Compare commits
	
		
			450 Commits 
		
	
	
		
			b7aa72465d
			...
			a1d75625e4
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | a1d75625e4 | |
|  | 85c60095ba | |
|  | e2b9c3e769 | |
|  | ae18ceb633 | |
|  | 917699417f | |
|  | 71a29d0106 | |
|  | 095bf28f5d | |
|  | 129dff575f | |
|  | 9167fbb0a8 | |
|  | b6608e1c46 | |
|  | 33e5e2c06f | |
|  | 52238ade28 | |
|  | f7cd8739a5 | |
|  | 7537c6f053 | |
|  | 9c83f02568 | |
|  | 441cf0962d | |
|  | fb04f74605 | |
|  | aa1f6fa4b5 | |
|  | 9002f608ee | |
|  | 8ebc022535 | |
|  | e26fa8330f | |
|  | a2659069c5 | |
|  | 54699d7a0b | |
|  | b91ab9e3a8 | |
|  | cd14c4fe72 | |
|  | ad40fcd2bc | |
|  | 508ba510a5 | |
|  | b875b35b98 | |
|  | 46ddc214cd | |
|  | b3ee20d3b9 | |
|  | cf3e6c1218 | |
|  | 8af9b0201d | |
|  | 5c2e972315 | |
|  | 59f4024242 | |
|  | 7859e743cc | |
|  | f7f738638d | |
|  | ae95e0c83e | |
|  | 3b39cce741 | |
|  | 110a023a03 | |
|  | 89127614d5 | |
|  | 1f1a3f19d5 | |
|  | f7469442e3 | |
|  | 8363317e11 | |
|  | a628eabb30 | |
|  | d216068713 | |
|  | 131e3e8157 | |
|  | fc95c6719f | |
|  | bef3dd9e97 | |
|  | e6ccfce751 | |
|  | 31207f92ee | |
|  | 5f8f8e98ba | |
|  | b56352b0e4 | |
|  | 9be821a5cf | |
|  | b46400a86f | |
|  | 02812b9f51 | |
|  | 3c5816c977 | |
|  | af3745684c | |
|  | 3907cba68e | |
|  | e3d59964af | |
|  | ba83bab776 | |
|  | 18d440c207 | |
|  | edac717613 | |
|  | 7e93b81a83 | |
|  | 4fbd469c33 | |
|  | cb90f3e6ba | |
|  | 5e009a8229 | |
|  | b72a025d0f | |
|  | 5739e79645 | |
|  | 2ac999cc3c | |
|  | 9f9b0b17dc | |
|  | 9133f42b07 | |
|  | 268bd0d8ec | |
|  | 4f1db1ff52 | |
|  | a870df68c0 | |
|  | 3d12a7e005 | |
|  | 9292d73b40 | |
|  | 83d69fe395 | |
|  | 72df312e71 | |
|  | 711f639fc5 | |
|  | 8477919fc9 | |
|  | 872feef24b | |
|  | affc210033 | |
|  | 04bd111037 | |
|  | a0ee0cc713 | |
|  | 5449bd5673 | |
|  | e6d4ec43b9 | |
|  | 418c6907fd | |
|  | d528e7ab4d | |
|  | 7a89b59a3f | |
|  | 7d4cd8944c | |
|  | a6058d14ae | |
|  | 43a8cf4be1 | |
|  | 6534a363a5 | |
|  | 30d60379c1 | |
|  | 408a74784e | |
|  | f0342d6ae3 | |
|  | 21f633a900 | |
|  | 4a270f85ca | |
|  | d802c8aa90 | |
|  | 8ea0f08386 | |
|  | 13ea500a44 | |
|  | 2f854a3e86 | |
|  | cdb1311e40 | |
|  | fcd089c08f | |
|  | 993281882b | |
|  | bbb4d4e52c | |
|  | 0e8c60ee4a | |
|  | 1db5d4def2 | |
|  | 6e54abc56d | |
|  | 28af4749cc | |
|  | 02a7c7c276 | |
|  | 4fa71cc01c | |
|  | 6a4ee461f5 | |
|  | 2db03444f7 | |
|  | a1b124b62b | |
|  | 59ca256183 | |
|  | 6c2efc96dc | |
|  | f7fd8278af | |
|  | 7ac730e326 | |
|  | 582144830f | |
|  | 8b860f4245 | |
|  | 27fd96729a | |
|  | eee4c61b51 | |
|  | 42ba855d1b | |
|  | c2cc12e14f | |
|  | e4ec6b7b0c | |
|  | 9ce958cb4a | |
|  | ce4d64ed2f | |
|  | c6f599b1be | |
|  | 9eb74560ad | |
|  | 702dfe47d5 | |
|  | d15e73557a | |
|  | 74d4b5280a | |
|  | 3538ccd799 | |
|  | b22f7dcae0 | |
|  | fde62c72be | |
|  | 4ef77bb64f | |
|  | e78fdf2f69 | |
|  | 13bc3c308d | |
|  | 60fc43e530 | |
|  | 30afcd2b6b | |
|  | c80f020ebc | |
|  | 262a0e36c6 | |
|  | d93135acd8 | |
|  | b23780c102 | |
|  | 31de5f6648 | |
|  | 236083b6e4 | |
|  | d2dee87b36 | |
|  | 5cb0cc0f0b | |
|  | fc075e96c6 | |
|  | d6ca4771ce | |
|  | c5a0cfc639 | |
|  | f85314ecab | |
|  | c929bc15c9 | |
|  | 6690968236 | |
|  | 343b7c9712 | |
|  | 45f37870af | |
|  | 4d528b76a0 | |
|  | 05b143d9ef | |
|  | a354732a9e | |
|  | fbc21a1dec | |
|  | b278164f83 | |
|  | 8ffa6a5e68 | |
|  | 7707e0e75a | |
|  | 523c24eb72 | |
|  | 544ff5ab4c | |
|  | 63c23d6b82 | |
|  | cca3206fd6 | |
|  | 54530dcf94 | |
|  | 338395346d | |
|  | 30c5896d26 | |
|  | 88a0e90f82 | |
|  | 40c972f0ec | |
|  | f139adddca | |
|  | 979af79588 | |
|  | a3429268ea | |
|  | d285a3479a | |
|  | 61db040702 | |
|  | a5a0e6854b | |
|  | c383978402 | |
|  | 08fcd3fb03 | |
|  | adba454d1d | |
|  | 4bab998ff9 | |
|  | c25c77c573 | |
|  | 188ff0e0e5 | |
|  | 6b30c86eca | |
|  | 6aa52417ef | |
|  | 18e97a8f9a | |
|  | 5eb9144921 | |
|  | a51632ffa6 | |
|  | 0df7d557db | |
|  | 7b020c42cc | |
|  | d18cf32e28 | |
|  | dd6a4d49d8 | |
|  | d51be2a36a | |
|  | 3018187228 | |
|  | e5f0b450cf | |
|  | 4aa24f8518 | |
|  | d2f6428e46 | |
|  | 5439060cd3 | |
|  | 7372404d76 | |
|  | 77a15ebf19 | |
|  | d0e7610073 | |
|  | a73b24cf4a | |
|  | 5dfff3f75a | |
|  | d4155396bf | |
|  | 3869e91b19 | |
|  | 829dfa7520 | |
|  | b209990d04 | |
|  | 60aa16adf6 | |
|  | eca2c02f8b | |
|  | 921f72f7fe | |
|  | 38a6483859 | |
|  | f72b972348 | |
|  | 2edfed75eb | |
|  | 2d22713806 | |
|  | df548257ad | |
|  | 3fb3608879 | |
|  | faa7194daf | |
|  | eec240a70a | |
|  | 322e015d32 | |
|  | dbc445ff9d | |
|  | 7aaa2a61ec | |
|  | 0dcaf5f3b2 | |
|  | af013912ac | |
|  | 8839bb06a3 | |
|  | a35c1d40ab | |
|  | 15549f7c26 | |
|  | cf48fdecfe | |
|  | b341146bd1 | |
|  | 2f451ab9a3 | |
|  | 8e83455a78 | |
|  | 38111e8d53 | |
|  | aea5abdd70 | |
|  | aca6503fcd | |
|  | b9a61ded0a | |
|  | 4cfe4979ff | |
|  | 97bfbdbc1c | |
|  | b1fd8b2ec3 | |
|  | 5c1401bf81 | |
|  | 7f1c2b8ecf | |
|  | 10c98946bd | |
|  | 5b551dd9fa | |
|  | 0fcd424d57 | |
|  | 70ab60ce7c | |
|  | a65e1e7a88 | |
|  | 40cba51909 | |
|  | e153cc0187 | |
|  | f2ce4a3469 | |
|  | 3aa964315a | |
|  | f3ca8608d5 | |
|  | 25ffdedc06 | |
|  | 3ba46362a9 | |
|  | fb8196e354 | |
|  | b6ed26589a | |
|  | 8ff18739be | |
|  | 456979dd12 | |
|  | 995af130cf | |
|  | d55266f4a2 | |
|  | 79211eab9a | |
|  | 336db8425e | |
|  | 2eaef26547 | |
|  | 0a69829ec5 | |
|  | 496dce57a8 | |
|  | 72b4dc1461 | |
|  | 90bfdaf58c | |
|  | 507cd96904 | |
|  | 2588e54867 | |
|  | 0055c1d954 | |
|  | 4f863a6989 | |
|  | c04d77a3c9 | |
|  | 8e66f45e23 | |
|  | 290b0a86b1 | |
|  | d5e5174d97 | |
|  | 8ab5e08830 | |
|  | 668016d37b | |
|  | 9221c57234 | |
|  | 78434f6317 | |
|  | 5fb5682269 | |
|  | 71de56b09a | |
|  | e5cb39804c | |
|  | d28c7e17c6 | |
|  | d23d8c1779 | |
|  | 58cc57a422 | |
|  | da913ef2bb | |
|  | 96992bcbb9 | |
|  | 6533285d7d | |
|  | 8c39b8b124 | |
|  | ededa2e88f | |
|  | dd168184c3 | |
|  | 37ee477aee | |
|  | f067cf48a7 | |
|  | c56d4b0a79 | |
|  | 7cafb59ab7 | |
|  | 7458f99733 | |
|  | 4c3c3e4b56 | |
|  | b29d33d603 | |
|  | 1617e0ff2c | |
|  | c025761f15 | |
|  | 2e797ef7ee | |
|  | c36deb1f4d | |
|  | fa7e37d6ed | |
|  | 364ea91983 | |
|  | 7ae9b5319b | |
|  | 6156ff95f8 | |
|  | 9e3f41a5b1 | |
|  | 7c22f76274 | |
|  | 04c99c2749 | |
|  | e536057fea | |
|  | c6b4da5788 | |
|  | 1f7f84fdfa | |
|  | a5bdc6db66 | |
|  | 9a18b57d38 | |
|  | ed10632d97 | |
|  | 299429a278 | |
|  | 28fefe4ffe | |
|  | 08a6a51cb8 | |
|  | 50465d4b34 | |
|  | 4f69af872c | |
|  | 9bc6a61c93 | |
|  | 23aa97692e | |
|  | 1e5810e56c | |
|  | b54cb6682c | |
|  | 3ed309f019 | |
|  | d08aeaeafe | |
|  | c6ee4e5dc1 | |
|  | ad5eee5666 | |
|  | fc72d75061 | |
|  | de1843dc84 | |
|  | 930d498841 | |
|  | 5ea112699d | |
|  | e244747bc3 | |
|  | 5a09ccf459 | |
|  | ce1bcf6d36 | |
|  | 28ba5e5435 | |
|  | 10adf34be5 | |
|  | 82dcaff8db | |
|  | 621b252b0c | |
|  | 20a089c331 | |
|  | df50d78042 | |
|  | 114ec36436 | |
|  | 179d7d2b04 | |
|  | f568fca98f | |
|  | 6c9bc627d8 | |
|  | 1d7cf7d1dd | |
|  | 54a0a0000d | |
|  | 0268b2ce91 | |
|  | 81f8e2d4ac | |
|  | bf0739c194 | |
|  | 5fe3f58ea9 | |
|  | 3e1d033708 | |
|  | c35576e196 | |
|  | 8ce26d692f | |
|  | 7f29fd8dcf | |
|  | 7fbada8a15 | |
|  | 286e75d342 | |
|  | df641d9d31 | |
|  | 35b0c4bef0 | |
|  | c4496f21fc | |
|  | 7e0e627921 | |
|  | 28ea8e787a | |
|  | 0294455c5e | |
|  | 734bc09b67 | |
|  | 0bcdea28a0 | |
|  | fdf3a1b01b | |
|  | ce7b8a5e18 | |
|  | 00024181cd | |
|  | 814384848d | |
|  | bea31f6d19 | |
|  | 250275d98d | |
|  | f415fc43ce | |
|  | 3f15923537 | |
|  | 87cd725adb | |
|  | 48accbd28f | |
|  | 227c9ea173 | |
|  | d651f3d8e9 | |
|  | ef0cfc4b20 | |
|  | ecb525a2bc | |
|  | b77d123edd | |
|  | f4e63465de | |
|  | df31047ecb | |
|  | 131674eabd | |
|  | 5a94e8fb5b | |
|  | 0518b3ab04 | |
|  | 2f0bed3018 | |
|  | 9da3b63644 | |
|  | 1d6f55543d | |
|  | a3ed30e62b | |
|  | 42d621bba7 | |
|  | 2e81ccf5b4 | |
|  | 022bf8ce75 | |
|  | 0e9457299c | |
|  | 6b1ceee19f | |
|  | 1e689ee701 | |
|  | 190845ce1d | |
|  | 0c74b04c83 | |
|  | 215fec1d41 | |
|  | fcc8cee9d3 | |
|  | ca3f7a1b6b | |
|  | 87c1113de4 | |
|  | 43b659dbe4 | |
|  | 63b1488ab6 | |
|  | 7eb31f3fea | |
|  | 534e5d150d | |
|  | e4a6223256 | |
|  | ab2664da70 | |
|  | ae326cbb9a | |
|  | 07cec02303 | |
|  | 2fdb8fc25a | |
|  | 6d951c526a | |
|  | 575a24adf1 | |
|  | 919e462f88 | |
|  | a09b8560bb | |
|  | c4cd573b26 | |
|  | d24a9e158f | |
|  | 18a1634025 | |
|  | 78c0d2b234 | |
|  | 4314a59327 | |
|  | e94f1261b5 | |
|  | 86da79a854 | |
|  | de89e3a9c4 | |
|  | 7bed470f5c | |
|  | fa9a9cfb1d | |
|  | 3d0e95513c | |
|  | ee151b00af | |
|  | 22c14e235e | |
|  | 1102843087 | |
|  | e03bec5efc | |
|  | bee2c36072 | |
|  | b36b3d522f | |
|  | 4ace8f6037 | |
|  | 98a7326c85 | |
|  | 46972df041 | |
|  | 565d7c3ee5 | |
|  | ac695a05bf | |
|  | fc56971a2d | |
|  | ee87cf0e29 | |
|  | ebcb275cd8 | |
|  | f745da9fb2 | |
|  | 4f442efbd7 | |
|  | f9a84f0732 | |
|  | e0bf964ff0 | |
|  | a9fc4c1b91 | |
|  | b52ff270c5 | |
|  | 1713ecd9f8 | |
|  | edb82fdd78 | |
|  | 339d787cf8 | |
|  | c32b21b4b1 | |
|  | 71477290fc | |
|  | 9716d86825 | 
|  | @ -3,8 +3,8 @@ | |||
| |gh_actions| | ||||
| |docs| | ||||
| 
 | ||||
| ``tractor`` is a `structured concurrent`_, multi-processing_ runtime | ||||
| built on trio_. | ||||
| ``tractor`` is a `structured concurrent`_, (optionally | ||||
| distributed_) multi-processing_ runtime built on trio_. | ||||
| 
 | ||||
| Fundamentally, ``tractor`` gives you parallelism via | ||||
| ``trio``-"*actors*": independent Python processes (aka | ||||
|  | @ -17,11 +17,20 @@ protocol" constructed on top of multiple Pythons each running a ``trio`` | |||
| scheduled runtime - a call to ``trio.run()``. | ||||
| 
 | ||||
| We believe the system adheres to the `3 axioms`_ of an "`actor model`_" | ||||
| but likely *does not* look like what *you* probably think an "actor | ||||
| model" looks like, and that's *intentional*. | ||||
| but likely **does not** look like what **you** probably *think* an "actor | ||||
| model" looks like, and that's **intentional**. | ||||
| 
 | ||||
| The first step to grok ``tractor`` is to get the basics of ``trio`` down. | ||||
| A great place to start is the `trio docs`_ and this `blog post`_. | ||||
| 
 | ||||
| Where do i start!? | ||||
| ------------------ | ||||
| The first step to grok ``tractor`` is to get an intermediate | ||||
| knowledge of ``trio`` and **structured concurrency** B) | ||||
| 
 | ||||
| Some great places to start are, | ||||
| - the seminal `blog post`_ | ||||
| - obviously the `trio docs`_ | ||||
| - wikipedia's nascent SC_ page | ||||
| - the fancy diagrams @ libdill-docs_ | ||||
| 
 | ||||
| 
 | ||||
| Features | ||||
|  | @ -593,6 +602,7 @@ matrix seems too hip, we're also mostly all in the the `trio gitter | |||
| channel`_! | ||||
| 
 | ||||
| .. _structured concurrent: https://trio.discourse.group/t/concise-definition-of-structured-concurrency/228 | ||||
| .. _distributed: https://en.wikipedia.org/wiki/Distributed_computing | ||||
| .. _multi-processing: https://en.wikipedia.org/wiki/Multiprocessing | ||||
| .. _trio: https://github.com/python-trio/trio | ||||
| .. _nurseries: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/#nurseries-a-structured-replacement-for-go-statements | ||||
|  | @ -611,8 +621,9 @@ channel`_! | |||
| .. _trio docs: https://trio.readthedocs.io/en/latest/ | ||||
| .. _blog post: https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/ | ||||
| .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _SC: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _libdill-docs: https://sustrik.github.io/libdill/structured-concurrency.html | ||||
| .. _structured chadcurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency | ||||
| .. _unrequirements: https://en.wikipedia.org/wiki/Actor_model#Direct_communication_and_asynchrony | ||||
| .. _async generators: https://www.python.org/dev/peps/pep-0525/ | ||||
| .. _trio-parallel: https://github.com/richardsheridan/trio-parallel | ||||
|  |  | |||
|  | @ -6,53 +6,59 @@ been an outage) and we want to ensure that despite being in debug mode | |||
| actor tree will eventually be cancelled without leaving any zombies. | ||||
| 
 | ||||
| ''' | ||||
| import trio | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from functools import partial | ||||
| 
 | ||||
| from tractor import ( | ||||
|     open_nursery, | ||||
|     context, | ||||
|     Context, | ||||
|     ContextCancelled, | ||||
|     MsgStream, | ||||
|     _testing, | ||||
| ) | ||||
| import trio | ||||
| import pytest | ||||
| 
 | ||||
| 
 | ||||
| async def break_channel_silently_then_error( | ||||
| async def break_ipc_then_error( | ||||
|     stream: MsgStream, | ||||
|     break_ipc_with: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| ): | ||||
|     await _testing.break_ipc( | ||||
|         stream=stream, | ||||
|         method=break_ipc_with, | ||||
|         pre_close=pre_close, | ||||
|     ) | ||||
|     async for msg in stream: | ||||
|         await stream.send(msg) | ||||
| 
 | ||||
|         # XXX: close the channel right after an error is raised | ||||
|         # purposely breaking the IPC transport to make sure the parent | ||||
|         # doesn't get stuck in debug or hang on the connection join. | ||||
|         # this more or less simulates an infinite msg-receive hang on | ||||
|         # the other end. | ||||
|         await stream._ctx.chan.send(None) | ||||
|     assert 0 | ||||
| 
 | ||||
| 
 | ||||
| async def close_stream_and_error( | ||||
| async def iter_ipc_stream( | ||||
|     stream: MsgStream, | ||||
|     break_ipc_with: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| ): | ||||
|     async for msg in stream: | ||||
|         await stream.send(msg) | ||||
| 
 | ||||
|         # wipe out channel right before raising | ||||
|         await stream._ctx.chan.send(None) | ||||
|         await stream.aclose() | ||||
|         assert 0 | ||||
| 
 | ||||
| 
 | ||||
| @context | ||||
| async def recv_and_spawn_net_killers( | ||||
| 
 | ||||
|     ctx: Context, | ||||
|     break_ipc_after: bool | int = False, | ||||
|     break_ipc_after: bool|int = False, | ||||
|     pre_close: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Receive stream msgs and spawn some IPC killers mid-stream. | ||||
| 
 | ||||
|     ''' | ||||
|     broke_ipc: bool = False | ||||
|     await ctx.started() | ||||
|     async with ( | ||||
|         ctx.open_stream() as stream, | ||||
|  | @ -60,27 +66,58 @@ async def recv_and_spawn_net_killers( | |||
|     ): | ||||
|         async for i in stream: | ||||
|             print(f'child echoing {i}') | ||||
|             if not broke_ipc: | ||||
|                 await stream.send(i) | ||||
|             else: | ||||
|                 await trio.sleep(0.01) | ||||
| 
 | ||||
|             if ( | ||||
|                 break_ipc_after | ||||
|                 and i > break_ipc_after | ||||
|                 and | ||||
|                 i >= break_ipc_after | ||||
|             ): | ||||
|                 '#################################\n' | ||||
|                 'Simulating child-side IPC BREAK!\n' | ||||
|                 '#################################' | ||||
|                 n.start_soon(break_channel_silently_then_error, stream) | ||||
|                 n.start_soon(close_stream_and_error, stream) | ||||
|                 broke_ipc = True | ||||
|                 n.start_soon( | ||||
|                     iter_ipc_stream, | ||||
|                     stream, | ||||
|                 ) | ||||
|                 n.start_soon( | ||||
|                     partial( | ||||
|                         break_ipc_then_error, | ||||
|                         stream=stream, | ||||
|                         pre_close=pre_close, | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def stuff_hangin_ctlc(timeout: float = 1) -> None: | ||||
| 
 | ||||
|     with trio.move_on_after(timeout) as cs: | ||||
|         yield timeout | ||||
| 
 | ||||
|     if cs.cancelled_caught: | ||||
|         # pretend to be a user seeing no streaming action | ||||
|         # thinking it's a hang, and then hitting ctl-c.. | ||||
|         print( | ||||
|             f"i'm a user on the PARENT side and thingz hangin " | ||||
|             f'after timeout={timeout} ???\n\n' | ||||
|             'MASHING CTlR-C..!?\n' | ||||
|         ) | ||||
|         raise KeyboardInterrupt | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     debug_mode: bool = False, | ||||
|     start_method: str = 'trio', | ||||
|     loglevel: str = 'cancel', | ||||
| 
 | ||||
|     # by default we break the parent IPC first (if configured to break | ||||
|     # at all), but this can be changed so the child does first (even if | ||||
|     # both are set to break). | ||||
|     break_parent_ipc_after: int | bool = False, | ||||
|     break_child_ipc_after: int | bool = False, | ||||
|     break_parent_ipc_after: int|bool = False, | ||||
|     break_child_ipc_after: int|bool = False, | ||||
|     pre_close: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|  | @ -91,59 +128,128 @@ async def main( | |||
|             # NOTE: even debugger is used we shouldn't get | ||||
|             # a hang since it never engages due to broken IPC | ||||
|             debug_mode=debug_mode, | ||||
|             loglevel='warning', | ||||
|             loglevel=loglevel, | ||||
| 
 | ||||
|         ) as an, | ||||
|     ): | ||||
|         sub_name: str = 'chitty_hijo' | ||||
|         portal = await an.start_actor( | ||||
|             'chitty_hijo', | ||||
|             sub_name, | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         async with portal.open_context( | ||||
|         async with ( | ||||
|             stuff_hangin_ctlc(timeout=2) as timeout, | ||||
|             _testing.expect_ctxc( | ||||
|                 yay=( | ||||
|                     break_parent_ipc_after | ||||
|                     or break_child_ipc_after | ||||
|                 ), | ||||
|                 # TODO: we CAN'T remove this right? | ||||
|                 # since we need the ctxc to bubble up from either | ||||
|                 # the stream API after the `None` msg is sent | ||||
|                 # (which actually implicitly cancels all remote | ||||
|                 # tasks in the hijo) or from simluated | ||||
|                 # KBI-mash-from-user | ||||
|                 # or should we expect that a KBI triggers the ctxc | ||||
|                 # and KBI in an eg? | ||||
|                 reraise=True, | ||||
|             ), | ||||
| 
 | ||||
|             portal.open_context( | ||||
|                 recv_and_spawn_net_killers, | ||||
|                 break_ipc_after=break_child_ipc_after, | ||||
| 
 | ||||
|         ) as (ctx, sent): | ||||
|                 pre_close=pre_close, | ||||
|             ) as (ctx, sent), | ||||
|         ): | ||||
|             rx_eoc: bool = False | ||||
|             ipc_break_sent: bool = False | ||||
|             async with ctx.open_stream() as stream: | ||||
|                 for i in range(1000): | ||||
| 
 | ||||
|                     if ( | ||||
|                         break_parent_ipc_after | ||||
|                         and i > break_parent_ipc_after | ||||
|                         and | ||||
|                         i > break_parent_ipc_after | ||||
|                         and | ||||
|                         not ipc_break_sent | ||||
|                     ): | ||||
|                         print( | ||||
|                             '#################################\n' | ||||
|                             'Simulating parent-side IPC BREAK!\n' | ||||
|                             '#################################' | ||||
|                             'Simulating PARENT-side IPC BREAK!\n' | ||||
|                             '#################################\n' | ||||
|                         ) | ||||
|                         await stream._ctx.chan.send(None) | ||||
| 
 | ||||
|                         # TODO: other methods? see break func above. | ||||
|                         # await stream._ctx.chan.send(None) | ||||
|                         # await stream._ctx.chan.transport.stream.send_eof() | ||||
|                         await stream._ctx.chan.transport.stream.aclose() | ||||
|                         ipc_break_sent = True | ||||
| 
 | ||||
|                     # it actually breaks right here in the | ||||
|                     # mp_spawn/forkserver backends and thus the zombie | ||||
|                     # reaper never even kicks in? | ||||
|                     # mp_spawn/forkserver backends and thus the | ||||
|                     # zombie reaper never even kicks in? | ||||
|                     try: | ||||
|                         print(f'parent sending {i}') | ||||
|                         await stream.send(i) | ||||
|                     except ContextCancelled as ctxc: | ||||
|                         print( | ||||
|                             'parent received ctxc on `stream.send()`\n' | ||||
|                             f'{ctxc}\n' | ||||
|                         ) | ||||
|                         assert 'root' in ctxc.canceller | ||||
|                         assert sub_name in ctx.canceller | ||||
| 
 | ||||
|                     with trio.move_on_after(2) as cs: | ||||
|                         # TODO: is this needed or no? | ||||
|                         raise | ||||
| 
 | ||||
|                     except trio.ClosedResourceError: | ||||
|                         # NOTE: don't send if we already broke the | ||||
|                         # connection to avoid raising a closed-error | ||||
|                         # such that we drop through to the ctl-c | ||||
|                         # mashing by user. | ||||
|                         await trio.sleep(0.01) | ||||
| 
 | ||||
|                     # timeout: int = 1 | ||||
|                     # with trio.move_on_after(timeout) as cs: | ||||
|                     async with stuff_hangin_ctlc() as timeout: | ||||
|                         print( | ||||
|                             f'PARENT `stream.receive()` with timeout={timeout}\n' | ||||
|                         ) | ||||
|                         # NOTE: in the parent side IPC failure case this | ||||
|                         # will raise an ``EndOfChannel`` after the child | ||||
|                         # is killed and sends a stop msg back to it's | ||||
|                         # caller/this-parent. | ||||
|                         try: | ||||
|                             rx = await stream.receive() | ||||
| 
 | ||||
|                         print(f"I'm a happy user and echoed to me is {rx}") | ||||
| 
 | ||||
|                     if cs.cancelled_caught: | ||||
|                         # pretend to be a user seeing no streaming action | ||||
|                         # thinking it's a hang, and then hitting ctl-c.. | ||||
|                         print("YOO i'm a user anddd thingz hangin..") | ||||
|                             print( | ||||
|                                 "I'm a happy PARENT user and echoed to me is\n" | ||||
|                                 f'{rx}\n' | ||||
|                             ) | ||||
|                         except trio.EndOfChannel: | ||||
|                             rx_eoc: bool = True | ||||
|                             print('MsgStream got EoC for PARENT') | ||||
|                             raise | ||||
| 
 | ||||
|             print( | ||||
|                     "YOO i'm mad send side dun but thingz hangin..\n" | ||||
|                     'MASHING CTlR-C Ctl-c..' | ||||
|                 'Streaming finished and we got Eoc.\n' | ||||
|                 'Canceling `.open_context()` in root with\n' | ||||
|                 'CTlR-C..' | ||||
|             ) | ||||
|             if rx_eoc: | ||||
|                 assert stream.closed | ||||
|                 try: | ||||
|                     await stream.send(i) | ||||
|                     pytest.fail('stream not closed?') | ||||
|                 except ( | ||||
|                     trio.ClosedResourceError, | ||||
|                     trio.EndOfChannel, | ||||
|                 ) as send_err: | ||||
|                     if rx_eoc: | ||||
|                         assert send_err is stream._eoc | ||||
|                     else: | ||||
|                         assert send_err is stream._closed | ||||
| 
 | ||||
|             raise KeyboardInterrupt | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -0,0 +1,138 @@ | |||
| ''' | ||||
| Examples of using the builtin `breakpoint()` from an `asyncio.Task` | ||||
| running in a subactor spawned with `infect_asyncio=True`. | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     to_asyncio, | ||||
|     Portal, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| async def aio_sleep_forever(): | ||||
|     await asyncio.sleep(float('inf')) | ||||
| 
 | ||||
| 
 | ||||
| async def bp_then_error( | ||||
|     to_trio: trio.MemorySendChannel, | ||||
|     from_trio: asyncio.Queue, | ||||
| 
 | ||||
|     raise_after_bp: bool = True, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # sync with ``trio``-side (caller) task | ||||
|     to_trio.send_nowait('start') | ||||
| 
 | ||||
|     # NOTE: what happens here inside the hook needs some refinement.. | ||||
|     # => seems like it's still `._debug._set_trace()` but | ||||
|     #    we set `Lock.local_task_in_debug = 'sync'`, we probably want | ||||
|     #    some further, at least, meta-data about the task/actor in debug | ||||
|     #    in terms of making it clear it's `asyncio` mucking about. | ||||
|     breakpoint() | ||||
| 
 | ||||
| 
 | ||||
|     # short checkpoint / delay | ||||
|     await asyncio.sleep(0.5)  # asyncio-side | ||||
| 
 | ||||
|     if raise_after_bp: | ||||
|         raise ValueError('asyncio side error!') | ||||
| 
 | ||||
|     # TODO: test case with this so that it gets cancelled? | ||||
|     else: | ||||
|         # XXX NOTE: this is required in order to get the SIGINT-ignored | ||||
|         # hang case documented in the module script section! | ||||
|         await aio_sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def trio_ctx( | ||||
|     ctx: tractor.Context, | ||||
|     bp_before_started: bool = False, | ||||
| ): | ||||
| 
 | ||||
|     # this will block until the ``asyncio`` task sends a "first" | ||||
|     # message, see first line in above func. | ||||
|     async with ( | ||||
| 
 | ||||
|         to_asyncio.open_channel_from( | ||||
|             bp_then_error, | ||||
|             # raise_after_bp=not bp_before_started, | ||||
|         ) as (first, chan), | ||||
| 
 | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         assert first == 'start' | ||||
| 
 | ||||
|         if bp_before_started: | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|         await ctx.started(first)  # trio-side | ||||
| 
 | ||||
|         tn.start_soon( | ||||
|             to_asyncio.run_task, | ||||
|             aio_sleep_forever, | ||||
|         ) | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     bps_all_over: bool = True, | ||||
| 
 | ||||
|     # TODO, WHICH OF THESE HAZ BUGZ? | ||||
|     cancel_from_root: bool = False, | ||||
|     err_from_root: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         maybe_enable_greenback=True, | ||||
|         # loglevel='devx', | ||||
|     ) as an: | ||||
|         ptl: Portal = await an.start_actor( | ||||
|             'aio_daemon', | ||||
|             enable_modules=[__name__], | ||||
|             infect_asyncio=True, | ||||
|             debug_mode=True, | ||||
|             # loglevel='cancel', | ||||
|         ) | ||||
| 
 | ||||
|         async with ptl.open_context( | ||||
|             trio_ctx, | ||||
|             bp_before_started=bps_all_over, | ||||
|         ) as (ctx, first): | ||||
| 
 | ||||
|             assert first == 'start' | ||||
| 
 | ||||
|             # pause in parent to ensure no cross-actor | ||||
|             # locking problems exist! | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|             if cancel_from_root: | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|             if err_from_root: | ||||
|                 assert 0 | ||||
|             else: | ||||
|                 await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
|         # TODO: case where we cancel from trio-side while asyncio task | ||||
|         # has debugger lock? | ||||
|         # await ptl.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
| 
 | ||||
|     # works fine B) | ||||
|     trio.run(main) | ||||
| 
 | ||||
|     # will hang and ignores SIGINT !! | ||||
|     # NOTE: you'll need to send a SIGQUIT (via ctl-\) to kill it | ||||
|     # manually.. | ||||
|     # trio.run(main, True) | ||||
|  | @ -0,0 +1,9 @@ | |||
| ''' | ||||
| Reproduce a bug where enabling debug mode for a sub-actor actually causes | ||||
| a hang on teardown... | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
|  | @ -1,5 +1,5 @@ | |||
| ''' | ||||
| Fast fail test with a context. | ||||
| Fast fail test with a `Context`. | ||||
| 
 | ||||
| Ensure the partially initialized sub-actor process | ||||
| doesn't cause a hang on error/cancel of the parent | ||||
|  |  | |||
|  | @ -4,9 +4,15 @@ import trio | |||
| 
 | ||||
| async def breakpoint_forever(): | ||||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     try: | ||||
|         while True: | ||||
|             yield 'yo' | ||||
|         await tractor.breakpoint() | ||||
|             await tractor.pause() | ||||
|     except BaseException: | ||||
|         tractor.log.get_console_log().exception( | ||||
|             'Cancelled while trying to enter pause point!' | ||||
|         ) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| async def name_error(): | ||||
|  | @ -19,7 +25,8 @@ async def main(): | |||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='error', | ||||
|         # loglevel='cancel', | ||||
|         # loglevel='devx', | ||||
|     ) as n: | ||||
| 
 | ||||
|         p0 = await n.start_actor('bp_forever', enable_modules=[__name__]) | ||||
|  | @ -32,7 +39,7 @@ async def main(): | |||
|             try: | ||||
|                 await p1.run(name_error) | ||||
|             except tractor.RemoteActorError as rae: | ||||
|                 assert rae.type is NameError | ||||
|                 assert rae.boxed_type is NameError | ||||
| 
 | ||||
|             async for i in stream: | ||||
| 
 | ||||
|  |  | |||
|  | @ -10,7 +10,7 @@ async def name_error(): | |||
| async def breakpoint_forever(): | ||||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     while True: | ||||
|         await tractor.breakpoint() | ||||
|         await tractor.pause() | ||||
| 
 | ||||
|         # NOTE: if the test never sent 'q'/'quit' commands | ||||
|         # on the pdb repl, without this checkpoint line the | ||||
|  | @ -45,6 +45,7 @@ async def spawn_until(depth=0): | |||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: notes on the new boxed-relayed errors through proxy actors | ||||
| async def main(): | ||||
|     """The main ``tractor`` routine. | ||||
| 
 | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ async def breakpoint_forever(): | |||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     while True: | ||||
|         await trio.sleep(0.1) | ||||
|         await tractor.breakpoint() | ||||
|         await tractor.pause() | ||||
| 
 | ||||
| 
 | ||||
| async def name_error(): | ||||
|  | @ -38,6 +38,7 @@ async def main(): | |||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         # loglevel='runtime', | ||||
|     ) as n: | ||||
| 
 | ||||
|         # Spawn both actors, don't bother with collecting results | ||||
|  |  | |||
|  | @ -23,5 +23,6 @@ async def main(): | |||
|             n.start_soon(debug_actor.run, die) | ||||
|             n.start_soon(crash_boi.run, die) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  |  | |||
|  | @ -0,0 +1,56 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def name_error( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     ''' | ||||
|     Raise a `NameError`, catch it and enter `.post_mortem()`, then | ||||
|     expect the `._rpc._invoke()` crash handler to also engage. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         getattr(doggypants)  # noqa (on purpose) | ||||
|     except NameError: | ||||
|         await tractor.post_mortem() | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     Test 3 `PdbREPL` entries: | ||||
|       - one in the child due to manual `.post_mortem()`, | ||||
|       - another in the child due to runtime RPC crash handling. | ||||
|       - final one here in parent from the RAE. | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX NOTE: ideally the REPL arrives at this frame in the parent | ||||
|     # ONE UP FROM the inner ctx block below! | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         # loglevel='cancel', | ||||
|     ) as an: | ||||
|         p: tractor.Portal = await an.start_actor( | ||||
|             'child', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         # XXX should raise `RemoteActorError[NameError]` | ||||
|         # AND be the active frame when REPL enters! | ||||
|         try: | ||||
|             async with p.open_context(name_error) as (ctx, first): | ||||
|                 assert first | ||||
|         except tractor.RemoteActorError as rae: | ||||
|             assert rae.boxed_type is NameError | ||||
| 
 | ||||
|             # manually handle in root's parent task | ||||
|             await tractor.post_mortem() | ||||
|             raise | ||||
|         else: | ||||
|             raise RuntimeError('IPC ctx should have remote errored!?') | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -6,19 +6,46 @@ import tractor | |||
| 
 | ||||
| 
 | ||||
| async def main() -> None: | ||||
|     async with tractor.open_nursery(debug_mode=True) as an: | ||||
| 
 | ||||
|         assert os.environ['PYTHONBREAKPOINT'] == 'tractor._debug._set_trace' | ||||
|     # intially unset, no entry. | ||||
|     orig_pybp_var: int = os.environ.get('PYTHONBREAKPOINT') | ||||
|     assert orig_pybp_var in {None, "0"} | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|     ) as an: | ||||
|         assert an | ||||
|         assert ( | ||||
|             (pybp_var := os.environ['PYTHONBREAKPOINT']) | ||||
|             == | ||||
|             'tractor.devx._debug._sync_pause_from_builtin' | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: an assert that verifies the hook has indeed been, hooked | ||||
|         # XD | ||||
|         assert sys.breakpointhook is not tractor._debug._set_trace | ||||
|         assert ( | ||||
|             (pybp_hook := sys.breakpointhook) | ||||
|             is not tractor.devx._debug._set_trace | ||||
|         ) | ||||
| 
 | ||||
|         print( | ||||
|             f'$PYTHONOBREAKPOINT: {pybp_var!r}\n' | ||||
|             f'`sys.breakpointhook`: {pybp_hook!r}\n' | ||||
|         ) | ||||
|         breakpoint() | ||||
|         pass  # first bp, tractor hook set. | ||||
| 
 | ||||
|     # TODO: an assert that verifies the hook is unhooked.. | ||||
|     # XXX AFTER EXIT (of actor-runtime) verify the hook is unset.. | ||||
|     # | ||||
|     # YES, this is weird but it's how stdlib docs say to do it.. | ||||
|     # https://docs.python.org/3/library/sys.html#sys.breakpointhook | ||||
|     assert os.environ.get('PYTHONBREAKPOINT') is orig_pybp_var | ||||
|     assert sys.breakpointhook | ||||
| 
 | ||||
|     # now ensure a regular builtin pause still works | ||||
|     breakpoint() | ||||
|     pass  # last bp, stdlib hook restored | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  |  | |||
|  | @ -10,7 +10,7 @@ async def main(): | |||
| 
 | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|         await tractor.breakpoint() | ||||
|         await tractor.pause() | ||||
| 
 | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|  |  | |||
|  | @ -2,13 +2,16 @@ import trio | |||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
| async def main( | ||||
|     registry_addrs: tuple[str, int]|None = None | ||||
| ): | ||||
| 
 | ||||
|     async with tractor.open_root_actor( | ||||
|         debug_mode=True, | ||||
|         # loglevel='runtime', | ||||
|     ): | ||||
|         while True: | ||||
|             await tractor.breakpoint() | ||||
|             await tractor.pause() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -0,0 +1,81 @@ | |||
| ''' | ||||
| Verify we can dump a `stackscope` tree on a hang. | ||||
| 
 | ||||
| ''' | ||||
| import os | ||||
| import signal | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| @tractor.context | ||||
| async def start_n_shield_hang( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     # actor: tractor.Actor = tractor.current_actor() | ||||
| 
 | ||||
|     # sync to parent-side task | ||||
|     await ctx.started(os.getpid()) | ||||
| 
 | ||||
|     print('Entering shield sleep..') | ||||
|     with trio.CancelScope(shield=True): | ||||
|         await trio.sleep_forever()  # in subactor | ||||
| 
 | ||||
|     # XXX NOTE ^^^ since this shields, we expect | ||||
|     # the zombie reaper (aka T800) to engage on | ||||
|     # SIGINT from the user and eventually hard-kill | ||||
|     # this subprocess! | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     from_test: bool = False, | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|             enable_stack_on_sig=True, | ||||
|             # maybe_enable_greenback=False, | ||||
|             loglevel='devx', | ||||
|         ) as an, | ||||
|     ): | ||||
| 
 | ||||
|         ptl: tractor.Portal  = await an.start_actor( | ||||
|             'hanger', | ||||
|             enable_modules=[__name__], | ||||
|             debug_mode=True, | ||||
|         ) | ||||
|         async with ptl.open_context( | ||||
|             start_n_shield_hang, | ||||
|         ) as (ctx, cpid): | ||||
| 
 | ||||
|             _, proc, _ = an._children[ptl.chan.uid] | ||||
|             assert cpid == proc.pid | ||||
| 
 | ||||
|             print( | ||||
|                 'Yo my child hanging..?\n' | ||||
|                 'Sending SIGUSR1 to see a tree-trace!\n' | ||||
|             ) | ||||
| 
 | ||||
|             # XXX simulate the wrapping test's "user actions" | ||||
|             # (i.e. if a human didn't run this manually but wants to | ||||
|             # know what they should do to reproduce test behaviour) | ||||
|             if from_test: | ||||
|                 os.kill( | ||||
|                     cpid, | ||||
|                     signal.SIGUSR1, | ||||
|                 ) | ||||
| 
 | ||||
|                 # simulate user cancelling program | ||||
|                 await trio.sleep(0.5) | ||||
|                 os.kill( | ||||
|                     os.getpid(), | ||||
|                     signal.SIGINT, | ||||
|                 ) | ||||
|             else: | ||||
|                 # actually let user send the ctl-c | ||||
|                 await trio.sleep_forever()  # in root | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -0,0 +1,88 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def cancellable_pause_loop( | ||||
|     task_status: trio.TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED | ||||
| ): | ||||
|     with trio.CancelScope() as cs: | ||||
|         task_status.started(cs) | ||||
|         for _ in range(3): | ||||
|             try: | ||||
|                 # ON first entry, there is no level triggered | ||||
|                 # cancellation yet, so this cp does a parent task | ||||
|                 # ctx-switch so that this scope raises for the NEXT | ||||
|                 # checkpoint we hit. | ||||
|                 await trio.lowlevel.checkpoint() | ||||
|                 await tractor.pause() | ||||
| 
 | ||||
|                 cs.cancel() | ||||
| 
 | ||||
|                 # parent should have called `cs.cancel()` by now | ||||
|                 await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|             except trio.Cancelled: | ||||
|                 print('INSIDE SHIELDED PAUSE') | ||||
|                 await tractor.pause(shield=True) | ||||
|         else: | ||||
|             # should raise it again, bubbling up to parent | ||||
|             print('BUBBLING trio.Cancelled to parent task-nursery') | ||||
|             await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def pm_on_cancelled(): | ||||
|     async with trio.open_nursery() as tn: | ||||
|         tn.cancel_scope.cancel() | ||||
|         try: | ||||
|             await trio.sleep_forever() | ||||
|         except trio.Cancelled: | ||||
|             # should also raise `Cancelled` since | ||||
|             # we didn't pass `shield=True`. | ||||
|             try: | ||||
|                 await tractor.post_mortem(hide_tb=False) | ||||
|             except trio.Cancelled as taskc: | ||||
| 
 | ||||
|                 # should enter just fine, in fact it should | ||||
|                 # be debugging the internals of the previous | ||||
|                 # sin-shield call above Bo | ||||
|                 await tractor.post_mortem( | ||||
|                     hide_tb=False, | ||||
|                     shield=True, | ||||
|                 ) | ||||
|                 raise taskc | ||||
| 
 | ||||
|         else: | ||||
|             raise RuntimeError('Dint cancel as expected!?') | ||||
| 
 | ||||
| 
 | ||||
| async def cancelled_before_pause( | ||||
| ): | ||||
|     ''' | ||||
|     Verify that using a shielded pause works despite surrounding | ||||
|     cancellation called state in the calling task. | ||||
| 
 | ||||
|     ''' | ||||
|     async with trio.open_nursery() as tn: | ||||
|         cs: trio.CancelScope = await tn.start(cancellable_pause_loop) | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|     assert cs.cancelled_caught | ||||
| 
 | ||||
|     await pm_on_cancelled() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|     ) as n: | ||||
|         portal: tractor.Portal = await n.run_in_actor( | ||||
|             cancelled_before_pause, | ||||
|         ) | ||||
|         await portal.result() | ||||
| 
 | ||||
|         # ensure the same works in the root actor! | ||||
|         await pm_on_cancelled() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -4,9 +4,9 @@ import trio | |||
| 
 | ||||
| async def gen(): | ||||
|     yield 'yo' | ||||
|     await tractor.breakpoint() | ||||
|     await tractor.pause() | ||||
|     yield 'yo' | ||||
|     await tractor.breakpoint() | ||||
|     await tractor.pause() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -15,7 +15,7 @@ async def just_bp( | |||
| ) -> None: | ||||
| 
 | ||||
|     await ctx.started() | ||||
|     await tractor.breakpoint() | ||||
|     await tractor.pause() | ||||
| 
 | ||||
|     # TODO: bps and errors in this call.. | ||||
|     async for val in gen(): | ||||
|  |  | |||
|  | @ -3,17 +3,20 @@ import tractor | |||
| 
 | ||||
| 
 | ||||
| async def breakpoint_forever(): | ||||
|     """Indefinitely re-enter debugger in child actor. | ||||
|     """ | ||||
|     ''' | ||||
|     Indefinitely re-enter debugger in child actor. | ||||
| 
 | ||||
|     ''' | ||||
|     while True: | ||||
|         await trio.sleep(0.1) | ||||
|         await tractor.breakpoint() | ||||
|         await tractor.pause() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='cancel', | ||||
|     ) as n: | ||||
| 
 | ||||
|         portal = await n.run_in_actor( | ||||
|  |  | |||
|  | @ -3,16 +3,26 @@ import tractor | |||
| 
 | ||||
| 
 | ||||
| async def name_error(): | ||||
|     getattr(doggypants) | ||||
|     getattr(doggypants)  # noqa (on purpose) | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|     ) as n: | ||||
|         # loglevel='transport', | ||||
|     ) as an: | ||||
| 
 | ||||
|         portal = await n.run_in_actor(name_error) | ||||
|         await portal.result() | ||||
|         # TODO: ideally the REPL arrives at this frame in the parent, | ||||
|         # ABOVE the @api_frame of `Portal.run_in_actor()` (which | ||||
|         # should eventually not even be a portal method ... XD) | ||||
|         # await tractor.pause() | ||||
|         p: tractor.Portal = await an.run_in_actor(name_error) | ||||
| 
 | ||||
|         # with this style, should raise on this line | ||||
|         await p.result() | ||||
| 
 | ||||
|         # with this alt style should raise at `open_nusery()` | ||||
|         # return await p.result() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -0,0 +1,169 @@ | |||
| from functools import partial | ||||
| import time | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| # TODO: only import these when not running from test harness? | ||||
| # can we detect `pexpect` usage maybe? | ||||
| # from tractor.devx._debug import ( | ||||
| #     get_lock, | ||||
| #     get_debug_req, | ||||
| # ) | ||||
| 
 | ||||
| 
 | ||||
| def sync_pause( | ||||
|     use_builtin: bool = False, | ||||
|     error: bool = False, | ||||
|     hide_tb: bool = True, | ||||
|     pre_sleep: float|None = None, | ||||
| ): | ||||
|     if pre_sleep: | ||||
|         time.sleep(pre_sleep) | ||||
| 
 | ||||
|     if use_builtin: | ||||
|         breakpoint(hide_tb=hide_tb) | ||||
| 
 | ||||
|     else: | ||||
|         # TODO: maybe for testing some kind of cm style interface | ||||
|         # where the `._set_trace()` call doesn't happen until block | ||||
|         # exit? | ||||
|         # assert get_lock().ctx_in_debug is None | ||||
|         # assert get_debug_req().repl is None | ||||
|         tractor.pause_from_sync() | ||||
|         # assert get_debug_req().repl is None | ||||
| 
 | ||||
|     if error: | ||||
|         raise RuntimeError('yoyo sync code error') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def start_n_sync_pause( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     actor: tractor.Actor = tractor.current_actor() | ||||
| 
 | ||||
|     # sync to parent-side task | ||||
|     await ctx.started() | ||||
| 
 | ||||
|     print(f'Entering `sync_pause()` in subactor: {actor.uid}\n') | ||||
|     sync_pause() | ||||
|     print(f'Exited `sync_pause()` in subactor: {actor.uid}\n') | ||||
| 
 | ||||
| 
 | ||||
| async def main() -> None: | ||||
|     async with ( | ||||
|         tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|             maybe_enable_greenback=True, | ||||
|             enable_stack_on_sig=True, | ||||
|             # loglevel='warning', | ||||
|             # loglevel='devx', | ||||
|         ) as an, | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         # just from root task | ||||
|         sync_pause() | ||||
| 
 | ||||
|         p: tractor.Portal  = await an.start_actor( | ||||
|             'subactor', | ||||
|             enable_modules=[__name__], | ||||
|             # infect_asyncio=True, | ||||
|             debug_mode=True, | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: 3 sub-actor usage cases: | ||||
|         # -[x] via a `.open_context()` | ||||
|         # -[ ] via a `.run_in_actor()` call | ||||
|         # -[ ] via a `.run()` | ||||
|         # -[ ] via a `.to_thread.run_sync()` in subactor | ||||
|         async with p.open_context( | ||||
|             start_n_sync_pause, | ||||
|         ) as (ctx, first): | ||||
|             assert first is None | ||||
| 
 | ||||
|             # TODO: handle bg-thread-in-root-actor special cases! | ||||
|             # | ||||
|             # there are a couple very subtle situations possible here | ||||
|             # and they are likely to become more important as cpython | ||||
|             # moves to support no-GIL. | ||||
|             # | ||||
|             # Cases: | ||||
|             # 1. root-actor bg-threads that call `.pause_from_sync()` | ||||
|             #   whilst an in-tree subactor also is using ` .pause()`. | ||||
|             # |_ since the root-actor bg thread can not | ||||
|             #   `Lock._debug_lock.acquire_nowait()` without running | ||||
|             #   a `trio.Task`, AND because the | ||||
|             #   `PdbREPL.set_continue()` is called from that | ||||
|             #   bg-thread, we can not `._debug_lock.release()` | ||||
|             #   either! | ||||
|             #  |_ this results in no actor-tree `Lock` being used | ||||
|             #    on behalf of the bg-thread and thus the subactor's | ||||
|             #    task and the thread trying to to use stdio | ||||
|             #    simultaneously which results in the classic TTY | ||||
|             #    clobbering! | ||||
|             # | ||||
|             # 2. mutiple sync-bg-threads that call | ||||
|             #   `.pause_from_sync()` where one is scheduled via | ||||
|             #   `Nursery.start_soon(to_thread.run_sync)` in a bg | ||||
|             #   task. | ||||
|             # | ||||
|             #   Due to the GIL, the threads never truly try to step | ||||
|             #   through the REPL simultaneously, BUT their `logging` | ||||
|             #   and traceback outputs are interleaved since the GIL | ||||
|             #   (seemingly) on every REPL-input from the user | ||||
|             #   switches threads.. | ||||
|             # | ||||
|             #   Soo, the context switching semantics of the GIL | ||||
|             #   result in a very confusing and messy interaction UX | ||||
|             #   since eval and (tb) print output is NOT synced to | ||||
|             #   each REPL-cycle (like we normally make it via | ||||
|             #   a `.set_continue()` callback triggering the | ||||
|             #   `Lock.release()`). Ideally we can solve this | ||||
|             #   usability issue NOW because this will of course be | ||||
|             #   that much more important when eventually there is no | ||||
|             #   GIL! | ||||
| 
 | ||||
|             # XXX should cause double REPL entry and thus TTY | ||||
|             # clobbering due to case 1. above! | ||||
|             tn.start_soon( | ||||
|                 partial( | ||||
|                     trio.to_thread.run_sync, | ||||
|                     partial( | ||||
|                         sync_pause, | ||||
|                         use_builtin=False, | ||||
|                         # pre_sleep=0.5, | ||||
|                     ), | ||||
|                     abandon_on_cancel=True, | ||||
|                     thread_name='start_soon_root_bg_thread', | ||||
|                 ) | ||||
|             ) | ||||
| 
 | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|             # XXX should cause double REPL entry and thus TTY | ||||
|             # clobbering due to case 2. above! | ||||
|             await trio.to_thread.run_sync( | ||||
|                 partial( | ||||
|                     sync_pause, | ||||
|                     # NOTE this already works fine since in the new | ||||
|                     # thread the `breakpoint()` built-in is never | ||||
|                     # overloaded, thus NO locking is used, HOWEVER | ||||
|                     # the case 2. from above still exists! | ||||
|                     use_builtin=True, | ||||
|                 ), | ||||
|                 # TODO: with this `False` we can hang!??! | ||||
|                 # abandon_on_cancel=False, | ||||
|                 abandon_on_cancel=True, | ||||
|                 thread_name='inline_root_bg_thread', | ||||
|             ) | ||||
| 
 | ||||
|         await ctx.cancel() | ||||
| 
 | ||||
|         # TODO: case where we cancel from trio-side while asyncio task | ||||
|         # has debugger lock? | ||||
|         await p.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -1,6 +1,11 @@ | |||
| import time | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     ActorNursery, | ||||
|     MsgStream, | ||||
|     Portal, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # this is the first 2 actors, streamer_1 and streamer_2 | ||||
|  | @ -12,14 +17,18 @@ async def stream_data(seed): | |||
| 
 | ||||
| # this is the third actor; the aggregator | ||||
| async def aggregate(seed): | ||||
|     """Ensure that the two streams we receive match but only stream | ||||
|     ''' | ||||
|     Ensure that the two streams we receive match but only stream | ||||
|     a single set of values to the parent. | ||||
|     """ | ||||
|     async with tractor.open_nursery() as nursery: | ||||
|         portals = [] | ||||
| 
 | ||||
|     ''' | ||||
|     an: ActorNursery | ||||
|     async with tractor.open_nursery() as an: | ||||
|         portals: list[Portal] = [] | ||||
|         for i in range(1, 3): | ||||
|             # fork point | ||||
|             portal = await nursery.start_actor( | ||||
| 
 | ||||
|             # fork/spawn call | ||||
|             portal = await an.start_actor( | ||||
|                 name=f'streamer_{i}', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|  | @ -43,7 +52,11 @@ async def aggregate(seed): | |||
|         async with trio.open_nursery() as n: | ||||
| 
 | ||||
|             for portal in portals: | ||||
|                 n.start_soon(push_to_chan, portal, send_chan.clone()) | ||||
|                 n.start_soon( | ||||
|                     push_to_chan, | ||||
|                     portal, | ||||
|                     send_chan.clone(), | ||||
|                 ) | ||||
| 
 | ||||
|             # close this local task's reference to send side | ||||
|             await send_chan.aclose() | ||||
|  | @ -60,26 +73,36 @@ async def aggregate(seed): | |||
| 
 | ||||
|             print("FINISHED ITERATING in aggregator") | ||||
| 
 | ||||
|         await nursery.cancel() | ||||
|         await an.cancel() | ||||
|         print("WAITING on `ActorNursery` to finish") | ||||
|     print("AGGREGATOR COMPLETE!") | ||||
| 
 | ||||
| 
 | ||||
| # this is the main actor and *arbiter* | ||||
| async def main(): | ||||
|     # a nursery which spawns "actors" | ||||
| async def main() -> list[int]: | ||||
|     ''' | ||||
|     This is the "root" actor's main task's entrypoint. | ||||
| 
 | ||||
|     By default (and if not otherwise specified) that root process | ||||
|     also acts as a "registry actor" / "registrar" on the localhost | ||||
|     for the purposes of multi-actor "service discovery". | ||||
| 
 | ||||
|     ''' | ||||
|     # yes, a nursery which spawns `trio`-"actors" B) | ||||
|     an: ActorNursery | ||||
|     async with tractor.open_nursery( | ||||
|         arbiter_addr=('127.0.0.1', 1616) | ||||
|     ) as nursery: | ||||
|         loglevel='cancel', | ||||
|         debug_mode=True, | ||||
|     ) as an: | ||||
| 
 | ||||
|         seed = int(1e3) | ||||
|         pre_start = time.time() | ||||
| 
 | ||||
|         portal = await nursery.start_actor( | ||||
|         portal: Portal = await an.start_actor( | ||||
|             name='aggregator', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         stream: MsgStream | ||||
|         async with portal.open_stream_from( | ||||
|             aggregate, | ||||
|             seed=seed, | ||||
|  | @ -88,11 +111,12 @@ async def main(): | |||
|             start = time.time() | ||||
|             # the portal call returns exactly what you'd expect | ||||
|             # as if the remote "aggregate" function was called locally | ||||
|             result_stream = [] | ||||
|             result_stream: list[int] = [] | ||||
|             async for value in stream: | ||||
|                 result_stream.append(value) | ||||
| 
 | ||||
|         await portal.cancel_actor() | ||||
|         cancelled: bool = await portal.cancel_actor() | ||||
|         assert cancelled | ||||
| 
 | ||||
|         print(f"STREAM TIME = {time.time() - start}") | ||||
|         print(f"STREAM + SPAWN TIME = {time.time() - pre_start}") | ||||
|  |  | |||
|  | @ -8,7 +8,10 @@ This uses no extra threads, fancy semaphores or futures; all we need | |||
| is ``tractor``'s channels. | ||||
| 
 | ||||
| """ | ||||
| from contextlib import asynccontextmanager | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     aclosing, | ||||
| ) | ||||
| from typing import Callable | ||||
| import itertools | ||||
| import math | ||||
|  | @ -16,7 +19,6 @@ import time | |||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| from async_generator import aclosing | ||||
| 
 | ||||
| 
 | ||||
| PRIMES = [ | ||||
|  | @ -44,7 +46,7 @@ async def is_prime(n): | |||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def worker_pool(workers=4): | ||||
|     """Though it's a trivial special case for ``tractor``, the well | ||||
|     known "worker pool" seems to be the defacto "but, I want this | ||||
|  |  | |||
|  | @ -13,7 +13,7 @@ async def simple_rpc( | |||
| 
 | ||||
|     ''' | ||||
|     # signal to parent that we're up much like | ||||
|     # ``trio_typing.TaskStatus.started()`` | ||||
|     # ``trio.TaskStatus.started()`` | ||||
|     await ctx.started(data + 1) | ||||
| 
 | ||||
|     async with ctx.open_stream() as stream: | ||||
|  |  | |||
|  | @ -9,7 +9,7 @@ async def main(service_name): | |||
|     async with tractor.open_nursery() as an: | ||||
|         await an.start_actor(service_name) | ||||
| 
 | ||||
|         async with tractor.get_arbiter('127.0.0.1', 1616) as portal: | ||||
|         async with tractor.get_registry('127.0.0.1', 1616) as portal: | ||||
|             print(f"Arbiter is listening on {portal.channel}") | ||||
| 
 | ||||
|         async with tractor.wait_for_actor(service_name) as sockaddr: | ||||
|  |  | |||
|  | @ -0,0 +1,18 @@ | |||
| First generate a built disti: | ||||
| 
 | ||||
| ``` | ||||
| python -m pip install --upgrade build | ||||
| python -m build --sdist --outdir dist/alpha5/ | ||||
| ``` | ||||
| 
 | ||||
| Then try a test ``pypi`` upload: | ||||
| 
 | ||||
| ``` | ||||
| python -m twine upload --repository testpypi dist/alpha5/* | ||||
| ``` | ||||
| 
 | ||||
| The push to `pypi` for realz. | ||||
| 
 | ||||
| ``` | ||||
| python -m twine upload --repository testpypi dist/alpha5/* | ||||
| ``` | ||||
							
								
								
									
										113
									
								
								pyproject.toml
								
								
								
								
							
							
						
						
									
										113
									
								
								pyproject.toml
								
								
								
								
							|  | @ -1,3 +1,72 @@ | |||
| [build-system] | ||||
| requires = ["poetry-core"] | ||||
| build-backend = "poetry.core.masonry.api" | ||||
| 
 | ||||
| # ------ - ------ | ||||
| 
 | ||||
| [tool.poetry] | ||||
| name = "tractor" | ||||
| version = "0.1.0a6dev0" | ||||
| description='structured concurrent `trio`-"actors"' | ||||
| authors = ["Tyler Goodlet <goodboy_foss@protonmail.com>"] | ||||
| license = "AGPlv3" | ||||
| readme = "docs/README.rst" | ||||
| 
 | ||||
| # TODO: do we need this xontrib loader at all given pep420 | ||||
| # and xonsh's xontrib global-autoload-via-setuptools? | ||||
| # https://xon.sh/tutorial_xontrib.html#authoring-xontribs | ||||
| packages = [ | ||||
|   {include = 'tractor' }, | ||||
|   # {include = 'tractor.experimental' }, | ||||
|   # {include = 'tractor.trionics' }, | ||||
|   # {include = 'tractor.msg' }, | ||||
|   # {include = 'tractor.devx' }, | ||||
| ] | ||||
| 
 | ||||
| # ------ - ------ | ||||
| 
 | ||||
| [tool.poetry.dependencies] | ||||
| python = "^3.11" | ||||
| 
 | ||||
| # trio runtime related | ||||
| # proper range spec: | ||||
| # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 | ||||
| trio='^0.24' | ||||
| tricycle = "^0.4.1" | ||||
| trio-typing = "^0.10.0" | ||||
| 
 | ||||
| msgspec='^0.18.5'  # interchange | ||||
| wrapt = "^1.16.0"  # decorators | ||||
| colorlog = "^6.8.2"  # logging | ||||
| 
 | ||||
| # built-in multi-actor `pdb` REPL | ||||
| pdbp = "^1.5.0" | ||||
| 
 | ||||
| 
 | ||||
| # TODO: distributed transport using | ||||
| # linux kernel networking | ||||
| # 'pyroute2 | ||||
| 
 | ||||
| # ------ - ------ | ||||
| 
 | ||||
| [tool.poetry.group.dev] | ||||
| optional = false | ||||
| [tool.poetry.group.dev.dependencies] | ||||
| # testing | ||||
| pytest = "^8.2.0" | ||||
| pexpect = "^4.9.0" | ||||
| 
 | ||||
| # .devx tooling | ||||
| greenback = "^1.2.1" | ||||
| stackscope = "^0.2.2" | ||||
| 
 | ||||
| # (light) xonsh usage/integration | ||||
| xontrib-vox = "^0.0.1" | ||||
| prompt-toolkit = "^3.0.43" | ||||
| xonsh-vox-tabcomplete = "^0.5" | ||||
| 
 | ||||
| # ------ - ------ | ||||
| 
 | ||||
| [tool.towncrier] | ||||
| package = "tractor" | ||||
| filename = "NEWS.rst" | ||||
|  | @ -26,3 +95,47 @@ all_bullets = true | |||
|   directory = "trivial" | ||||
|   name = "Trivial/Internal Changes" | ||||
|   showcontent = true | ||||
| 
 | ||||
| # ------ - ------ | ||||
| 
 | ||||
| [tool.pytest.ini_options] | ||||
| minversion = '6.0' | ||||
| testpaths = [ | ||||
|   'tests' | ||||
| ] | ||||
| addopts = [ | ||||
|   # TODO: figure out why this isn't working.. | ||||
|   '--rootdir=./tests', | ||||
| 
 | ||||
|   '--import-mode=importlib', | ||||
|   # don't show frickin captured logs AGAIN in the report.. | ||||
|   '--show-capture=no', | ||||
| ] | ||||
| log_cli = false | ||||
| 
 | ||||
| # TODO: maybe some of these layout choices? | ||||
| # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | ||||
| # pythonpath = "src" | ||||
| 
 | ||||
| # ------ - ------ | ||||
| 
 | ||||
| [project] | ||||
| keywords = [ | ||||
|   'trio', | ||||
|   'async', | ||||
|   'concurrency', | ||||
|   'structured concurrency', | ||||
|   'actor model', | ||||
|   'distributed', | ||||
|   'multiprocessing' | ||||
| ] | ||||
| classifiers = [ | ||||
|   "Development Status :: 3 - Alpha", | ||||
|   "Operating System :: POSIX :: Linux", | ||||
|   "Framework :: Trio", | ||||
|   "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||
|   "Programming Language :: Python :: Implementation :: CPython", | ||||
|   "Programming Language :: Python :: 3 :: Only", | ||||
|   "Programming Language :: Python :: 3.11", | ||||
|   "Topic :: System :: Distributed Computing", | ||||
| ] | ||||
|  |  | |||
|  | @ -6,3 +6,4 @@ mypy | |||
| trio_typing | ||||
| pexpect | ||||
| towncrier | ||||
| numpy | ||||
|  |  | |||
							
								
								
									
										29
									
								
								setup.py
								
								
								
								
							
							
						
						
									
										29
									
								
								setup.py
								
								
								
								
							|  | @ -26,7 +26,7 @@ with open('docs/README.rst', encoding='utf-8') as f: | |||
| setup( | ||||
|     name="tractor", | ||||
|     version='0.1.0a6dev0',  # alpha zone | ||||
|     description='structured concurrrent `trio`-"actors"', | ||||
|     description='structured concurrent `trio`-"actors"', | ||||
|     long_description=readme, | ||||
|     license='AGPLv3', | ||||
|     author='Tyler Goodlet', | ||||
|  | @ -36,41 +36,44 @@ setup( | |||
|     platforms=['linux', 'windows'], | ||||
|     packages=[ | ||||
|         'tractor', | ||||
|         'tractor.experimental', | ||||
|         'tractor.trionics', | ||||
|         'tractor.experimental',  # wacky ideas | ||||
|         'tractor.trionics',  # trio extensions | ||||
|         'tractor.msg',  # lowlevel data types | ||||
|         'tractor.devx',  # "dev-experience" | ||||
|     ], | ||||
|     install_requires=[ | ||||
| 
 | ||||
|         # trio related | ||||
|         # proper range spec: | ||||
|         # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 | ||||
|         'trio >= 0.22', | ||||
|         'async_generator', | ||||
|         'trio_typing', | ||||
|         'exceptiongroup', | ||||
|         'trio >= 0.24', | ||||
| 
 | ||||
|         # 'async_generator',  # in stdlib mostly! | ||||
|         # 'trio_typing',  # trio==0.23.0 has type hints! | ||||
|         # 'exceptiongroup',  # in stdlib as of 3.11! | ||||
| 
 | ||||
|         # tooling | ||||
|         'stackscope', | ||||
|         'tricycle', | ||||
|         'trio_typing', | ||||
|         'colorlog', | ||||
|         'wrapt', | ||||
| 
 | ||||
|         # IPC serialization | ||||
|         'msgspec', | ||||
|         'msgspec>=0.18.5', | ||||
| 
 | ||||
|         # debug mode REPL | ||||
|         'pdbp', | ||||
| 
 | ||||
|         # TODO: distributed transport using | ||||
|         # linux kernel networking | ||||
|         # 'pyroute2', | ||||
| 
 | ||||
|         # pip ref docs on these specs: | ||||
|         # https://pip.pypa.io/en/stable/reference/requirement-specifiers/#examples | ||||
|         # and pep: | ||||
|         # https://peps.python.org/pep-0440/#version-specifiers | ||||
| 
 | ||||
|         # windows deps workaround for ``pdbpp`` | ||||
|         # https://github.com/pdbpp/pdbpp/issues/498 | ||||
|         # https://github.com/pdbpp/fancycompleter/issues/37 | ||||
|         'pyreadline3 ; platform_system == "Windows"', | ||||
| 
 | ||||
|     ], | ||||
|     tests_require=['pytest'], | ||||
|     python_requires=">=3.10", | ||||
|  |  | |||
|  | @ -7,94 +7,19 @@ import os | |||
| import random | ||||
| import signal | ||||
| import platform | ||||
| import pathlib | ||||
| import time | ||||
| import inspect | ||||
| from functools import partial, wraps | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor._testing import ( | ||||
|     examples_dir as examples_dir, | ||||
|     tractor_test as tractor_test, | ||||
|     expect_ctxc as expect_ctxc, | ||||
| ) | ||||
| 
 | ||||
| # TODO: include wtv plugin(s) we build in `._testing.pytest`? | ||||
| pytest_plugins = ['pytester'] | ||||
| 
 | ||||
| 
 | ||||
| def tractor_test(fn): | ||||
|     """ | ||||
|     Use: | ||||
| 
 | ||||
|     @tractor_test | ||||
|     async def test_whatever(): | ||||
|         await ... | ||||
| 
 | ||||
|     If fixtures: | ||||
| 
 | ||||
|         - ``arb_addr`` (a socket addr tuple where arbiter is listening) | ||||
|         - ``loglevel`` (logging level passed to tractor internals) | ||||
|         - ``start_method`` (subprocess spawning backend) | ||||
| 
 | ||||
|     are defined in the `pytest` fixture space they will be automatically | ||||
|     injected to tests declaring these funcargs. | ||||
|     """ | ||||
|     @wraps(fn) | ||||
|     def wrapper( | ||||
|         *args, | ||||
|         loglevel=None, | ||||
|         arb_addr=None, | ||||
|         start_method=None, | ||||
|         **kwargs | ||||
|     ): | ||||
|         # __tracebackhide__ = True | ||||
| 
 | ||||
|         if 'arb_addr' in inspect.signature(fn).parameters: | ||||
|             # injects test suite fixture value to test as well | ||||
|             # as `run()` | ||||
|             kwargs['arb_addr'] = arb_addr | ||||
| 
 | ||||
|         if 'loglevel' in inspect.signature(fn).parameters: | ||||
|             # allows test suites to define a 'loglevel' fixture | ||||
|             # that activates the internal logging | ||||
|             kwargs['loglevel'] = loglevel | ||||
| 
 | ||||
|         if start_method is None: | ||||
|             if platform.system() == "Windows": | ||||
|                 start_method = 'trio' | ||||
| 
 | ||||
|         if 'start_method' in inspect.signature(fn).parameters: | ||||
|             # set of subprocess spawning backends | ||||
|             kwargs['start_method'] = start_method | ||||
| 
 | ||||
|         if kwargs: | ||||
| 
 | ||||
|             # use explicit root actor start | ||||
| 
 | ||||
|             async def _main(): | ||||
|                 async with tractor.open_root_actor( | ||||
|                     # **kwargs, | ||||
|                     arbiter_addr=arb_addr, | ||||
|                     loglevel=loglevel, | ||||
|                     start_method=start_method, | ||||
| 
 | ||||
|                     # TODO: only enable when pytest is passed --pdb | ||||
|                     # debug_mode=True, | ||||
| 
 | ||||
|                 ): | ||||
|                     await fn(*args, **kwargs) | ||||
| 
 | ||||
|             main = _main | ||||
| 
 | ||||
|         else: | ||||
|             # use implicit root actor start | ||||
|             main = partial(fn, *args, **kwargs) | ||||
| 
 | ||||
|         return trio.run(main) | ||||
| 
 | ||||
|     return wrapper | ||||
| 
 | ||||
| 
 | ||||
| _arb_addr = '127.0.0.1', random.randint(1000, 9999) | ||||
| 
 | ||||
| 
 | ||||
| # Sending signal.SIGINT on subprocess fails on windows. Use CTRL_* alternatives | ||||
| if platform.system() == 'Windows': | ||||
|     _KILL_SIGNAL = signal.CTRL_BREAK_EVENT | ||||
|  | @ -114,41 +39,45 @@ no_windows = pytest.mark.skipif( | |||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def repodir() -> pathlib.Path: | ||||
|     ''' | ||||
|     Return the abspath to the repo directory. | ||||
| 
 | ||||
|     ''' | ||||
|     # 2 parents up to step up through tests/<repo_dir> | ||||
|     return pathlib.Path(__file__).parent.parent.absolute() | ||||
| 
 | ||||
| 
 | ||||
| def examples_dir() -> pathlib.Path: | ||||
|     ''' | ||||
|     Return the abspath to the examples directory as `pathlib.Path`. | ||||
| 
 | ||||
|     ''' | ||||
|     return repodir() / 'examples' | ||||
| 
 | ||||
| 
 | ||||
| def pytest_addoption(parser): | ||||
|     parser.addoption( | ||||
|         "--ll", action="store", dest='loglevel', | ||||
|         "--ll", | ||||
|         action="store", | ||||
|         dest='loglevel', | ||||
|         default='ERROR', help="logging level to set when testing" | ||||
|     ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--spawn-backend", action="store", dest='spawn_backend', | ||||
|         "--spawn-backend", | ||||
|         action="store", | ||||
|         dest='spawn_backend', | ||||
|         default='trio', | ||||
|         help="Processing spawning backend to use for test run", | ||||
|     ) | ||||
| 
 | ||||
|     parser.addoption( | ||||
|         "--tpdb", "--debug-mode", | ||||
|         action="store_true", | ||||
|         dest='tractor_debug_mode', | ||||
|         # default=False, | ||||
|         help=( | ||||
|             'Enable a flag that can be used by tests to to set the ' | ||||
|             '`debug_mode: bool` for engaging the internal ' | ||||
|             'multi-proc debugger sys.' | ||||
|         ), | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pytest_configure(config): | ||||
|     backend = config.option.spawn_backend | ||||
|     tractor._spawn.try_set_start_method(backend) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def debug_mode(request): | ||||
|     return request.config.option.tractor_debug_mode | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session', autouse=True) | ||||
| def loglevel(request): | ||||
|     orig = tractor.log._default_loglevel | ||||
|  | @ -168,14 +97,35 @@ _ci_env: bool = os.environ.get('CI', False) | |||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def ci_env() -> bool: | ||||
|     """Detect CI envoirment. | ||||
|     """ | ||||
|     ''' | ||||
|     Detect CI envoirment. | ||||
| 
 | ||||
|     ''' | ||||
|     return _ci_env | ||||
| 
 | ||||
| 
 | ||||
| # TODO: also move this to `._testing` for now? | ||||
| # -[ ] possibly generalize and re-use for multi-tree spawning | ||||
| #    along with the new stuff for multi-addrs in distribute_dis | ||||
| #    branch? | ||||
| # | ||||
| # choose randomly at import time | ||||
| _reg_addr: tuple[str, int] = ( | ||||
|     '127.0.0.1', | ||||
|     random.randint(1000, 9999), | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='session') | ||||
| def arb_addr(): | ||||
|     return _arb_addr | ||||
| def reg_addr() -> tuple[str, int]: | ||||
| 
 | ||||
|     # globally override the runtime to the per-test-session-dynamic | ||||
|     # addr so that all tests never conflict with any other actor | ||||
|     # tree using the default. | ||||
|     from tractor import _root | ||||
|     _root._default_lo_addrs = [_reg_addr] | ||||
| 
 | ||||
|     return _reg_addr | ||||
| 
 | ||||
| 
 | ||||
| def pytest_generate_tests(metafunc): | ||||
|  | @ -200,6 +150,18 @@ def pytest_generate_tests(metafunc): | |||
|         metafunc.parametrize("start_method", [spawn_backend], scope='module') | ||||
| 
 | ||||
| 
 | ||||
| # TODO: a way to let test scripts (like from `examples/`) | ||||
| # guarantee they won't registry addr collide! | ||||
| # @pytest.fixture | ||||
| # def open_test_runtime( | ||||
| #     reg_addr: tuple, | ||||
| # ) -> AsyncContextManager: | ||||
| #     return partial( | ||||
| #         tractor.open_nursery, | ||||
| #         registry_addrs=[reg_addr], | ||||
| #     ) | ||||
| 
 | ||||
| 
 | ||||
| def sig_prog(proc, sig): | ||||
|     "Kill the actor-process with ``sig``." | ||||
|     proc.send_signal(sig) | ||||
|  | @ -212,34 +174,40 @@ def sig_prog(proc, sig): | |||
|     assert ret | ||||
| 
 | ||||
| 
 | ||||
| # TODO: factor into @cm and move to `._testing`? | ||||
| @pytest.fixture | ||||
| def daemon( | ||||
|     loglevel: str, | ||||
|     testdir, | ||||
|     arb_addr: tuple[str, int], | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
|     ''' | ||||
|     Run a daemon actor as a "remote arbiter". | ||||
|     Run a daemon root actor as a separate actor-process tree and | ||||
|     "remote registrar" for discovery-protocol related tests. | ||||
| 
 | ||||
|     ''' | ||||
|     if loglevel in ('trace', 'debug'): | ||||
|         # too much logging will lock up the subproc (smh) | ||||
|         loglevel = 'info' | ||||
|         # XXX: too much logging will lock up the subproc (smh) | ||||
|         loglevel: str = 'info' | ||||
| 
 | ||||
|     cmdargs = [ | ||||
|         sys.executable, '-c', | ||||
|         "import tractor; tractor.run_daemon([], registry_addr={}, loglevel={})" | ||||
|         .format( | ||||
|             arb_addr, | ||||
|             "'{}'".format(loglevel) if loglevel else None) | ||||
|     code: str = ( | ||||
|             "import tractor; " | ||||
|             "tractor.run_daemon([], registry_addrs={reg_addrs}, loglevel={ll})" | ||||
|     ).format( | ||||
|         reg_addrs=str([reg_addr]), | ||||
|         ll="'{}'".format(loglevel) if loglevel else None, | ||||
|     ) | ||||
|     cmd: list[str] = [ | ||||
|         sys.executable, | ||||
|         '-c', code, | ||||
|     ] | ||||
|     kwargs = dict() | ||||
|     kwargs = {} | ||||
|     if platform.system() == 'Windows': | ||||
|         # without this, tests hang on windows forever | ||||
|         kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP | ||||
| 
 | ||||
|     proc = testdir.popen( | ||||
|         cmdargs, | ||||
|         cmd, | ||||
|         stdout=subprocess.PIPE, | ||||
|         stderr=subprocess.PIPE, | ||||
|         **kwargs, | ||||
|  |  | |||
|  | @ -0,0 +1,219 @@ | |||
| ''' | ||||
| `tractor.devx.*` tooling sub-pkg test space. | ||||
| 
 | ||||
| ''' | ||||
| import time | ||||
| from typing import ( | ||||
|     Callable, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
| ) | ||||
| from pexpect.spawnbase import SpawnBase | ||||
| 
 | ||||
| from tractor._testing import ( | ||||
|     mk_cmd, | ||||
| ) | ||||
| from tractor.devx._debug import ( | ||||
|     _pause_msg as _pause_msg, | ||||
|     _crash_msg as _crash_msg, | ||||
|     _repl_fail_msg as _repl_fail_msg, | ||||
|     _ctlc_ignore_header as _ctlc_ignore_header, | ||||
| ) | ||||
| from conftest import ( | ||||
|     _ci_env, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def spawn( | ||||
|     start_method, | ||||
|     testdir: pytest.Testdir, | ||||
|     reg_addr: tuple[str, int], | ||||
| 
 | ||||
| ) -> Callable[[str], None]: | ||||
|     ''' | ||||
|     Use the `pexpect` module shipped via `testdir.spawn()` to | ||||
|     run an `./examples/..` script by name. | ||||
| 
 | ||||
|     ''' | ||||
|     if start_method != 'trio': | ||||
|         pytest.skip( | ||||
|             '`pexpect` based tests only supported on `trio` backend' | ||||
|         ) | ||||
| 
 | ||||
|     def _spawn( | ||||
|         cmd: str, | ||||
|         **mkcmd_kwargs, | ||||
|     ): | ||||
|         return testdir.spawn( | ||||
|             cmd=mk_cmd( | ||||
|                 cmd, | ||||
|                 **mkcmd_kwargs, | ||||
|             ), | ||||
|             expect_timeout=3, | ||||
|         ) | ||||
| 
 | ||||
|     # such that test-dep can pass input script name. | ||||
|     return _spawn | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|     params=[False, True], | ||||
|     ids='ctl-c={}'.format, | ||||
| ) | ||||
| def ctlc( | ||||
|     request, | ||||
|     ci_env: bool, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     use_ctlc = request.param | ||||
| 
 | ||||
|     node = request.node | ||||
|     markers = node.own_markers | ||||
|     for mark in markers: | ||||
|         if mark.name == 'has_nested_actors': | ||||
|             pytest.skip( | ||||
|                 f'Test {node} has nested actors and fails with Ctrl-C.\n' | ||||
|                 f'The test can sometimes run fine locally but until' | ||||
|                 ' we solve' 'this issue this CI test will be xfail:\n' | ||||
|                 'https://github.com/goodboy/tractor/issues/320' | ||||
|             ) | ||||
| 
 | ||||
|     if use_ctlc: | ||||
|         # XXX: disable pygments highlighting for auto-tests | ||||
|         # since some envs (like actions CI) will struggle | ||||
|         # the the added color-char encoding.. | ||||
|         from tractor.devx._debug import TractorConfig | ||||
|         TractorConfig.use_pygements = False | ||||
| 
 | ||||
|     yield use_ctlc | ||||
| 
 | ||||
| 
 | ||||
| def expect( | ||||
|     child, | ||||
| 
 | ||||
|     # normally a `pdb` prompt by default | ||||
|     patt: str, | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Expect wrapper that prints last seen console | ||||
|     data before failing. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         child.expect( | ||||
|             patt, | ||||
|             **kwargs, | ||||
|         ) | ||||
|     except TIMEOUT: | ||||
|         before = str(child.before.decode()) | ||||
|         print(before) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| PROMPT = r"\(Pdb\+\)" | ||||
| 
 | ||||
| 
 | ||||
| def in_prompt_msg( | ||||
|     child: SpawnBase, | ||||
|     parts: list[str], | ||||
| 
 | ||||
|     pause_on_false: bool = False, | ||||
|     err_on_false: bool = False, | ||||
|     print_prompt_on_false: bool = True, | ||||
| 
 | ||||
| ) -> bool: | ||||
|     ''' | ||||
|     Predicate check if (the prompt's) std-streams output has all | ||||
|     `str`-parts in it. | ||||
| 
 | ||||
|     Can be used in test asserts for bulk matching expected | ||||
|     log/REPL output for a given `pdb` interact point. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = False | ||||
| 
 | ||||
|     before: str = str(child.before.decode()) | ||||
|     for part in parts: | ||||
|         if part not in before: | ||||
|             if pause_on_false: | ||||
|                 import pdbp | ||||
|                 pdbp.set_trace() | ||||
| 
 | ||||
|             if print_prompt_on_false: | ||||
|                 print(before) | ||||
| 
 | ||||
|             if err_on_false: | ||||
|                 raise ValueError( | ||||
|                     f'Could not find pattern in `before` output?\n' | ||||
|                     f'part: {part!r}\n' | ||||
|                 ) | ||||
|             return False | ||||
| 
 | ||||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| # TODO: todo support terminal color-chars stripping so we can match | ||||
| # against call stack frame output from the the 'll' command the like! | ||||
| # -[ ] SO answer for stipping ANSI codes: https://stackoverflow.com/a/14693789 | ||||
| def assert_before( | ||||
|     child: SpawnBase, | ||||
|     patts: list[str], | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     __tracebackhide__: bool = False | ||||
| 
 | ||||
|     assert in_prompt_msg( | ||||
|         child=child, | ||||
|         parts=patts, | ||||
| 
 | ||||
|         # since this is an "assert" helper ;) | ||||
|         err_on_false=True, | ||||
|         **kwargs | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def do_ctlc( | ||||
|     child, | ||||
|     count: int = 3, | ||||
|     delay: float = 0.1, | ||||
|     patt: str|None = None, | ||||
| 
 | ||||
|     # expect repl UX to reprint the prompt after every | ||||
|     # ctrl-c send. | ||||
|     # XXX: no idea but, in CI this never seems to work even on 3.10 so | ||||
|     # needs some further investigation potentially... | ||||
|     expect_prompt: bool = not _ci_env, | ||||
| 
 | ||||
| ) -> str|None: | ||||
| 
 | ||||
|     before: str|None = None | ||||
| 
 | ||||
|     # make sure ctl-c sends don't do anything but repeat output | ||||
|     for _ in range(count): | ||||
|         time.sleep(delay) | ||||
|         child.sendcontrol('c') | ||||
| 
 | ||||
|         # TODO: figure out why this makes CI fail.. | ||||
|         # if you run this test manually it works just fine.. | ||||
|         if expect_prompt: | ||||
|             time.sleep(delay) | ||||
|             child.expect(PROMPT) | ||||
|             before = str(child.before.decode()) | ||||
|             time.sleep(delay) | ||||
| 
 | ||||
|             if patt: | ||||
|                 # should see the last line on console | ||||
|                 assert patt in before | ||||
| 
 | ||||
|     # return the console content up to the final prompt | ||||
|     return before | ||||
|  | @ -10,24 +10,28 @@ TODO: | |||
|     - wonder if any of it'll work on OS X? | ||||
| 
 | ||||
| """ | ||||
| from functools import partial | ||||
| import itertools | ||||
| from os import path | ||||
| from typing import Optional | ||||
| import platform | ||||
| import pathlib | ||||
| import sys | ||||
| import time | ||||
| 
 | ||||
| import pytest | ||||
| import pexpect | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| from conftest import ( | ||||
|     examples_dir, | ||||
|     _ci_env, | ||||
| from .conftest import ( | ||||
|     do_ctlc, | ||||
|     PROMPT, | ||||
|     _pause_msg, | ||||
|     _crash_msg, | ||||
|     _repl_fail_msg, | ||||
| ) | ||||
| from .conftest import ( | ||||
|     expect, | ||||
|     in_prompt_msg, | ||||
|     assert_before, | ||||
| ) | ||||
| 
 | ||||
| # TODO: The next great debugger audit could be done by you! | ||||
|  | @ -47,15 +51,6 @@ if platform.system() == 'Windows': | |||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def mk_cmd(ex_name: str) -> str: | ||||
|     ''' | ||||
|     Generate a command suitable to pass to ``pexpect.spawn()``. | ||||
| 
 | ||||
|     ''' | ||||
|     script_path: pathlib.Path = examples_dir() / 'debugging' / f'{ex_name}.py' | ||||
|     return ' '.join(['python', str(script_path)]) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: was trying to this xfail style but some weird bug i see in CI | ||||
| # that's happening at collect time.. pretty soon gonna dump actions i'm | ||||
| # thinkin... | ||||
|  | @ -74,104 +69,6 @@ has_nested_actors = pytest.mark.has_nested_actors | |||
| # ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def spawn( | ||||
|     start_method, | ||||
|     testdir, | ||||
|     arb_addr, | ||||
| ) -> 'pexpect.spawn': | ||||
| 
 | ||||
|     if start_method != 'trio': | ||||
|         pytest.skip( | ||||
|             "Debugger tests are only supported on the trio backend" | ||||
|         ) | ||||
| 
 | ||||
|     def _spawn(cmd): | ||||
|         return testdir.spawn( | ||||
|             cmd=mk_cmd(cmd), | ||||
|             expect_timeout=3, | ||||
|         ) | ||||
| 
 | ||||
|     return _spawn | ||||
| 
 | ||||
| 
 | ||||
| PROMPT = r"\(Pdb\+\)" | ||||
| 
 | ||||
| 
 | ||||
| def expect( | ||||
|     child, | ||||
| 
 | ||||
|     # prompt by default | ||||
|     patt: str = PROMPT, | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Expect wrapper that prints last seen console | ||||
|     data before failing. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         child.expect( | ||||
|             patt, | ||||
|             **kwargs, | ||||
|         ) | ||||
|     except TIMEOUT: | ||||
|         before = str(child.before.decode()) | ||||
|         print(before) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| def assert_before( | ||||
|     child, | ||||
|     patts: list[str], | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     for patt in patts: | ||||
|         try: | ||||
|             assert patt in before | ||||
|         except AssertionError: | ||||
|             print(before) | ||||
|             raise | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|     params=[False, True], | ||||
|     ids='ctl-c={}'.format, | ||||
| ) | ||||
| def ctlc( | ||||
|     request, | ||||
|     ci_env: bool, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     use_ctlc = request.param | ||||
| 
 | ||||
|     node = request.node | ||||
|     markers = node.own_markers | ||||
|     for mark in markers: | ||||
|         if mark.name == 'has_nested_actors': | ||||
|             pytest.skip( | ||||
|                 f'Test {node} has nested actors and fails with Ctrl-C.\n' | ||||
|                 f'The test can sometimes run fine locally but until' | ||||
|                 ' we solve' 'this issue this CI test will be xfail:\n' | ||||
|                 'https://github.com/goodboy/tractor/issues/320' | ||||
|             ) | ||||
| 
 | ||||
|     if use_ctlc: | ||||
|         # XXX: disable pygments highlighting for auto-tests | ||||
|         # since some envs (like actions CI) will struggle | ||||
|         # the the added color-char encoding.. | ||||
|         from tractor._debug import TractorConfig | ||||
|         TractorConfig.use_pygements = False | ||||
| 
 | ||||
|     yield use_ctlc | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'user_in_out', | ||||
|     [ | ||||
|  | @ -180,7 +77,10 @@ def ctlc( | |||
|     ], | ||||
|     ids=lambda item: f'{item[0]} -> {item[1]}', | ||||
| ) | ||||
| def test_root_actor_error(spawn, user_in_out): | ||||
| def test_root_actor_error( | ||||
|     spawn, | ||||
|     user_in_out, | ||||
| ): | ||||
|     ''' | ||||
|     Demonstrate crash handler entering pdb from basic error in root actor. | ||||
| 
 | ||||
|  | @ -192,11 +92,15 @@ def test_root_actor_error(spawn, user_in_out): | |||
|     # scan for the prompt | ||||
|     expect(child, PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     # make sure expected logging and error arrives | ||||
|     assert "Attaching to pdb in crashed actor: ('root'" in before | ||||
|     assert 'AssertionError' in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('root'", | ||||
|             'AssertionError', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # send user command | ||||
|     child.sendline(user_input) | ||||
|  | @ -215,8 +119,10 @@ def test_root_actor_error(spawn, user_in_out): | |||
|     ids=lambda item: f'{item[0]} -> {item[1]}', | ||||
| ) | ||||
| def test_root_actor_bp(spawn, user_in_out): | ||||
|     """Demonstrate breakpoint from in root actor. | ||||
|     """ | ||||
|     ''' | ||||
|     Demonstrate breakpoint from in root actor. | ||||
| 
 | ||||
|     ''' | ||||
|     user_input, expect_err_str = user_in_out | ||||
|     child = spawn('root_actor_breakpoint') | ||||
| 
 | ||||
|  | @ -230,7 +136,7 @@ def test_root_actor_bp(spawn, user_in_out): | |||
|     child.expect('\r\n') | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
|     if expect_err_str is None: | ||||
|         assert 'Error' not in str(child.before) | ||||
|  | @ -238,38 +144,6 @@ def test_root_actor_bp(spawn, user_in_out): | |||
|         assert expect_err_str in str(child.before) | ||||
| 
 | ||||
| 
 | ||||
| def do_ctlc( | ||||
|     child, | ||||
|     count: int = 3, | ||||
|     delay: float = 0.1, | ||||
|     patt: Optional[str] = None, | ||||
| 
 | ||||
|     # expect repl UX to reprint the prompt after every | ||||
|     # ctrl-c send. | ||||
|     # XXX: no idea but, in CI this never seems to work even on 3.10 so | ||||
|     # needs some further investigation potentially... | ||||
|     expect_prompt: bool = not _ci_env, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # make sure ctl-c sends don't do anything but repeat output | ||||
|     for _ in range(count): | ||||
|         time.sleep(delay) | ||||
|         child.sendcontrol('c') | ||||
| 
 | ||||
|         # TODO: figure out why this makes CI fail.. | ||||
|         # if you run this test manually it works just fine.. | ||||
|         if expect_prompt: | ||||
|             before = str(child.before.decode()) | ||||
|             time.sleep(delay) | ||||
|             child.expect(PROMPT) | ||||
|             time.sleep(delay) | ||||
| 
 | ||||
|             if patt: | ||||
|                 # should see the last line on console | ||||
|                 assert patt in before | ||||
| 
 | ||||
| 
 | ||||
| def test_root_actor_bp_forever( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
|  | @ -309,7 +183,7 @@ def test_root_actor_bp_forever( | |||
| 
 | ||||
|     # quit out of the loop | ||||
|     child.sendline('q') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|  | @ -331,8 +205,13 @@ def test_subactor_error( | |||
|     # scan for the prompt | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching to pdb in crashed actor: ('name_error'" in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('name_error'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if do_next: | ||||
|         child.sendline('n') | ||||
|  | @ -350,12 +229,16 @@ def test_subactor_error( | |||
|         child.sendline('continue') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             # root actor gets debugger engaged | ||||
|     assert "Attaching to pdb in crashed actor: ('root'" in before | ||||
|             "('root'", | ||||
|             # error is a remote error propagated from the subactor | ||||
|     assert "RemoteActorError: ('name_error'" in before | ||||
|             "('name_error'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # another round | ||||
|     if ctlc: | ||||
|  | @ -365,7 +248,7 @@ def test_subactor_error( | |||
|     child.expect('\r\n') | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_subactor_breakpoint( | ||||
|  | @ -375,12 +258,12 @@ def test_subactor_breakpoint( | |||
|     "Single subactor with an infinite breakpoint loop" | ||||
| 
 | ||||
|     child = spawn('subactor_breakpoint') | ||||
| 
 | ||||
|     # scan for the prompt | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         [_pause_msg, | ||||
|          "('breakpoint_forever'",] | ||||
|     ) | ||||
| 
 | ||||
|     # do some "next" commands to demonstrate recurrent breakpoint | ||||
|     # entries | ||||
|  | @ -395,8 +278,10 @@ def test_subactor_breakpoint( | |||
|     for _ in range(5): | ||||
|         child.sendline('continue') | ||||
|         child.expect(PROMPT) | ||||
|         before = str(child.before.decode()) | ||||
|         assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||
|         assert in_prompt_msg( | ||||
|             child, | ||||
|             [_pause_msg, "('breakpoint_forever'"] | ||||
|         ) | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc(child) | ||||
|  | @ -407,9 +292,12 @@ def test_subactor_breakpoint( | |||
|     # child process should exit but parent will capture pdb.BdbQuit | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "RemoteActorError: ('breakpoint_forever'" in before | ||||
|     assert 'bdb.BdbQuit' in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         ['RemoteActorError:', | ||||
|          "('breakpoint_forever'", | ||||
|          'bdb.BdbQuit',] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|  | @ -418,11 +306,14 @@ def test_subactor_breakpoint( | |||
|     child.sendline('c') | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "RemoteActorError: ('breakpoint_forever'" in before | ||||
|     assert 'bdb.BdbQuit' in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         ['RemoteActorError:', | ||||
|          "('breakpoint_forever'", | ||||
|          'bdb.BdbQuit',] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| @has_nested_actors | ||||
|  | @ -441,7 +332,10 @@ def test_multi_subactors( | |||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         [_pause_msg, "('breakpoint_forever'"] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|  | @ -460,9 +354,14 @@ def test_multi_subactors( | |||
| 
 | ||||
|     # first name_error failure | ||||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching to pdb in crashed actor: ('name_error'" in before | ||||
|     assert "NameError" in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('name_error'", | ||||
|             "NameError", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|  | @ -486,8 +385,10 @@ def test_multi_subactors( | |||
|     # breakpoint loop should re-engage | ||||
|     child.sendline('c') | ||||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching pdb to actor: ('breakpoint_forever'" in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         [_pause_msg, "('breakpoint_forever'"] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|  | @ -527,9 +428,12 @@ def test_multi_subactors( | |||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     assert_before(child, [ | ||||
|     assert_before( | ||||
|         child, [ | ||||
|             # debugger attaches to root | ||||
|         "Attaching to pdb in crashed actor: ('root'", | ||||
|             # "Attaching to pdb in crashed actor: ('root'", | ||||
|             _crash_msg, | ||||
|             "('root'", | ||||
| 
 | ||||
|             # expect a multierror with exceptions for each sub-actor | ||||
|             "RemoteActorError: ('breakpoint_forever'", | ||||
|  | @ -537,14 +441,15 @@ def test_multi_subactors( | |||
|             "RemoteActorError: ('spawn_error'", | ||||
|             "RemoteActorError: ('name_error_1'", | ||||
|             'bdb.BdbQuit', | ||||
|     ]) | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
|     # repeat of previous multierror for final output | ||||
|     assert_before(child, [ | ||||
|  | @ -574,18 +479,28 @@ def test_multi_daemon_subactors( | |||
|     # the root's tty lock first so anticipate either crash | ||||
|     # message on the first entry. | ||||
| 
 | ||||
|     bp_forever_msg = "Attaching pdb to actor: ('bp_forever'" | ||||
|     name_error_msg = "NameError: name 'doggypants' is not defined" | ||||
|     bp_forev_parts = [ | ||||
|         _pause_msg, | ||||
|         "('bp_forever'", | ||||
|     ] | ||||
|     bp_forev_in_msg = partial( | ||||
|         in_prompt_msg, | ||||
|         parts=bp_forev_parts, | ||||
|     ) | ||||
| 
 | ||||
|     name_error_msg: str = "NameError: name 'doggypants' is not defined" | ||||
|     name_error_parts: list[str] = [name_error_msg] | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     if bp_forever_msg in before: | ||||
|         next_msg = name_error_msg | ||||
| 
 | ||||
|     if bp_forev_in_msg(child=child): | ||||
|         next_parts = name_error_parts | ||||
| 
 | ||||
|     elif name_error_msg in before: | ||||
|         next_msg = bp_forever_msg | ||||
|         next_parts = bp_forev_parts | ||||
| 
 | ||||
|     else: | ||||
|         raise ValueError("Neither log msg was found !?") | ||||
|         raise ValueError('Neither log msg was found !?') | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|  | @ -599,7 +514,10 @@ def test_multi_daemon_subactors( | |||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(PROMPT) | ||||
|     assert_before(child, [next_msg]) | ||||
|     assert_before( | ||||
|         child, | ||||
|         next_parts, | ||||
|     ) | ||||
| 
 | ||||
|     # XXX: hooray the root clobbering the child here was fixed! | ||||
|     # IMO, this demonstrates the true power of SC system design. | ||||
|  | @ -607,7 +525,7 @@ def test_multi_daemon_subactors( | |||
|     # now the root actor won't clobber the bp_forever child | ||||
|     # during it's first access to the debug lock, but will instead | ||||
|     # wait for the lock to release, by the edge triggered | ||||
|     # ``_debug.Lock.no_remote_has_tty`` event before sending cancel messages | ||||
|     # ``devx._debug.Lock.no_remote_has_tty`` event before sending cancel messages | ||||
|     # (via portals) to its underlings B) | ||||
| 
 | ||||
|     # at some point here there should have been some warning msg from | ||||
|  | @ -623,9 +541,15 @@ def test_multi_daemon_subactors( | |||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     try: | ||||
|         assert_before(child, [bp_forever_msg]) | ||||
|         assert_before( | ||||
|             child, | ||||
|             bp_forev_parts, | ||||
|         ) | ||||
|     except AssertionError: | ||||
|         assert_before(child, [name_error_msg]) | ||||
|         assert_before( | ||||
|             child, | ||||
|             name_error_parts, | ||||
|         ) | ||||
| 
 | ||||
|     else: | ||||
|         if ctlc: | ||||
|  | @ -637,32 +561,36 @@ def test_multi_daemon_subactors( | |||
| 
 | ||||
|         child.sendline('c') | ||||
|         child.expect(PROMPT) | ||||
|         assert_before(child, [name_error_msg]) | ||||
|         assert_before( | ||||
|             child, | ||||
|             name_error_parts, | ||||
|         ) | ||||
| 
 | ||||
|     # wait for final error in root | ||||
|     # where it crashs with boxed error | ||||
|     while True: | ||||
|         try: | ||||
|         child.sendline('c') | ||||
|         child.expect(PROMPT) | ||||
|             assert_before( | ||||
|         if not in_prompt_msg( | ||||
|             child, | ||||
|                 [bp_forever_msg] | ||||
|             ) | ||||
|         except AssertionError: | ||||
|             bp_forev_parts | ||||
|         ): | ||||
|             break | ||||
| 
 | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # boxed error raised in root task | ||||
|             "Attaching to pdb in crashed actor: ('root'", | ||||
|             "_exceptions.RemoteActorError: ('name_error'", | ||||
|             # "Attaching to pdb in crashed actor: ('root'", | ||||
|             _crash_msg, | ||||
|             "('root'",  # should attach in root | ||||
|             "_exceptions.RemoteActorError:",  # with an embedded RAE for.. | ||||
|             "('name_error'",  # the src subactor which raised | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| @has_nested_actors | ||||
|  | @ -738,7 +666,7 @@ def test_multi_subactors_root_errors( | |||
|     ]) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
|     assert_before(child, [ | ||||
|         # "Attaching to pdb in crashed actor: ('root'", | ||||
|  | @ -758,10 +686,11 @@ def test_multi_nested_subactors_error_through_nurseries( | |||
|     # https://github.com/goodboy/tractor/issues/320 | ||||
|     # ctlc: bool, | ||||
| ): | ||||
|     """Verify deeply nested actors that error trigger debugger entries | ||||
|     ''' | ||||
|     Verify deeply nested actors that error trigger debugger entries | ||||
|     at each actor nurserly (level) all the way up the tree. | ||||
| 
 | ||||
|     """ | ||||
|     ''' | ||||
|     # NOTE: previously, inside this script was a bug where if the | ||||
|     # parent errors before a 2-levels-lower actor has released the lock, | ||||
|     # the parent tries to cancel it but it's stuck in the debugger? | ||||
|  | @ -770,7 +699,7 @@ def test_multi_nested_subactors_error_through_nurseries( | |||
| 
 | ||||
|     child = spawn('multi_nested_subactors_error_up_through_nurseries') | ||||
| 
 | ||||
|     timed_out_early: bool = False | ||||
|     # timed_out_early: bool = False | ||||
| 
 | ||||
|     for send_char in itertools.cycle(['c', 'q']): | ||||
|         try: | ||||
|  | @ -781,22 +710,31 @@ def test_multi_nested_subactors_error_through_nurseries( | |||
|         except EOF: | ||||
|             break | ||||
| 
 | ||||
|     assert_before(child, [ | ||||
| 
 | ||||
|         # boxed source errors | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ # boxed source errors | ||||
|             "NameError: name 'doggypants' is not defined", | ||||
|         "tractor._exceptions.RemoteActorError: ('name_error'", | ||||
|             "tractor._exceptions.RemoteActorError:", | ||||
|             "('name_error'", | ||||
|             "bdb.BdbQuit", | ||||
| 
 | ||||
|             # first level subtrees | ||||
|         "tractor._exceptions.RemoteActorError: ('spawner0'", | ||||
|             # "tractor._exceptions.RemoteActorError: ('spawner0'", | ||||
|             "src_uid=('spawner0'", | ||||
| 
 | ||||
|             # "tractor._exceptions.RemoteActorError: ('spawner1'", | ||||
| 
 | ||||
|             # propagation of errors up through nested subtrees | ||||
|         "tractor._exceptions.RemoteActorError: ('spawn_until_0'", | ||||
|         "tractor._exceptions.RemoteActorError: ('spawn_until_1'", | ||||
|         "tractor._exceptions.RemoteActorError: ('spawn_until_2'", | ||||
|     ]) | ||||
|             # "tractor._exceptions.RemoteActorError: ('spawn_until_0'", | ||||
|             # "tractor._exceptions.RemoteActorError: ('spawn_until_1'", | ||||
|             # "tractor._exceptions.RemoteActorError: ('spawn_until_2'", | ||||
|             # ^-NOTE-^ old RAE repr, new one is below with a field | ||||
|             # showing the src actor's uid. | ||||
|             "src_uid=('spawn_until_0'", | ||||
|             "relay_uid=('spawn_until_1'", | ||||
|             "src_uid=('spawn_until_2'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.timeout(15) | ||||
|  | @ -817,10 +755,13 @@ def test_root_nursery_cancels_before_child_releases_tty_lock( | |||
|     child = spawn('root_cancelled_but_child_is_in_tty_lock') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "NameError: name 'doggypants' is not defined" in before | ||||
|     assert "tractor._exceptions.RemoteActorError: ('name_error'" not in before | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             "NameError: name 'doggypants' is not defined", | ||||
|             "tractor._exceptions.RemoteActorError: ('name_error'", | ||||
|         ], | ||||
|     ) | ||||
|     time.sleep(0.5) | ||||
| 
 | ||||
|     if ctlc: | ||||
|  | @ -858,7 +799,7 @@ def test_root_nursery_cancels_before_child_releases_tty_lock( | |||
| 
 | ||||
|     for i in range(3): | ||||
|         try: | ||||
|             child.expect(pexpect.EOF, timeout=0.5) | ||||
|             child.expect(EOF, timeout=0.5) | ||||
|             break | ||||
|         except TIMEOUT: | ||||
|             child.sendline('c') | ||||
|  | @ -871,11 +812,14 @@ def test_root_nursery_cancels_before_child_releases_tty_lock( | |||
| 
 | ||||
|     if not timed_out_early: | ||||
|         before = str(child.before.decode()) | ||||
|         assert_before(child, [ | ||||
|         assert_before( | ||||
|             child, | ||||
|             [ | ||||
|                 "tractor._exceptions.RemoteActorError: ('spawner0'", | ||||
|                 "tractor._exceptions.RemoteActorError: ('name_error'", | ||||
|                 "NameError: name 'doggypants' is not defined", | ||||
|         ]) | ||||
|             ], | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def test_root_cancels_child_context_during_startup( | ||||
|  | @ -897,7 +841,7 @@ def test_root_cancels_child_context_during_startup( | |||
|         do_ctlc(child) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_different_debug_mode_per_actor( | ||||
|  | @ -908,26 +852,249 @@ def test_different_debug_mode_per_actor( | |||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     # only one actor should enter the debugger | ||||
|     before = str(child.before.decode()) | ||||
|     assert "Attaching to pdb in crashed actor: ('debugged_boi'" in before | ||||
|     assert "RuntimeError" in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         [_crash_msg, "('debugged_boi'", "RuntimeError"], | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
|     # NOTE: this debugged actor error currently WON'T show up since the | ||||
|     # root will actually cancel and terminate the nursery before the error | ||||
|     # msg reported back from the debug mode actor is processed. | ||||
|     # assert "tractor._exceptions.RemoteActorError: ('debugged_boi'" in before | ||||
| 
 | ||||
|     assert "tractor._exceptions.RemoteActorError: ('crash_boi'" in before | ||||
| 
 | ||||
|     # the crash boi should not have made a debugger request but | ||||
|     # instead crashed completely | ||||
|     assert "tractor._exceptions.RemoteActorError: ('crash_boi'" in before | ||||
|     assert "RuntimeError" in before | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             "tractor._exceptions.RemoteActorError:", | ||||
|             "src_uid=('crash_boi'", | ||||
|             "RuntimeError", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def test_post_mortem_api( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `tractor.post_mortem()` API works in an exception | ||||
|     handler block. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('pm_in_subactor') | ||||
| 
 | ||||
|     # First entry is via manual `.post_mortem()` | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task 'name_error'", | ||||
|             "NameError", | ||||
|             "('child'", | ||||
|             "tractor.post_mortem()", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # 2nd is RPC crash handler | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task 'name_error'", | ||||
|             "NameError", | ||||
|             "('child'", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # 3rd is via RAE bubbled to root's parent ctx task and | ||||
|     # crash-handled via another manual pm call. | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|             "NameError", | ||||
|             "tractor.post_mortem()", | ||||
|             "src_uid=('child'", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # 4th and FINAL is via RAE bubbled to root's parent ctx task and | ||||
|     # crash-handled via another manual pm call. | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|             "NameError", | ||||
|             "src_uid=('child'", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
| 
 | ||||
|     # TODO: ensure we're stopped and showing the right call stack frame | ||||
|     # -[ ] need a way to strip the terminal color chars in order to | ||||
|     #    pattern match... see TODO around `assert_before()` above! | ||||
|     # child.sendline('w') | ||||
|     # child.expect(PROMPT) | ||||
|     # assert_before( | ||||
|     #     child, | ||||
|     #     [ | ||||
|     #         # error src block annot at ctx open | ||||
|     #         '-> async with p.open_context(name_error) as (ctx, first):', | ||||
|     #     ] | ||||
|     # ) | ||||
| 
 | ||||
|     # # step up a frame to ensure the it's the root's nursery | ||||
|     # child.sendline('u') | ||||
|     # child.expect(PROMPT) | ||||
|     # assert_before( | ||||
|     #     child, | ||||
|     #     [ | ||||
|     #         # handler block annotation | ||||
|     #         '-> async with tractor.open_nursery(', | ||||
|     #     ] | ||||
|     # ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_shield_pause( | ||||
|     spawn, | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `tractor.pause()/.post_mortem()` API works inside an | ||||
|     already cancelled `trio.CancelScope` and that you can step to the | ||||
|     next checkpoint wherein the cancelled will get raised. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('shielded_pause') | ||||
| 
 | ||||
|     # First entry is via manual `.post_mortem()` | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "cancellable_pause_loop'", | ||||
|             "('cancelled_before_pause'",  # actor name | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # since 3 tries in ex. shield pause loop | ||||
|     for i in range(3): | ||||
|         child.sendline('c') | ||||
|         child.expect(PROMPT) | ||||
|         assert_before( | ||||
|             child, | ||||
|             [ | ||||
|                 _pause_msg, | ||||
|                 "INSIDE SHIELDED PAUSE", | ||||
|                 "('cancelled_before_pause'",  # actor name | ||||
|             ] | ||||
|         ) | ||||
| 
 | ||||
|     # back inside parent task that opened nursery | ||||
|     child.sendline('c') | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('cancelled_before_pause'",  # actor name | ||||
|             _repl_fail_msg, | ||||
|             "trio.Cancelled", | ||||
|             "raise Cancelled._create()", | ||||
| 
 | ||||
|             # we should be handling a taskc inside | ||||
|             # the first `.port_mortem()` sin-shield! | ||||
|             'await DebugStatus.req_finished.wait()', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # same as above but in the root actor's task | ||||
|     child.sendline('c') | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('root'",  # actor name | ||||
|             _repl_fail_msg, | ||||
|             "trio.Cancelled", | ||||
|             "raise Cancelled._create()", | ||||
| 
 | ||||
|             # handling a taskc inside the first unshielded | ||||
|             # `.port_mortem()`. | ||||
|             # BUT in this case in the root-proc path ;) | ||||
|             'wait Lock._debug_lock.acquire()', | ||||
|         ] | ||||
|     ) | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: better error for "non-ideal" usage from the root actor. | ||||
| # -[ ] if called from an async scope emit a message that suggests | ||||
| #    using `await tractor.pause()` instead since it's less overhead | ||||
| #    (in terms of `greenback` and/or extra threads) and if it's from | ||||
| #    a sync scope suggest that usage must first call | ||||
| #    `ensure_portal()` in the (eventual parent) async calling scope? | ||||
| def test_sync_pause_from_bg_task_in_root_actor_(): | ||||
|     ''' | ||||
|     When used from the root actor, normally we can only implicitly | ||||
|     support `.pause_from_sync()` from the main-parent-task (that | ||||
|     opens the runtime via `open_root_actor()`) since `greenback` | ||||
|     requires a `.ensure_portal()` call per `trio.Task` where it is | ||||
|     used. | ||||
| 
 | ||||
|     ''' | ||||
|     ... | ||||
| 
 | ||||
| # TODO: needs ANSI code stripping tho, see `assert_before()` # above! | ||||
| def test_correct_frames_below_hidden(): | ||||
|     ''' | ||||
|     Ensure that once a `tractor.pause()` enages, when the user | ||||
|     inputs a "next"/"n" command the actual next line steps | ||||
|     and that using a "step"/"s" into the next LOC, particuarly | ||||
|     `tractor` APIs, you can step down into that code. | ||||
| 
 | ||||
|     ''' | ||||
|     ... | ||||
| 
 | ||||
| 
 | ||||
| def test_cant_pause_from_paused_task(): | ||||
|     ''' | ||||
|     Pausing from with an already paused task should raise an error. | ||||
| 
 | ||||
|     Normally this should only happen in practise while debugging the call stack of `tractor.pause()` itself, likely | ||||
|     by a `.pause()` line somewhere inside our runtime. | ||||
| 
 | ||||
|     ''' | ||||
|     ... | ||||
|  | @ -0,0 +1,350 @@ | |||
| ''' | ||||
| That "foreign loop/thread" debug REPL support better ALSO WORK! | ||||
| 
 | ||||
| Same as `test_native_pause.py`. | ||||
| All these tests can be understood (somewhat) by running the | ||||
| equivalent `examples/debugging/` scripts manually. | ||||
| 
 | ||||
| ''' | ||||
| # from functools import partial | ||||
| # import itertools | ||||
| import time | ||||
| # from typing import ( | ||||
| #     Iterator, | ||||
| # ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     # TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| from .conftest import ( | ||||
|     # _ci_env, | ||||
|     do_ctlc, | ||||
|     PROMPT, | ||||
|     # expect, | ||||
|     in_prompt_msg, | ||||
|     assert_before, | ||||
|     _pause_msg, | ||||
|     _crash_msg, | ||||
|     _ctlc_ignore_header, | ||||
|     # _repl_fail_msg, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def test_pause_from_sync( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify we can use the `pdbp` REPL from sync functions AND from | ||||
|     any thread spawned with `trio.to_thread.run_sync()`. | ||||
| 
 | ||||
|     `examples/debugging/sync_bp.py` | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('sync_bp') | ||||
| 
 | ||||
|     # first `sync_pause()` after nurseries open | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # pre-prompt line | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|         # ^NOTE^ subactor not spawned yet; don't need extra delay. | ||||
| 
 | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # first `await tractor.pause()` inside `p.open_context()` body | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     # XXX shouldn't see gb loaded message with PDB loglevel! | ||||
|     assert not in_prompt_msg( | ||||
|         child, | ||||
|         ['`greenback` portal opened!'], | ||||
|     ) | ||||
|     # should be same root task | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             # NOTE: setting this to 0 (or some other sufficient | ||||
|             # small val) can cause the test to fail since the | ||||
|             # `subactor` suffers a race where the root/parent | ||||
|             # sends an actor-cancel prior to it hitting its pause | ||||
|             # point; by def the value is 0.1 | ||||
|             delay=0.4, | ||||
|         ) | ||||
| 
 | ||||
|     # XXX, fwiw without a brief sleep here the SIGINT might actually | ||||
|     # trigger "subactor" cancellation by its parent  before the | ||||
|     # shield-handler is engaged. | ||||
|     # | ||||
|     # => similar to the `delay` input to `do_ctlc()` below, setting | ||||
|     # this too low can cause the test to fail since the `subactor` | ||||
|     # suffers a race where the root/parent sends an actor-cancel | ||||
|     # prior to the context task hitting its pause point (and thus | ||||
|     # engaging the `sigint_shield()` handler in time); this value | ||||
|     # seems be good enuf? | ||||
|     time.sleep(0.6) | ||||
| 
 | ||||
|     # one of the bg thread or subactor should have | ||||
|     # `Lock.acquire()`-ed | ||||
|     # (NOT both, which will result in REPL clobbering!) | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
|         'subactor': [ | ||||
|             "'start_n_sync_pause'", | ||||
|             "('subactor'", | ||||
|         ], | ||||
|         'inline_root_bg_thread': [ | ||||
|             "<Thread(inline_root_bg_thread", | ||||
|             "('root'", | ||||
|         ], | ||||
|         'start_soon_root_bg_thread': [ | ||||
|             "<Thread(start_soon_root_bg_thread", | ||||
|             "('root'", | ||||
|         ], | ||||
|     } | ||||
|     conts: int = 0  # for debugging below matching logic on failure | ||||
|     while attach_patts: | ||||
|         child.sendline('c') | ||||
|         conts += 1 | ||||
|         child.expect(PROMPT) | ||||
|         before = str(child.before.decode()) | ||||
|         for key in attach_patts: | ||||
|             if key in before: | ||||
|                 attach_key: str = key | ||||
|                 expected_patts: str = attach_patts.pop(key) | ||||
|                 assert_before( | ||||
|                     child, | ||||
|                     [_pause_msg] | ||||
|                     + | ||||
|                     expected_patts | ||||
|                 ) | ||||
|                 break | ||||
|         else: | ||||
|             pytest.fail( | ||||
|                 f'No keys found?\n\n' | ||||
|                 f'{attach_patts.keys()}\n\n' | ||||
|                 f'{before}\n' | ||||
|             ) | ||||
| 
 | ||||
|         # ensure no other task/threads engaged a REPL | ||||
|         # at the same time as the one that was detected above. | ||||
|         for key, other_patts in attach_patts.copy().items(): | ||||
|             assert not in_prompt_msg( | ||||
|                 child, | ||||
|                 other_patts, | ||||
|             ) | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc( | ||||
|                 child, | ||||
|                 patt=attach_key, | ||||
|                 # NOTE same as comment above | ||||
|                 delay=0.4, | ||||
|             ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def expect_any_of( | ||||
|     attach_patts: dict[str, list[str]], | ||||
|     child,   # what type? | ||||
|     ctlc: bool = False, | ||||
|     prompt: str = _ctlc_ignore_header, | ||||
|     ctlc_delay: float = .4, | ||||
| 
 | ||||
| ) -> list[str]: | ||||
|     ''' | ||||
|     Receive any of a `list[str]` of patterns provided in | ||||
|     `attach_patts`. | ||||
| 
 | ||||
|     Used to test racing prompts from multiple actors and/or | ||||
|     tasks using a common root process' `pdbp` REPL. | ||||
| 
 | ||||
|     ''' | ||||
|     assert attach_patts | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     for attach_key in attach_patts: | ||||
|         if attach_key in before: | ||||
|             expected_patts: str = attach_patts.pop(attach_key) | ||||
|             assert_before( | ||||
|                 child, | ||||
|                 expected_patts | ||||
|             ) | ||||
|             break  # from for | ||||
|     else: | ||||
|         pytest.fail( | ||||
|             f'No keys found?\n\n' | ||||
|             f'{attach_patts.keys()}\n\n' | ||||
|             f'{before}\n' | ||||
|         ) | ||||
| 
 | ||||
|     # ensure no other task/threads engaged a REPL | ||||
|     # at the same time as the one that was detected above. | ||||
|     for key, other_patts in attach_patts.copy().items(): | ||||
|         assert not in_prompt_msg( | ||||
|             child, | ||||
|             other_patts, | ||||
|         ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             patt=prompt, | ||||
|             # NOTE same as comment above | ||||
|             delay=ctlc_delay, | ||||
|         ) | ||||
| 
 | ||||
|     return expected_patts | ||||
| 
 | ||||
| 
 | ||||
| def test_sync_pause_from_aio_task( | ||||
|     spawn, | ||||
|     ctlc: bool | ||||
|     # ^TODO, fix for `asyncio`!! | ||||
| ): | ||||
|     ''' | ||||
|     Verify we can use the `pdbp` REPL from an `asyncio.Task` spawned using | ||||
|     APIs in `.to_asyncio`. | ||||
| 
 | ||||
|     `examples/debugging/asycio_bp.py` | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('asyncio_bp') | ||||
| 
 | ||||
|     # RACE on whether trio/asyncio task bps first | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
| 
 | ||||
|         # first pause in guest-mode (aka "infecting") | ||||
|         # `trio.Task`. | ||||
|         'trio-side': [ | ||||
|             _pause_msg, | ||||
|             "<Task 'trio_ctx'", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
| 
 | ||||
|         # `breakpoint()` from `asyncio.Task`. | ||||
|         'asyncio-side': [ | ||||
|             _pause_msg, | ||||
|             "<Task pending name='Task-2' coro=<greenback_shim()", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
|     } | ||||
| 
 | ||||
|     while attach_patts: | ||||
|         expect_any_of( | ||||
|             attach_patts=attach_patts, | ||||
|             child=child, | ||||
|             ctlc=ctlc, | ||||
|         ) | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     # NOW in race order, | ||||
|     # - the asyncio-task will error | ||||
|     # - the root-actor parent task will pause | ||||
|     # | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
| 
 | ||||
|         # error raised in `asyncio.Task` | ||||
|         "raise ValueError('asyncio side error!')": [ | ||||
|             _crash_msg, | ||||
|             'return await chan.receive()',  # `.to_asyncio` impl internals in tb | ||||
|             "<Task 'trio_ctx'", | ||||
|             "@ ('aio_daemon'", | ||||
|             "ValueError: asyncio side error!", | ||||
|         ], | ||||
| 
 | ||||
|         # parent-side propagation via actor-nursery/portal | ||||
|         # "tractor._exceptions.RemoteActorError: remote task raised a 'ValueError'": [ | ||||
|         "remote task raised a 'ValueError'": [ | ||||
|             _crash_msg, | ||||
|             "src_uid=('aio_daemon'", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
| 
 | ||||
|         # a final pause in root-actor | ||||
|         "<Task '__main__.main'": [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ], | ||||
|     } | ||||
|     while attach_patts: | ||||
|         expect_any_of( | ||||
|             attach_patts=attach_patts, | ||||
|             child=child, | ||||
|             ctlc=ctlc, | ||||
|         ) | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     assert not attach_patts | ||||
| 
 | ||||
|     # final boxed error propagates to root | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|             "remote task raised a 'ValueError'", | ||||
|             "ValueError: asyncio side error!", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             # NOTE: setting this to 0 (or some other sufficient | ||||
|             # small val) can cause the test to fail since the | ||||
|             # `subactor` suffers a race where the root/parent | ||||
|             # sends an actor-cancel prior to it hitting its pause | ||||
|             # point; by def the value is 0.1 | ||||
|             delay=0.4, | ||||
|         ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_sync_pause_from_non_greenbacked_aio_task(): | ||||
|     ''' | ||||
|     Where the `breakpoint()` caller task is NOT spawned by | ||||
|     `tractor.to_asyncio` and thus never activates | ||||
|     a `greenback.ensure_portal()` beforehand, presumably bc the task | ||||
|     was started by some lib/dep as in often seen in the field. | ||||
| 
 | ||||
|     Ensure sync pausing works when the pause is in, | ||||
| 
 | ||||
|     - the root actor running in infected-mode? | ||||
|       |_ since we don't need any IPC to acquire the debug lock? | ||||
|       |_ is there some way to handle this like the non-main-thread case? | ||||
| 
 | ||||
|     All other cases need to error out appropriately right? | ||||
| 
 | ||||
|     - for any subactor we can't avoid needing the repl lock.. | ||||
|       |_ is there a way to hook into `asyncio.ensure_future(obj)`? | ||||
| 
 | ||||
|     ''' | ||||
|     pass | ||||
|  | @ -0,0 +1,120 @@ | |||
| ''' | ||||
| That "native" runtime-hackin toolset better be dang useful! | ||||
| 
 | ||||
| Verify the funtion of a variety of "developer-experience" tools we | ||||
| offer from the `.devx` sub-pkg: | ||||
| 
 | ||||
| - use of the lovely `stackscope` for dumping actor `trio`-task trees | ||||
|   during operation and hangs. | ||||
| 
 | ||||
| TODO: | ||||
| - demonstration of `CallerInfo` call stack frame filtering such that | ||||
|   for logging and REPL purposes a user sees exactly the layers needed | ||||
|   when debugging a problem inside the stack vs. in their app. | ||||
| 
 | ||||
| ''' | ||||
| import os | ||||
| import signal | ||||
| 
 | ||||
| from .conftest import ( | ||||
|     expect, | ||||
|     assert_before, | ||||
|     # in_prompt_msg, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def test_shield_pause( | ||||
|     spawn, | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `tractor.pause()/.post_mortem()` API works inside an | ||||
|     already cancelled `trio.CancelScope` and that you can step to the | ||||
|     next checkpoint wherein the cancelled will get raised. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn( | ||||
|         'shield_hang_in_sub' | ||||
|     ) | ||||
|     expect( | ||||
|         child, | ||||
|         'Yo my child hanging..?', | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             'Entering shield sleep..', | ||||
|             'Enabling trace-trees on `SIGUSR1` since `stackscope` is installed @', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     print( | ||||
|         'Sending SIGUSR1 to see a tree-trace!', | ||||
|     ) | ||||
|     os.kill( | ||||
|         child.pid, | ||||
|         signal.SIGUSR1, | ||||
|     ) | ||||
|     expect( | ||||
|         child, | ||||
|         # end-of-tree delimiter | ||||
|         "------ \('root', ", | ||||
|     ) | ||||
| 
 | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             'Trying to dump `stackscope` tree..', | ||||
|             'Dumping `stackscope` tree for actor', | ||||
|             "('root'",  # uid line | ||||
| 
 | ||||
|             # parent block point (non-shielded) | ||||
|             'await trio.sleep_forever()  # in root', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # expect( | ||||
|     #     child, | ||||
|     #     # relay to the sub should be reported | ||||
|     #     'Relaying `SIGUSR1`[10] to sub-actor', | ||||
|     # ) | ||||
| 
 | ||||
|     expect( | ||||
|         child, | ||||
|         # end-of-tree delimiter | ||||
|         "------ \('hanger', ", | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # relay to the sub should be reported | ||||
|             'Relaying `SIGUSR1`[10] to sub-actor', | ||||
| 
 | ||||
|             "('hanger'",  # uid line | ||||
| 
 | ||||
|             # hanger LOC where it's shield-halted | ||||
|             'await trio.sleep_forever()  # in subactor', | ||||
|         ] | ||||
|     ) | ||||
|     # breakpoint() | ||||
| 
 | ||||
|     # simulate the user sending a ctl-c to the hanging program. | ||||
|     # this should result in the terminator kicking in since | ||||
|     # the sub is shield blocking and can't respond to SIGINT. | ||||
|     os.kill( | ||||
|         child.pid, | ||||
|         signal.SIGINT, | ||||
|     ) | ||||
|     expect( | ||||
|         child, | ||||
|         'Shutting down actor runtime', | ||||
|         timeout=6, | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             'raise KeyboardInterrupt', | ||||
|             # 'Shutting down actor runtime', | ||||
|             '#T-800 deployed to collect zombie B0', | ||||
|             "'--uid', \"('hanger',", | ||||
|         ] | ||||
|     ) | ||||
|  | @ -3,22 +3,30 @@ Sketchy network blackoutz, ugly byzantine gens, puedes eschuchar la | |||
| cancelacion?.. | ||||
| 
 | ||||
| ''' | ||||
| import itertools | ||||
| from functools import partial | ||||
| from types import ModuleType | ||||
| 
 | ||||
| import pytest | ||||
| from _pytest.pathlib import import_path | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| from conftest import ( | ||||
| from tractor._testing import ( | ||||
|     examples_dir, | ||||
|     break_ipc, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'debug_mode', | ||||
|     [False, True], | ||||
|     ids=['no_debug_mode', 'debug_mode'], | ||||
|     'pre_aclose_msgstream', | ||||
|     [ | ||||
|         False, | ||||
|         True, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'no_msgstream_aclose', | ||||
|         'pre_aclose_msgstream', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'ipc_break', | ||||
|  | @ -63,8 +71,10 @@ from conftest import ( | |||
| ) | ||||
| def test_ipc_channel_break_during_stream( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     spawn_backend: str, | ||||
|     ipc_break: dict | None, | ||||
|     ipc_break: dict|None, | ||||
|     pre_aclose_msgstream: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Ensure we can have an IPC channel break its connection during | ||||
|  | @ -81,72 +91,152 @@ def test_ipc_channel_break_during_stream( | |||
| 
 | ||||
|         # non-`trio` spawners should never hit the hang condition that | ||||
|         # requires the user to do ctl-c to cancel the actor tree. | ||||
|         expect_final_exc = trio.ClosedResourceError | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
| 
 | ||||
|     mod = import_path( | ||||
|         examples_dir() / 'advanced_faults' / 'ipc_failure_during_stream.py', | ||||
|     mod: ModuleType = import_path( | ||||
|         examples_dir() / 'advanced_faults' | ||||
|         / 'ipc_failure_during_stream.py', | ||||
|         root=examples_dir(), | ||||
|         consider_namespace_packages=False, | ||||
|     ) | ||||
| 
 | ||||
|     # by def we expect KBI from user after a simulated "hang | ||||
|     # period" wherein the user eventually hits ctl-c to kill the | ||||
|     # root-actor tree. | ||||
|     expect_final_exc: BaseException = KeyboardInterrupt | ||||
|     if ( | ||||
|         # only expect EoC if trans is broken on the child side, | ||||
|         ipc_break['break_child_ipc_after'] is not False | ||||
|         # AND we tell the child to call `MsgStream.aclose()`. | ||||
|         and pre_aclose_msgstream | ||||
|     ): | ||||
|         # expect_final_exc = trio.EndOfChannel | ||||
|         # ^XXX NOPE! XXX^ since now `.open_stream()` absorbs this | ||||
|         # gracefully! | ||||
|         expect_final_exc = KeyboardInterrupt | ||||
| 
 | ||||
|     # when ONLY the child breaks we expect the parent to get a closed | ||||
|     # resource error on the next `MsgStream.receive()` and then fail out | ||||
|     # and cancel the child from there. | ||||
|     # NOTE when ONLY the child breaks or it breaks BEFORE the | ||||
|     # parent we expect the parent to get a closed resource error | ||||
|     # on the next `MsgStream.receive()` and then fail out and | ||||
|     # cancel the child from there. | ||||
|     # | ||||
|     # ONLY CHILD breaks | ||||
|     if ( | ||||
| 
 | ||||
|         # only child breaks | ||||
|         ( | ||||
|         ipc_break['break_child_ipc_after'] | ||||
|             and ipc_break['break_parent_ipc_after'] is False | ||||
|         ) | ||||
|         and | ||||
|         ipc_break['break_parent_ipc_after'] is False | ||||
|     ): | ||||
|         # NOTE: we DO NOT expect this any more since | ||||
|         # the child side's channel will be broken silently | ||||
|         # and nothing on the parent side will indicate this! | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
| 
 | ||||
|         # both break but, parent breaks first | ||||
|         or ( | ||||
|         # NOTE: child will send a 'stop' msg before it breaks | ||||
|         # the transport channel BUT, that will be absorbed by the | ||||
|         # `ctx.open_stream()` block and thus the `.open_context()` | ||||
|         # should hang, after which the test script simulates | ||||
|         # a user sending ctl-c by raising a KBI. | ||||
|         if pre_aclose_msgstream: | ||||
|             expect_final_exc = KeyboardInterrupt | ||||
| 
 | ||||
|             # XXX OLD XXX | ||||
|             # if child calls `MsgStream.aclose()` then expect EoC. | ||||
|             # ^ XXX not any more ^ since eoc is always absorbed | ||||
|             # gracefully and NOT bubbled to the `.open_context()` | ||||
|             # block! | ||||
|             # expect_final_exc = trio.EndOfChannel | ||||
| 
 | ||||
|     # BOTH but, CHILD breaks FIRST | ||||
|     elif ( | ||||
|         ipc_break['break_child_ipc_after'] is not False | ||||
|         and ( | ||||
|             ipc_break['break_parent_ipc_after'] | ||||
|             > ipc_break['break_child_ipc_after'] | ||||
|         ) | ||||
|         ) | ||||
| 
 | ||||
|     ): | ||||
|         expect_final_exc = trio.ClosedResourceError | ||||
|         if pre_aclose_msgstream: | ||||
|             expect_final_exc = KeyboardInterrupt | ||||
| 
 | ||||
|     # when the parent IPC side dies (even if the child's does as well | ||||
|     # but the child fails BEFORE the parent) we expect the channel to be | ||||
|     # sent a stop msg from the child at some point which will signal the | ||||
|     # parent that the stream has been terminated. | ||||
|     # NOTE: when the parent breaks "after" the child you get this same | ||||
|     # case as well, the child breaks the IPC channel with a stop msg | ||||
|     # before any closure takes place. | ||||
|     # NOTE when the parent IPC side dies (even if the child does as well | ||||
|     # but the child fails BEFORE the parent) we always expect the | ||||
|     # IPC layer to raise a closed-resource, NEVER do we expect | ||||
|     # a stop msg since the parent-side ctx apis will error out | ||||
|     # IMMEDIATELY before the child ever sends any 'stop' msg. | ||||
|     # | ||||
|     # ONLY PARENT breaks | ||||
|     elif ( | ||||
|         # only parent breaks | ||||
|         ( | ||||
|         ipc_break['break_parent_ipc_after'] | ||||
|             and ipc_break['break_child_ipc_after'] is False | ||||
|         ) | ||||
|         and | ||||
|         ipc_break['break_child_ipc_after'] is False | ||||
|     ): | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
| 
 | ||||
|         # both break but, child breaks first | ||||
|         or ( | ||||
|     # BOTH but, PARENT breaks FIRST | ||||
|     elif ( | ||||
|         ipc_break['break_parent_ipc_after'] is not False | ||||
|         and ( | ||||
|             ipc_break['break_child_ipc_after'] | ||||
|                 > ipc_break['break_parent_ipc_after'] | ||||
|             ) | ||||
|             > | ||||
|             ipc_break['break_parent_ipc_after'] | ||||
|         ) | ||||
|     ): | ||||
|         expect_final_exc = trio.EndOfChannel | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
| 
 | ||||
|     with pytest.raises(expect_final_exc): | ||||
|     with pytest.raises( | ||||
|         expected_exception=( | ||||
|             expect_final_exc, | ||||
|             ExceptionGroup, | ||||
|         ), | ||||
|     ) as excinfo: | ||||
|         try: | ||||
|             trio.run( | ||||
|                 partial( | ||||
|                     mod.main, | ||||
|                     debug_mode=debug_mode, | ||||
|                     start_method=spawn_backend, | ||||
|                     loglevel=loglevel, | ||||
|                     pre_close=pre_aclose_msgstream, | ||||
|                     **ipc_break, | ||||
|                 ) | ||||
|             ) | ||||
|         except KeyboardInterrupt as _kbi: | ||||
|             kbi = _kbi | ||||
|             if expect_final_exc is not KeyboardInterrupt: | ||||
|                 pytest.fail( | ||||
|                     'Rxed unexpected KBI !?\n' | ||||
|                     f'{repr(kbi)}' | ||||
|                 ) | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|         except tractor.TransportClosed as _tc: | ||||
|             tc = _tc | ||||
|             if expect_final_exc is KeyboardInterrupt: | ||||
|                 pytest.fail( | ||||
|                     'Unexpected transport failure !?\n' | ||||
|                     f'{repr(tc)}' | ||||
|                 ) | ||||
|             cause: Exception = tc.__cause__ | ||||
|             assert ( | ||||
|                 type(cause) is trio.ClosedResourceError | ||||
|                 and | ||||
|                 cause.args[0] == 'another task closed this fd' | ||||
|             ) | ||||
|             raise | ||||
| 
 | ||||
|     # get raw instance from pytest wrapper | ||||
|     value = excinfo.value | ||||
|     if isinstance(value, ExceptionGroup): | ||||
|         value = next( | ||||
|             itertools.dropwhile( | ||||
|                 lambda exc: not isinstance(exc, expect_final_exc), | ||||
|                 value.exceptions, | ||||
|             ) | ||||
|         ) | ||||
|         assert value | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -155,9 +245,15 @@ async def break_ipc_after_started( | |||
| ) -> None: | ||||
|     await ctx.started() | ||||
|     async with ctx.open_stream() as stream: | ||||
|         await stream.aclose() | ||||
|         await trio.sleep(0.2) | ||||
|         await ctx.chan.send(None) | ||||
| 
 | ||||
|         # TODO: make a test which verifies the error | ||||
|         # for this, i.e. raises a `MsgTypeError` | ||||
|         # await ctx.chan.send(None) | ||||
| 
 | ||||
|         await break_ipc( | ||||
|             stream=stream, | ||||
|             pre_close=True, | ||||
|         ) | ||||
|         print('child broke IPC and terminating') | ||||
| 
 | ||||
| 
 | ||||
|  | @ -169,6 +265,7 @@ def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages(): | |||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         with trio.fail_after(3): | ||||
|             async with tractor.open_nursery() as n: | ||||
|                 portal = await n.start_actor( | ||||
|                     'ipc_breaker', | ||||
|  | @ -186,7 +283,10 @@ def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages(): | |||
| 
 | ||||
|                         print('parent waiting on context') | ||||
| 
 | ||||
|             print('parent exited context') | ||||
|                 print( | ||||
|                     'parent exited context\n' | ||||
|                     'parent raising KBI..\n' | ||||
|                 ) | ||||
|                 raise KeyboardInterrupt | ||||
| 
 | ||||
|     with pytest.raises(KeyboardInterrupt): | ||||
|  |  | |||
|  | @ -6,6 +6,7 @@ from collections import Counter | |||
| import itertools | ||||
| import platform | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
|  | @ -143,8 +144,16 @@ def test_dynamic_pub_sub(): | |||
| 
 | ||||
|     try: | ||||
|         trio.run(main) | ||||
|     except trio.TooSlowError: | ||||
|         pass | ||||
|     except ( | ||||
|         trio.TooSlowError, | ||||
|         ExceptionGroup, | ||||
|     ) as err: | ||||
|         if isinstance(err, ExceptionGroup): | ||||
|             for suberr in err.exceptions: | ||||
|                 if isinstance(suberr, trio.TooSlowError): | ||||
|                     break | ||||
|             else: | ||||
|                 pytest.fail('Never got a `TooSlowError` ?') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -298,44 +307,69 @@ async def inf_streamer( | |||
| 
 | ||||
|     async with ( | ||||
|         ctx.open_stream() as stream, | ||||
|         trio.open_nursery() as n, | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         async def bail_on_sentinel(): | ||||
|         async def close_stream_on_sentinel(): | ||||
|             async for msg in stream: | ||||
|                 if msg == 'done': | ||||
|                     print( | ||||
|                         'streamer RXed "done" sentinel msg!\n' | ||||
|                         'CLOSING `MsgStream`!' | ||||
|                     ) | ||||
|                     await stream.aclose() | ||||
|                 else: | ||||
|                     print(f'streamer received {msg}') | ||||
|             else: | ||||
|                 print('streamer exited recv loop') | ||||
| 
 | ||||
|         # start termination detector | ||||
|         n.start_soon(bail_on_sentinel) | ||||
|         tn.start_soon(close_stream_on_sentinel) | ||||
| 
 | ||||
|         for val in itertools.count(): | ||||
|         cap: int = 10000  # so that we don't spin forever when bug.. | ||||
|         for val in range(cap): | ||||
|             try: | ||||
|                 print(f'streamer sending {val}') | ||||
|                 await stream.send(val) | ||||
|             except trio.ClosedResourceError: | ||||
|                 if val > cap: | ||||
|                     raise RuntimeError( | ||||
|                         'Streamer never cancelled by setinel?' | ||||
|                     ) | ||||
|                 await trio.sleep(0.001) | ||||
| 
 | ||||
|             # close out the stream gracefully | ||||
|             except trio.ClosedResourceError: | ||||
|                 print('transport closed on streamer side!') | ||||
|                 assert stream.closed | ||||
|                 break | ||||
|         else: | ||||
|             raise RuntimeError( | ||||
|                 'Streamer not cancelled before finished sending?' | ||||
|             ) | ||||
| 
 | ||||
|     print('terminating streamer') | ||||
|     print('streamer exited .open_streamer() block') | ||||
| 
 | ||||
| 
 | ||||
| def test_local_task_fanout_from_stream(): | ||||
| def test_local_task_fanout_from_stream( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Single stream with multiple local consumer tasks using the | ||||
|     ``MsgStream.subscribe()` api. | ||||
| 
 | ||||
|     Ensure all tasks receive all values after stream completes sending. | ||||
|     Ensure all tasks receive all values after stream completes | ||||
|     sending. | ||||
| 
 | ||||
|     ''' | ||||
|     consumers = 22 | ||||
|     consumers: int = 22 | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         counts = Counter() | ||||
| 
 | ||||
|         async with tractor.open_nursery() as tn: | ||||
|             p = await tn.start_actor( | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as tn: | ||||
|             p: tractor.Portal = await tn.start_actor( | ||||
|                 'inf_streamer', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|  | @ -343,7 +377,6 @@ def test_local_task_fanout_from_stream(): | |||
|                 p.open_context(inf_streamer) as (ctx, _), | ||||
|                 ctx.open_stream() as stream, | ||||
|             ): | ||||
| 
 | ||||
|                 async def pull_and_count(name: str): | ||||
|                     # name = trio.lowlevel.current_task().name | ||||
|                     async with stream.subscribe() as recver: | ||||
|  | @ -352,7 +385,7 @@ def test_local_task_fanout_from_stream(): | |||
|                             tractor.trionics.BroadcastReceiver | ||||
|                         ) | ||||
|                         async for val in recver: | ||||
|                             # print(f'{name}: {val}') | ||||
|                             print(f'bx {name} rx: {val}') | ||||
|                             counts[name] += 1 | ||||
| 
 | ||||
|                         print(f'{name} bcaster ended') | ||||
|  | @ -362,10 +395,14 @@ def test_local_task_fanout_from_stream(): | |||
|                 with trio.fail_after(3): | ||||
|                     async with trio.open_nursery() as nurse: | ||||
|                         for i in range(consumers): | ||||
|                             nurse.start_soon(pull_and_count, i) | ||||
|                             nurse.start_soon( | ||||
|                                 pull_and_count, | ||||
|                                 i, | ||||
|                             ) | ||||
| 
 | ||||
|                         # delay to let bcast consumers pull msgs | ||||
|                         await trio.sleep(0.5) | ||||
|                         print('\nterminating') | ||||
|                         print('terminating nursery of bcast rxer consumers!') | ||||
|                         await stream.send('done') | ||||
| 
 | ||||
|             print('closed stream connection') | ||||
|  |  | |||
|  | @ -8,15 +8,13 @@ import platform | |||
| import time | ||||
| from itertools import repeat | ||||
| 
 | ||||
| from exceptiongroup import ( | ||||
|     BaseExceptionGroup, | ||||
|     ExceptionGroup, | ||||
| ) | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| from conftest import tractor_test, no_windows | ||||
| from tractor._testing import ( | ||||
|     tractor_test, | ||||
| ) | ||||
| from conftest import no_windows | ||||
| 
 | ||||
| 
 | ||||
| def is_win(): | ||||
|  | @ -47,17 +45,19 @@ async def do_nuthin(): | |||
|     ], | ||||
|     ids=['no_args', 'unexpected_args'], | ||||
| ) | ||||
| def test_remote_error(arb_addr, args_err): | ||||
|     """Verify an error raised in a subactor that is propagated | ||||
| def test_remote_error(reg_addr, args_err): | ||||
|     ''' | ||||
|     Verify an error raised in a subactor that is propagated | ||||
|     to the parent nursery, contains the underlying boxed builtin | ||||
|     error type info and causes cancellation and reraising all the | ||||
|     way up the stack. | ||||
|     """ | ||||
| 
 | ||||
|     ''' | ||||
|     args, errtype = args_err | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             arbiter_addr=arb_addr, | ||||
|             registry_addrs=[reg_addr], | ||||
|         ) as nursery: | ||||
| 
 | ||||
|             # on a remote type error caused by bad input args | ||||
|  | @ -65,7 +65,9 @@ def test_remote_error(arb_addr, args_err): | |||
|             # an exception group outside the nursery since the error | ||||
|             # here and the far end task error are one in the same? | ||||
|             portal = await nursery.run_in_actor( | ||||
|                 assert_err, name='errorer', **args | ||||
|                 assert_err, | ||||
|                 name='errorer', | ||||
|                 **args | ||||
|             ) | ||||
| 
 | ||||
|             # get result(s) from main task | ||||
|  | @ -75,7 +77,7 @@ def test_remote_error(arb_addr, args_err): | |||
|                 # of this actor nursery. | ||||
|                 await portal.result() | ||||
|             except tractor.RemoteActorError as err: | ||||
|                 assert err.type == errtype | ||||
|                 assert err.boxed_type == errtype | ||||
|                 print("Look Maa that actor failed hard, hehh") | ||||
|                 raise | ||||
| 
 | ||||
|  | @ -84,20 +86,33 @@ def test_remote_error(arb_addr, args_err): | |||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert excinfo.value.type == errtype | ||||
|         assert excinfo.value.boxed_type == errtype | ||||
| 
 | ||||
|     else: | ||||
|         # the root task will also error on the `.result()` call | ||||
|         # so we expect an error from there AND the child. | ||||
|         with pytest.raises(BaseExceptionGroup) as excinfo: | ||||
|         # the root task will also error on the `Portal.result()` | ||||
|         # call so we expect an error from there AND the child. | ||||
|         # |_ tho seems like on new `trio` this doesn't always | ||||
|         #    happen? | ||||
|         with pytest.raises(( | ||||
|             BaseExceptionGroup, | ||||
|             tractor.RemoteActorError, | ||||
|         )) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         # ensure boxed errors | ||||
|         for exc in excinfo.value.exceptions: | ||||
|             assert exc.type == errtype | ||||
|         # ensure boxed errors are `errtype` | ||||
|         err: BaseException = excinfo.value | ||||
|         if isinstance(err, BaseExceptionGroup): | ||||
|             suberrs: list[BaseException] = err.exceptions | ||||
|         else: | ||||
|             suberrs: list[BaseException] = [err] | ||||
| 
 | ||||
|         for exc in suberrs: | ||||
|             assert exc.boxed_type == errtype | ||||
| 
 | ||||
| 
 | ||||
| def test_multierror(arb_addr): | ||||
| def test_multierror( | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
|     ''' | ||||
|     Verify we raise a ``BaseExceptionGroup`` out of a nursery where | ||||
|     more then one actor errors. | ||||
|  | @ -105,7 +120,7 @@ def test_multierror(arb_addr): | |||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             arbiter_addr=arb_addr, | ||||
|             registry_addrs=[reg_addr], | ||||
|         ) as nursery: | ||||
| 
 | ||||
|             await nursery.run_in_actor(assert_err, name='errorer1') | ||||
|  | @ -115,7 +130,7 @@ def test_multierror(arb_addr): | |||
|             try: | ||||
|                 await portal2.result() | ||||
|             except tractor.RemoteActorError as err: | ||||
|                 assert err.type == AssertionError | ||||
|                 assert err.boxed_type == AssertionError | ||||
|                 print("Look Maa that first actor failed hard, hehh") | ||||
|                 raise | ||||
| 
 | ||||
|  | @ -130,14 +145,14 @@ def test_multierror(arb_addr): | |||
| @pytest.mark.parametrize( | ||||
|     'num_subactors', range(25, 26), | ||||
| ) | ||||
| def test_multierror_fast_nursery(arb_addr, start_method, num_subactors, delay): | ||||
| def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay): | ||||
|     """Verify we raise a ``BaseExceptionGroup`` out of a nursery where | ||||
|     more then one actor errors and also with a delay before failure | ||||
|     to test failure during an ongoing spawning. | ||||
|     """ | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             arbiter_addr=arb_addr, | ||||
|             registry_addrs=[reg_addr], | ||||
|         ) as nursery: | ||||
| 
 | ||||
|             for i in range(num_subactors): | ||||
|  | @ -167,7 +182,7 @@ def test_multierror_fast_nursery(arb_addr, start_method, num_subactors, delay): | |||
| 
 | ||||
|     for exc in exceptions: | ||||
|         assert isinstance(exc, tractor.RemoteActorError) | ||||
|         assert exc.type == AssertionError | ||||
|         assert exc.boxed_type == AssertionError | ||||
| 
 | ||||
| 
 | ||||
| async def do_nothing(): | ||||
|  | @ -175,15 +190,20 @@ async def do_nothing(): | |||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize('mechanism', ['nursery_cancel', KeyboardInterrupt]) | ||||
| def test_cancel_single_subactor(arb_addr, mechanism): | ||||
|     """Ensure a ``ActorNursery.start_actor()`` spawned subactor | ||||
| def test_cancel_single_subactor(reg_addr, mechanism): | ||||
|     ''' | ||||
|     Ensure a ``ActorNursery.start_actor()`` spawned subactor | ||||
|     cancels when the nursery is cancelled. | ||||
|     """ | ||||
| 
 | ||||
|     ''' | ||||
|     async def spawn_actor(): | ||||
|         """Spawn an actor that blocks indefinitely. | ||||
|         """ | ||||
|         ''' | ||||
|         Spawn an actor that blocks indefinitely then cancel via | ||||
|         either `ActorNursery.cancel()` or an exception raise. | ||||
| 
 | ||||
|         ''' | ||||
|         async with tractor.open_nursery( | ||||
|             arbiter_addr=arb_addr, | ||||
|             registry_addrs=[reg_addr], | ||||
|         ) as nursery: | ||||
| 
 | ||||
|             portal = await nursery.start_actor( | ||||
|  | @ -303,7 +323,7 @@ async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel): | |||
|                         await portal.run(func, **kwargs) | ||||
| 
 | ||||
|                     except tractor.RemoteActorError as err: | ||||
|                         assert err.type == err_type | ||||
|                         assert err.boxed_type == err_type | ||||
|                         # we only expect this first error to propogate | ||||
|                         # (all other daemons are cancelled before they | ||||
|                         # can be scheduled) | ||||
|  | @ -322,11 +342,11 @@ async def test_some_cancels_all(num_actors_and_errs, start_method, loglevel): | |||
|             assert len(err.exceptions) == num_actors | ||||
|             for exc in err.exceptions: | ||||
|                 if isinstance(exc, tractor.RemoteActorError): | ||||
|                     assert exc.type == err_type | ||||
|                     assert exc.boxed_type == err_type | ||||
|                 else: | ||||
|                     assert isinstance(exc, trio.Cancelled) | ||||
|         elif isinstance(err, tractor.RemoteActorError): | ||||
|             assert err.type == err_type | ||||
|             assert err.boxed_type == err_type | ||||
| 
 | ||||
|         assert n.cancelled is True | ||||
|         assert not n._children | ||||
|  | @ -405,7 +425,7 @@ async def test_nested_multierrors(loglevel, start_method): | |||
|                     elif isinstance(subexc, tractor.RemoteActorError): | ||||
|                         # on windows it seems we can't exactly be sure wtf | ||||
|                         # will happen.. | ||||
|                         assert subexc.type in ( | ||||
|                         assert subexc.boxed_type in ( | ||||
|                             tractor.RemoteActorError, | ||||
|                             trio.Cancelled, | ||||
|                             BaseExceptionGroup, | ||||
|  | @ -415,7 +435,7 @@ async def test_nested_multierrors(loglevel, start_method): | |||
|                         for subsub in subexc.exceptions: | ||||
| 
 | ||||
|                             if subsub in (tractor.RemoteActorError,): | ||||
|                                 subsub = subsub.type | ||||
|                                 subsub = subsub.boxed_type | ||||
| 
 | ||||
|                             assert type(subsub) in ( | ||||
|                                 trio.Cancelled, | ||||
|  | @ -430,16 +450,16 @@ async def test_nested_multierrors(loglevel, start_method): | |||
|                     # we get back the (sent) cancel signal instead | ||||
|                     if is_win(): | ||||
|                         if isinstance(subexc, tractor.RemoteActorError): | ||||
|                             assert subexc.type in ( | ||||
|                             assert subexc.boxed_type in ( | ||||
|                                 BaseExceptionGroup, | ||||
|                                 tractor.RemoteActorError | ||||
|                             ) | ||||
|                         else: | ||||
|                             assert isinstance(subexc, BaseExceptionGroup) | ||||
|                     else: | ||||
|                         assert subexc.type is ExceptionGroup | ||||
|                         assert subexc.boxed_type is ExceptionGroup | ||||
|                 else: | ||||
|                     assert subexc.type in ( | ||||
|                     assert subexc.boxed_type in ( | ||||
|                         tractor.RemoteActorError, | ||||
|                         trio.Cancelled | ||||
|                     ) | ||||
|  |  | |||
|  | @ -0,0 +1,917 @@ | |||
| ''' | ||||
| Low-level functional audits for our | ||||
| "capability based messaging"-spec feats. | ||||
| 
 | ||||
| B~) | ||||
| 
 | ||||
| ''' | ||||
| import typing | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Type, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     structs, | ||||
|     msgpack, | ||||
|     Struct, | ||||
|     ValidationError, | ||||
| ) | ||||
| import pytest | ||||
| 
 | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     _state, | ||||
|     MsgTypeError, | ||||
|     Context, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _codec, | ||||
|     _ctxvar_MsgCodec, | ||||
| 
 | ||||
|     NamespacePath, | ||||
|     MsgCodec, | ||||
|     mk_codec, | ||||
|     apply_codec, | ||||
|     current_codec, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|     _payload_msgs, | ||||
|     log, | ||||
|     PayloadMsg, | ||||
|     Started, | ||||
|     mk_msg_spec, | ||||
| ) | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| def mk_custom_codec( | ||||
|     pld_spec: Union[Type]|Any, | ||||
|     add_hooks: bool, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     Create custom `msgpack` enc/dec-hooks and set a `Decoder` | ||||
|     which only loads `pld_spec` (like `NamespacePath`) types. | ||||
| 
 | ||||
|     ''' | ||||
|     uid: tuple[str, str] = tractor.current_actor().uid | ||||
| 
 | ||||
|     # XXX NOTE XXX: despite defining `NamespacePath` as a type | ||||
|     # field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair | ||||
|     # to cast to/from that type on the wire. See the docs: | ||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
| 
 | ||||
|     def enc_nsp(obj: Any) -> Any: | ||||
|         print(f'{uid} ENC HOOK') | ||||
|         match obj: | ||||
|             case NamespacePath(): | ||||
|                 print( | ||||
|                     f'{uid}: `NamespacePath`-Only ENCODE?\n' | ||||
|                     f'obj-> `{obj}`: {type(obj)}\n' | ||||
|                 ) | ||||
|                 # if type(obj) != NamespacePath: | ||||
|                 #     breakpoint() | ||||
|                 return str(obj) | ||||
| 
 | ||||
|         print( | ||||
|             f'{uid}\n' | ||||
|             'CUSTOM ENCODE\n' | ||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n' | ||||
|         ) | ||||
|         logmsg: str = ( | ||||
|             f'{uid}\n' | ||||
|             'FAILED ENCODE\n' | ||||
|             f'obj-> `{obj}: {type(obj)}`\n' | ||||
|         ) | ||||
|         raise NotImplementedError(logmsg) | ||||
| 
 | ||||
|     def dec_nsp( | ||||
|         obj_type: Type, | ||||
|         obj: Any, | ||||
| 
 | ||||
|     ) -> Any: | ||||
|         print( | ||||
|             f'{uid}\n' | ||||
|             'CUSTOM DECODE\n' | ||||
|             f'type-arg-> {obj_type}\n' | ||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n' | ||||
|         ) | ||||
|         nsp = None | ||||
| 
 | ||||
|         if ( | ||||
|             obj_type is NamespacePath | ||||
|             and isinstance(obj, str) | ||||
|             and ':' in obj | ||||
|         ): | ||||
|             nsp = NamespacePath(obj) | ||||
|             # TODO: we could built a generic handler using | ||||
|             # JUST matching the obj_type part? | ||||
|             # nsp = obj_type(obj) | ||||
| 
 | ||||
|         if nsp: | ||||
|             print(f'Returning NSP instance: {nsp}') | ||||
|             return nsp | ||||
| 
 | ||||
|         logmsg: str = ( | ||||
|             f'{uid}\n' | ||||
|             'FAILED DECODE\n' | ||||
|             f'type-> {obj_type}\n' | ||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n\n' | ||||
|             f'current codec:\n' | ||||
|             f'{current_codec()}\n' | ||||
|         ) | ||||
|         # TODO: figure out the ignore subsys for this! | ||||
|         # -[ ] option whether to defense-relay backc the msg | ||||
|         #   inside an `Invalid`/`Ignore` | ||||
|         # -[ ] how to make this handling pluggable such that a | ||||
|         #   `Channel`/`MsgTransport` can intercept and process | ||||
|         #   back msgs either via exception handling or some other | ||||
|         #   signal? | ||||
|         log.warning(logmsg) | ||||
|         # NOTE: this delivers the invalid | ||||
|         # value up to `msgspec`'s decoding | ||||
|         # machinery for error raising. | ||||
|         return obj | ||||
|         # raise NotImplementedError(logmsg) | ||||
| 
 | ||||
|     nsp_codec: MsgCodec = mk_codec( | ||||
|         ipc_pld_spec=pld_spec, | ||||
| 
 | ||||
|         # NOTE XXX: the encode hook MUST be used no matter what since | ||||
|         # our `NamespacePath` is not any of a `Any` native type nor | ||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||
|         # how to encode it unless we provide the custom hook. | ||||
|         # | ||||
|         # AGAIN that is, regardless of whether we spec an | ||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||
|         # how to enc `NamespacePath` (nsp), so we add a custom | ||||
|         # hook to do that ALWAYS. | ||||
|         enc_hook=enc_nsp if add_hooks else None, | ||||
| 
 | ||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||
|         # `Decoder`? so it won't work in tandem with the | ||||
|         # `ipc_pld_spec` passed above? | ||||
|         dec_hook=dec_nsp if add_hooks else None, | ||||
|     ) | ||||
|     return nsp_codec | ||||
| 
 | ||||
| 
 | ||||
| def chk_codec_applied( | ||||
|     expect_codec: MsgCodec, | ||||
|     enter_value: MsgCodec|None = None, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     buncha sanity checks ensuring that the IPC channel's | ||||
|     context-vars are set to the expected codec and that are | ||||
|     ctx-var wrapper APIs match the same. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: play with tricyle again, bc this is supposed to work | ||||
|     # the way we want? | ||||
|     # | ||||
|     # TreeVar | ||||
|     # task: trio.Task = trio.lowlevel.current_task() | ||||
|     # curr_codec = _ctxvar_MsgCodec.get_in(task) | ||||
| 
 | ||||
|     # ContextVar | ||||
|     # task_ctx: Context = task.context | ||||
|     # assert _ctxvar_MsgCodec in task_ctx | ||||
|     # curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec] | ||||
| 
 | ||||
|     # NOTE: currently we use this! | ||||
|     # RunVar | ||||
|     curr_codec: MsgCodec = current_codec() | ||||
|     last_read_codec = _ctxvar_MsgCodec.get() | ||||
|     # assert curr_codec is last_read_codec | ||||
| 
 | ||||
|     assert ( | ||||
|         (same_codec := expect_codec) is | ||||
|         # returned from `mk_codec()` | ||||
| 
 | ||||
|         # yielded value from `apply_codec()` | ||||
| 
 | ||||
|         # read from current task's `contextvars.Context` | ||||
|         curr_codec is | ||||
|         last_read_codec | ||||
| 
 | ||||
|         # the default `msgspec` settings | ||||
|         is not _codec._def_msgspec_codec | ||||
|         is not _codec._def_tractor_codec | ||||
|     ) | ||||
| 
 | ||||
|     if enter_value: | ||||
|         enter_value is same_codec | ||||
| 
 | ||||
| 
 | ||||
| def iter_maybe_sends( | ||||
|     send_items: dict[Union[Type], Any] | list[tuple], | ||||
|     ipc_pld_spec: Union[Type] | Any, | ||||
|     add_codec_hooks: bool, | ||||
| 
 | ||||
|     codec: MsgCodec|None = None, | ||||
| 
 | ||||
| ) -> tuple[Any, bool]: | ||||
| 
 | ||||
|     if isinstance(send_items, dict): | ||||
|         send_items = send_items.items() | ||||
| 
 | ||||
|     for ( | ||||
|         send_type_spec, | ||||
|         send_value, | ||||
|     ) in send_items: | ||||
| 
 | ||||
|         expect_roundtrip: bool = False | ||||
| 
 | ||||
|         # values-to-typespec santiy | ||||
|         send_type = type(send_value) | ||||
|         assert send_type == send_type_spec or ( | ||||
|             (subtypes := getattr(send_type_spec, '__args__', None)) | ||||
|             and send_type in subtypes | ||||
|         ) | ||||
| 
 | ||||
|         spec_subtypes: set[Union[Type]] = ( | ||||
|              getattr( | ||||
|                  ipc_pld_spec, | ||||
|                  '__args__', | ||||
|                  {ipc_pld_spec,}, | ||||
|              ) | ||||
|         ) | ||||
|         send_in_spec: bool = ( | ||||
|             send_type == ipc_pld_spec | ||||
|             or ( | ||||
|                 ipc_pld_spec != Any | ||||
|                 and  # presume `Union` of types | ||||
|                 send_type in spec_subtypes | ||||
|             ) | ||||
|             or ( | ||||
|                 ipc_pld_spec == Any | ||||
|                 and | ||||
|                 send_type != NamespacePath | ||||
|             ) | ||||
|         ) | ||||
|         expect_roundtrip = ( | ||||
|             send_in_spec | ||||
|             # any spec should support all other | ||||
|             # builtin py values that we send | ||||
|             # except our custom nsp type which | ||||
|             # we should be able to send as long | ||||
|             # as we provide the custom codec hooks. | ||||
|             or ( | ||||
|                 ipc_pld_spec == Any | ||||
|                 and | ||||
|                 send_type == NamespacePath | ||||
|                 and | ||||
|                 add_codec_hooks | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         if codec is not None: | ||||
|             # XXX FIRST XXX ensure roundtripping works | ||||
|             # before touching any IPC primitives/APIs. | ||||
|             wire_bytes: bytes = codec.encode( | ||||
|                 Started( | ||||
|                     cid='blahblah', | ||||
|                     pld=send_value, | ||||
|                 ) | ||||
|             ) | ||||
|             # NOTE: demonstrates the decoder loading | ||||
|             # to via our native SCIPP msg-spec | ||||
|             # (structurred-conc-inter-proc-protocol) | ||||
|             # implemented as per, | ||||
|             try: | ||||
|                 msg: Started = codec.decode(wire_bytes) | ||||
|                 if not expect_roundtrip: | ||||
|                     pytest.fail( | ||||
|                         f'NOT-EXPECTED able to roundtrip value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {send_type}\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 pld = msg.pld | ||||
|                 assert pld == send_value | ||||
| 
 | ||||
|             except ValidationError: | ||||
|                 if expect_roundtrip: | ||||
|                     pytest.fail( | ||||
|                         f'EXPECTED to roundtrip value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {send_type}\n' | ||||
|                     ) | ||||
| 
 | ||||
|         yield ( | ||||
|             str(send_type), | ||||
|             send_value, | ||||
|             expect_roundtrip, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def dec_type_union( | ||||
|     type_names: list[str], | ||||
| ) -> Type: | ||||
|     ''' | ||||
|     Look up types by name, compile into a list and then create and | ||||
|     return a `typing.Union` from the full set. | ||||
| 
 | ||||
|     ''' | ||||
|     import importlib | ||||
|     types: list[Type] = [] | ||||
|     for type_name in type_names: | ||||
|         for mod in [ | ||||
|             typing, | ||||
|             importlib.import_module(__name__), | ||||
|         ]: | ||||
|             if type_ref := getattr( | ||||
|                 mod, | ||||
|                 type_name, | ||||
|                 False, | ||||
|             ): | ||||
|                 types.append(type_ref) | ||||
| 
 | ||||
|     # special case handling only.. | ||||
|     # ipc_pld_spec: Union[Type] = eval( | ||||
|     #     pld_spec_str, | ||||
|     #     {},  # globals | ||||
|     #     {'typing': typing},  # locals | ||||
|     # ) | ||||
| 
 | ||||
|     return Union[*types] | ||||
| 
 | ||||
| 
 | ||||
| def enc_type_union( | ||||
|     union_or_type: Union[Type]|Type, | ||||
| ) -> list[str]: | ||||
|     ''' | ||||
|     Encode a type-union or single type to a list of type-name-strings | ||||
|     ready for IPC interchange. | ||||
| 
 | ||||
|     ''' | ||||
|     type_strs: list[str] = [] | ||||
|     for typ in getattr( | ||||
|         union_or_type, | ||||
|         '__args__', | ||||
|         {union_or_type,}, | ||||
|     ): | ||||
|         type_strs.append(typ.__qualname__) | ||||
| 
 | ||||
|     return type_strs | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def send_back_values( | ||||
|     ctx: Context, | ||||
|     expect_debug: bool, | ||||
|     pld_spec_type_strs: list[str], | ||||
|     add_hooks: bool, | ||||
|     started_msg_bytes: bytes, | ||||
|     expect_ipc_send: dict[str, tuple[Any, bool]], | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Setup up a custom codec to load instances of `NamespacePath` | ||||
|     and ensure we can round trip a func ref with our parent. | ||||
| 
 | ||||
|     ''' | ||||
|     uid: tuple = tractor.current_actor().uid | ||||
| 
 | ||||
|     # debug mode sanity check (prolly superfluous but, meh) | ||||
|     assert expect_debug == _state.debug_mode() | ||||
| 
 | ||||
|     # init state in sub-actor should be default | ||||
|     chk_codec_applied( | ||||
|         expect_codec=_codec._def_tractor_codec, | ||||
|     ) | ||||
| 
 | ||||
|     # load pld spec from input str | ||||
|     ipc_pld_spec = dec_type_union( | ||||
|         pld_spec_type_strs, | ||||
|     ) | ||||
|     pld_spec_str = str(ipc_pld_spec) | ||||
| 
 | ||||
|     # same as on parent side config. | ||||
|     nsp_codec: MsgCodec = mk_custom_codec( | ||||
|         pld_spec=ipc_pld_spec, | ||||
|         add_hooks=add_hooks, | ||||
|     ) | ||||
|     with ( | ||||
|         apply_codec(nsp_codec) as codec, | ||||
|     ): | ||||
|         chk_codec_applied( | ||||
|             expect_codec=nsp_codec, | ||||
|             enter_value=codec, | ||||
|         ) | ||||
| 
 | ||||
|         print( | ||||
|             f'{uid}: attempting `Started`-bytes DECODE..\n' | ||||
|         ) | ||||
|         try: | ||||
|             msg: Started = nsp_codec.decode(started_msg_bytes) | ||||
|             expected_pld_spec_str: str = msg.pld | ||||
|             assert pld_spec_str == expected_pld_spec_str | ||||
| 
 | ||||
|         # TODO: maybe we should add our own wrapper error so as to | ||||
|         # be interchange-lib agnostic? | ||||
|         # -[ ] the error type is wtv is raised from the hook so we | ||||
|         #   could also require a type-class of errors for | ||||
|         #   indicating whether the hook-failure can be handled by | ||||
|         #   a nasty-dialog-unprot sub-sys? | ||||
|         except ValidationError: | ||||
| 
 | ||||
|             # NOTE: only in the `Any` spec case do we expect this to | ||||
|             # work since otherwise no spec covers a plain-ol' | ||||
|             # `.pld: str` | ||||
|             if pld_spec_str == 'Any': | ||||
|                 raise | ||||
|             else: | ||||
|                 print( | ||||
|                     f'{uid}: (correctly) unable to DECODE `Started`-bytes\n' | ||||
|                     f'{started_msg_bytes}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         iter_send_val_items = iter(expect_ipc_send.values()) | ||||
|         sent: list[Any] = [] | ||||
|         for send_value, expect_send in iter_send_val_items: | ||||
|             try: | ||||
|                 print( | ||||
|                     f'{uid}: attempting to `.started({send_value})`\n' | ||||
|                     f'=> expect_send: {expect_send}\n' | ||||
|                     f'SINCE, ipc_pld_spec: {ipc_pld_spec}\n' | ||||
|                     f'AND, codec: {codec}\n' | ||||
|                 ) | ||||
|                 await ctx.started(send_value) | ||||
|                 sent.append(send_value) | ||||
|                 if not expect_send: | ||||
| 
 | ||||
|                     # XXX NOTE XXX THIS WON'T WORK WITHOUT SPECIAL | ||||
|                     # `str` handling! or special debug mode IPC | ||||
|                     # msgs! | ||||
|                     await tractor.pause() | ||||
| 
 | ||||
|                     raise RuntimeError( | ||||
|                         f'NOT-EXPECTED able to roundtrip value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {type(send_value)}\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 break  # move on to streaming block.. | ||||
| 
 | ||||
|             except tractor.MsgTypeError: | ||||
|                 await tractor.pause() | ||||
| 
 | ||||
|                 if expect_send: | ||||
|                     raise RuntimeError( | ||||
|                         f'EXPECTED to `.started()` value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {type(send_value)}\n' | ||||
|                     ) | ||||
| 
 | ||||
|         async with ctx.open_stream() as ipc: | ||||
|             print( | ||||
|                 f'{uid}: Entering streaming block to send remaining values..' | ||||
|             ) | ||||
| 
 | ||||
|             for send_value, expect_send in iter_send_val_items: | ||||
|                 send_type: Type = type(send_value) | ||||
|                 print( | ||||
|                     '------ - ------\n' | ||||
|                     f'{uid}: SENDING NEXT VALUE\n' | ||||
|                     f'ipc_pld_spec: {ipc_pld_spec}\n' | ||||
|                     f'expect_send: {expect_send}\n' | ||||
|                     f'val: {send_value}\n' | ||||
|                     '------ - ------\n' | ||||
|                 ) | ||||
|                 try: | ||||
|                     await ipc.send(send_value) | ||||
|                     print(f'***\n{uid}-CHILD sent {send_value!r}\n***\n') | ||||
|                     sent.append(send_value) | ||||
| 
 | ||||
|                     # NOTE: should only raise above on | ||||
|                     # `.started()` or a `Return` | ||||
|                     # if not expect_send: | ||||
|                     #     raise RuntimeError( | ||||
|                     #         f'NOT-EXPECTED able to roundtrip value given spec:\n' | ||||
|                     #         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                     #         f'value -> {send_value}: {send_type}\n' | ||||
|                     #     ) | ||||
| 
 | ||||
|                 except ValidationError: | ||||
|                     print(f'{uid} FAILED TO SEND {send_value}!') | ||||
| 
 | ||||
|                     # await tractor.pause() | ||||
|                     if expect_send: | ||||
|                         raise RuntimeError( | ||||
|                             f'EXPECTED to roundtrip value given spec:\n' | ||||
|                             f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                             f'value -> {send_value}: {send_type}\n' | ||||
|                         ) | ||||
|                     # continue | ||||
| 
 | ||||
|             else: | ||||
|                 print( | ||||
|                     f'{uid}: finished sending all values\n' | ||||
|                     'Should be exiting stream block!\n' | ||||
|                 ) | ||||
| 
 | ||||
|         print(f'{uid}: exited streaming block!') | ||||
| 
 | ||||
|         # TODO: this won't be true bc in streaming phase we DO NOT | ||||
|         # msgspec check outbound msgs! | ||||
|         # -[ ] once we implement the receiver side `InvalidMsg` | ||||
|         #   then we can expect it here? | ||||
|         # assert ( | ||||
|         #     len(sent) | ||||
|         #     == | ||||
|         #     len([val | ||||
|         #          for val, expect in | ||||
|         #          expect_ipc_send.values() | ||||
|         #          if expect is True]) | ||||
|         # ) | ||||
| 
 | ||||
| 
 | ||||
| def ex_func(*args): | ||||
|     print(f'ex_func({args})') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'ipc_pld_spec', | ||||
|     [ | ||||
|         Any, | ||||
|         NamespacePath, | ||||
|         NamespacePath|None,  # the "maybe" spec Bo | ||||
|     ], | ||||
|     ids=[ | ||||
|         'any_type', | ||||
|         'nsp_type', | ||||
|         'maybe_nsp_type', | ||||
|     ] | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'add_codec_hooks', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=['use_codec_hooks', 'no_codec_hooks'], | ||||
| ) | ||||
| def test_codec_hooks_mod( | ||||
|     debug_mode: bool, | ||||
|     ipc_pld_spec: Union[Type]|Any, | ||||
|     # send_value: None|str|NamespacePath, | ||||
|     add_codec_hooks: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Audit the `.msg.MsgCodec` override apis details given our impl | ||||
|     uses `contextvars` to accomplish per `trio` task codec | ||||
|     application around an inter-proc-task-comms context. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         nsp = NamespacePath.from_ref(ex_func) | ||||
|         send_items: dict[Union, Any] = { | ||||
|             Union[None]: None, | ||||
|             Union[NamespacePath]: nsp, | ||||
|             Union[str]: str(nsp), | ||||
|         } | ||||
| 
 | ||||
|         # init default state for actor | ||||
|         chk_codec_applied( | ||||
|             expect_codec=_codec._def_tractor_codec, | ||||
|         ) | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             p: tractor.Portal = await an.start_actor( | ||||
|                 'sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             # TODO: 2 cases: | ||||
|             # - codec not modified -> decode nsp as `str` | ||||
|             # - codec modified with hooks -> decode nsp as | ||||
|             #   `NamespacePath` | ||||
|             nsp_codec: MsgCodec = mk_custom_codec( | ||||
|                 pld_spec=ipc_pld_spec, | ||||
|                 add_hooks=add_codec_hooks, | ||||
|             ) | ||||
|             with apply_codec(nsp_codec) as codec: | ||||
|                 chk_codec_applied( | ||||
|                     expect_codec=nsp_codec, | ||||
|                     enter_value=codec, | ||||
|                 ) | ||||
| 
 | ||||
|                 expect_ipc_send: dict[str, tuple[Any, bool]] = {} | ||||
| 
 | ||||
|                 report: str = ( | ||||
|                     'Parent report on send values with\n' | ||||
|                     f'ipc_pld_spec: {ipc_pld_spec}\n' | ||||
|                     '       ------ - ------\n' | ||||
|                 ) | ||||
|                 for val_type_str, val, expect_send in iter_maybe_sends( | ||||
|                     send_items, | ||||
|                     ipc_pld_spec, | ||||
|                     add_codec_hooks=add_codec_hooks, | ||||
|                 ): | ||||
|                     report += ( | ||||
|                         f'send_value: {val}: {type(val)} ' | ||||
|                         f'=> expect_send: {expect_send}\n' | ||||
|                     ) | ||||
|                     expect_ipc_send[val_type_str] = (val, expect_send) | ||||
| 
 | ||||
|                 print( | ||||
|                     report + | ||||
|                     '       ------ - ------\n' | ||||
|                 ) | ||||
|                 assert len(expect_ipc_send) == len(send_items) | ||||
|                 # now try over real IPC with a the subactor | ||||
|                 # expect_ipc_rountrip: bool = True | ||||
|                 expected_started = Started( | ||||
|                     cid='cid', | ||||
|                     pld=str(ipc_pld_spec), | ||||
|                 ) | ||||
|                 # build list of values we expect to receive from | ||||
|                 # the subactor. | ||||
|                 expect_to_send: list[Any] = [ | ||||
|                     val | ||||
|                     for val, expect_send in expect_ipc_send.values() | ||||
|                     if expect_send | ||||
|                 ] | ||||
| 
 | ||||
|                 pld_spec_type_strs: list[str] = enc_type_union(ipc_pld_spec) | ||||
| 
 | ||||
|                 # XXX should raise an mte (`MsgTypeError`) | ||||
|                 # when `add_codec_hooks == False` bc the input | ||||
|                 # `expect_ipc_send` kwarg has a nsp which can't be | ||||
|                 # serialized! | ||||
|                 # | ||||
|                 # TODO:can we ensure this happens from the | ||||
|                 # `Return`-side (aka the sub) as well? | ||||
|                 if not add_codec_hooks: | ||||
|                     try: | ||||
|                         async with p.open_context( | ||||
|                             send_back_values, | ||||
|                             expect_debug=debug_mode, | ||||
|                             pld_spec_type_strs=pld_spec_type_strs, | ||||
|                             add_hooks=add_codec_hooks, | ||||
|                             started_msg_bytes=nsp_codec.encode(expected_started), | ||||
| 
 | ||||
|                             # XXX NOTE bc we send a `NamespacePath` in this kwarg | ||||
|                             expect_ipc_send=expect_ipc_send, | ||||
| 
 | ||||
|                         ) as (ctx, first): | ||||
|                             pytest.fail('ctx should fail to open without custom enc_hook!?') | ||||
| 
 | ||||
|                     # this test passes bc we can go no further! | ||||
|                     except MsgTypeError: | ||||
|                         # teardown nursery | ||||
|                         await p.cancel_actor() | ||||
|                         return | ||||
| 
 | ||||
|                 # TODO: send the original nsp here and | ||||
|                 # test with `limit_msg_spec()` above? | ||||
|                 # await tractor.pause() | ||||
|                 print('PARENT opening IPC ctx!\n') | ||||
|                 async with ( | ||||
| 
 | ||||
|                     # XXX should raise an mte (`MsgTypeError`) | ||||
|                     # when `add_codec_hooks == False`.. | ||||
|                     p.open_context( | ||||
|                         send_back_values, | ||||
|                         expect_debug=debug_mode, | ||||
|                         pld_spec_type_strs=pld_spec_type_strs, | ||||
|                         add_hooks=add_codec_hooks, | ||||
|                         started_msg_bytes=nsp_codec.encode(expected_started), | ||||
|                         expect_ipc_send=expect_ipc_send, | ||||
|                     ) as (ctx, first), | ||||
| 
 | ||||
|                     ctx.open_stream() as ipc, | ||||
|                 ): | ||||
|                     # ensure codec is still applied across | ||||
|                     # `tractor.Context` + its embedded nursery. | ||||
|                     chk_codec_applied( | ||||
|                         expect_codec=nsp_codec, | ||||
|                         enter_value=codec, | ||||
|                     ) | ||||
|                     print( | ||||
|                         'root: ENTERING CONTEXT BLOCK\n' | ||||
|                         f'type(first): {type(first)}\n' | ||||
|                         f'first: {first}\n' | ||||
|                     ) | ||||
|                     expect_to_send.remove(first) | ||||
| 
 | ||||
|                     # TODO: explicit values we expect depending on | ||||
|                     # codec config! | ||||
|                     # assert first == first_val | ||||
|                     # assert first == f'{__name__}:ex_func' | ||||
| 
 | ||||
|                     async for next_sent in ipc: | ||||
|                         print( | ||||
|                             'Parent: child sent next value\n' | ||||
|                             f'{next_sent}: {type(next_sent)}\n' | ||||
|                         ) | ||||
|                         if expect_to_send: | ||||
|                             expect_to_send.remove(next_sent) | ||||
|                         else: | ||||
|                             print('PARENT should terminate stream loop + block!') | ||||
| 
 | ||||
|                     # all sent values should have arrived! | ||||
|                     assert not expect_to_send | ||||
| 
 | ||||
|             await p.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def chk_pld_type( | ||||
|     payload_spec: Type[Struct]|Any, | ||||
|     pld: Any, | ||||
| 
 | ||||
|     expect_roundtrip: bool|None = None, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     pld_val_type: Type = type(pld) | ||||
| 
 | ||||
|     # TODO: verify that the overridden subtypes | ||||
|     # DO NOT have modified type-annots from original! | ||||
|     # 'Start',  .pld: FuncSpec | ||||
|     # 'StartAck',  .pld: IpcCtxSpec | ||||
|     # 'Stop',  .pld: UNSEt | ||||
|     # 'Error',  .pld: ErrorData | ||||
| 
 | ||||
|     codec: MsgCodec = mk_codec( | ||||
|         # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified | ||||
|         # type union. | ||||
|         ipc_pld_spec=payload_spec, | ||||
|     ) | ||||
| 
 | ||||
|     # make a one-off dec to compare with our `MsgCodec` instance | ||||
|     # which does the below `mk_msg_spec()` call internally | ||||
|     ipc_msg_spec: Union[Type[Struct]] | ||||
|     msg_types: list[PayloadMsg[payload_spec]] | ||||
|     ( | ||||
|         ipc_msg_spec, | ||||
|         msg_types, | ||||
|     ) = mk_msg_spec( | ||||
|         payload_type_union=payload_spec, | ||||
|     ) | ||||
|     _enc = msgpack.Encoder() | ||||
|     _dec = msgpack.Decoder( | ||||
|         type=ipc_msg_spec or Any,  # like `PayloadMsg[Any]` | ||||
|     ) | ||||
| 
 | ||||
|     assert ( | ||||
|         payload_spec | ||||
|         == | ||||
|         codec.pld_spec | ||||
|     ) | ||||
| 
 | ||||
|     # assert codec.dec == dec | ||||
|     # | ||||
|     # ^-XXX-^ not sure why these aren't "equal" but when cast | ||||
|     # to `str` they seem to match ?? .. kk | ||||
| 
 | ||||
|     assert ( | ||||
|         str(ipc_msg_spec) | ||||
|         == | ||||
|         str(codec.msg_spec) | ||||
|         == | ||||
|         str(_dec.type) | ||||
|         == | ||||
|         str(codec.dec.type) | ||||
|     ) | ||||
| 
 | ||||
|     # verify the boxed-type for all variable payload-type msgs. | ||||
|     if not msg_types: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     roundtrip: bool|None = None | ||||
|     pld_spec_msg_names: list[str] = [ | ||||
|         td.__name__ for td in _payload_msgs | ||||
|     ] | ||||
|     for typedef in msg_types: | ||||
| 
 | ||||
|         skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names | ||||
|         if skip_runtime_msg: | ||||
|             continue | ||||
| 
 | ||||
|         pld_field = structs.fields(typedef)[1] | ||||
|         assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere? | ||||
| 
 | ||||
|         kwargs: dict[str, Any] = { | ||||
|             'cid': '666', | ||||
|             'pld': pld, | ||||
|         } | ||||
|         enc_msg: PayloadMsg = typedef(**kwargs) | ||||
| 
 | ||||
|         _wire_bytes: bytes = _enc.encode(enc_msg) | ||||
|         wire_bytes: bytes = codec.enc.encode(enc_msg) | ||||
|         assert _wire_bytes == wire_bytes | ||||
| 
 | ||||
|         ve: ValidationError|None = None | ||||
|         try: | ||||
|             dec_msg = codec.dec.decode(wire_bytes) | ||||
|             _dec_msg = _dec.decode(wire_bytes) | ||||
| 
 | ||||
|             # decoded msg and thus payload should be exactly same! | ||||
|             assert (roundtrip := ( | ||||
|                 _dec_msg | ||||
|                 == | ||||
|                 dec_msg | ||||
|                 == | ||||
|                 enc_msg | ||||
|             )) | ||||
| 
 | ||||
|             if ( | ||||
|                 expect_roundtrip is not None | ||||
|                 and expect_roundtrip != roundtrip | ||||
|             ): | ||||
|                 breakpoint() | ||||
| 
 | ||||
|             assert ( | ||||
|                 pld | ||||
|                 == | ||||
|                 dec_msg.pld | ||||
|                 == | ||||
|                 enc_msg.pld | ||||
|             ) | ||||
|             # assert (roundtrip := (_dec_msg == enc_msg)) | ||||
| 
 | ||||
|         except ValidationError as _ve: | ||||
|             ve = _ve | ||||
|             roundtrip: bool = False | ||||
|             if pld_val_type is payload_spec: | ||||
|                 raise ValueError( | ||||
|                    'Got `ValidationError` despite type-var match!?\n' | ||||
|                     f'pld_val_type: {pld_val_type}\n' | ||||
|                     f'payload_type: {payload_spec}\n' | ||||
|                 ) from ve | ||||
| 
 | ||||
|             else: | ||||
|                 # ow we good cuz the pld spec mismatched. | ||||
|                 print( | ||||
|                     'Got expected `ValidationError` since,\n' | ||||
|                     f'{pld_val_type} is not {payload_spec}\n' | ||||
|                 ) | ||||
|         else: | ||||
|             if ( | ||||
|                 payload_spec is not Any | ||||
|                 and | ||||
|                 pld_val_type is not payload_spec | ||||
|             ): | ||||
|                 raise ValueError( | ||||
|                    'DID NOT `ValidationError` despite expected type match!?\n' | ||||
|                     f'pld_val_type: {pld_val_type}\n' | ||||
|                     f'payload_type: {payload_spec}\n' | ||||
|                 ) | ||||
| 
 | ||||
|     # full code decode should always be attempted! | ||||
|     if roundtrip is None: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     return roundtrip | ||||
| 
 | ||||
| 
 | ||||
| def test_limit_msgspec(): | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_root_actor( | ||||
|             debug_mode=True | ||||
|         ): | ||||
| 
 | ||||
|             # ensure we can round-trip a boxing `PayloadMsg` | ||||
|             assert chk_pld_type( | ||||
|                 payload_spec=Any, | ||||
|                 pld=None, | ||||
|                 expect_roundtrip=True, | ||||
|             ) | ||||
| 
 | ||||
|             # verify that a mis-typed payload value won't decode | ||||
|             assert not chk_pld_type( | ||||
|                 payload_spec=int, | ||||
|                 pld='doggy', | ||||
|             ) | ||||
| 
 | ||||
|             # parametrize the boxed `.pld` type as a custom-struct | ||||
|             # and ensure that parametrization propagates | ||||
|             # to all payload-msg-spec-able subtypes! | ||||
|             class CustomPayload(Struct): | ||||
|                 name: str | ||||
|                 value: Any | ||||
| 
 | ||||
|             assert not chk_pld_type( | ||||
|                 payload_spec=CustomPayload, | ||||
|                 pld='doggy', | ||||
|             ) | ||||
| 
 | ||||
|             assert chk_pld_type( | ||||
|                 payload_spec=CustomPayload, | ||||
|                 pld=CustomPayload(name='doggy', value='urmom') | ||||
|             ) | ||||
| 
 | ||||
|             # yah, we can `.pause_from_sync()` now! | ||||
|             # breakpoint() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -6,14 +6,15 @@ sub-sub-actor daemons. | |||
| ''' | ||||
| from typing import Optional | ||||
| import asyncio | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     aclosing, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import tractor | ||||
| from tractor import RemoteActorError | ||||
| from async_generator import aclosing | ||||
| 
 | ||||
| 
 | ||||
| async def aio_streamer( | ||||
|  | @ -141,7 +142,7 @@ async def open_actor_local_nursery( | |||
| ) | ||||
| def test_actor_managed_trio_nursery_task_error_cancels_aio( | ||||
|     asyncio_mode: bool, | ||||
|     arb_addr | ||||
|     reg_addr: tuple, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that a ``trio`` nursery created managed in a child actor | ||||
|  | @ -170,4 +171,4 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio( | |||
| 
 | ||||
|     # verify boxed error | ||||
|     err = excinfo.value | ||||
|     assert isinstance(err.type(), NameError) | ||||
|     assert err.boxed_type is NameError | ||||
|  |  | |||
|  | @ -5,9 +5,7 @@ import trio | |||
| import tractor | ||||
| from tractor import open_actor_cluster | ||||
| from tractor.trionics import gather_contexts | ||||
| 
 | ||||
| from conftest import tractor_test | ||||
| 
 | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| MESSAGE = 'tractoring at full speed' | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,24 +1,36 @@ | |||
| ''' | ||||
| ``async with ():`` inlined context-stream cancellation testing. | ||||
| 
 | ||||
| Verify the we raise errors when streams are opened prior to sync-opening | ||||
| a ``tractor.Context`` beforehand. | ||||
| Verify the we raise errors when streams are opened prior to | ||||
| sync-opening a ``tractor.Context`` beforehand. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from itertools import count | ||||
| import math | ||||
| import platform | ||||
| from typing import Optional | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Callable, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Actor, | ||||
|     Context, | ||||
|     current_actor, | ||||
| ) | ||||
| from tractor._exceptions import ( | ||||
|     StreamOverrun, | ||||
|     ContextCancelled, | ||||
| ) | ||||
| from tractor._state import current_ipc_ctx | ||||
| 
 | ||||
| from conftest import tractor_test | ||||
| from tractor._testing import ( | ||||
|     tractor_test, | ||||
|     expect_ctxc, | ||||
| ) | ||||
| 
 | ||||
| # ``Context`` semantics are as follows, | ||||
| #  ------------------------------------ | ||||
|  | @ -64,7 +76,7 @@ _state: bool = False | |||
| 
 | ||||
| @tractor.context | ||||
| async def too_many_starteds( | ||||
|     ctx: tractor.Context, | ||||
|     ctx: Context, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Call ``Context.started()`` more then once (an error). | ||||
|  | @ -79,7 +91,7 @@ async def too_many_starteds( | |||
| 
 | ||||
| @tractor.context | ||||
| async def not_started_but_stream_opened( | ||||
|     ctx: tractor.Context, | ||||
|     ctx: Context, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Enter ``Context.open_stream()`` without calling ``.started()``. | ||||
|  | @ -100,11 +112,15 @@ async def not_started_but_stream_opened( | |||
|     ], | ||||
|     ids='misuse_type={}'.format, | ||||
| ) | ||||
| def test_started_misuse(target): | ||||
| 
 | ||||
| def test_started_misuse( | ||||
|     target: Callable, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.start_actor( | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.start_actor( | ||||
|                 target.__name__, | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|  | @ -119,7 +135,7 @@ def test_started_misuse(target): | |||
| @tractor.context | ||||
| async def simple_setup_teardown( | ||||
| 
 | ||||
|     ctx: tractor.Context, | ||||
|     ctx: Context, | ||||
|     data: int, | ||||
|     block_forever: bool = False, | ||||
| 
 | ||||
|  | @ -129,6 +145,8 @@ async def simple_setup_teardown( | |||
|     global _state | ||||
|     _state = True | ||||
| 
 | ||||
|     assert current_ipc_ctx() is ctx | ||||
| 
 | ||||
|     # signal to parent that we're up | ||||
|     await ctx.started(data + 1) | ||||
| 
 | ||||
|  | @ -165,6 +183,7 @@ def test_simple_context( | |||
|     error_parent, | ||||
|     callee_blocks_forever, | ||||
|     pointlessly_open_stream, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
| 
 | ||||
|     timeout = 1.5 if not platform.system() == 'Windows' else 4 | ||||
|  | @ -172,20 +191,23 @@ def test_simple_context( | |||
|     async def main(): | ||||
| 
 | ||||
|         with trio.fail_after(timeout): | ||||
|             async with tractor.open_nursery() as nursery: | ||||
| 
 | ||||
|                 portal = await nursery.start_actor( | ||||
|             async with tractor.open_nursery( | ||||
|                 debug_mode=debug_mode, | ||||
|             ) as an: | ||||
|                 portal = await an.start_actor( | ||||
|                     'simple_context', | ||||
|                     enable_modules=[__name__], | ||||
|                 ) | ||||
| 
 | ||||
|                 try: | ||||
|                     async with portal.open_context( | ||||
|                     async with ( | ||||
|                         portal.open_context( | ||||
|                             simple_setup_teardown, | ||||
|                             data=10, | ||||
|                             block_forever=callee_blocks_forever, | ||||
|                     ) as (ctx, sent): | ||||
| 
 | ||||
|                         ) as (ctx, sent), | ||||
|                     ): | ||||
|                         assert current_ipc_ctx() is ctx | ||||
|                         assert sent == 11 | ||||
| 
 | ||||
|                         if callee_blocks_forever: | ||||
|  | @ -193,9 +215,6 @@ def test_simple_context( | |||
|                         else: | ||||
|                             assert await ctx.result() == 'yo' | ||||
| 
 | ||||
|                         if not error_parent: | ||||
|                             await ctx.cancel() | ||||
| 
 | ||||
|                         if pointlessly_open_stream: | ||||
|                             async with ctx.open_stream(): | ||||
|                                 if error_parent: | ||||
|  | @ -208,10 +227,15 @@ def test_simple_context( | |||
|                                     # 'stop' msg to the far end which needs | ||||
|                                     # to be ignored | ||||
|                                     pass | ||||
| 
 | ||||
|                         else: | ||||
|                             if error_parent: | ||||
|                                 raise error_parent | ||||
| 
 | ||||
|                             # cancel AFTER we open a stream | ||||
|                             # to avoid a cancel raised inside | ||||
|                             # `.open_stream()` | ||||
|                             await ctx.cancel() | ||||
|                 finally: | ||||
| 
 | ||||
|                     # after cancellation | ||||
|  | @ -226,10 +250,10 @@ def test_simple_context( | |||
|             trio.run(main) | ||||
|         except error_parent: | ||||
|             pass | ||||
|         except trio.MultiError as me: | ||||
|         except BaseExceptionGroup as beg: | ||||
|             # XXX: on windows it seems we may have to expect the group error | ||||
|             from tractor._exceptions import is_multi_cancelled | ||||
|             assert is_multi_cancelled(me) | ||||
|             assert is_multi_cancelled(beg) | ||||
|     else: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|  | @ -253,6 +277,7 @@ def test_caller_cancels( | |||
|     cancel_method: str, | ||||
|     chk_ctx_result_before_exit: bool, | ||||
|     callee_returns_early: bool, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that when the opening side of a context (aka the caller) | ||||
|  | @ -261,37 +286,100 @@ def test_caller_cancels( | |||
| 
 | ||||
|     ''' | ||||
|     async def check_canceller( | ||||
|         ctx: tractor.Context, | ||||
|         ctx: Context, | ||||
|     ) -> None: | ||||
|         # should not raise yet return the remote | ||||
|         # context cancelled error. | ||||
|         res = await ctx.result() | ||||
|         actor: Actor = current_actor() | ||||
|         uid: tuple = actor.uid | ||||
|         _ctxc: ContextCancelled|None = None | ||||
| 
 | ||||
|         if ( | ||||
|             cancel_method == 'portal' | ||||
|             and not callee_returns_early | ||||
|         ): | ||||
|             try: | ||||
|                 res = await ctx.result() | ||||
|                 assert 0, 'Portal cancel should raise!' | ||||
| 
 | ||||
|             except ContextCancelled as ctxc: | ||||
|                 # with trio.CancelScope(shield=True): | ||||
|                 #     await tractor.pause() | ||||
|                 _ctxc = ctxc | ||||
|                 assert ctx.chan._cancel_called | ||||
|                 assert ctxc.canceller == uid | ||||
|                 assert ctxc is ctx.maybe_error | ||||
| 
 | ||||
|         # NOTE: should not ever raise even in the `ctx` | ||||
|         # case since self-cancellation should swallow the ctxc | ||||
|         # silently! | ||||
|         else: | ||||
|             try: | ||||
|                 res = await ctx.result() | ||||
|             except ContextCancelled as ctxc: | ||||
|                 pytest.fail(f'should not have raised ctxc\n{ctxc}') | ||||
| 
 | ||||
|         # we actually get a result | ||||
|         if callee_returns_early: | ||||
|             assert res == 'yo' | ||||
|             assert ctx.outcome is res | ||||
|             assert ctx.maybe_error is None | ||||
| 
 | ||||
|         else: | ||||
|             err = res | ||||
|             err: Exception = ctx.outcome | ||||
|             assert isinstance(err, ContextCancelled) | ||||
|             assert ( | ||||
|                 tuple(err.canceller) | ||||
|                 == | ||||
|                 tractor.current_actor().uid | ||||
|                 uid | ||||
|             ) | ||||
|             assert ( | ||||
|                 err | ||||
|                 is ctx.maybe_error | ||||
|                 is ctx._remote_error | ||||
|             ) | ||||
|             if le := ctx._local_error: | ||||
|                 assert err is le | ||||
| 
 | ||||
|             # else: | ||||
|                 # TODO: what should this be then? | ||||
|                 # not defined until block closes right? | ||||
|                 # | ||||
|                 # await tractor.pause() | ||||
|                 # assert ctx._local_error is None | ||||
| 
 | ||||
|         # TODO: don't need this right? | ||||
|         # if _ctxc: | ||||
|         #     raise _ctxc | ||||
| 
 | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as nursery: | ||||
|             portal = await nursery.start_actor( | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.start_actor( | ||||
|                 'simple_context', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|             timeout = 0.5 if not callee_returns_early else 2 | ||||
|             timeout: float = ( | ||||
|                 0.5 | ||||
|                 if not callee_returns_early | ||||
|                 else 2 | ||||
|             ) | ||||
|             with trio.fail_after(timeout): | ||||
|                 async with portal.open_context( | ||||
|                 async with ( | ||||
|                     expect_ctxc( | ||||
|                         yay=( | ||||
|                             not callee_returns_early | ||||
|                             and cancel_method == 'portal' | ||||
|                         ) | ||||
|                     ), | ||||
| 
 | ||||
|                     portal.open_context( | ||||
|                         simple_setup_teardown, | ||||
|                         data=10, | ||||
|                         block_forever=not callee_returns_early, | ||||
|                 ) as (ctx, sent): | ||||
|                     ) as (ctx, sent), | ||||
|                 ): | ||||
| 
 | ||||
|                     if callee_returns_early: | ||||
|                         # ensure we block long enough before sending | ||||
|  | @ -300,10 +388,18 @@ def test_caller_cancels( | |||
|                         await trio.sleep(0.5) | ||||
| 
 | ||||
|                     if cancel_method == 'ctx': | ||||
|                         print('cancelling with `Context.cancel()`') | ||||
|                         await ctx.cancel() | ||||
|                     else: | ||||
| 
 | ||||
|                     elif cancel_method == 'portal': | ||||
|                         print('cancelling with `Portal.cancel_actor()`') | ||||
|                         await portal.cancel_actor() | ||||
| 
 | ||||
|                     else: | ||||
|                         pytest.fail( | ||||
|                             f'Unknown `cancel_method={cancel_method} ?' | ||||
|                         ) | ||||
| 
 | ||||
|                     if chk_ctx_result_before_exit: | ||||
|                         await check_canceller(ctx) | ||||
| 
 | ||||
|  | @ -313,6 +409,23 @@ def test_caller_cancels( | |||
|             if cancel_method != 'portal': | ||||
|                 await portal.cancel_actor() | ||||
| 
 | ||||
|             # XXX NOTE XXX: non-normal yet purposeful | ||||
|             # test-specific ctxc suppression is implemented! | ||||
|             # | ||||
|             # WHY: the `.cancel_actor()` case (cancel_method='portal') | ||||
|             # will cause both: | ||||
|             #  * the `ctx.result()` inside `.open_context().__aexit__()` | ||||
|             #  * AND the `ctx.result()` inside `check_canceller()` | ||||
|             # to raise ctxc. | ||||
|             # | ||||
|             #   which should in turn cause `ctx._scope` to | ||||
|             # catch any cancellation? | ||||
|             if ( | ||||
|                 not callee_returns_early | ||||
|                 and cancel_method != 'portal' | ||||
|             ): | ||||
|                 assert not ctx._scope.cancelled_caught | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -331,7 +444,7 @@ def test_caller_cancels( | |||
| @tractor.context | ||||
| async def close_ctx_immediately( | ||||
| 
 | ||||
|     ctx: tractor.Context, | ||||
|     ctx: Context, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|  | @ -343,17 +456,33 @@ async def close_ctx_immediately( | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_callee_closes_ctx_after_stream_open(): | ||||
|     'callee context closes without using stream' | ||||
| async def test_callee_closes_ctx_after_stream_open( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     callee context closes without using stream. | ||||
| 
 | ||||
|     async with tractor.open_nursery() as n: | ||||
|     This should result in a msg sequence | ||||
|     |_<root>_ | ||||
|              |_<fast_stream_closer> | ||||
| 
 | ||||
|         portal = await n.start_actor( | ||||
|              <= {'started': <Any>, 'cid': <str>} | ||||
|              <= {'stop': True, 'cid': <str>} | ||||
|              <= {'result': Any, ..} | ||||
| 
 | ||||
|      (ignored by child) | ||||
|     => {'stop': True, 'cid': <str>} | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=debug_mode, | ||||
|     ) as an: | ||||
|         portal = await an.start_actor( | ||||
|             'fast_stream_closer', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         with trio.fail_after(2): | ||||
|         with trio.fail_after(0.5): | ||||
|             async with portal.open_context( | ||||
|                 close_ctx_immediately, | ||||
| 
 | ||||
|  | @ -361,10 +490,9 @@ async def test_callee_closes_ctx_after_stream_open(): | |||
|                 # cancel_on_exit=True, | ||||
| 
 | ||||
|             ) as (ctx, sent): | ||||
| 
 | ||||
|                 assert sent is None | ||||
| 
 | ||||
|                 with trio.fail_after(0.5): | ||||
|                 with trio.fail_after(0.4): | ||||
|                     async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|                         # should fall through since ``StopAsyncIteration`` | ||||
|  | @ -372,11 +500,14 @@ async def test_callee_closes_ctx_after_stream_open(): | |||
|                         # a ``trio.EndOfChannel`` by | ||||
|                         # ``trio.abc.ReceiveChannel.__anext__()`` | ||||
|                         async for _ in stream: | ||||
|                             # trigger failure if we DO NOT | ||||
|                             # get an EOC! | ||||
|                             assert 0 | ||||
|                         else: | ||||
| 
 | ||||
|                             # verify stream is now closed | ||||
|                             try: | ||||
|                                 with trio.fail_after(0.3): | ||||
|                                     await stream.receive() | ||||
|                             except trio.EndOfChannel: | ||||
|                                 pass | ||||
|  | @ -397,8 +528,7 @@ async def test_callee_closes_ctx_after_stream_open(): | |||
| 
 | ||||
| @tractor.context | ||||
| async def expect_cancelled( | ||||
| 
 | ||||
|     ctx: tractor.Context, | ||||
|     ctx: Context, | ||||
| 
 | ||||
| ) -> None: | ||||
|     global _state | ||||
|  | @ -412,12 +542,29 @@ async def expect_cancelled( | |||
|                 await stream.send(msg)  # echo server | ||||
| 
 | ||||
|     except trio.Cancelled: | ||||
| 
 | ||||
|         # on ctx.cancel() the internal RPC scope is cancelled but | ||||
|         # never caught until the func exits. | ||||
|         assert ctx._scope.cancel_called | ||||
|         assert not ctx._scope.cancelled_caught | ||||
| 
 | ||||
|         # should be the RPC cmd request for `._cancel_task()` | ||||
|         assert ctx._cancel_msg | ||||
|         # which, has not yet resolved to an error outcome | ||||
|         # since this rpc func has not yet exited. | ||||
|         assert not ctx.maybe_error | ||||
|         assert not ctx._final_result_is_set() | ||||
| 
 | ||||
|         # debug REPL if needed | ||||
|         # with trio.CancelScope(shield=True): | ||||
|         #     await tractor.pause() | ||||
| 
 | ||||
|         # expected case | ||||
|         _state = False | ||||
|         raise | ||||
| 
 | ||||
|     else: | ||||
|         assert 0, "Wasn't cancelled!?" | ||||
|         assert 0, "callee wasn't cancelled !?" | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|  | @ -427,12 +574,18 @@ async def expect_cancelled( | |||
| @tractor_test | ||||
| async def test_caller_closes_ctx_after_callee_opens_stream( | ||||
|     use_ctx_cancel_method: bool, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     'caller context closes without using stream' | ||||
|     ''' | ||||
|     caller context closes without using/opening stream | ||||
| 
 | ||||
|     async with tractor.open_nursery() as n: | ||||
|     ''' | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=debug_mode, | ||||
|     ) as an: | ||||
| 
 | ||||
|         portal = await n.start_actor( | ||||
|         root: Actor = current_actor() | ||||
|         portal = await an.start_actor( | ||||
|             'ctx_cancelled', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
|  | @ -440,22 +593,37 @@ async def test_caller_closes_ctx_after_callee_opens_stream( | |||
|         async with portal.open_context( | ||||
|             expect_cancelled, | ||||
|         ) as (ctx, sent): | ||||
|             await portal.run(assert_state, value=True) | ||||
| 
 | ||||
|             assert sent is None | ||||
| 
 | ||||
|             # call cancel explicitly | ||||
|             if use_ctx_cancel_method: | ||||
|             await portal.run(assert_state, value=True) | ||||
| 
 | ||||
|             # call `ctx.cancel()` explicitly | ||||
|             if use_ctx_cancel_method: | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|                 # NOTE: means the local side `ctx._scope` will | ||||
|                 # have been cancelled by an ctxc ack and thus | ||||
|                 # `._scope.cancelled_caught` should be set. | ||||
|                 try: | ||||
|                     async with ctx.open_stream() as stream: | ||||
|                         async for msg in stream: | ||||
|                             pass | ||||
| 
 | ||||
|                 except tractor.ContextCancelled: | ||||
|                     raise  # XXX: must be propagated to __aexit__ | ||||
|                 except tractor.ContextCancelled as ctxc: | ||||
|                     # XXX: the cause is US since we call | ||||
|                     # `Context.cancel()` just above! | ||||
|                     assert ( | ||||
|                         ctxc.canceller | ||||
|                         == | ||||
|                         current_actor().uid | ||||
|                         == | ||||
|                         root.uid | ||||
|                     ) | ||||
| 
 | ||||
|                     # XXX: must be propagated to __aexit__ | ||||
|                     # and should be silently absorbed there | ||||
|                     # since we called `.cancel()` just above ;) | ||||
|                     raise | ||||
| 
 | ||||
|                 else: | ||||
|                     assert 0, "Should have context cancelled?" | ||||
|  | @ -464,7 +632,10 @@ async def test_caller_closes_ctx_after_callee_opens_stream( | |||
|                 assert portal.channel.connected() | ||||
| 
 | ||||
|                 # ctx is closed here | ||||
|                 await portal.run(assert_state, value=False) | ||||
|                 await portal.run( | ||||
|                     assert_state, | ||||
|                     value=False, | ||||
|                 ) | ||||
| 
 | ||||
|             else: | ||||
|                 try: | ||||
|  | @ -472,7 +643,25 @@ async def test_caller_closes_ctx_after_callee_opens_stream( | |||
|                         await ctx.result() | ||||
|                         assert 0, "Callee should have blocked!?" | ||||
|                 except trio.TooSlowError: | ||||
|                     # NO-OP -> since already called above | ||||
|                     await ctx.cancel() | ||||
| 
 | ||||
|         # NOTE: local scope should have absorbed the cancellation since | ||||
|         # in this case we call `ctx.cancel()` and the local | ||||
|         # `._scope` does not get `.cancel_called` and thus | ||||
|         # `.cancelled_caught` neither will ever bet set. | ||||
|         if use_ctx_cancel_method: | ||||
|             assert not ctx._scope.cancelled_caught | ||||
| 
 | ||||
|         # rxed ctxc response from far end | ||||
|         assert ctx.cancel_acked | ||||
|         assert ( | ||||
|             ctx._remote_error | ||||
|             is ctx._local_error | ||||
|             is ctx.maybe_error | ||||
|             is ctx.outcome | ||||
|         ) | ||||
| 
 | ||||
|         try: | ||||
|             async with ctx.open_stream() as stream: | ||||
|                 async for msg in stream: | ||||
|  | @ -494,11 +683,13 @@ async def test_caller_closes_ctx_after_callee_opens_stream( | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_multitask_caller_cancels_from_nonroot_task(): | ||||
| 
 | ||||
|     async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|         portal = await n.start_actor( | ||||
| async def test_multitask_caller_cancels_from_nonroot_task( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=debug_mode, | ||||
|     ) as an: | ||||
|         portal = await an.start_actor( | ||||
|             'ctx_cancelled', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
|  | @ -545,25 +736,31 @@ async def test_multitask_caller_cancels_from_nonroot_task(): | |||
| @tractor.context | ||||
| async def cancel_self( | ||||
| 
 | ||||
|     ctx: tractor.Context, | ||||
|     ctx: Context, | ||||
| 
 | ||||
| ) -> None: | ||||
|     global _state | ||||
|     _state = True | ||||
| 
 | ||||
|     # since we call this the below `.open_stream()` should always | ||||
|     # error! | ||||
|     await ctx.cancel() | ||||
| 
 | ||||
|     # should inline raise immediately | ||||
|     try: | ||||
|         async with ctx.open_stream(): | ||||
|             pass | ||||
|     except tractor.ContextCancelled: | ||||
|     # except tractor.ContextCancelled: | ||||
|     except RuntimeError: | ||||
|         # suppress for now so we can do checkpoint tests below | ||||
|         pass | ||||
|         print('Got expected runtime error for stream-after-cancel') | ||||
| 
 | ||||
|     else: | ||||
|         raise RuntimeError('Context didnt cancel itself?!') | ||||
| 
 | ||||
|     # check a real ``trio.Cancelled`` is raised on a checkpoint | ||||
|     # check that``trio.Cancelled`` is now raised on any further | ||||
|     # checkpoints since the self cancel above will have cancelled | ||||
|     # the `Context._scope.cancel_scope: trio.CancelScope` | ||||
|     try: | ||||
|         with trio.fail_after(0.1): | ||||
|             await trio.sleep_forever() | ||||
|  | @ -574,17 +771,22 @@ async def cancel_self( | |||
|         # should never get here | ||||
|         assert 0 | ||||
| 
 | ||||
|     raise RuntimeError('Context didnt cancel itself?!') | ||||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_callee_cancels_before_started(): | ||||
| async def test_callee_cancels_before_started( | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Callee calls `Context.cancel()` while streaming and caller | ||||
|     sees stream terminated in `ContextCancelled`. | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|         portal = await n.start_actor( | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=debug_mode, | ||||
|     ) as an: | ||||
|         portal = await an.start_actor( | ||||
|             'cancels_self', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
|  | @ -598,10 +800,12 @@ async def test_callee_cancels_before_started(): | |||
| 
 | ||||
|         # raises a special cancel signal | ||||
|         except tractor.ContextCancelled as ce: | ||||
|             ce.type == trio.Cancelled | ||||
|             _ce = ce  # for debug on crash | ||||
|             ce.boxed_type == trio.Cancelled | ||||
| 
 | ||||
|             # the traceback should be informative | ||||
|             assert 'cancelled itself' in ce.msgdata['tb_str'] | ||||
|             assert 'itself' in ce.tb_str | ||||
|             assert ce.tb_str == ce.msgdata['tb_str'] | ||||
| 
 | ||||
|         # teardown the actor | ||||
|         await portal.cancel_actor() | ||||
|  | @ -610,7 +814,7 @@ async def test_callee_cancels_before_started(): | |||
| @tractor.context | ||||
| async def never_open_stream( | ||||
| 
 | ||||
|     ctx:  tractor.Context, | ||||
|     ctx:  Context, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|  | @ -624,8 +828,8 @@ async def never_open_stream( | |||
| @tractor.context | ||||
| async def keep_sending_from_callee( | ||||
| 
 | ||||
|     ctx:  tractor.Context, | ||||
|     msg_buffer_size: Optional[int] = None, | ||||
|     ctx:  Context, | ||||
|     msg_buffer_size: int|None = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|  | @ -648,9 +852,15 @@ async def keep_sending_from_callee( | |||
|         ('caller', 1, never_open_stream), | ||||
|         ('callee', 0, keep_sending_from_callee), | ||||
|     ], | ||||
|     ids='overrun_condition={}'.format, | ||||
|     ids=[ | ||||
|          ('caller_1buf_never_open_stream'), | ||||
|          ('callee_0buf_keep_sending_from_callee'), | ||||
|     ] | ||||
| ) | ||||
| def test_one_end_stream_not_opened(overrun_by): | ||||
| def test_one_end_stream_not_opened( | ||||
|     overrun_by: tuple[str, int, Callable], | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     This should exemplify the bug from: | ||||
|     https://github.com/goodboy/tractor/issues/265 | ||||
|  | @ -661,12 +871,15 @@ def test_one_end_stream_not_opened(overrun_by): | |||
|     buf_size = buf_size_increase + Actor.msg_buffer_size | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.start_actor( | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.start_actor( | ||||
|                 entrypoint.__name__, | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             with trio.fail_after(1): | ||||
|                 async with portal.open_context( | ||||
|                     entrypoint, | ||||
|                 ) as (ctx, sent): | ||||
|  | @ -701,7 +914,7 @@ def test_one_end_stream_not_opened(overrun_by): | |||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         assert excinfo.value.type == StreamOverrun | ||||
|         assert excinfo.value.boxed_type == StreamOverrun | ||||
| 
 | ||||
|     elif overrunner == 'callee': | ||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|  | @ -710,7 +923,7 @@ def test_one_end_stream_not_opened(overrun_by): | |||
|         # TODO: embedded remote errors so that we can verify the source | ||||
|         # error? the callee delivers an error which is an overrun | ||||
|         # wrapped in a remote actor error. | ||||
|         assert excinfo.value.type == tractor.RemoteActorError | ||||
|         assert excinfo.value.boxed_type == tractor.RemoteActorError | ||||
| 
 | ||||
|     else: | ||||
|         trio.run(main) | ||||
|  | @ -719,7 +932,7 @@ def test_one_end_stream_not_opened(overrun_by): | |||
| @tractor.context | ||||
| async def echo_back_sequence( | ||||
| 
 | ||||
|     ctx:  tractor.Context, | ||||
|     ctx:  Context, | ||||
|     seq: list[int], | ||||
|     wait_for_cancel: bool, | ||||
|     allow_overruns_side: str, | ||||
|  | @ -736,10 +949,13 @@ async def echo_back_sequence( | |||
|     # NOTE: ensure that if the caller is expecting to cancel this task | ||||
|     # that we stay echoing much longer then they are so we don't | ||||
|     # return early instead of receive the cancel msg. | ||||
|     total_batches: int = 1000 if wait_for_cancel else 6 | ||||
|     total_batches: int = ( | ||||
|         1000 if wait_for_cancel | ||||
|         else 6 | ||||
|     ) | ||||
| 
 | ||||
|     await ctx.started() | ||||
|     # await tractor.breakpoint() | ||||
|     # await tractor.pause() | ||||
|     async with ctx.open_stream( | ||||
|         msg_buffer_size=msg_buffer_size, | ||||
| 
 | ||||
|  | @ -755,8 +971,23 @@ async def echo_back_sequence( | |||
|         ) | ||||
| 
 | ||||
|         seq = list(seq)  # bleh, msgpack sometimes ain't decoded right | ||||
|         for _ in range(total_batches): | ||||
|         for i in range(total_batches): | ||||
|             print(f'starting new stream batch {i} iter in child') | ||||
|             batch = [] | ||||
| 
 | ||||
|             # EoC case, delay a little instead of hot | ||||
|             # iter-stopping (since apparently py3.11+ can do that | ||||
|             # faster then a ctxc can be sent) on the async for | ||||
|             # loop when child was requested to ctxc. | ||||
|             if ( | ||||
|                 stream.closed | ||||
|                 or | ||||
|                 ctx.cancel_called | ||||
|             ): | ||||
|                 print('child stream already closed!?!') | ||||
|                 await trio.sleep(0.05) | ||||
|                 continue | ||||
| 
 | ||||
|             async for msg in stream: | ||||
|                 batch.append(msg) | ||||
|                 if batch == seq: | ||||
|  | @ -767,15 +998,18 @@ async def echo_back_sequence( | |||
| 
 | ||||
|                 print('callee waiting on next') | ||||
| 
 | ||||
|             print(f'callee echoing back latest batch\n{batch}') | ||||
|             for msg in batch: | ||||
|                 print(f'callee sending {msg}') | ||||
|                 print(f'callee sending msg\n{msg}') | ||||
|                 await stream.send(msg) | ||||
| 
 | ||||
|     print( | ||||
|         'EXITING CALLEEE:\n' | ||||
|         f'{ctx.cancel_called_remote}' | ||||
|     ) | ||||
|     try: | ||||
|         return 'yo' | ||||
|     finally: | ||||
|         print( | ||||
|             'exiting callee with context:\n' | ||||
|             f'{pformat(ctx)}\n' | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|  | @ -801,7 +1035,10 @@ def test_maybe_allow_overruns_stream( | |||
|     cancel_ctx: bool, | ||||
|     slow_side: str, | ||||
|     allow_overruns_side: str, | ||||
| 
 | ||||
|     # conftest wide | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Demonstrate small overruns of each task back and forth | ||||
|  | @ -820,23 +1057,34 @@ def test_maybe_allow_overruns_stream( | |||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.start_actor( | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             portal = await an.start_actor( | ||||
|                 'callee_sends_forever', | ||||
|                 enable_modules=[__name__], | ||||
|                 loglevel=loglevel, | ||||
| 
 | ||||
|                 # debug_mode=True, | ||||
|                 debug_mode=debug_mode, | ||||
|             ) | ||||
|             seq = list(range(10)) | ||||
| 
 | ||||
|             # stream-sequence batch info with send delay to determine | ||||
|             # approx timeout determining whether test has hung. | ||||
|             total_batches: int = 2 | ||||
|             num_items: int = 10 | ||||
|             seq = list(range(num_items)) | ||||
|             parent_send_delay: float = 0.16 | ||||
|             timeout: float = math.ceil( | ||||
|                 total_batches * num_items * parent_send_delay | ||||
|             ) | ||||
|             with trio.fail_after(timeout): | ||||
|                 async with portal.open_context( | ||||
|                     echo_back_sequence, | ||||
|                     seq=seq, | ||||
|                     wait_for_cancel=cancel_ctx, | ||||
|                     be_slow=(slow_side == 'child'), | ||||
|                     allow_overruns_side=allow_overruns_side, | ||||
|             ) as (ctx, sent): | ||||
| 
 | ||||
|                 ) as (ctx, sent): | ||||
|                     assert sent is None | ||||
| 
 | ||||
|                     async with ctx.open_stream( | ||||
|  | @ -844,7 +1092,6 @@ def test_maybe_allow_overruns_stream( | |||
|                         allow_overruns=(allow_overruns_side in {'parent', 'both'}), | ||||
|                     ) as stream: | ||||
| 
 | ||||
|                     total_batches: int = 2 | ||||
|                         for _ in range(total_batches): | ||||
|                             for msg in seq: | ||||
|                                 # print(f'root tx {msg}') | ||||
|  | @ -853,7 +1100,7 @@ def test_maybe_allow_overruns_stream( | |||
|                                     # NOTE: we make the parent slightly | ||||
|                                     # slower, when it is slow, to make sure | ||||
|                                     # that in the overruns everywhere case | ||||
|                                 await trio.sleep(0.16) | ||||
|                                     await trio.sleep(parent_send_delay) | ||||
| 
 | ||||
|                             batch = [] | ||||
|                             async for msg in stream: | ||||
|  | @ -864,14 +1111,14 @@ def test_maybe_allow_overruns_stream( | |||
| 
 | ||||
|                     if cancel_ctx: | ||||
|                         # cancel the remote task | ||||
|                     print('sending root side cancel') | ||||
|                         print('Requesting `ctx.cancel()` in parent!') | ||||
|                         await ctx.cancel() | ||||
| 
 | ||||
|             res = await ctx.result() | ||||
|                 res: str|ContextCancelled = await ctx.result() | ||||
| 
 | ||||
|                 if cancel_ctx: | ||||
|                     assert isinstance(res, ContextCancelled) | ||||
|                 assert tuple(res.canceller) == tractor.current_actor().uid | ||||
|                     assert tuple(res.canceller) == current_actor().uid | ||||
| 
 | ||||
|                 else: | ||||
|                     print(f'RX ROOT SIDE RESULT {res}') | ||||
|  | @ -904,7 +1151,7 @@ def test_maybe_allow_overruns_stream( | |||
|             # NOTE: i tried to isolate to a deterministic case here | ||||
|             # based on timeing, but i was kinda wasted, and i don't | ||||
|             # think it's sane to catch them.. | ||||
|             assert err.type in ( | ||||
|             assert err.boxed_type in ( | ||||
|                 tractor.RemoteActorError, | ||||
|                 StreamOverrun, | ||||
|             ) | ||||
|  | @ -912,11 +1159,12 @@ def test_maybe_allow_overruns_stream( | |||
|         elif ( | ||||
|             slow_side == 'child' | ||||
|         ): | ||||
|             assert err.type == StreamOverrun | ||||
|             assert err.boxed_type == StreamOverrun | ||||
| 
 | ||||
|         elif slow_side == 'parent': | ||||
|             assert err.type == tractor.RemoteActorError | ||||
|             assert 'StreamOverrun' in err.msgdata['tb_str'] | ||||
|             assert err.boxed_type == tractor.RemoteActorError | ||||
|             assert 'StreamOverrun' in err.tb_str | ||||
|             assert err.tb_str == err.msgdata['tb_str'] | ||||
| 
 | ||||
|     else: | ||||
|         # if this hits the logic blocks from above are not | ||||
|  | @ -924,91 +1172,50 @@ def test_maybe_allow_overruns_stream( | |||
|         pytest.fail('PARAMETRIZED CASE GEN PROBLEM YO') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_forever( | ||||
|     ctx: tractor.Context, | ||||
| ) -> None: | ||||
|     await ctx.started() | ||||
|     async with ctx.open_stream(): | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def attach_to_sleep_forever(): | ||||
| def test_ctx_with_self_actor( | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Cancel a context **before** any underlying error is raised in order | ||||
|     to trigger a local reception of a ``ContextCancelled`` which **should not** | ||||
|     be re-raised in the local surrounding ``Context`` *iff* the cancel was | ||||
|     requested by **this** side of the context. | ||||
|     NOTE: for now this is an INVALID OP! | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.wait_for_actor('sleeper') as p2: | ||||
|         async with ( | ||||
|             p2.open_context(sleep_forever) as (peer_ctx, first), | ||||
|             peer_ctx.open_stream(), | ||||
|         ): | ||||
|             try: | ||||
|                 yield | ||||
|             finally: | ||||
|                 # XXX: previously this would trigger local | ||||
|                 # ``ContextCancelled`` to be received and raised in the | ||||
|                 # local context overriding any local error due to logic | ||||
|                 # inside ``_invoke()`` which checked for an error set on | ||||
|                 # ``Context._error`` and raised it in a cancellation | ||||
|                 # scenario. | ||||
|                 # ------ | ||||
|                 # The problem is you can have a remote cancellation that | ||||
|                 # is part of a local error and we shouldn't raise | ||||
|                 # ``ContextCancelled`` **iff** we **were not** the side | ||||
|                 # of the context to initiate it, i.e. | ||||
|                 # ``Context._cancel_called`` should **NOT** have been | ||||
|                 # set. The special logic to handle this case is now | ||||
|                 # inside ``Context._maybe_raise_from_remote_msg()`` XD | ||||
|                 await peer_ctx.cancel() | ||||
|     BUT, eventually presuming we add a "side" key to `Actor.get_context()`, | ||||
|     we might be able to get this working symmetrically, but should we?? | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def error_before_started( | ||||
|     ctx: tractor.Context, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     This simulates exactly an original bug discovered in: | ||||
|     https://github.com/pikers/piker/issues/244 | ||||
| 
 | ||||
|     ''' | ||||
|     async with attach_to_sleep_forever(): | ||||
|         # send an unserializable type which should raise a type error | ||||
|         # here and **NOT BE SWALLOWED** by the surrounding acm!!?! | ||||
|         await ctx.started(object()) | ||||
| 
 | ||||
| 
 | ||||
| def test_do_not_swallow_error_before_started_by_remote_contextcancelled(): | ||||
|     ''' | ||||
|     Verify that an error raised in a remote context which itself opens | ||||
|     another remote context, which it cancels, does not ovverride the | ||||
|     original error that caused the cancellation of the secondardy | ||||
|     context. | ||||
|     Open a context back to the same actor and ensure all cancellation | ||||
|     and error semantics hold the same. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.start_actor( | ||||
|                 'errorer', | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|             enable_modules=[__name__], | ||||
|             ) | ||||
|             await n.start_actor( | ||||
|                 'sleeper', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|         ) as an: | ||||
|             assert an | ||||
|             async with ( | ||||
|                 tractor.find_actor('root') as portal, | ||||
|                 portal.open_context( | ||||
|                     error_before_started | ||||
|                 ) as (ctx, sent), | ||||
|             ): | ||||
|                 await trio.sleep_forever() | ||||
|                     expect_cancelled, | ||||
|                     # echo_back_sequence, | ||||
|                     # seq=seq, | ||||
|                     # wait_for_cancel=cancel_ctx, | ||||
|                     # be_slow=(slow_side == 'child'), | ||||
|                     # allow_overruns_side=allow_overruns_side, | ||||
| 
 | ||||
|     with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||
|                 ) as (ctx, sent), | ||||
|                 ctx.open_stream() as ipc, | ||||
|             ): | ||||
|                 assert sent is None | ||||
| 
 | ||||
|                 seq = list(range(10)) | ||||
|                 for i in seq: | ||||
|                     await ipc.send(i) | ||||
|                     rx: int = await ipc.receive() | ||||
|                     assert rx == i | ||||
| 
 | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|     with pytest.raises(RuntimeError) as excinfo: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     assert excinfo.value.type == TypeError | ||||
|     assert 'Invalid Operation' in repr(excinfo.value) | ||||
|  |  | |||
|  | @ -9,25 +9,24 @@ import itertools | |||
| 
 | ||||
| import pytest | ||||
| import tractor | ||||
| from tractor._testing import tractor_test | ||||
| import trio | ||||
| 
 | ||||
| from conftest import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_reg_then_unreg(arb_addr): | ||||
| async def test_reg_then_unreg(reg_addr): | ||||
|     actor = tractor.current_actor() | ||||
|     assert actor.is_arbiter | ||||
|     assert len(actor._registry) == 1  # only self is registered | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         arbiter_addr=arb_addr, | ||||
|         registry_addrs=[reg_addr], | ||||
|     ) as n: | ||||
| 
 | ||||
|         portal = await n.start_actor('actor', enable_modules=[__name__]) | ||||
|         uid = portal.channel.uid | ||||
| 
 | ||||
|         async with tractor.get_arbiter(*arb_addr) as aportal: | ||||
|         async with tractor.get_registry(*reg_addr) as aportal: | ||||
|             # this local actor should be the arbiter | ||||
|             assert actor is aportal.actor | ||||
| 
 | ||||
|  | @ -53,15 +52,27 @@ async def hi(): | |||
|     return the_line.format(tractor.current_actor().name) | ||||
| 
 | ||||
| 
 | ||||
| async def say_hello(other_actor): | ||||
| async def say_hello( | ||||
|     other_actor: str, | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
|     await trio.sleep(1)  # wait for other actor to spawn | ||||
|     async with tractor.find_actor(other_actor) as portal: | ||||
|     async with tractor.find_actor( | ||||
|         other_actor, | ||||
|         registry_addrs=[reg_addr], | ||||
|     ) as portal: | ||||
|         assert portal is not None | ||||
|         return await portal.run(__name__, 'hi') | ||||
| 
 | ||||
| 
 | ||||
| async def say_hello_use_wait(other_actor): | ||||
|     async with tractor.wait_for_actor(other_actor) as portal: | ||||
| async def say_hello_use_wait( | ||||
|     other_actor: str, | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
|     async with tractor.wait_for_actor( | ||||
|         other_actor, | ||||
|         registry_addr=reg_addr, | ||||
|     ) as portal: | ||||
|         assert portal is not None | ||||
|         result = await portal.run(__name__, 'hi') | ||||
|         return result | ||||
|  | @ -69,21 +80,29 @@ async def say_hello_use_wait(other_actor): | |||
| 
 | ||||
| @tractor_test | ||||
| @pytest.mark.parametrize('func', [say_hello, say_hello_use_wait]) | ||||
| async def test_trynamic_trio(func, start_method, arb_addr): | ||||
|     """Main tractor entry point, the "master" process (for now | ||||
|     acts as the "director"). | ||||
|     """ | ||||
| async def test_trynamic_trio( | ||||
|     func, | ||||
|     start_method, | ||||
|     reg_addr, | ||||
| ): | ||||
|     ''' | ||||
|     Root actor acting as the "director" and running one-shot-task-actors | ||||
|     for the directed subs. | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.open_nursery() as n: | ||||
|         print("Alright... Action!") | ||||
| 
 | ||||
|         donny = await n.run_in_actor( | ||||
|             func, | ||||
|             other_actor='gretchen', | ||||
|             reg_addr=reg_addr, | ||||
|             name='donny', | ||||
|         ) | ||||
|         gretchen = await n.run_in_actor( | ||||
|             func, | ||||
|             other_actor='donny', | ||||
|             reg_addr=reg_addr, | ||||
|             name='gretchen', | ||||
|         ) | ||||
|         print(await gretchen.result()) | ||||
|  | @ -131,7 +150,7 @@ async def unpack_reg(actor_or_portal): | |||
| 
 | ||||
| 
 | ||||
| async def spawn_and_check_registry( | ||||
|     arb_addr: tuple, | ||||
|     reg_addr: tuple, | ||||
|     use_signal: bool, | ||||
|     remote_arbiter: bool = False, | ||||
|     with_streaming: bool = False, | ||||
|  | @ -139,9 +158,9 @@ async def spawn_and_check_registry( | |||
| ) -> None: | ||||
| 
 | ||||
|     async with tractor.open_root_actor( | ||||
|         arbiter_addr=arb_addr, | ||||
|         registry_addrs=[reg_addr], | ||||
|     ): | ||||
|         async with tractor.get_arbiter(*arb_addr) as portal: | ||||
|         async with tractor.get_registry(*reg_addr) as portal: | ||||
|             # runtime needs to be up to call this | ||||
|             actor = tractor.current_actor() | ||||
| 
 | ||||
|  | @ -213,17 +232,19 @@ async def spawn_and_check_registry( | |||
| def test_subactors_unregister_on_cancel( | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     with_streaming, | ||||
| ): | ||||
|     """Verify that cancelling a nursery results in all subactors | ||||
|     ''' | ||||
|     Verify that cancelling a nursery results in all subactors | ||||
|     deregistering themselves with the arbiter. | ||||
|     """ | ||||
| 
 | ||||
|     ''' | ||||
|     with pytest.raises(KeyboardInterrupt): | ||||
|         trio.run( | ||||
|             partial( | ||||
|                 spawn_and_check_registry, | ||||
|                 arb_addr, | ||||
|                 reg_addr, | ||||
|                 use_signal, | ||||
|                 remote_arbiter=False, | ||||
|                 with_streaming=with_streaming, | ||||
|  | @ -237,7 +258,7 @@ def test_subactors_unregister_on_cancel_remote_daemon( | |||
|     daemon, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     with_streaming, | ||||
| ): | ||||
|     """Verify that cancelling a nursery results in all subactors | ||||
|  | @ -248,7 +269,7 @@ def test_subactors_unregister_on_cancel_remote_daemon( | |||
|         trio.run( | ||||
|             partial( | ||||
|                 spawn_and_check_registry, | ||||
|                 arb_addr, | ||||
|                 reg_addr, | ||||
|                 use_signal, | ||||
|                 remote_arbiter=True, | ||||
|                 with_streaming=with_streaming, | ||||
|  | @ -262,7 +283,7 @@ async def streamer(agen): | |||
| 
 | ||||
| 
 | ||||
| async def close_chans_before_nursery( | ||||
|     arb_addr: tuple, | ||||
|     reg_addr: tuple, | ||||
|     use_signal: bool, | ||||
|     remote_arbiter: bool = False, | ||||
| ) -> None: | ||||
|  | @ -275,9 +296,9 @@ async def close_chans_before_nursery( | |||
|         entries_at_end = 1 | ||||
| 
 | ||||
|     async with tractor.open_root_actor( | ||||
|         arbiter_addr=arb_addr, | ||||
|         registry_addrs=[reg_addr], | ||||
|     ): | ||||
|         async with tractor.get_arbiter(*arb_addr) as aportal: | ||||
|         async with tractor.get_registry(*reg_addr) as aportal: | ||||
|             try: | ||||
|                 get_reg = partial(unpack_reg, aportal) | ||||
| 
 | ||||
|  | @ -329,7 +350,7 @@ async def close_chans_before_nursery( | |||
| def test_close_channel_explicit( | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
| ): | ||||
|     """Verify that closing a stream explicitly and killing the actor's | ||||
|     "root nursery" **before** the containing nursery tears down also | ||||
|  | @ -339,7 +360,7 @@ def test_close_channel_explicit( | |||
|         trio.run( | ||||
|             partial( | ||||
|                 close_chans_before_nursery, | ||||
|                 arb_addr, | ||||
|                 reg_addr, | ||||
|                 use_signal, | ||||
|                 remote_arbiter=False, | ||||
|             ), | ||||
|  | @ -351,7 +372,7 @@ def test_close_channel_explicit_remote_arbiter( | |||
|     daemon, | ||||
|     start_method, | ||||
|     use_signal, | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
| ): | ||||
|     """Verify that closing a stream explicitly and killing the actor's | ||||
|     "root nursery" **before** the containing nursery tears down also | ||||
|  | @ -361,7 +382,7 @@ def test_close_channel_explicit_remote_arbiter( | |||
|         trio.run( | ||||
|             partial( | ||||
|                 close_chans_before_nursery, | ||||
|                 arb_addr, | ||||
|                 reg_addr, | ||||
|                 use_signal, | ||||
|                 remote_arbiter=True, | ||||
|             ), | ||||
|  |  | |||
|  | @ -11,8 +11,7 @@ import platform | |||
| import shutil | ||||
| 
 | ||||
| import pytest | ||||
| 
 | ||||
| from conftest import ( | ||||
| from tractor._testing import ( | ||||
|     examples_dir, | ||||
| ) | ||||
| 
 | ||||
|  | @ -20,8 +19,8 @@ from conftest import ( | |||
| @pytest.fixture | ||||
| def run_example_in_subproc( | ||||
|     loglevel: str, | ||||
|     testdir, | ||||
|     arb_addr: tuple[str, int], | ||||
|     testdir: pytest.Testdir, | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
| 
 | ||||
|     @contextmanager | ||||
|  |  | |||
|  | @ -2,22 +2,34 @@ | |||
| The hipster way to force SC onto the stdlib's "async": 'infection mode'. | ||||
| 
 | ||||
| ''' | ||||
| from typing import Optional, Iterable, Union | ||||
| import asyncio | ||||
| import builtins | ||||
| from contextlib import ExitStack | ||||
| # from functools import partial | ||||
| import itertools | ||||
| import importlib | ||||
| import os | ||||
| from pathlib import Path | ||||
| import signal | ||||
| from typing import ( | ||||
|     Callable, | ||||
|     Iterable, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| from exceptiongroup import BaseExceptionGroup | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     current_actor, | ||||
|     Actor, | ||||
|     to_asyncio, | ||||
|     RemoteActorError, | ||||
|     ContextCancelled, | ||||
|     _state, | ||||
| ) | ||||
| from tractor.trionics import BroadcastReceiver | ||||
| from tractor._testing import expect_ctxc | ||||
| 
 | ||||
| 
 | ||||
| async def sleep_and_err( | ||||
|  | @ -25,8 +37,8 @@ async def sleep_and_err( | |||
| 
 | ||||
|     # just signature placeholders for compat with | ||||
|     # ``to_asyncio.open_channel_from()`` | ||||
|     to_trio: Optional[trio.MemorySendChannel] = None, | ||||
|     from_trio: Optional[asyncio.Queue] = None, | ||||
|     to_trio: trio.MemorySendChannel|None = None, | ||||
|     from_trio: asyncio.Queue|None = None, | ||||
| 
 | ||||
| ): | ||||
|     if to_trio: | ||||
|  | @ -36,7 +48,7 @@ async def sleep_and_err( | |||
|     assert 0 | ||||
| 
 | ||||
| 
 | ||||
| async def sleep_forever(): | ||||
| async def aio_sleep_forever(): | ||||
|     await asyncio.sleep(float('inf')) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -44,10 +56,10 @@ async def trio_cancels_single_aio_task(): | |||
| 
 | ||||
|     # spawn an ``asyncio`` task to run a func and return result | ||||
|     with trio.move_on_after(.2): | ||||
|         await tractor.to_asyncio.run_task(sleep_forever) | ||||
|         await tractor.to_asyncio.run_task(aio_sleep_forever) | ||||
| 
 | ||||
| 
 | ||||
| def test_trio_cancels_aio_on_actor_side(arb_addr): | ||||
| def test_trio_cancels_aio_on_actor_side(reg_addr): | ||||
|     ''' | ||||
|     Spawn an infected actor that is cancelled by the ``trio`` side | ||||
|     task using std cancel scope apis. | ||||
|  | @ -55,7 +67,7 @@ def test_trio_cancels_aio_on_actor_side(arb_addr): | |||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             arbiter_addr=arb_addr | ||||
|             registry_addrs=[reg_addr] | ||||
|         ) as n: | ||||
|             await n.run_in_actor( | ||||
|                 trio_cancels_single_aio_task, | ||||
|  | @ -66,14 +78,22 @@ def test_trio_cancels_aio_on_actor_side(arb_addr): | |||
| 
 | ||||
| 
 | ||||
| async def asyncio_actor( | ||||
| 
 | ||||
|     target: str, | ||||
|     expect_err: Optional[Exception] = None | ||||
|     expect_err: Exception|None = None | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     assert tractor.current_actor().is_infected_aio() | ||||
|     target = globals()[target] | ||||
|     # ensure internal runtime state is consistent | ||||
|     actor: Actor = tractor.current_actor() | ||||
|     assert ( | ||||
|         actor.is_infected_aio() | ||||
|         and | ||||
|         actor._infected_aio | ||||
|         and | ||||
|         _state._runtime_vars['_is_infected_aio'] | ||||
|     ) | ||||
| 
 | ||||
|     target: Callable = globals()[target] | ||||
| 
 | ||||
|     if '.' in expect_err: | ||||
|         modpath, _, name = expect_err.rpartition('.') | ||||
|  | @ -89,12 +109,14 @@ async def asyncio_actor( | |||
| 
 | ||||
|     except BaseException as err: | ||||
|         if expect_err: | ||||
|             assert isinstance(err, error_type) | ||||
|             assert isinstance(err, error_type), ( | ||||
|                 f'{type(err)} is not {error_type}?' | ||||
|             ) | ||||
| 
 | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| def test_aio_simple_error(arb_addr): | ||||
| def test_aio_simple_error(reg_addr): | ||||
|     ''' | ||||
|     Verify a simple remote asyncio error propagates back through trio | ||||
|     to the parent actor. | ||||
|  | @ -103,7 +125,7 @@ def test_aio_simple_error(arb_addr): | |||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             arbiter_addr=arb_addr | ||||
|             registry_addrs=[reg_addr] | ||||
|         ) as n: | ||||
|             await n.run_in_actor( | ||||
|                 asyncio_actor, | ||||
|  | @ -112,15 +134,26 @@ def test_aio_simple_error(arb_addr): | |||
|                 infect_asyncio=True, | ||||
|             ) | ||||
| 
 | ||||
|     with pytest.raises(RemoteActorError) as excinfo: | ||||
|     with pytest.raises( | ||||
|         expected_exception=(RemoteActorError, ExceptionGroup), | ||||
|     ) as excinfo: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     err = excinfo.value | ||||
| 
 | ||||
|     # might get multiple `trio.Cancelled`s as well inside an inception | ||||
|     if isinstance(err, ExceptionGroup): | ||||
|         err = next(itertools.dropwhile( | ||||
|             lambda exc: not isinstance(exc, tractor.RemoteActorError), | ||||
|             err.exceptions | ||||
|         )) | ||||
|         assert err | ||||
| 
 | ||||
|     assert isinstance(err, RemoteActorError) | ||||
|     assert err.type == AssertionError | ||||
|     assert err.boxed_type is AssertionError | ||||
| 
 | ||||
| 
 | ||||
| def test_tractor_cancels_aio(arb_addr): | ||||
| def test_tractor_cancels_aio(reg_addr): | ||||
|     ''' | ||||
|     Verify we can cancel a spawned asyncio task gracefully. | ||||
| 
 | ||||
|  | @ -129,7 +162,7 @@ def test_tractor_cancels_aio(arb_addr): | |||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.run_in_actor( | ||||
|                 asyncio_actor, | ||||
|                 target='sleep_forever', | ||||
|                 target='aio_sleep_forever', | ||||
|                 expect_err='trio.Cancelled', | ||||
|                 infect_asyncio=True, | ||||
|             ) | ||||
|  | @ -139,7 +172,7 @@ def test_tractor_cancels_aio(arb_addr): | |||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def test_trio_cancels_aio(arb_addr): | ||||
| def test_trio_cancels_aio(reg_addr): | ||||
|     ''' | ||||
|     Much like the above test with ``tractor.Portal.cancel_actor()`` | ||||
|     except we just use a standard ``trio`` cancellation api. | ||||
|  | @ -150,10 +183,10 @@ def test_trio_cancels_aio(arb_addr): | |||
|         with trio.move_on_after(1): | ||||
|             # cancel the nursery shortly after boot | ||||
| 
 | ||||
|             async with tractor.open_nursery() as n: | ||||
|                 await n.run_in_actor( | ||||
|             async with tractor.open_nursery() as tn: | ||||
|                 await tn.run_in_actor( | ||||
|                     asyncio_actor, | ||||
|                     target='sleep_forever', | ||||
|                     target='aio_sleep_forever', | ||||
|                     expect_err='trio.Cancelled', | ||||
|                     infect_asyncio=True, | ||||
|                 ) | ||||
|  | @ -171,9 +204,12 @@ async def trio_ctx( | |||
|     # this will block until the ``asyncio`` task sends a "first" | ||||
|     # message. | ||||
|     with trio.fail_after(2): | ||||
|         try: | ||||
|             async with ( | ||||
|             trio.open_nursery() as n, | ||||
| 
 | ||||
|                 trio.open_nursery( | ||||
|                     # TODO, for new `trio` / py3.13 | ||||
|                     # strict_exception_groups=False, | ||||
|                 ) as tn, | ||||
|                 tractor.to_asyncio.open_channel_from( | ||||
|                     sleep_and_err, | ||||
|                 ) as (first, chan), | ||||
|  | @ -182,19 +218,28 @@ async def trio_ctx( | |||
|                 assert first == 'start' | ||||
| 
 | ||||
|                 # spawn another asyncio task for the cuck of it. | ||||
|             n.start_soon( | ||||
|                 tn.start_soon( | ||||
|                     tractor.to_asyncio.run_task, | ||||
|                 sleep_forever, | ||||
|                     aio_sleep_forever, | ||||
|                 ) | ||||
|                 await trio.sleep_forever() | ||||
| 
 | ||||
|         # TODO, factor this into a `trionics.collapse()`? | ||||
|         except* BaseException as beg: | ||||
|             # await tractor.pause(shield=True) | ||||
|             if len(excs := beg.exceptions) == 1: | ||||
|                 raise excs[0] | ||||
|             else: | ||||
|                 raise | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'parent_cancels', [False, True], | ||||
|     'parent_cancels', | ||||
|     ['context', 'actor', False], | ||||
|     ids='parent_actor_cancels_child={}'.format | ||||
| ) | ||||
| def test_context_spawns_aio_task_that_errors( | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     parent_cancels: bool, | ||||
| ): | ||||
|     ''' | ||||
|  | @ -204,7 +249,6 @@ def test_context_spawns_aio_task_that_errors( | |||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
| 
 | ||||
|         with trio.fail_after(2): | ||||
|             async with tractor.open_nursery() as n: | ||||
|                 p = await n.start_actor( | ||||
|  | @ -214,18 +258,36 @@ def test_context_spawns_aio_task_that_errors( | |||
|                     # debug_mode=True, | ||||
|                     loglevel='cancel', | ||||
|                 ) | ||||
|                 async with p.open_context( | ||||
|                 async with ( | ||||
|                     expect_ctxc( | ||||
|                         yay=parent_cancels == 'actor', | ||||
|                     ), | ||||
|                     p.open_context( | ||||
|                         trio_ctx, | ||||
|                 ) as (ctx, first): | ||||
|                     ) as (ctx, first), | ||||
|                 ): | ||||
| 
 | ||||
|                     assert first == 'start' | ||||
| 
 | ||||
|                     if parent_cancels: | ||||
|                     if parent_cancels == 'actor': | ||||
|                         await p.cancel_actor() | ||||
| 
 | ||||
|                     elif parent_cancels == 'context': | ||||
|                         await ctx.cancel() | ||||
| 
 | ||||
|                     else: | ||||
|                         await trio.sleep_forever() | ||||
| 
 | ||||
|         return await ctx.result() | ||||
|                 async with expect_ctxc( | ||||
|                     yay=parent_cancels == 'actor', | ||||
|                 ): | ||||
|                     await ctx.result() | ||||
| 
 | ||||
|                 if parent_cancels == 'context': | ||||
|                     # to tear down sub-acor | ||||
|                     await p.cancel_actor() | ||||
| 
 | ||||
|         return ctx.outcome | ||||
| 
 | ||||
|     if parent_cancels: | ||||
|         # bc the parent made the cancel request, | ||||
|  | @ -242,7 +304,7 @@ def test_context_spawns_aio_task_that_errors( | |||
| 
 | ||||
|         err = excinfo.value | ||||
|         assert isinstance(err, expect) | ||||
|         assert err.type == AssertionError | ||||
|         assert err.boxed_type is AssertionError | ||||
| 
 | ||||
| 
 | ||||
| async def aio_cancel(): | ||||
|  | @ -251,29 +313,55 @@ async def aio_cancel(): | |||
| 
 | ||||
|     ''' | ||||
|     await asyncio.sleep(0.5) | ||||
|     task = asyncio.current_task() | ||||
| 
 | ||||
|     # cancel and enter sleep | ||||
|     task = asyncio.current_task() | ||||
|     task.cancel() | ||||
|     await sleep_forever() | ||||
|     await aio_sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| def test_aio_cancelled_from_aio_causes_trio_cancelled(arb_addr): | ||||
| def test_aio_cancelled_from_aio_causes_trio_cancelled( | ||||
|     reg_addr: tuple, | ||||
| ): | ||||
|     ''' | ||||
|     When the `asyncio.Task` cancels itself the `trio` side cshould | ||||
|     also cancel and teardown and relay the cancellation cross-process | ||||
|     to the caller (parent). | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|             await n.run_in_actor( | ||||
| 
 | ||||
|         an: tractor.ActorNursery | ||||
|         async with tractor.open_nursery() as an: | ||||
|             p: tractor.Portal = await an.run_in_actor( | ||||
|                 asyncio_actor, | ||||
|                 target='aio_cancel', | ||||
|                 expect_err='tractor.to_asyncio.AsyncioCancelled', | ||||
|                 infect_asyncio=True, | ||||
|             ) | ||||
|             # NOTE: normally the `an.__aexit__()` waits on the | ||||
|             # portal's result but we do it explicitly here | ||||
|             # to avoid indent levels. | ||||
|             with trio.fail_after(1): | ||||
|                 await p.wait_for_result() | ||||
| 
 | ||||
|     with pytest.raises(RemoteActorError) as excinfo: | ||||
|     with pytest.raises( | ||||
|         expected_exception=(RemoteActorError, ExceptionGroup), | ||||
|     ) as excinfo: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     # ensure boxed error is correct | ||||
|     assert excinfo.value.type == to_asyncio.AsyncioCancelled | ||||
|     # might get multiple `trio.Cancelled`s as well inside an inception | ||||
|     err: RemoteActorError|ExceptionGroup = excinfo.value | ||||
|     if isinstance(err, ExceptionGroup): | ||||
|         err = next(itertools.dropwhile( | ||||
|             lambda exc: not isinstance(exc, tractor.RemoteActorError), | ||||
|             err.exceptions | ||||
|         )) | ||||
|         assert err | ||||
| 
 | ||||
|     # relayed boxed error should be our `trio`-task's | ||||
|     # cancel-signal-proxy-equivalent of `asyncio.CancelledError`. | ||||
|     assert err.boxed_type == to_asyncio.AsyncioCancelled | ||||
| 
 | ||||
| 
 | ||||
| # TODO: verify open_channel_from will fail on this.. | ||||
|  | @ -314,7 +402,6 @@ async def push_from_aio_task( | |||
| 
 | ||||
| 
 | ||||
| async def stream_from_aio( | ||||
| 
 | ||||
|     exit_early: bool = False, | ||||
|     raise_err: bool = False, | ||||
|     aio_raise_err: bool = False, | ||||
|  | @ -332,6 +419,7 @@ async def stream_from_aio( | |||
|             sequence=seq, | ||||
|             expect_cancel=raise_err or exit_early, | ||||
|             fail_early=aio_raise_err, | ||||
| 
 | ||||
|         ) as (first, chan): | ||||
| 
 | ||||
|             assert first is True | ||||
|  | @ -350,10 +438,15 @@ async def stream_from_aio( | |||
|                         if raise_err: | ||||
|                             raise Exception | ||||
|                         elif exit_early: | ||||
|                             print('`consume()` breaking early!\n') | ||||
|                             break | ||||
| 
 | ||||
|                 print('returning from `consume()`..\n') | ||||
| 
 | ||||
|             # run 2 tasks each pulling from | ||||
|             # the inter-task-channel with the 2nd | ||||
|             # using a fan-out `BroadcastReceiver`. | ||||
|             if fan_out: | ||||
|                 # start second task that get's the same stream value set. | ||||
|                 async with ( | ||||
| 
 | ||||
|                     # NOTE: this has to come first to avoid | ||||
|  | @ -363,11 +456,19 @@ async def stream_from_aio( | |||
| 
 | ||||
|                     trio.open_nursery() as n, | ||||
|                 ): | ||||
|                     # start 2nd task that get's broadcast the same | ||||
|                     # value set. | ||||
|                     n.start_soon(consume, br) | ||||
|                     await consume(chan) | ||||
| 
 | ||||
|             else: | ||||
|                 await consume(chan) | ||||
|     except BaseException as err: | ||||
|         import logging | ||||
|         log = logging.getLogger() | ||||
|         log.exception('aio-subactor errored!\n') | ||||
|         raise err | ||||
| 
 | ||||
|     finally: | ||||
| 
 | ||||
|         if ( | ||||
|  | @ -388,14 +489,15 @@ async def stream_from_aio( | |||
|             assert not fan_out | ||||
|             assert pulled == expect[:51] | ||||
| 
 | ||||
|         print('trio guest mode task completed!') | ||||
|         print('trio guest-mode task completed!') | ||||
|         assert chan._aio_task.done() | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'fan_out', [False, True], | ||||
|     ids='fan_out_w_chan_subscribe={}'.format | ||||
| ) | ||||
| def test_basic_interloop_channel_stream(arb_addr, fan_out): | ||||
| def test_basic_interloop_channel_stream(reg_addr, fan_out): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.run_in_actor( | ||||
|  | @ -403,13 +505,14 @@ def test_basic_interloop_channel_stream(arb_addr, fan_out): | |||
|                 infect_asyncio=True, | ||||
|                 fan_out=fan_out, | ||||
|             ) | ||||
|             # should raise RAE diectly | ||||
|             await portal.result() | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: parametrize the above test and avoid the duplication here? | ||||
| def test_trio_error_cancels_intertask_chan(arb_addr): | ||||
| def test_trio_error_cancels_intertask_chan(reg_addr): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.run_in_actor( | ||||
|  | @ -420,30 +523,47 @@ def test_trio_error_cancels_intertask_chan(arb_addr): | |||
|             # should trigger remote actor error | ||||
|             await portal.result() | ||||
| 
 | ||||
|     with pytest.raises(BaseExceptionGroup) as excinfo: | ||||
|     with pytest.raises(RemoteActorError) as excinfo: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     # ensure boxed errors | ||||
|     for exc in excinfo.value.exceptions: | ||||
|         assert exc.type == Exception | ||||
|     # ensure boxed error type | ||||
|     excinfo.value.boxed_type is Exception | ||||
| 
 | ||||
| 
 | ||||
| def test_trio_closes_early_and_channel_exits(arb_addr): | ||||
| def test_trio_closes_early_and_channel_exits( | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
|     ''' | ||||
|     Check that if the `trio`-task "exits early" on `async for`ing the | ||||
|     inter-task-channel (via a `break`) we exit silently from the | ||||
|     `open_channel_from()` block and get a final `Return[None]` msg. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|         with trio.fail_after(2): | ||||
|             async with tractor.open_nursery( | ||||
|                 # debug_mode=True, | ||||
|                 # enable_stack_on_sig=True, | ||||
|             ) as n: | ||||
|                 portal = await n.run_in_actor( | ||||
|                     stream_from_aio, | ||||
|                     exit_early=True, | ||||
|                     infect_asyncio=True, | ||||
|                 ) | ||||
|             # should trigger remote actor error | ||||
|             await portal.result() | ||||
|                 # should raise RAE diectly | ||||
|                 print('waiting on final infected subactor result..') | ||||
|                 res: None = await portal.wait_for_result() | ||||
|                 assert res is None | ||||
|                 print('infected subactor returned result: {res!r}\n') | ||||
| 
 | ||||
|     # should be a quiet exit on a simple channel exit | ||||
|     trio.run(main) | ||||
|     trio.run( | ||||
|         main, | ||||
|         # strict_exception_groups=False, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def test_aio_errors_and_channel_propagates_and_closes(arb_addr): | ||||
| def test_aio_errors_and_channel_propagates_and_closes(reg_addr): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|             portal = await n.run_in_actor( | ||||
|  | @ -451,26 +571,23 @@ def test_aio_errors_and_channel_propagates_and_closes(arb_addr): | |||
|                 aio_raise_err=True, | ||||
|                 infect_asyncio=True, | ||||
|             ) | ||||
|             # should trigger remote actor error | ||||
|             # should trigger RAE directly, not an eg. | ||||
|             await portal.result() | ||||
| 
 | ||||
|     with pytest.raises(BaseExceptionGroup) as excinfo: | ||||
|     with pytest.raises( | ||||
|         # NOTE: bc we directly wait on `Portal.result()` instead | ||||
|         # of capturing it inside the `ActorNursery` machinery. | ||||
|         expected_exception=RemoteActorError, | ||||
|     ) as excinfo: | ||||
|         trio.run(main) | ||||
| 
 | ||||
|     # ensure boxed errors | ||||
|     for exc in excinfo.value.exceptions: | ||||
|         assert exc.type == Exception | ||||
|     excinfo.value.boxed_type is Exception | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def trio_to_aio_echo_server( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
| 
 | ||||
|     async def aio_echo_server( | ||||
| async def aio_echo_server( | ||||
|     to_trio: trio.MemorySendChannel, | ||||
|     from_trio: asyncio.Queue, | ||||
|     ) -> None: | ||||
| ) -> None: | ||||
| 
 | ||||
|     to_trio.send_nowait('start') | ||||
| 
 | ||||
|  | @ -488,15 +605,19 @@ async def trio_to_aio_echo_server( | |||
| 
 | ||||
|     print('exiting asyncio task') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def trio_to_aio_echo_server( | ||||
|     ctx: tractor.Context|None, | ||||
| ): | ||||
|     async with to_asyncio.open_channel_from( | ||||
|         aio_echo_server, | ||||
|     ) as (first, chan): | ||||
| 
 | ||||
|         assert first == 'start' | ||||
| 
 | ||||
|         await ctx.started(first) | ||||
| 
 | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             async for msg in stream: | ||||
|                 print(f'asyncio echoing {msg}') | ||||
|                 await chan.send(msg) | ||||
|  | @ -520,7 +641,7 @@ async def trio_to_aio_echo_server( | |||
|     ids='raise_error={}'.format, | ||||
| ) | ||||
| def test_echoserver_detailed_mechanics( | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     raise_error_mid_stream, | ||||
| ): | ||||
| 
 | ||||
|  | @ -560,7 +681,8 @@ def test_echoserver_detailed_mechanics( | |||
|                             pass | ||||
|                         else: | ||||
|                             pytest.fail( | ||||
|                                 "stream wasn't stopped after sentinel?!") | ||||
|                                 'stream not stopped after sentinel ?!' | ||||
|                             ) | ||||
| 
 | ||||
|             # TODO: the case where this blocks and | ||||
|             # is cancelled by kbi or out of task cancellation | ||||
|  | @ -572,3 +694,272 @@ def test_echoserver_detailed_mechanics( | |||
| 
 | ||||
|     else: | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def manage_file( | ||||
|     ctx: tractor.Context, | ||||
|     tmp_path_str: str, | ||||
|     send_sigint_to: str, | ||||
|     trio_side_is_shielded: bool = True, | ||||
|     bg_aio_task: bool = False, | ||||
| ): | ||||
|     ''' | ||||
|     Start an `asyncio` task that just sleeps after registering a context | ||||
|     with `Actor.lifetime_stack`. Trigger a SIGINT to kill the actor tree | ||||
|     and ensure the stack is closed in the infected mode child. | ||||
| 
 | ||||
|     To verify the teardown state just write a tmpfile to the `testdir` | ||||
|     and delete it on actor close. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
|     tmp_path: Path = Path(tmp_path_str) | ||||
|     tmp_file: Path = tmp_path / f'{" ".join(ctx._actor.uid)}.file' | ||||
| 
 | ||||
|     # create a the tmp file and tell the parent where it's at | ||||
|     assert not tmp_file.is_file() | ||||
|     tmp_file.touch() | ||||
| 
 | ||||
|     stack: ExitStack = current_actor().lifetime_stack | ||||
|     stack.callback(tmp_file.unlink) | ||||
| 
 | ||||
|     await ctx.started(( | ||||
|         str(tmp_file), | ||||
|         os.getpid(), | ||||
|     )) | ||||
| 
 | ||||
|     # expect to be cancelled from here! | ||||
|     try: | ||||
| 
 | ||||
|         # NOTE: turns out you don't even need to sched an aio task | ||||
|         # since the original issue, even though seemingly was due to | ||||
|         # the guest-run being abandoned + a `._debug.pause()` inside | ||||
|         # `._runtime._async_main()` (which was originally trying to | ||||
|         # debug the `.lifetime_stack` not closing), IS NOT actually | ||||
|         # the core issue? | ||||
|         # | ||||
|         # further notes: | ||||
|         # | ||||
|         # - `trio` only issues the " RuntimeWarning: Trio guest run | ||||
|         #   got abandoned without properly finishing... weird stuff | ||||
|         #   might happen" IFF you DO run a asyncio task here, BUT | ||||
|         # - the original issue of the `.lifetime_stack` not closing | ||||
|         #   will still happen even if you don't run an `asyncio` task | ||||
|         #   here even though the "abandon" messgage won't be shown.. | ||||
|         # | ||||
|         # => ????? honestly i'm lost but it seems to be some issue | ||||
|         #   with `asyncio` and SIGINT.. | ||||
|         # | ||||
|         # honestly, this REALLY reminds me why i haven't used | ||||
|         # `asyncio` by choice in years.. XD | ||||
|         # | ||||
|         async with trio.open_nursery() as tn: | ||||
|             if bg_aio_task: | ||||
|                 tn.start_soon( | ||||
|                     tractor.to_asyncio.run_task, | ||||
|                     aio_sleep_forever, | ||||
|                 ) | ||||
| 
 | ||||
|             # XXX don't-need/doesn't-make-a-diff right | ||||
|             # since we're already doing it from parent? | ||||
|             # if send_sigint_to == 'child': | ||||
|             #     os.kill( | ||||
|             #         os.getpid(), | ||||
|             #         signal.SIGINT, | ||||
|             #     ) | ||||
| 
 | ||||
|             # XXX spend a half sec doing shielded checkpointing to | ||||
|             # ensure that despite the `trio`-side task ignoring the | ||||
|             # SIGINT, the `asyncio` side won't abandon the guest-run! | ||||
|             if trio_side_is_shielded: | ||||
|                 with trio.CancelScope(shield=True): | ||||
|                     for i in range(5): | ||||
|                         await trio.sleep(0.1) | ||||
| 
 | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|     # signalled manually at the OS level (aka KBI) by the parent actor. | ||||
|     except KeyboardInterrupt: | ||||
|         print('child raised KBI..') | ||||
|         assert tmp_file.exists() | ||||
|         raise | ||||
| 
 | ||||
|     raise RuntimeError('shoulda received a KBI?') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'trio_side_is_shielded', | ||||
|     [ | ||||
|         False, | ||||
|         True, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'trio_side_no_shielding', | ||||
|         'trio_side_does_shielded_work', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'send_sigint_to', | ||||
|     [ | ||||
|         'child', | ||||
|         'parent', | ||||
|     ], | ||||
|     ids='send_SIGINT_to={}'.format, | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'bg_aio_task', | ||||
|     [ | ||||
|         False, | ||||
| 
 | ||||
|         # NOTE: (and see notes in `manage_file()` above as well) if | ||||
|         # we FOR SURE SPAWN AN AIO TASK in the child it seems the | ||||
|         # "silent-abandon" case (as is described in detail in | ||||
|         # `to_asyncio.run_as_asyncio_guest()`) does not happen and | ||||
|         # `asyncio`'s loop will at least abandon the `trio` side | ||||
|         # loudly? .. prolly the state-spot to start looking for | ||||
|         # a soln that results in NO ABANDONMENT.. XD | ||||
|         True, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'bg_aio_task', | ||||
|         'just_trio_slee', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'wait_for_ctx', | ||||
|     [ | ||||
|         False, | ||||
|         True, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'raise_KBI_in_rent', | ||||
|         'wait_for_ctx', | ||||
|     ], | ||||
| ) | ||||
| def test_sigint_closes_lifetime_stack( | ||||
|     tmp_path: Path, | ||||
|     wait_for_ctx: bool, | ||||
|     bg_aio_task: bool, | ||||
|     trio_side_is_shielded: bool, | ||||
|     debug_mode: bool, | ||||
|     send_sigint_to: str, | ||||
| ): | ||||
|     ''' | ||||
|     Ensure that an infected child can use the `Actor.lifetime_stack` | ||||
|     to make a file on boot and it's automatically cleaned up by the | ||||
|     actor-lifetime-linked exit stack closure. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         try: | ||||
|             an: tractor.ActorNursery | ||||
|             async with tractor.open_nursery( | ||||
|                 debug_mode=debug_mode, | ||||
|             ) as an: | ||||
|                 p: tractor.Portal = await an.start_actor( | ||||
|                     'file_mngr', | ||||
|                     enable_modules=[__name__], | ||||
|                     infect_asyncio=True, | ||||
|                 ) | ||||
|                 async with p.open_context( | ||||
|                     manage_file, | ||||
|                     tmp_path_str=str(tmp_path), | ||||
|                     send_sigint_to=send_sigint_to, | ||||
|                     bg_aio_task=bg_aio_task, | ||||
|                     trio_side_is_shielded=trio_side_is_shielded, | ||||
|                 ) as (ctx, first): | ||||
| 
 | ||||
|                     path_str, cpid = first | ||||
|                     tmp_file: Path = Path(path_str) | ||||
|                     assert tmp_file.exists() | ||||
| 
 | ||||
|                     # XXX originally to simulate what (hopefully) | ||||
|                     # the below now triggers.. had to manually | ||||
|                     # trigger a SIGINT from a ctl-c in the root. | ||||
|                     # await trio.sleep_forever() | ||||
| 
 | ||||
|                     # XXX NOTE XXX signal infected-`asyncio` child to | ||||
|                     # OS-cancel with SIGINT; this should trigger the | ||||
|                     # bad `asyncio` cancel behaviour that can cause | ||||
|                     # a guest-run abandon as was seen causing | ||||
|                     # shm-buffer leaks in `piker`'s live quote stream | ||||
|                     # susbys! | ||||
|                     # | ||||
|                     await trio.sleep(.2) | ||||
|                     pid: int = ( | ||||
|                         cpid if send_sigint_to == 'child' | ||||
|                         else os.getpid() | ||||
|                     ) | ||||
|                     os.kill( | ||||
|                         pid, | ||||
|                         signal.SIGINT, | ||||
|                     ) | ||||
| 
 | ||||
|                     # XXX CASE 1: without the bug fixed, in | ||||
|                     # the non-KBI-raised-in-parent case, this | ||||
|                     # timeout should trigger! | ||||
|                     if wait_for_ctx: | ||||
|                         print('waiting for ctx outcome in parent..') | ||||
|                         try: | ||||
|                             with trio.fail_after(1): | ||||
|                                 await ctx.wait_for_result() | ||||
|                         except tractor.ContextCancelled as ctxc: | ||||
|                             assert ctxc.canceller == ctx.chan.uid | ||||
|                             raise | ||||
| 
 | ||||
|                     # XXX CASE 2: this seems to be the source of the | ||||
|                     # original issue which exhibited BEFORE we put | ||||
|                     # a `Actor.cancel_soon()` inside | ||||
|                     # `run_as_asyncio_guest()`.. | ||||
|                     else: | ||||
|                         raise KeyboardInterrupt | ||||
| 
 | ||||
|                 pytest.fail('should have raised some kinda error?!?') | ||||
| 
 | ||||
|         except ( | ||||
|             KeyboardInterrupt, | ||||
|             ContextCancelled, | ||||
|         ): | ||||
|             # XXX CASE 2: without the bug fixed, in the | ||||
|             # KBI-raised-in-parent case, the actor teardown should | ||||
|             # never get run (silently abaondoned by `asyncio`..) and | ||||
|             # thus the file should leak! | ||||
|             assert not tmp_file.exists() | ||||
|             assert ctx.maybe_error | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: debug_mode tests once we get support for `asyncio`! | ||||
| # | ||||
| # -[ ] need tests to wrap both scripts: | ||||
| #   - [ ] infected_asyncio_echo_server.py | ||||
| #   - [ ] debugging/asyncio_bp.py | ||||
| #  -[ ] consider moving ^ (some of) these ^ to `test_debugger`? | ||||
| # | ||||
| # -[ ] missing impl outstanding includes: | ||||
| #  - [x] for sync pauses we need to ensure we open yet another | ||||
| #    `greenback` portal in the asyncio task | ||||
| #    => completed using `.bestow_portal(task)` inside | ||||
| #     `.to_asyncio._run_asyncio_task()` right? | ||||
| #   -[ ] translation func to get from `asyncio` task calling to  | ||||
| #     `._debug.wait_for_parent_stdin_hijack()` which does root | ||||
| #     call to do TTY locking. | ||||
| # | ||||
| def test_sync_breakpoint(): | ||||
|     ''' | ||||
|     Verify we can do sync-func/code breakpointing using the | ||||
|     `breakpoint()` builtin inside infected mode actors. | ||||
| 
 | ||||
|     ''' | ||||
|     pytest.xfail('This support is not implemented yet!') | ||||
| 
 | ||||
| 
 | ||||
| def test_debug_mode_crash_handling(): | ||||
|     ''' | ||||
|     Verify mult-actor crash handling works with a combo of infected-`asyncio`-mode | ||||
|     and normal `trio` actors despite nested process trees. | ||||
| 
 | ||||
|     ''' | ||||
|     pytest.xfail('This support is not implemented yet!') | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -9,7 +9,7 @@ import trio | |||
| import tractor | ||||
| import pytest | ||||
| 
 | ||||
| from conftest import tractor_test | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| def test_must_define_ctx(): | ||||
|  | @ -38,10 +38,13 @@ async def async_gen_stream(sequence): | |||
|     assert cs.cancelled_caught | ||||
| 
 | ||||
| 
 | ||||
| # TODO: deprecated either remove entirely | ||||
| # or re-impl in terms of `MsgStream` one-sides | ||||
| # wrapper, but at least remove `Portal.open_stream_from()` | ||||
| @tractor.stream | ||||
| async def context_stream( | ||||
|     ctx: tractor.Context, | ||||
|     sequence | ||||
|     sequence: list[int], | ||||
| ): | ||||
|     for i in sequence: | ||||
|         await ctx.send_yield(i) | ||||
|  | @ -55,7 +58,7 @@ async def context_stream( | |||
| 
 | ||||
| 
 | ||||
| async def stream_from_single_subactor( | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     start_method, | ||||
|     stream_func, | ||||
| ): | ||||
|  | @ -64,7 +67,7 @@ async def stream_from_single_subactor( | |||
|     # only one per host address, spawns an actor if None | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         arbiter_addr=arb_addr, | ||||
|         registry_addrs=[reg_addr], | ||||
|         start_method=start_method, | ||||
|     ) as nursery: | ||||
| 
 | ||||
|  | @ -115,13 +118,13 @@ async def stream_from_single_subactor( | |||
| @pytest.mark.parametrize( | ||||
|     'stream_func', [async_gen_stream, context_stream] | ||||
| ) | ||||
| def test_stream_from_single_subactor(arb_addr, start_method, stream_func): | ||||
| def test_stream_from_single_subactor(reg_addr, start_method, stream_func): | ||||
|     """Verify streaming from a spawned async generator. | ||||
|     """ | ||||
|     trio.run( | ||||
|         partial( | ||||
|             stream_from_single_subactor, | ||||
|             arb_addr, | ||||
|             reg_addr, | ||||
|             start_method, | ||||
|             stream_func=stream_func, | ||||
|         ), | ||||
|  | @ -225,14 +228,14 @@ async def a_quadruple_example(): | |||
|         return result_stream | ||||
| 
 | ||||
| 
 | ||||
| async def cancel_after(wait, arb_addr): | ||||
|     async with tractor.open_root_actor(arbiter_addr=arb_addr): | ||||
| async def cancel_after(wait, reg_addr): | ||||
|     async with tractor.open_root_actor(registry_addrs=[reg_addr]): | ||||
|         with trio.move_on_after(wait): | ||||
|             return await a_quadruple_example() | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture(scope='module') | ||||
| def time_quad_ex(arb_addr, ci_env, spawn_backend): | ||||
| def time_quad_ex(reg_addr, ci_env, spawn_backend): | ||||
|     if spawn_backend == 'mp': | ||||
|         """no idea but the  mp *nix runs are flaking out here often... | ||||
|         """ | ||||
|  | @ -240,7 +243,7 @@ def time_quad_ex(arb_addr, ci_env, spawn_backend): | |||
| 
 | ||||
|     timeout = 7 if platform.system() in ('Windows', 'Darwin') else 4 | ||||
|     start = time.time() | ||||
|     results = trio.run(cancel_after, timeout, arb_addr) | ||||
|     results = trio.run(cancel_after, timeout, reg_addr) | ||||
|     diff = time.time() - start | ||||
|     assert results | ||||
|     return results, diff | ||||
|  | @ -260,14 +263,14 @@ def test_a_quadruple_example(time_quad_ex, ci_env, spawn_backend): | |||
|     list(map(lambda i: i/10, range(3, 9))) | ||||
| ) | ||||
| def test_not_fast_enough_quad( | ||||
|     arb_addr, time_quad_ex, cancel_delay, ci_env, spawn_backend | ||||
|     reg_addr, time_quad_ex, cancel_delay, ci_env, spawn_backend | ||||
| ): | ||||
|     """Verify we can cancel midway through the quad example and all actors | ||||
|     cancel gracefully. | ||||
|     """ | ||||
|     results, diff = time_quad_ex | ||||
|     delay = max(diff - cancel_delay, 0) | ||||
|     results = trio.run(cancel_after, delay, arb_addr) | ||||
|     results = trio.run(cancel_after, delay, reg_addr) | ||||
|     system = platform.system() | ||||
|     if system in ('Windows', 'Darwin') and results is not None: | ||||
|         # In CI envoirments it seems later runs are quicker then the first | ||||
|  | @ -280,7 +283,7 @@ def test_not_fast_enough_quad( | |||
| 
 | ||||
| @tractor_test | ||||
| async def test_respawn_consumer_task( | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     spawn_backend, | ||||
|     loglevel, | ||||
| ): | ||||
|  |  | |||
|  | @ -7,7 +7,7 @@ import pytest | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| from conftest import tractor_test | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.trio | ||||
|  | @ -24,7 +24,7 @@ async def test_no_runtime(): | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_self_is_registered(arb_addr): | ||||
| async def test_self_is_registered(reg_addr): | ||||
|     "Verify waiting on the arbiter to register itself using the standard api." | ||||
|     actor = tractor.current_actor() | ||||
|     assert actor.is_arbiter | ||||
|  | @ -34,20 +34,20 @@ async def test_self_is_registered(arb_addr): | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_self_is_registered_localportal(arb_addr): | ||||
| async def test_self_is_registered_localportal(reg_addr): | ||||
|     "Verify waiting on the arbiter to register itself using a local portal." | ||||
|     actor = tractor.current_actor() | ||||
|     assert actor.is_arbiter | ||||
|     async with tractor.get_arbiter(*arb_addr) as portal: | ||||
|     async with tractor.get_registry(*reg_addr) as portal: | ||||
|         assert isinstance(portal, tractor._portal.LocalPortal) | ||||
| 
 | ||||
|         with trio.fail_after(0.2): | ||||
|             sockaddr = await portal.run_from_ns( | ||||
|                     'self', 'wait_for_actor', name='root') | ||||
|             assert sockaddr[0] == arb_addr | ||||
|             assert sockaddr[0] == reg_addr | ||||
| 
 | ||||
| 
 | ||||
| def test_local_actor_async_func(arb_addr): | ||||
| def test_local_actor_async_func(reg_addr): | ||||
|     """Verify a simple async function in-process. | ||||
|     """ | ||||
|     nums = [] | ||||
|  | @ -55,7 +55,7 @@ def test_local_actor_async_func(arb_addr): | |||
|     async def print_loop(): | ||||
| 
 | ||||
|         async with tractor.open_root_actor( | ||||
|             arbiter_addr=arb_addr, | ||||
|             registry_addrs=[reg_addr], | ||||
|         ): | ||||
|             # arbiter is started in-proc if dne | ||||
|             assert tractor.current_actor().is_arbiter | ||||
|  |  | |||
|  | @ -7,8 +7,10 @@ import time | |||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from conftest import ( | ||||
| from tractor._testing import ( | ||||
|     tractor_test, | ||||
| ) | ||||
| from conftest import ( | ||||
|     sig_prog, | ||||
|     _INT_SIGNAL, | ||||
|     _INT_RETURN_CODE, | ||||
|  | @ -28,9 +30,9 @@ def test_abort_on_sigint(daemon): | |||
| 
 | ||||
| 
 | ||||
| @tractor_test | ||||
| async def test_cancel_remote_arbiter(daemon, arb_addr): | ||||
| async def test_cancel_remote_arbiter(daemon, reg_addr): | ||||
|     assert not tractor.current_actor().is_arbiter | ||||
|     async with tractor.get_arbiter(*arb_addr) as portal: | ||||
|     async with tractor.get_registry(*reg_addr) as portal: | ||||
|         await portal.cancel_actor() | ||||
| 
 | ||||
|     time.sleep(0.1) | ||||
|  | @ -39,16 +41,16 @@ async def test_cancel_remote_arbiter(daemon, arb_addr): | |||
| 
 | ||||
|     # no arbiter socket should exist | ||||
|     with pytest.raises(OSError): | ||||
|         async with tractor.get_arbiter(*arb_addr) as portal: | ||||
|         async with tractor.get_registry(*reg_addr) as portal: | ||||
|             pass | ||||
| 
 | ||||
| 
 | ||||
| def test_register_duplicate_name(daemon, arb_addr): | ||||
| def test_register_duplicate_name(daemon, reg_addr): | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             arbiter_addr=arb_addr, | ||||
|             registry_addrs=[reg_addr], | ||||
|         ) as n: | ||||
| 
 | ||||
|             assert not tractor.current_actor().is_arbiter | ||||
|  |  | |||
|  | @ -0,0 +1,364 @@ | |||
| ''' | ||||
| Audit sub-sys APIs from `.msg._ops` | ||||
| mostly for ensuring correct `contextvars` | ||||
| related settings around IPC contexts. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     Struct, | ||||
| ) | ||||
| import pytest | ||||
| import trio | ||||
| 
 | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Context, | ||||
|     MsgTypeError, | ||||
|     current_ipc_ctx, | ||||
|     Portal, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _ops as msgops, | ||||
|     Return, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _codec, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|     log, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| class PldMsg( | ||||
|     Struct, | ||||
| 
 | ||||
|     # TODO: with multiple structs in-spec we need to tag them! | ||||
|     # -[ ] offer a built-in `PldMsg` type to inherit from which takes | ||||
|     #      case of these details? | ||||
|     # | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # tag=True, | ||||
|     # tag_field='msg_type', | ||||
| ): | ||||
|     field: str | ||||
| 
 | ||||
| 
 | ||||
| maybe_msg_spec = PldMsg|None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_expect_raises( | ||||
|     raises: BaseException|None = None, | ||||
|     ensure_in_message: list[str]|None = None, | ||||
|     post_mortem: bool = False, | ||||
|     timeout: int = 3, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Async wrapper for ensuring errors propagate from the inner scope. | ||||
| 
 | ||||
|     ''' | ||||
|     if tractor._state.debug_mode(): | ||||
|         timeout += 999 | ||||
| 
 | ||||
|     with trio.fail_after(timeout): | ||||
|         try: | ||||
|             yield | ||||
|         except BaseException as _inner_err: | ||||
|             inner_err = _inner_err | ||||
|             # wasn't-expected to error.. | ||||
|             if raises is None: | ||||
|                 raise | ||||
| 
 | ||||
|             else: | ||||
|                 assert type(inner_err) is raises | ||||
| 
 | ||||
|                 # maybe check for error txt content | ||||
|                 if ensure_in_message: | ||||
|                     part: str | ||||
|                     err_repr: str = repr(inner_err) | ||||
|                     for part in ensure_in_message: | ||||
|                         for i, arg in enumerate(inner_err.args): | ||||
|                             if part in err_repr: | ||||
|                                 break | ||||
|                         # if part never matches an arg, then we're | ||||
|                         # missing a match. | ||||
|                         else: | ||||
|                             raise ValueError( | ||||
|                                 'Failed to find error message content?\n\n' | ||||
|                                 f'expected: {ensure_in_message!r}\n' | ||||
|                                 f'part: {part!r}\n\n' | ||||
|                                 f'{inner_err.args}' | ||||
|                         ) | ||||
| 
 | ||||
|                 if post_mortem: | ||||
|                     await tractor.post_mortem() | ||||
| 
 | ||||
|         else: | ||||
|             if raises: | ||||
|                 raise RuntimeError( | ||||
|                     f'Expected a {raises.__name__!r} to be raised?' | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context( | ||||
|     pld_spec=maybe_msg_spec, | ||||
| ) | ||||
| async def child( | ||||
|     ctx: Context, | ||||
|     started_value: int|PldMsg|None, | ||||
|     return_value: str|None, | ||||
|     validate_pld_spec: bool, | ||||
|     raise_on_started_mte: bool = True, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Call ``Context.started()`` more then once (an error). | ||||
| 
 | ||||
|     ''' | ||||
|     expect_started_mte: bool = started_value == 10 | ||||
| 
 | ||||
|     # sanaity check that child RPC context is the current one | ||||
|     curr_ctx: Context = current_ipc_ctx() | ||||
|     assert ctx is curr_ctx | ||||
| 
 | ||||
|     rx: msgops.PldRx = ctx._pld_rx | ||||
|     curr_pldec: _codec.MsgDec = rx.pld_dec | ||||
| 
 | ||||
|     ctx_meta: dict = getattr( | ||||
|         child, | ||||
|         '_tractor_context_meta', | ||||
|         None, | ||||
|     ) | ||||
|     if ctx_meta: | ||||
|         assert ( | ||||
|             ctx_meta['pld_spec'] | ||||
|             is curr_pldec.spec | ||||
|             is curr_pldec.pld_spec | ||||
|         ) | ||||
| 
 | ||||
|     # 2 cases: hdndle send-side and recv-only validation | ||||
|     # - when `raise_on_started_mte == True`, send validate | ||||
|     # - else, parent-recv-side only validation | ||||
|     mte: MsgTypeError|None = None | ||||
|     try: | ||||
|         await ctx.started( | ||||
|             value=started_value, | ||||
|             validate_pld_spec=validate_pld_spec, | ||||
|         ) | ||||
| 
 | ||||
|     except MsgTypeError as _mte: | ||||
|         mte = _mte | ||||
|         log.exception('started()` raised an MTE!\n') | ||||
|         if not expect_started_mte: | ||||
|             raise RuntimeError( | ||||
|                 'Child-ctx-task SHOULD NOT HAVE raised an MTE for\n\n' | ||||
|                 f'{started_value!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|         boxed_div: str = '------ - ------' | ||||
|         assert boxed_div not in mte._message | ||||
|         assert boxed_div not in mte.tb_str | ||||
|         assert boxed_div not in repr(mte) | ||||
|         assert boxed_div not in str(mte) | ||||
|         mte_repr: str = repr(mte) | ||||
|         for line in mte.message.splitlines(): | ||||
|             assert line in mte_repr | ||||
| 
 | ||||
|         # since this is a *local error* there should be no | ||||
|         # boxed traceback content! | ||||
|         assert not mte.tb_str | ||||
| 
 | ||||
|         # propagate to parent? | ||||
|         if raise_on_started_mte: | ||||
|             raise | ||||
| 
 | ||||
|     # no-send-side-error fallthrough | ||||
|     if ( | ||||
|         validate_pld_spec | ||||
|         and | ||||
|         expect_started_mte | ||||
|     ): | ||||
|         raise RuntimeError( | ||||
|             'Child-ctx-task SHOULD HAVE raised an MTE for\n\n' | ||||
|             f'{started_value!r}\n' | ||||
|         ) | ||||
| 
 | ||||
|     assert ( | ||||
|         not expect_started_mte | ||||
|         or | ||||
|         not validate_pld_spec | ||||
|     ) | ||||
| 
 | ||||
|     # if wait_for_parent_to_cancel: | ||||
|     #     ... | ||||
|     # | ||||
|     # ^-TODO-^ logic for diff validation policies on each side: | ||||
|     # | ||||
|     # -[ ] ensure that if we don't validate on the send | ||||
|     #   side, that we are eventually error-cancelled by our | ||||
|     #   parent due to the bad `Started` payload! | ||||
|     # -[ ] the boxed error should be srced from the parent's | ||||
|     #   runtime NOT ours! | ||||
|     # -[ ] we should still error on bad `return_value`s | ||||
|     #   despite the parent not yet error-cancelling us? | ||||
|     #   |_ how do we want the parent side to look in that | ||||
|     #     case? | ||||
|     #     -[ ] maybe the equiv of "during handling of the | ||||
|     #       above error another occurred" for the case where | ||||
|     #       the parent sends a MTE to this child and while | ||||
|     #       waiting for the child to terminate it gets back | ||||
|     #       the MTE for this case? | ||||
|     # | ||||
| 
 | ||||
|     # XXX should always fail on recv side since we can't | ||||
|     # really do much else beside terminate and relay the | ||||
|     # msg-type-error from this RPC task ;) | ||||
|     return return_value | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'return_value', | ||||
|     [ | ||||
|         'yo', | ||||
|         None, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'return[invalid-"yo"]', | ||||
|         'return[valid-None]', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'started_value', | ||||
|     [ | ||||
|         10, | ||||
|         PldMsg(field='yo'), | ||||
|     ], | ||||
|     ids=[ | ||||
|         'Started[invalid-10]', | ||||
|         'Started[valid-PldMsg]', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'pld_check_started_value', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'check-started-pld', | ||||
|         'no-started-pld-validate', | ||||
|     ], | ||||
| ) | ||||
| def test_basic_payload_spec( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     return_value: str|None, | ||||
|     started_value: int|PldMsg, | ||||
|     pld_check_started_value: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Validate the most basic `PldRx` msg-type-spec semantics around | ||||
|     a IPC `Context` endpoint start, started-sync, and final return | ||||
|     value depending on set payload types and the currently applied | ||||
|     pld-spec. | ||||
| 
 | ||||
|     ''' | ||||
|     invalid_return: bool = return_value == 'yo' | ||||
|     invalid_started: bool = started_value == 10 | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|             loglevel=loglevel, | ||||
|         ) as an: | ||||
|             p: Portal = await an.start_actor( | ||||
|                 'child', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             # since not opened yet. | ||||
|             assert current_ipc_ctx() is None | ||||
| 
 | ||||
|             if invalid_started: | ||||
|                 msg_type_str: str = 'Started' | ||||
|                 bad_value: int = 10 | ||||
|             elif invalid_return: | ||||
|                 msg_type_str: str = 'Return' | ||||
|                 bad_value: str = 'yo' | ||||
|             else: | ||||
|                 # XXX but should never be used below then.. | ||||
|                 msg_type_str: str = '' | ||||
|                 bad_value: str = '' | ||||
| 
 | ||||
|             maybe_mte: MsgTypeError|None = None | ||||
|             should_raise: Exception|None = ( | ||||
|                 MsgTypeError if ( | ||||
|                     invalid_return | ||||
|                     or | ||||
|                     invalid_started | ||||
|                 ) else None | ||||
|             ) | ||||
|             async with ( | ||||
|                 maybe_expect_raises( | ||||
|                     raises=should_raise, | ||||
|                     ensure_in_message=[ | ||||
|                         f"invalid `{msg_type_str}` msg payload", | ||||
|                         f'{bad_value}', | ||||
|                         f'has type {type(bad_value)!r}', | ||||
|                         'not match type-spec', | ||||
|                         f'`{msg_type_str}.pld: PldMsg|NoneType`', | ||||
|                     ], | ||||
|                     # only for debug | ||||
|                     # post_mortem=True, | ||||
|                 ), | ||||
|                 p.open_context( | ||||
|                     child, | ||||
|                     return_value=return_value, | ||||
|                     started_value=started_value, | ||||
|                     validate_pld_spec=pld_check_started_value, | ||||
|                 ) as (ctx, first), | ||||
|             ): | ||||
|                 # now opened with 'child' sub | ||||
|                 assert current_ipc_ctx() is ctx | ||||
| 
 | ||||
|                 assert type(first) is PldMsg | ||||
|                 assert first.field == 'yo' | ||||
| 
 | ||||
|                 try: | ||||
|                     res: None|PldMsg = await ctx.result(hide_tb=False) | ||||
|                     assert res is None | ||||
|                 except MsgTypeError as mte: | ||||
|                     maybe_mte = mte | ||||
|                     if not invalid_return: | ||||
|                         raise | ||||
| 
 | ||||
|                     # expected this invalid `Return.pld` so audit | ||||
|                     # the error state + meta-data | ||||
|                     assert mte.expected_msg_type is Return | ||||
|                     assert mte.cid == ctx.cid | ||||
|                     mte_repr: str = repr(mte) | ||||
|                     for line in mte.message.splitlines(): | ||||
|                         assert line in mte_repr | ||||
| 
 | ||||
|                     assert mte.tb_str | ||||
|                     # await tractor.pause(shield=True) | ||||
| 
 | ||||
|                     # verify expected remote mte deats | ||||
|                     assert ctx._local_error is None | ||||
|                     assert ( | ||||
|                         mte is | ||||
|                         ctx._remote_error is | ||||
|                         ctx.maybe_error is | ||||
|                         ctx.outcome | ||||
|                     ) | ||||
| 
 | ||||
|             if should_raise is None: | ||||
|                 assert maybe_mte is None | ||||
| 
 | ||||
|             await p.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -5,8 +5,7 @@ import pytest | |||
| import trio | ||||
| import tractor | ||||
| from tractor.experimental import msgpub | ||||
| 
 | ||||
| from conftest import tractor_test | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| def test_type_checks(): | ||||
|  | @ -160,7 +159,7 @@ async def test_required_args(callwith_expecterror): | |||
| ) | ||||
| def test_multi_actor_subs_arbiter_pub( | ||||
|     loglevel, | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     pub_actor, | ||||
| ): | ||||
|     """Try out the neato @pub decorator system. | ||||
|  | @ -170,7 +169,7 @@ def test_multi_actor_subs_arbiter_pub( | |||
|     async def main(): | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             arbiter_addr=arb_addr, | ||||
|             registry_addrs=[reg_addr], | ||||
|             enable_modules=[__name__], | ||||
|         ) as n: | ||||
| 
 | ||||
|  | @ -255,12 +254,12 @@ def test_multi_actor_subs_arbiter_pub( | |||
| 
 | ||||
| def test_single_subactor_pub_multitask_subs( | ||||
|     loglevel, | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
| ): | ||||
|     async def main(): | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             arbiter_addr=arb_addr, | ||||
|             registry_addrs=[reg_addr], | ||||
|             enable_modules=[__name__], | ||||
|         ) as n: | ||||
| 
 | ||||
|  |  | |||
|  | @ -34,7 +34,6 @@ def test_resource_only_entered_once(key_on): | |||
|     global _resource | ||||
|     _resource = 0 | ||||
| 
 | ||||
|     kwargs = {} | ||||
|     key = None | ||||
|     if key_on == 'key_value': | ||||
|         key = 'some_common_key' | ||||
|  | @ -139,7 +138,7 @@ def test_open_local_sub_to_stream(): | |||
|     N local tasks using ``trionics.maybe_open_context():``. | ||||
| 
 | ||||
|     ''' | ||||
|     timeout = 3 if platform.system() != "Windows" else 10 | ||||
|     timeout: float = 3.6 if platform.system() != "Windows" else 10 | ||||
| 
 | ||||
|     async def main(): | ||||
| 
 | ||||
|  |  | |||
|  | @ -0,0 +1,244 @@ | |||
| ''' | ||||
| Special attention cases for using "infect `asyncio`" mode from a root | ||||
| actor; i.e. not using a std `trio.run()` bootstrap. | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| from functools import partial | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     to_asyncio, | ||||
| ) | ||||
| from tests.test_infected_asyncio import ( | ||||
|     aio_echo_server, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_error_mid_stream', | ||||
|     [ | ||||
|         False, | ||||
|         Exception, | ||||
|         KeyboardInterrupt, | ||||
|     ], | ||||
|     ids='raise_error={}'.format, | ||||
| ) | ||||
| def test_infected_root_actor( | ||||
|     raise_error_mid_stream: bool|Exception, | ||||
| 
 | ||||
|     # conftest wide | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify you can run the `tractor` runtime with `Actor.is_infected_aio() == True` | ||||
|     in the root actor. | ||||
| 
 | ||||
|     ''' | ||||
|     async def _trio_main(): | ||||
|         with trio.fail_after(2): | ||||
|             first: str | ||||
|             chan: to_asyncio.LinkedTaskChannel | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=debug_mode, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|                 to_asyncio.open_channel_from( | ||||
|                     aio_echo_server, | ||||
|                 ) as (first, chan), | ||||
|             ): | ||||
|                 assert first == 'start' | ||||
| 
 | ||||
|                 for i in range(1000): | ||||
|                     await chan.send(i) | ||||
|                     out = await chan.receive() | ||||
|                     assert out == i | ||||
|                     print(f'asyncio echoing {i}') | ||||
| 
 | ||||
|                     if raise_error_mid_stream and i == 500: | ||||
|                         raise raise_error_mid_stream | ||||
| 
 | ||||
|                     if out is None: | ||||
|                         try: | ||||
|                             out = await chan.receive() | ||||
|                         except trio.EndOfChannel: | ||||
|                             break | ||||
|                         else: | ||||
|                             raise RuntimeError( | ||||
|                                 'aio channel never stopped?' | ||||
|                             ) | ||||
| 
 | ||||
|     if raise_error_mid_stream: | ||||
|         with pytest.raises(raise_error_mid_stream): | ||||
|             tractor.to_asyncio.run_as_asyncio_guest( | ||||
|                 trio_main=_trio_main, | ||||
|             ) | ||||
|     else: | ||||
|         tractor.to_asyncio.run_as_asyncio_guest( | ||||
|             trio_main=_trio_main, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| async def sync_and_err( | ||||
|     # just signature placeholders for compat with | ||||
|     # ``to_asyncio.open_channel_from()`` | ||||
|     to_trio: trio.MemorySendChannel, | ||||
|     from_trio: asyncio.Queue, | ||||
|     ev: asyncio.Event, | ||||
| 
 | ||||
| ): | ||||
|     if to_trio: | ||||
|         to_trio.send_nowait('start') | ||||
| 
 | ||||
|     await ev.wait() | ||||
|     raise RuntimeError('asyncio-side') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'aio_err_trigger', | ||||
|     [ | ||||
|         'before_start_point', | ||||
|         'after_trio_task_starts', | ||||
|         'after_start_point', | ||||
|     ], | ||||
|     ids='aio_err_triggered={}'.format | ||||
| ) | ||||
| def test_trio_prestarted_task_bubbles( | ||||
|     aio_err_trigger: str, | ||||
| 
 | ||||
|     # conftest wide | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async def pre_started_err( | ||||
|         raise_err: bool = False, | ||||
|         pre_sleep: float|None = None, | ||||
|         aio_trigger: asyncio.Event|None = None, | ||||
|         task_status=trio.TASK_STATUS_IGNORED, | ||||
|     ): | ||||
|         ''' | ||||
|         Maybe pre-started error then sleep. | ||||
| 
 | ||||
|         ''' | ||||
|         if pre_sleep is not None: | ||||
|             print(f'Sleeping from trio for {pre_sleep!r}s !') | ||||
|             await trio.sleep(pre_sleep) | ||||
| 
 | ||||
|         # signal aio-task to raise JUST AFTER this task | ||||
|         # starts but has not yet `.started()` | ||||
|         if aio_trigger: | ||||
|             print('Signalling aio-task to raise from `trio`!!') | ||||
|             aio_trigger.set() | ||||
| 
 | ||||
|         if raise_err: | ||||
|             print('Raising from trio!') | ||||
|             raise TypeError('trio-side') | ||||
| 
 | ||||
|         task_status.started() | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
|     async def _trio_main(): | ||||
|         # with trio.fail_after(2): | ||||
|         with trio.fail_after(999): | ||||
|             first: str | ||||
|             chan: to_asyncio.LinkedTaskChannel | ||||
|             aio_ev = asyncio.Event() | ||||
| 
 | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=False, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|             ): | ||||
|                 # TODO, tests for this with 3.13 egs? | ||||
|                 # from tractor.devx import open_crash_handler | ||||
|                 # with open_crash_handler(): | ||||
|                 async with ( | ||||
|                     # where we'll start a sub-task that errors BEFORE | ||||
|                     # calling `.started()` such that the error should | ||||
|                     # bubble before the guest run terminates! | ||||
|                     trio.open_nursery() as tn, | ||||
| 
 | ||||
|                     # THEN start an infect task which should error just | ||||
|                     # after the trio-side's task does. | ||||
|                     to_asyncio.open_channel_from( | ||||
|                         partial( | ||||
|                             sync_and_err, | ||||
|                             ev=aio_ev, | ||||
|                         ) | ||||
|                     ) as (first, chan), | ||||
|                 ): | ||||
| 
 | ||||
|                     for i in range(5): | ||||
|                         pre_sleep: float|None = None | ||||
|                         last_iter: bool = (i == 4) | ||||
| 
 | ||||
|                         # TODO, missing cases? | ||||
|                         # -[ ] error as well on | ||||
|                         #    'after_start_point' case as well for | ||||
|                         #    another case? | ||||
|                         raise_err: bool = False | ||||
| 
 | ||||
|                         if last_iter: | ||||
|                             raise_err: bool = True | ||||
| 
 | ||||
|                             # trigger aio task to error on next loop | ||||
|                             # tick/checkpoint | ||||
|                             if aio_err_trigger == 'before_start_point': | ||||
|                                 aio_ev.set() | ||||
| 
 | ||||
|                             pre_sleep: float = 0 | ||||
| 
 | ||||
|                         await tn.start( | ||||
|                             pre_started_err, | ||||
|                             raise_err, | ||||
|                             pre_sleep, | ||||
|                             (aio_ev if ( | ||||
|                                     aio_err_trigger == 'after_trio_task_starts' | ||||
|                                     and | ||||
|                                     last_iter | ||||
|                                 ) else None | ||||
|                             ), | ||||
|                         ) | ||||
| 
 | ||||
|                         if ( | ||||
|                             aio_err_trigger == 'after_start_point' | ||||
|                             and | ||||
|                             last_iter | ||||
|                         ): | ||||
|                             aio_ev.set() | ||||
| 
 | ||||
|     with pytest.raises( | ||||
|         expected_exception=ExceptionGroup, | ||||
|     ) as excinfo: | ||||
|         tractor.to_asyncio.run_as_asyncio_guest( | ||||
|             trio_main=_trio_main, | ||||
|         ) | ||||
| 
 | ||||
|     eg = excinfo.value | ||||
|     rte_eg, rest_eg = eg.split(RuntimeError) | ||||
| 
 | ||||
|     # ensure the trio-task's error bubbled despite the aio-side | ||||
|     # having (maybe) errored first. | ||||
|     if aio_err_trigger in ( | ||||
|         'after_trio_task_starts', | ||||
|         'after_start_point', | ||||
|     ): | ||||
|         assert len(errs := rest_eg.exceptions) == 1 | ||||
|         typerr = errs[0] | ||||
|         assert ( | ||||
|             type(typerr) is TypeError | ||||
|             and | ||||
|             'trio-side' in typerr.args | ||||
|         ) | ||||
| 
 | ||||
|     # when aio errors BEFORE (last) trio task is scheduled, we should | ||||
|     # never see anythinb but the aio-side. | ||||
|     else: | ||||
|         assert len(rtes := rte_eg.exceptions) == 1 | ||||
|         assert 'asyncio-side' in rtes[0].args[0] | ||||
|  | @ -1,6 +1,8 @@ | |||
| """ | ||||
| RPC related | ||||
| """ | ||||
| ''' | ||||
| RPC (or maybe better labelled as "RTS: remote task scheduling"?) | ||||
| related API and error checks. | ||||
| 
 | ||||
| ''' | ||||
| import itertools | ||||
| 
 | ||||
| import pytest | ||||
|  | @ -13,9 +15,19 @@ async def sleep_back_actor( | |||
|     func_name, | ||||
|     func_defined, | ||||
|     exposed_mods, | ||||
|     *, | ||||
|     reg_addr: tuple, | ||||
| ): | ||||
|     if actor_name: | ||||
|         async with tractor.find_actor(actor_name) as portal: | ||||
|         async with tractor.find_actor( | ||||
|             actor_name, | ||||
|             # NOTE: must be set manually since | ||||
|             # the subactor doesn't have the reg_addr | ||||
|             # fixture code run in it! | ||||
|             # TODO: maybe we should just set this once in the | ||||
|             # _state mod and derive to all children? | ||||
|             registry_addrs=[reg_addr], | ||||
|         ) as portal: | ||||
|             try: | ||||
|                 await portal.run(__name__, func_name) | ||||
|             except tractor.RemoteActorError as err: | ||||
|  | @ -24,7 +36,7 @@ async def sleep_back_actor( | |||
|                 if not exposed_mods: | ||||
|                     expect = tractor.ModuleNotExposed | ||||
| 
 | ||||
|                 assert err.type is expect | ||||
|                 assert err.boxed_type is expect | ||||
|                 raise | ||||
|     else: | ||||
|         await trio.sleep(float('inf')) | ||||
|  | @ -42,14 +54,25 @@ async def short_sleep(): | |||
|         (['tmp_mod'], 'import doggy', ModuleNotFoundError), | ||||
|         (['tmp_mod'], '4doggy', SyntaxError), | ||||
|     ], | ||||
|     ids=['no_mods', 'this_mod', 'this_mod_bad_func', 'fail_to_import', | ||||
|          'fail_on_syntax'], | ||||
|     ids=[ | ||||
|         'no_mods', | ||||
|         'this_mod', | ||||
|         'this_mod_bad_func', | ||||
|         'fail_to_import', | ||||
|         'fail_on_syntax', | ||||
|     ], | ||||
| ) | ||||
| def test_rpc_errors(arb_addr, to_call, testdir): | ||||
|     """Test errors when making various RPC requests to an actor | ||||
| def test_rpc_errors( | ||||
|     reg_addr, | ||||
|     to_call, | ||||
|     testdir, | ||||
| ): | ||||
|     ''' | ||||
|     Test errors when making various RPC requests to an actor | ||||
|     that either doesn't have the requested module exposed or doesn't define | ||||
|     the named function. | ||||
|     """ | ||||
| 
 | ||||
|     ''' | ||||
|     exposed_mods, funcname, inside_err = to_call | ||||
|     subactor_exposed_mods = [] | ||||
|     func_defined = globals().get(funcname, False) | ||||
|  | @ -77,8 +100,13 @@ def test_rpc_errors(arb_addr, to_call, testdir): | |||
| 
 | ||||
|         # spawn a subactor which calls us back | ||||
|         async with tractor.open_nursery( | ||||
|             arbiter_addr=arb_addr, | ||||
|             registry_addrs=[reg_addr], | ||||
|             enable_modules=exposed_mods.copy(), | ||||
| 
 | ||||
|             # NOTE: will halt test in REPL if uncommented, so only | ||||
|             # do that if actually debugging subactor but keep it | ||||
|             # disabled for the test. | ||||
|             # debug_mode=True, | ||||
|         ) as n: | ||||
| 
 | ||||
|             actor = tractor.current_actor() | ||||
|  | @ -95,6 +123,7 @@ def test_rpc_errors(arb_addr, to_call, testdir): | |||
|                 exposed_mods=exposed_mods, | ||||
|                 func_defined=True if func_defined else False, | ||||
|                 enable_modules=subactor_exposed_mods, | ||||
|                 reg_addr=reg_addr, | ||||
|             ) | ||||
| 
 | ||||
|     def run(): | ||||
|  | @ -105,18 +134,20 @@ def test_rpc_errors(arb_addr, to_call, testdir): | |||
|         run() | ||||
|     else: | ||||
|         # underlying errors aren't propagated upwards (yet) | ||||
|         with pytest.raises(remote_err) as err: | ||||
|         with pytest.raises( | ||||
|             expected_exception=(remote_err, ExceptionGroup), | ||||
|         ) as err: | ||||
|             run() | ||||
| 
 | ||||
|         # get raw instance from pytest wrapper | ||||
|         value = err.value | ||||
| 
 | ||||
|         # might get multiple `trio.Cancelled`s as well inside an inception | ||||
|         if isinstance(value, trio.MultiError): | ||||
|         if isinstance(value, ExceptionGroup): | ||||
|             value = next(itertools.dropwhile( | ||||
|                 lambda exc: not isinstance(exc, tractor.RemoteActorError), | ||||
|                 value.exceptions | ||||
|             )) | ||||
| 
 | ||||
|         if getattr(value, 'type', None): | ||||
|             assert value.type is inside_err | ||||
|             assert value.boxed_type is inside_err | ||||
|  |  | |||
|  | @ -8,7 +8,7 @@ import pytest | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| from conftest import tractor_test | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| 
 | ||||
| _file_path: str = '' | ||||
|  | @ -64,7 +64,8 @@ async def test_lifetime_stack_wipes_tmpfile( | |||
| 
 | ||||
|     except ( | ||||
|         tractor.RemoteActorError, | ||||
|         tractor.BaseExceptionGroup, | ||||
|         # tractor.BaseExceptionGroup, | ||||
|         BaseExceptionGroup, | ||||
|     ): | ||||
|         pass | ||||
| 
 | ||||
|  |  | |||
|  | @ -0,0 +1,167 @@ | |||
| """ | ||||
| Shared mem primitives and APIs. | ||||
| 
 | ||||
| """ | ||||
| import uuid | ||||
| 
 | ||||
| # import numpy | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor._shm import ( | ||||
|     open_shm_list, | ||||
|     attach_shm_list, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_attach_shml_alot( | ||||
|     ctx: tractor.Context, | ||||
|     shm_key: str, | ||||
| ) -> None: | ||||
| 
 | ||||
|     await ctx.started(shm_key) | ||||
| 
 | ||||
|     # now try to attach a boatload of times in a loop.. | ||||
|     for _ in range(1000): | ||||
|         shml = attach_shm_list( | ||||
|             key=shm_key, | ||||
|             readonly=False, | ||||
|         ) | ||||
|         assert shml.shm.name == shm_key | ||||
|         await trio.sleep(0.001) | ||||
| 
 | ||||
| 
 | ||||
| def test_child_attaches_alot(): | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery() as an: | ||||
| 
 | ||||
|             # allocate writeable list in parent | ||||
|             key = f'shml_{uuid.uuid4()}' | ||||
|             shml = open_shm_list( | ||||
|                 key=key, | ||||
|             ) | ||||
| 
 | ||||
|             portal = await an.start_actor( | ||||
|                 'shm_attacher', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             async with ( | ||||
|                 portal.open_context( | ||||
|                     child_attach_shml_alot, | ||||
|                     shm_key=shml.key, | ||||
|                 ) as (ctx, start_val), | ||||
|             ): | ||||
|                 assert start_val == key | ||||
|                 await ctx.result() | ||||
| 
 | ||||
|             await portal.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def child_read_shm_list( | ||||
|     ctx: tractor.Context, | ||||
|     shm_key: str, | ||||
|     use_str: bool, | ||||
|     frame_size: int, | ||||
| ) -> None: | ||||
| 
 | ||||
|     # attach in child | ||||
|     shml = attach_shm_list( | ||||
|         key=shm_key, | ||||
|         # dtype=str if use_str else float, | ||||
|     ) | ||||
|     await ctx.started(shml.key) | ||||
| 
 | ||||
|     async with ctx.open_stream() as stream: | ||||
|         async for i in stream: | ||||
|             print(f'(child): reading shm list index: {i}') | ||||
| 
 | ||||
|             if use_str: | ||||
|                 expect = str(float(i)) | ||||
|             else: | ||||
|                 expect = float(i) | ||||
| 
 | ||||
|             if frame_size == 1: | ||||
|                 val = shml[i] | ||||
|                 assert expect == val | ||||
|                 print(f'(child): reading value: {val}') | ||||
|             else: | ||||
|                 frame = shml[i - frame_size:i] | ||||
|                 print(f'(child): reading frame: {frame}') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'use_str', | ||||
|     [False, True], | ||||
|     ids=lambda i: f'use_str_values={i}', | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'frame_size', | ||||
|     [1, 2**6, 2**10], | ||||
|     ids=lambda i: f'frame_size={i}', | ||||
| ) | ||||
| def test_parent_writer_child_reader( | ||||
|     use_str: bool, | ||||
|     frame_size: int, | ||||
| ): | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             # debug_mode=True, | ||||
|         ) as an: | ||||
| 
 | ||||
|             portal = await an.start_actor( | ||||
|                 'shm_reader', | ||||
|                 enable_modules=[__name__], | ||||
|                 debug_mode=True, | ||||
|             ) | ||||
| 
 | ||||
|             # allocate writeable list in parent | ||||
|             key = 'shm_list' | ||||
|             seq_size = int(2 * 2 ** 10) | ||||
|             shml = open_shm_list( | ||||
|                 key=key, | ||||
|                 size=seq_size, | ||||
|                 dtype=str if use_str else float, | ||||
|                 readonly=False, | ||||
|             ) | ||||
| 
 | ||||
|             async with ( | ||||
|                 portal.open_context( | ||||
|                     child_read_shm_list, | ||||
|                     shm_key=key, | ||||
|                     use_str=use_str, | ||||
|                     frame_size=frame_size, | ||||
|                 ) as (ctx, sent), | ||||
| 
 | ||||
|                 ctx.open_stream() as stream, | ||||
|             ): | ||||
| 
 | ||||
|                 assert sent == key | ||||
| 
 | ||||
|                 for i in range(seq_size): | ||||
| 
 | ||||
|                     val = float(i) | ||||
|                     if use_str: | ||||
|                         val = str(val) | ||||
| 
 | ||||
|                     # print(f'(parent): writing {val}') | ||||
|                     shml[i] = val | ||||
| 
 | ||||
|                     # only on frame fills do we | ||||
|                     # signal to the child that a frame's | ||||
|                     # worth is ready. | ||||
|                     if (i % frame_size) == 0: | ||||
|                         print(f'(parent): signalling frame full on {val}') | ||||
|                         await stream.send(i) | ||||
|                 else: | ||||
|                     print(f'(parent): signalling final frame on {val}') | ||||
|                     await stream.send(i) | ||||
| 
 | ||||
|             await portal.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -2,13 +2,15 @@ | |||
| Spawning basics | ||||
| 
 | ||||
| """ | ||||
| from typing import Optional | ||||
| from typing import ( | ||||
|     Any, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| from conftest import tractor_test | ||||
| from tractor._testing import tractor_test | ||||
| 
 | ||||
| data_to_pass_down = {'doggy': 10, 'kitty': 4} | ||||
| 
 | ||||
|  | @ -16,24 +18,21 @@ data_to_pass_down = {'doggy': 10, 'kitty': 4} | |||
| async def spawn( | ||||
|     is_arbiter: bool, | ||||
|     data: dict, | ||||
|     arb_addr: tuple[str, int], | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
|     namespaces = [__name__] | ||||
| 
 | ||||
|     await trio.sleep(0.1) | ||||
| 
 | ||||
|     async with tractor.open_root_actor( | ||||
|         arbiter_addr=arb_addr, | ||||
|         arbiter_addr=reg_addr, | ||||
|     ): | ||||
| 
 | ||||
|         actor = tractor.current_actor() | ||||
|         assert actor.is_arbiter == is_arbiter | ||||
|         data = data_to_pass_down | ||||
| 
 | ||||
|         if actor.is_arbiter: | ||||
| 
 | ||||
|             async with tractor.open_nursery( | ||||
|             ) as nursery: | ||||
|             async with tractor.open_nursery() as nursery: | ||||
| 
 | ||||
|                 # forks here | ||||
|                 portal = await nursery.run_in_actor( | ||||
|  | @ -41,7 +40,7 @@ async def spawn( | |||
|                     is_arbiter=False, | ||||
|                     name='sub-actor', | ||||
|                     data=data, | ||||
|                     arb_addr=arb_addr, | ||||
|                     reg_addr=reg_addr, | ||||
|                     enable_modules=namespaces, | ||||
|                 ) | ||||
| 
 | ||||
|  | @ -55,12 +54,14 @@ async def spawn( | |||
|             return 10 | ||||
| 
 | ||||
| 
 | ||||
| def test_local_arbiter_subactor_global_state(arb_addr): | ||||
| def test_local_arbiter_subactor_global_state( | ||||
|     reg_addr, | ||||
| ): | ||||
|     result = trio.run( | ||||
|         spawn, | ||||
|         True, | ||||
|         data_to_pass_down, | ||||
|         arb_addr, | ||||
|         reg_addr, | ||||
|     ) | ||||
|     assert result == 10 | ||||
| 
 | ||||
|  | @ -94,7 +95,9 @@ async def test_movie_theatre_convo(start_method): | |||
|         await portal.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| async def cellar_door(return_value: Optional[str]): | ||||
| async def cellar_door( | ||||
|     return_value: str|None, | ||||
| ): | ||||
|     return return_value | ||||
| 
 | ||||
| 
 | ||||
|  | @ -104,16 +107,18 @@ async def cellar_door(return_value: Optional[str]): | |||
| ) | ||||
| @tractor_test | ||||
| async def test_most_beautiful_word( | ||||
|     start_method, | ||||
|     return_value | ||||
|     start_method: str, | ||||
|     return_value: Any, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     The main ``tractor`` routine. | ||||
| 
 | ||||
|     ''' | ||||
|     with trio.fail_after(1): | ||||
|         async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as n: | ||||
|             portal = await n.run_in_actor( | ||||
|                 cellar_door, | ||||
|                 return_value=return_value, | ||||
|  | @ -140,7 +145,7 @@ async def check_loglevel(level): | |||
| def test_loglevel_propagated_to_subactor( | ||||
|     start_method, | ||||
|     capfd, | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
| ): | ||||
|     if start_method == 'mp_forkserver': | ||||
|         pytest.skip( | ||||
|  | @ -152,7 +157,7 @@ def test_loglevel_propagated_to_subactor( | |||
|         async with tractor.open_nursery( | ||||
|             name='arbiter', | ||||
|             start_method=start_method, | ||||
|             arbiter_addr=arb_addr, | ||||
|             arbiter_addr=reg_addr, | ||||
| 
 | ||||
|         ) as tn: | ||||
|             await tn.run_in_actor( | ||||
|  |  | |||
|  | @ -66,13 +66,13 @@ async def ensure_sequence( | |||
| async def open_sequence_streamer( | ||||
| 
 | ||||
|     sequence: list[int], | ||||
|     arb_addr: tuple[str, int], | ||||
|     reg_addr: tuple[str, int], | ||||
|     start_method: str, | ||||
| 
 | ||||
| ) -> tractor.MsgStream: | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         arbiter_addr=arb_addr, | ||||
|         arbiter_addr=reg_addr, | ||||
|         start_method=start_method, | ||||
|     ) as tn: | ||||
| 
 | ||||
|  | @ -93,7 +93,7 @@ async def open_sequence_streamer( | |||
| 
 | ||||
| 
 | ||||
| def test_stream_fan_out_to_local_subscriptions( | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     start_method, | ||||
| ): | ||||
| 
 | ||||
|  | @ -103,7 +103,7 @@ def test_stream_fan_out_to_local_subscriptions( | |||
| 
 | ||||
|         async with open_sequence_streamer( | ||||
|             sequence, | ||||
|             arb_addr, | ||||
|             reg_addr, | ||||
|             start_method, | ||||
|         ) as stream: | ||||
| 
 | ||||
|  | @ -138,7 +138,7 @@ def test_stream_fan_out_to_local_subscriptions( | |||
|     ] | ||||
| ) | ||||
| def test_consumer_and_parent_maybe_lag( | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     start_method, | ||||
|     task_delays, | ||||
| ): | ||||
|  | @ -150,7 +150,7 @@ def test_consumer_and_parent_maybe_lag( | |||
| 
 | ||||
|         async with open_sequence_streamer( | ||||
|             sequence, | ||||
|             arb_addr, | ||||
|             reg_addr, | ||||
|             start_method, | ||||
|         ) as stream: | ||||
| 
 | ||||
|  | @ -211,7 +211,7 @@ def test_consumer_and_parent_maybe_lag( | |||
| 
 | ||||
| 
 | ||||
| def test_faster_task_to_recv_is_cancelled_by_slower( | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     start_method, | ||||
| ): | ||||
|     ''' | ||||
|  | @ -225,7 +225,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | |||
| 
 | ||||
|         async with open_sequence_streamer( | ||||
|             sequence, | ||||
|             arb_addr, | ||||
|             reg_addr, | ||||
|             start_method, | ||||
| 
 | ||||
|         ) as stream: | ||||
|  | @ -271,7 +271,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | |||
|                         # the faster subtask was cancelled | ||||
|                         break | ||||
| 
 | ||||
|                 # await tractor.breakpoint() | ||||
|                 # await tractor.pause() | ||||
|                 # await stream.receive() | ||||
|                 print(f'final value: {value}') | ||||
| 
 | ||||
|  | @ -302,7 +302,7 @@ def test_subscribe_errors_after_close(): | |||
| 
 | ||||
| 
 | ||||
| def test_ensure_slow_consumers_lag_out( | ||||
|     arb_addr, | ||||
|     reg_addr, | ||||
|     start_method, | ||||
| ): | ||||
|     '''This is a pure local task test; no tractor | ||||
|  |  | |||
|  | @ -3,9 +3,13 @@ Reminders for oddities in `trio` that we need to stay aware of and/or | |||
| want to see changed. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| from trio import TaskStatus | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|  | @ -80,3 +84,115 @@ def test_stashed_child_nursery(use_start_soon): | |||
| 
 | ||||
|     with pytest.raises(NameError): | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     ('unmask_from_canc', 'canc_from_finally'), | ||||
|     [ | ||||
|         (True, False), | ||||
|         (True, True), | ||||
|         pytest.param(False, True, | ||||
|                      marks=pytest.mark.xfail(reason="never raises!") | ||||
|         ), | ||||
|     ], | ||||
|     # TODO, ask ronny how to impl this .. XD | ||||
|     # ids='unmask_from_canc={0}, canc_from_finally={1}',#.format, | ||||
| ) | ||||
| def test_acm_embedded_nursery_propagates_enter_err( | ||||
|     canc_from_finally: bool, | ||||
|     unmask_from_canc: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Demo how a masking `trio.Cancelled` could be handled by unmasking from the | ||||
|     `.__context__` field when a user (by accident) re-raises from a `finally:`. | ||||
| 
 | ||||
|     ''' | ||||
|     import tractor | ||||
| 
 | ||||
|     @acm | ||||
|     async def maybe_raise_from_masking_exc( | ||||
|         tn: trio.Nursery, | ||||
|         unmask_from: BaseException|None = trio.Cancelled | ||||
| 
 | ||||
|         # TODO, maybe offer a collection? | ||||
|         # unmask_from: set[BaseException] = { | ||||
|         #     trio.Cancelled, | ||||
|         # }, | ||||
|     ): | ||||
|         if not unmask_from: | ||||
|             yield | ||||
|             return | ||||
| 
 | ||||
|         try: | ||||
|             yield | ||||
|         except* unmask_from as be_eg: | ||||
| 
 | ||||
|             # TODO, if we offer `unmask_from: set` | ||||
|             # for masker_exc_type in unmask_from: | ||||
| 
 | ||||
|             matches, rest = be_eg.split(unmask_from) | ||||
|             if not matches: | ||||
|                 raise | ||||
| 
 | ||||
|             for exc_match in be_eg.exceptions: | ||||
|                 if ( | ||||
|                     (exc_ctx := exc_match.__context__) | ||||
|                     and | ||||
|                     type(exc_ctx) not in { | ||||
|                         # trio.Cancelled,  # always by default? | ||||
|                         unmask_from, | ||||
|                     } | ||||
|                 ): | ||||
|                     exc_ctx.add_note( | ||||
|                         f'\n' | ||||
|                         f'WARNING: the above error was masked by a {unmask_from!r} !?!\n' | ||||
|                         f'Are you always cancelling? Say from a `finally:` ?\n\n' | ||||
| 
 | ||||
|                         f'{tn!r}' | ||||
|                     ) | ||||
|                     raise exc_ctx from exc_match | ||||
| 
 | ||||
| 
 | ||||
|     @acm | ||||
|     async def wraps_tn_that_always_cancels(): | ||||
|         async with ( | ||||
|             trio.open_nursery() as tn, | ||||
|             maybe_raise_from_masking_exc( | ||||
|                 tn=tn, | ||||
|                 unmask_from=( | ||||
|                     trio.Cancelled | ||||
|                     if unmask_from_canc | ||||
|                     else None | ||||
|                 ), | ||||
|             ) | ||||
|         ): | ||||
|             try: | ||||
|                 yield tn | ||||
|             finally: | ||||
|                 if canc_from_finally: | ||||
|                     tn.cancel_scope.cancel() | ||||
|                     await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|     async def _main(): | ||||
|         with tractor.devx.open_crash_handler() as bxerr: | ||||
|             assert not bxerr.value | ||||
| 
 | ||||
|             async with ( | ||||
|                 wraps_tn_that_always_cancels() as tn, | ||||
|             ): | ||||
|                 assert not tn.cancel_scope.cancel_called | ||||
|                 assert 0 | ||||
| 
 | ||||
|         assert ( | ||||
|             (err := bxerr.value) | ||||
|             and | ||||
|             type(err) is AssertionError | ||||
|         ) | ||||
| 
 | ||||
|     with pytest.raises(ExceptionGroup) as excinfo: | ||||
|         trio.run(_main) | ||||
| 
 | ||||
|     eg: ExceptionGroup = excinfo.value | ||||
|     assert_eg, rest_eg = eg.split(AssertionError) | ||||
| 
 | ||||
|     assert len(assert_eg.exceptions) == 1 | ||||
|  |  | |||
|  | @ -18,71 +18,50 @@ | |||
| tractor: structured concurrent ``trio``-"actors". | ||||
| 
 | ||||
| """ | ||||
| from exceptiongroup import BaseExceptionGroup | ||||
| 
 | ||||
| from ._clustering import open_actor_cluster | ||||
| from ._ipc import Channel | ||||
| from ._clustering import ( | ||||
|     open_actor_cluster as open_actor_cluster, | ||||
| ) | ||||
| from ._context import ( | ||||
|     Context, | ||||
|     context, | ||||
|     Context as Context,  # the type | ||||
|     context as context,  # a func-decorator | ||||
| ) | ||||
| from ._streaming import ( | ||||
|     MsgStream, | ||||
|     stream, | ||||
|     MsgStream as MsgStream, | ||||
|     stream as stream, | ||||
| ) | ||||
| from ._discovery import ( | ||||
|     get_arbiter, | ||||
|     find_actor, | ||||
|     wait_for_actor, | ||||
|     query_actor, | ||||
|     get_registry as get_registry, | ||||
|     find_actor as find_actor, | ||||
|     wait_for_actor as wait_for_actor, | ||||
|     query_actor as query_actor, | ||||
| ) | ||||
| from ._supervise import ( | ||||
|     open_nursery as open_nursery, | ||||
|     ActorNursery as ActorNursery, | ||||
| ) | ||||
| from ._supervise import open_nursery | ||||
| from ._state import ( | ||||
|     current_actor, | ||||
|     is_root_process, | ||||
|     current_actor as current_actor, | ||||
|     is_root_process as is_root_process, | ||||
|     current_ipc_ctx as current_ipc_ctx, | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     RemoteActorError, | ||||
|     ModuleNotExposed, | ||||
|     ContextCancelled, | ||||
|     ContextCancelled as ContextCancelled, | ||||
|     ModuleNotExposed as ModuleNotExposed, | ||||
|     MsgTypeError as MsgTypeError, | ||||
|     RemoteActorError as RemoteActorError, | ||||
|     TransportClosed as TransportClosed, | ||||
| ) | ||||
| from ._debug import ( | ||||
|     breakpoint, | ||||
|     post_mortem, | ||||
| from .devx import ( | ||||
|     breakpoint as breakpoint, | ||||
|     pause as pause, | ||||
|     pause_from_sync as pause_from_sync, | ||||
|     post_mortem as post_mortem, | ||||
| ) | ||||
| from . import msg | ||||
| from . import msg as msg | ||||
| from ._root import ( | ||||
|     run_daemon, | ||||
|     open_root_actor, | ||||
|     run_daemon as run_daemon, | ||||
|     open_root_actor as open_root_actor, | ||||
| ) | ||||
| from ._portal import Portal | ||||
| from ._runtime import Actor | ||||
| 
 | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'Actor', | ||||
|     'Channel', | ||||
|     'Context', | ||||
|     'ContextCancelled', | ||||
|     'ModuleNotExposed', | ||||
|     'MsgStream', | ||||
|     'BaseExceptionGroup', | ||||
|     'Portal', | ||||
|     'RemoteActorError', | ||||
|     'breakpoint', | ||||
|     'context', | ||||
|     'current_actor', | ||||
|     'find_actor', | ||||
|     'get_arbiter', | ||||
|     'is_root_process', | ||||
|     'msg', | ||||
|     'open_actor_cluster', | ||||
|     'open_nursery', | ||||
|     'open_root_actor', | ||||
|     'post_mortem', | ||||
|     'query_actor', | ||||
|     'run_daemon', | ||||
|     'stream', | ||||
|     'to_asyncio', | ||||
|     'wait_for_actor', | ||||
| ] | ||||
| from ._ipc import Channel as Channel | ||||
| from ._portal import Portal as Portal | ||||
| from ._runtime import Actor as Actor | ||||
|  |  | |||
|  | @ -18,8 +18,6 @@ | |||
| This is the "bootloader" for actors started using the native trio backend. | ||||
| 
 | ||||
| """ | ||||
| import sys | ||||
| import trio | ||||
| import argparse | ||||
| 
 | ||||
| from ast import literal_eval | ||||
|  | @ -37,9 +35,8 @@ def parse_ipaddr(arg): | |||
|     return (str(host), int(port)) | ||||
| 
 | ||||
| 
 | ||||
| from ._entry import _trio_main | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     __tracebackhide__: bool = True | ||||
| 
 | ||||
|     parser = argparse.ArgumentParser() | ||||
|     parser.add_argument("--uid", type=parse_uid) | ||||
|  |  | |||
							
								
								
									
										2602
									
								
								tractor/_context.py
								
								
								
								
							
							
						
						
									
										2602
									
								
								tractor/_context.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,922 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Multi-core debugging for da peeps! | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| import bdb | ||||
| import os | ||||
| import sys | ||||
| import signal | ||||
| from functools import ( | ||||
|     partial, | ||||
|     cached_property, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Callable, | ||||
|     AsyncIterator, | ||||
|     AsyncGenerator, | ||||
| ) | ||||
| from types import FrameType | ||||
| 
 | ||||
| import pdbp | ||||
| import tractor | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| 
 | ||||
| from .log import get_logger | ||||
| from ._discovery import get_root | ||||
| from ._state import ( | ||||
|     is_root_process, | ||||
|     debug_mode, | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     is_multi_cancelled, | ||||
|     ContextCancelled, | ||||
| ) | ||||
| from ._ipc import Channel | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| __all__ = ['breakpoint', 'post_mortem'] | ||||
| 
 | ||||
| 
 | ||||
| class Lock: | ||||
|     ''' | ||||
|     Actor global debug lock state. | ||||
| 
 | ||||
|     Mostly to avoid a lot of ``global`` declarations for now XD. | ||||
| 
 | ||||
|     ''' | ||||
|     repl: MultiActorPdb | None = None | ||||
|     # placeholder for function to set a ``trio.Event`` on debugger exit | ||||
|     # pdb_release_hook: Optional[Callable] = None | ||||
| 
 | ||||
|     _trio_handler: Callable[ | ||||
|         [int, Optional[FrameType]], Any | ||||
|     ] | int | None = None | ||||
| 
 | ||||
|     # actor-wide variable pointing to current task name using debugger | ||||
|     local_task_in_debug: str | None = None | ||||
| 
 | ||||
|     # NOTE: set by the current task waiting on the root tty lock from | ||||
|     # the CALLER side of the `lock_tty_for_child()` context entry-call | ||||
|     # and must be cancelled if this actor is cancelled via IPC | ||||
|     # request-message otherwise deadlocks with the parent actor may | ||||
|     # ensure | ||||
|     _debugger_request_cs: Optional[trio.CancelScope] = None | ||||
| 
 | ||||
|     # NOTE: set only in the root actor for the **local** root spawned task | ||||
|     # which has acquired the lock (i.e. this is on the callee side of | ||||
|     # the `lock_tty_for_child()` context entry). | ||||
|     _root_local_task_cs_in_debug: Optional[trio.CancelScope] = None | ||||
| 
 | ||||
|     # actor tree-wide actor uid that supposedly has the tty lock | ||||
|     global_actor_in_debug: Optional[tuple[str, str]] = None | ||||
| 
 | ||||
|     local_pdb_complete: Optional[trio.Event] = None | ||||
|     no_remote_has_tty: Optional[trio.Event] = None | ||||
| 
 | ||||
|     # lock in root actor preventing multi-access to local tty | ||||
|     _debug_lock: trio.StrictFIFOLock = trio.StrictFIFOLock() | ||||
| 
 | ||||
|     _orig_sigint_handler: Optional[Callable] = None | ||||
|     _blocked: set[tuple[str, str]] = set() | ||||
| 
 | ||||
|     @classmethod | ||||
|     def shield_sigint(cls): | ||||
|         cls._orig_sigint_handler = signal.signal( | ||||
|             signal.SIGINT, | ||||
|             shield_sigint_handler, | ||||
|         ) | ||||
| 
 | ||||
|     @classmethod | ||||
|     def unshield_sigint(cls): | ||||
|         # always restore ``trio``'s sigint handler. see notes below in | ||||
|         # the pdb factory about the nightmare that is that code swapping | ||||
|         # out the handler when the repl activates... | ||||
|         signal.signal(signal.SIGINT, cls._trio_handler) | ||||
|         cls._orig_sigint_handler = None | ||||
| 
 | ||||
|     @classmethod | ||||
|     def release(cls): | ||||
|         try: | ||||
|             cls._debug_lock.release() | ||||
|         except RuntimeError: | ||||
|             # uhhh makes no sense but been seeing the non-owner | ||||
|             # release error even though this is definitely the task | ||||
|             # that locked? | ||||
|             owner = cls._debug_lock.statistics().owner | ||||
|             if owner: | ||||
|                 raise | ||||
| 
 | ||||
|         # actor-local state, irrelevant for non-root. | ||||
|         cls.global_actor_in_debug = None | ||||
|         cls.local_task_in_debug = None | ||||
| 
 | ||||
|         try: | ||||
|             # sometimes the ``trio`` might already be terminated in | ||||
|             # which case this call will raise. | ||||
|             if cls.local_pdb_complete is not None: | ||||
|                 cls.local_pdb_complete.set() | ||||
|         finally: | ||||
|             # restore original sigint handler | ||||
|             cls.unshield_sigint() | ||||
|             cls.repl = None | ||||
| 
 | ||||
| 
 | ||||
| class TractorConfig(pdbp.DefaultConfig): | ||||
|     ''' | ||||
|     Custom ``pdbp`` goodness :surfer: | ||||
| 
 | ||||
|     ''' | ||||
|     use_pygments: bool = True | ||||
|     sticky_by_default: bool = False | ||||
|     enable_hidden_frames: bool = False | ||||
| 
 | ||||
|     # much thanks @mdmintz for the hot tip! | ||||
|     # fixes line spacing issue when resizing terminal B) | ||||
|     truncate_long_lines: bool = False | ||||
| 
 | ||||
| 
 | ||||
| class MultiActorPdb(pdbp.Pdb): | ||||
|     ''' | ||||
|     Add teardown hooks to the regular ``pdbp.Pdb``. | ||||
| 
 | ||||
|     ''' | ||||
|     # override the pdbp config with our coolio one | ||||
|     DefaultConfig = TractorConfig | ||||
| 
 | ||||
|     # def preloop(self): | ||||
|     #     print('IN PRELOOP') | ||||
|     #     super().preloop() | ||||
| 
 | ||||
|     # TODO: figure out how to disallow recursive .set_trace() entry | ||||
|     # since that'll cause deadlock for us. | ||||
|     def set_continue(self): | ||||
|         try: | ||||
|             super().set_continue() | ||||
|         finally: | ||||
|             Lock.release() | ||||
| 
 | ||||
|     def set_quit(self): | ||||
|         try: | ||||
|             super().set_quit() | ||||
|         finally: | ||||
|             Lock.release() | ||||
| 
 | ||||
|     # XXX NOTE: we only override this because apparently the stdlib pdb | ||||
|     # bois likes to touch the SIGINT handler as much as i like to touch | ||||
|     # my d$%&. | ||||
|     def _cmdloop(self): | ||||
|         self.cmdloop() | ||||
| 
 | ||||
|     @cached_property | ||||
|     def shname(self) -> str | None: | ||||
|         ''' | ||||
|         Attempt to return the login shell name with a special check for | ||||
|         the infamous `xonsh` since it seems to have some issues much | ||||
|         different from std shells when it comes to flushing the prompt? | ||||
| 
 | ||||
|         ''' | ||||
|         # SUPER HACKY and only really works if `xonsh` is not used | ||||
|         # before spawning further sub-shells.. | ||||
|         shpath = os.getenv('SHELL', None) | ||||
| 
 | ||||
|         if shpath: | ||||
|             if ( | ||||
|                 os.getenv('XONSH_LOGIN', default=False) | ||||
|                 or 'xonsh' in shpath | ||||
|             ): | ||||
|                 return 'xonsh' | ||||
| 
 | ||||
|             return os.path.basename(shpath) | ||||
| 
 | ||||
|         return None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def _acquire_debug_lock_from_root_task( | ||||
|     uid: tuple[str, str] | ||||
| 
 | ||||
| ) -> AsyncIterator[trio.StrictFIFOLock]: | ||||
|     ''' | ||||
|     Acquire a root-actor local FIFO lock which tracks mutex access of | ||||
|     the process tree's global debugger breakpoint. | ||||
| 
 | ||||
|     This lock avoids tty clobbering (by preventing multiple processes | ||||
|     reading from stdstreams) and ensures multi-actor, sequential access | ||||
|     to the ``pdb`` repl. | ||||
| 
 | ||||
|     ''' | ||||
|     task_name = trio.lowlevel.current_task().name | ||||
| 
 | ||||
|     log.runtime( | ||||
|         f"Attempting to acquire TTY lock, remote task: {task_name}:{uid}" | ||||
|     ) | ||||
| 
 | ||||
|     we_acquired = False | ||||
| 
 | ||||
|     try: | ||||
|         log.runtime( | ||||
|             f"entering lock checkpoint, remote task: {task_name}:{uid}" | ||||
|         ) | ||||
|         we_acquired = True | ||||
| 
 | ||||
|         # NOTE: if the surrounding cancel scope from the | ||||
|         # `lock_tty_for_child()` caller is cancelled, this line should | ||||
|         # unblock and NOT leave us in some kind of | ||||
|         # a "child-locked-TTY-but-child-is-uncontactable-over-IPC" | ||||
|         # condition. | ||||
|         await Lock._debug_lock.acquire() | ||||
| 
 | ||||
|         if Lock.no_remote_has_tty is None: | ||||
|             # mark the tty lock as being in use so that the runtime | ||||
|             # can try to avoid clobbering any connection from a child | ||||
|             # that's currently relying on it. | ||||
|             Lock.no_remote_has_tty = trio.Event() | ||||
| 
 | ||||
|         Lock.global_actor_in_debug = uid | ||||
|         log.runtime(f"TTY lock acquired, remote task: {task_name}:{uid}") | ||||
| 
 | ||||
|         # NOTE: critical section: this yield is unshielded! | ||||
| 
 | ||||
|         # IF we received a cancel during the shielded lock entry of some | ||||
|         # next-in-queue requesting task, then the resumption here will | ||||
|         # result in that ``trio.Cancelled`` being raised to our caller | ||||
|         # (likely from ``lock_tty_for_child()`` below)!  In | ||||
|         # this case the ``finally:`` below should trigger and the | ||||
|         # surrounding caller side context should cancel normally | ||||
|         # relaying back to the caller. | ||||
| 
 | ||||
|         yield Lock._debug_lock | ||||
| 
 | ||||
|     finally: | ||||
|         if ( | ||||
|             we_acquired | ||||
|             and Lock._debug_lock.locked() | ||||
|         ): | ||||
|             Lock._debug_lock.release() | ||||
| 
 | ||||
|         # IFF there are no more requesting tasks queued up fire, the | ||||
|         # "tty-unlocked" event thereby alerting any monitors of the lock that | ||||
|         # we are now back in the "tty unlocked" state. This is basically | ||||
|         # and edge triggered signal around an empty queue of sub-actor | ||||
|         # tasks that may have tried to acquire the lock. | ||||
|         stats = Lock._debug_lock.statistics() | ||||
|         if ( | ||||
|             not stats.owner | ||||
|         ): | ||||
|             log.runtime(f"No more tasks waiting on tty lock! says {uid}") | ||||
|             if Lock.no_remote_has_tty is not None: | ||||
|                 Lock.no_remote_has_tty.set() | ||||
|                 Lock.no_remote_has_tty = None | ||||
| 
 | ||||
|         Lock.global_actor_in_debug = None | ||||
| 
 | ||||
|         log.runtime( | ||||
|             f"TTY lock released, remote task: {task_name}:{uid}" | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def lock_tty_for_child( | ||||
| 
 | ||||
|     ctx: tractor.Context, | ||||
|     subactor_uid: tuple[str, str] | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Lock the TTY in the root process of an actor tree in a new | ||||
|     inter-actor-context-task such that the ``pdbp`` debugger console | ||||
|     can be mutex-allocated to the calling sub-actor for REPL control | ||||
|     without interference by other processes / threads. | ||||
| 
 | ||||
|     NOTE: this task must be invoked in the root process of the actor | ||||
|     tree. It is meant to be invoked as an rpc-task and should be | ||||
|     highly reliable at releasing the mutex complete! | ||||
| 
 | ||||
|     ''' | ||||
|     task_name = trio.lowlevel.current_task().name | ||||
| 
 | ||||
|     if tuple(subactor_uid) in Lock._blocked: | ||||
|         log.warning( | ||||
|             f'Actor {subactor_uid} is blocked from acquiring debug lock\n' | ||||
|             f"remote task: {task_name}:{subactor_uid}" | ||||
|         ) | ||||
|         ctx._enter_debugger_on_cancel = False | ||||
|         await ctx.cancel(f'Debug lock blocked for {subactor_uid}') | ||||
|         return 'pdb_lock_blocked' | ||||
| 
 | ||||
|     # TODO: when we get to true remote debugging | ||||
|     # this will deliver stdin data? | ||||
| 
 | ||||
|     log.debug( | ||||
|         "Attempting to acquire TTY lock\n" | ||||
|         f"remote task: {task_name}:{subactor_uid}" | ||||
|     ) | ||||
| 
 | ||||
|     log.debug(f"Actor {subactor_uid} is WAITING on stdin hijack lock") | ||||
|     Lock.shield_sigint() | ||||
| 
 | ||||
|     try: | ||||
|         with ( | ||||
|             trio.CancelScope(shield=True) as debug_lock_cs, | ||||
|         ): | ||||
|             Lock._root_local_task_cs_in_debug = debug_lock_cs | ||||
|             async with _acquire_debug_lock_from_root_task(subactor_uid): | ||||
| 
 | ||||
|                 # indicate to child that we've locked stdio | ||||
|                 await ctx.started('Locked') | ||||
|                 log.debug( | ||||
|                     f"Actor {subactor_uid} acquired stdin hijack lock" | ||||
|                 ) | ||||
| 
 | ||||
|                 # wait for unlock pdb by child | ||||
|                 async with ctx.open_stream() as stream: | ||||
|                     assert await stream.receive() == 'pdb_unlock' | ||||
| 
 | ||||
|         return "pdb_unlock_complete" | ||||
| 
 | ||||
|     finally: | ||||
|         Lock._root_local_task_cs_in_debug = None | ||||
|         Lock.unshield_sigint() | ||||
| 
 | ||||
| 
 | ||||
| async def wait_for_parent_stdin_hijack( | ||||
|     actor_uid: tuple[str, str], | ||||
|     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED | ||||
| ): | ||||
|     ''' | ||||
|     Connect to the root actor via a ``Context`` and invoke a task which | ||||
|     locks a root-local TTY lock: ``lock_tty_for_child()``; this func | ||||
|     should be called in a new task from a child actor **and never the | ||||
|     root*. | ||||
| 
 | ||||
|     This function is used by any sub-actor to acquire mutex access to | ||||
|     the ``pdb`` REPL and thus the root's TTY for interactive debugging | ||||
|     (see below inside ``_breakpoint()``). It can be used to ensure that | ||||
|     an intermediate nursery-owning actor does not clobber its children | ||||
|     if they are in debug (see below inside | ||||
|     ``maybe_wait_for_debugger()``). | ||||
| 
 | ||||
|     ''' | ||||
|     with trio.CancelScope(shield=True) as cs: | ||||
|         Lock._debugger_request_cs = cs | ||||
| 
 | ||||
|         try: | ||||
|             async with get_root() as portal: | ||||
| 
 | ||||
|                 # this syncs to child's ``Context.started()`` call. | ||||
|                 async with portal.open_context( | ||||
| 
 | ||||
|                     tractor._debug.lock_tty_for_child, | ||||
|                     subactor_uid=actor_uid, | ||||
| 
 | ||||
|                 ) as (ctx, val): | ||||
| 
 | ||||
|                     log.debug('locked context') | ||||
|                     assert val == 'Locked' | ||||
| 
 | ||||
|                     async with ctx.open_stream() as stream: | ||||
|                         # unblock local caller | ||||
| 
 | ||||
|                         try: | ||||
|                             assert Lock.local_pdb_complete | ||||
|                             task_status.started(cs) | ||||
|                             await Lock.local_pdb_complete.wait() | ||||
| 
 | ||||
|                         finally: | ||||
|                             # TODO: shielding currently can cause hangs... | ||||
|                             # with trio.CancelScope(shield=True): | ||||
|                             await stream.send('pdb_unlock') | ||||
| 
 | ||||
|                         # sync with callee termination | ||||
|                         assert await ctx.result() == "pdb_unlock_complete" | ||||
| 
 | ||||
|                 log.debug('exitting child side locking task context') | ||||
| 
 | ||||
|         except ContextCancelled: | ||||
|             log.warning('Root actor cancelled debug lock') | ||||
|             raise | ||||
| 
 | ||||
|         finally: | ||||
|             Lock.local_task_in_debug = None | ||||
|             log.debug('Exiting debugger from child') | ||||
| 
 | ||||
| 
 | ||||
| def mk_mpdb() -> tuple[MultiActorPdb, Callable]: | ||||
| 
 | ||||
|     pdb = MultiActorPdb() | ||||
|     # signal.signal = pdbp.hideframe(signal.signal) | ||||
| 
 | ||||
|     Lock.shield_sigint() | ||||
| 
 | ||||
|     # XXX: These are the important flags mentioned in | ||||
|     # https://github.com/python-trio/trio/issues/1155 | ||||
|     # which resolve the traceback spews to console. | ||||
|     pdb.allow_kbdint = True | ||||
|     pdb.nosigint = True | ||||
| 
 | ||||
|     return pdb, Lock.unshield_sigint | ||||
| 
 | ||||
| 
 | ||||
| async def _breakpoint( | ||||
| 
 | ||||
|     debug_func, | ||||
| 
 | ||||
|     # TODO: | ||||
|     # shield: bool = False | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Breakpoint entry for engaging debugger instance sync-interaction, | ||||
|     from async code, executing in actor runtime (task). | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__ = True | ||||
|     actor = tractor.current_actor() | ||||
|     pdb, undo_sigint = mk_mpdb() | ||||
|     task_name = trio.lowlevel.current_task().name | ||||
| 
 | ||||
|     # TODO: is it possible to debug a trio.Cancelled except block? | ||||
|     # right now it seems like we can kinda do with by shielding | ||||
|     # around ``tractor.breakpoint()`` but not if we move the shielded | ||||
|     # scope here??? | ||||
|     # with trio.CancelScope(shield=shield): | ||||
|     #     await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|     if ( | ||||
|         not Lock.local_pdb_complete | ||||
|         or Lock.local_pdb_complete.is_set() | ||||
|     ): | ||||
|         Lock.local_pdb_complete = trio.Event() | ||||
| 
 | ||||
|     # TODO: need a more robust check for the "root" actor | ||||
|     if ( | ||||
|         not is_root_process() | ||||
|         and actor._parent_chan  # a connected child | ||||
|     ): | ||||
| 
 | ||||
|         if Lock.local_task_in_debug: | ||||
| 
 | ||||
|             # Recurrence entry case: this task already has the lock and | ||||
|             # is likely recurrently entering a breakpoint | ||||
|             if Lock.local_task_in_debug == task_name: | ||||
|                 # noop on recurrent entry case but we want to trigger | ||||
|                 # a checkpoint to allow other actors error-propagate and | ||||
|                 # potetially avoid infinite re-entries in some subactor. | ||||
|                 await trio.lowlevel.checkpoint() | ||||
|                 return | ||||
| 
 | ||||
|             # if **this** actor is already in debug mode block here | ||||
|             # waiting for the control to be released - this allows | ||||
|             # support for recursive entries to `tractor.breakpoint()` | ||||
|             log.warning(f"{actor.uid} already has a debug lock, waiting...") | ||||
| 
 | ||||
|             await Lock.local_pdb_complete.wait() | ||||
|             await trio.sleep(0.1) | ||||
| 
 | ||||
|         # mark local actor as "in debug mode" to avoid recurrent | ||||
|         # entries/requests to the root process | ||||
|         Lock.local_task_in_debug = task_name | ||||
| 
 | ||||
|         # this **must** be awaited by the caller and is done using the | ||||
|         # root nursery so that the debugger can continue to run without | ||||
|         # being restricted by the scope of a new task nursery. | ||||
| 
 | ||||
|         # TODO: if we want to debug a trio.Cancelled triggered exception | ||||
|         # we have to figure out how to avoid having the service nursery | ||||
|         # cancel on this task start? I *think* this works below: | ||||
|         # ```python | ||||
|         #   actor._service_n.cancel_scope.shield = shield | ||||
|         # ``` | ||||
|         # but not entirely sure if that's a sane way to implement it? | ||||
|         try: | ||||
|             with trio.CancelScope(shield=True): | ||||
|                 await actor._service_n.start( | ||||
|                     wait_for_parent_stdin_hijack, | ||||
|                     actor.uid, | ||||
|                 ) | ||||
|                 Lock.repl = pdb | ||||
|         except RuntimeError: | ||||
|             Lock.release() | ||||
| 
 | ||||
|             if actor._cancel_called: | ||||
|                 # service nursery won't be usable and we | ||||
|                 # don't want to lock up the root either way since | ||||
|                 # we're in (the midst of) cancellation. | ||||
|                 return | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|     elif is_root_process(): | ||||
| 
 | ||||
|         # we also wait in the root-parent for any child that | ||||
|         # may have the tty locked prior | ||||
|         # TODO: wait, what about multiple root tasks acquiring it though? | ||||
|         if Lock.global_actor_in_debug == actor.uid: | ||||
|             # re-entrant root process already has it: noop. | ||||
|             return | ||||
| 
 | ||||
|         # XXX: since we need to enter pdb synchronously below, | ||||
|         # we have to release the lock manually from pdb completion | ||||
|         # callbacks. Can't think of a nicer way then this atm. | ||||
|         if Lock._debug_lock.locked(): | ||||
|             log.warning( | ||||
|                 'Root actor attempting to shield-acquire active tty lock' | ||||
|                 f' owned by {Lock.global_actor_in_debug}') | ||||
| 
 | ||||
|             # must shield here to avoid hitting a ``Cancelled`` and | ||||
|             # a child getting stuck bc we clobbered the tty | ||||
|             with trio.CancelScope(shield=True): | ||||
|                 await Lock._debug_lock.acquire() | ||||
|         else: | ||||
|             # may be cancelled | ||||
|             await Lock._debug_lock.acquire() | ||||
| 
 | ||||
|         Lock.global_actor_in_debug = actor.uid | ||||
|         Lock.local_task_in_debug = task_name | ||||
|         Lock.repl = pdb | ||||
| 
 | ||||
|     try: | ||||
|         # block here one (at the appropriate frame *up*) where | ||||
|         # ``breakpoint()`` was awaited and begin handling stdio. | ||||
|         log.debug("Entering the synchronous world of pdb") | ||||
|         debug_func(actor, pdb) | ||||
| 
 | ||||
|     except bdb.BdbQuit: | ||||
|         Lock.release() | ||||
|         raise | ||||
| 
 | ||||
|     # XXX: apparently we can't do this without showing this frame | ||||
|     # in the backtrace on first entry to the REPL? Seems like an odd | ||||
|     # behaviour that should have been fixed by now. This is also why | ||||
|     # we scrapped all the @cm approaches that were tried previously. | ||||
|     # finally: | ||||
|     #     __tracebackhide__ = True | ||||
|     #     # frame = sys._getframe() | ||||
|     #     # last_f = frame.f_back | ||||
|     #     # last_f.f_globals['__tracebackhide__'] = True | ||||
|     #     # signal.signal = pdbp.hideframe(signal.signal) | ||||
| 
 | ||||
| 
 | ||||
| def shield_sigint_handler( | ||||
|     signum: int, | ||||
|     frame: 'frame',  # type: ignore # noqa | ||||
|     # pdb_obj: Optional[MultiActorPdb] = None, | ||||
|     *args, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Specialized, debugger-aware SIGINT handler. | ||||
| 
 | ||||
|     In childred we always ignore to avoid deadlocks since cancellation | ||||
|     should always be managed by the parent supervising actor. The root | ||||
|     is always cancelled on ctrl-c. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__ = True | ||||
| 
 | ||||
|     uid_in_debug = Lock.global_actor_in_debug | ||||
| 
 | ||||
|     actor = tractor.current_actor() | ||||
|     # print(f'{actor.uid} in HANDLER with ') | ||||
| 
 | ||||
|     def do_cancel(): | ||||
|         # If we haven't tried to cancel the runtime then do that instead | ||||
|         # of raising a KBI (which may non-gracefully destroy | ||||
|         # a ``trio.run()``). | ||||
|         if not actor._cancel_called: | ||||
|             actor.cancel_soon() | ||||
| 
 | ||||
|         # If the runtime is already cancelled it likely means the user | ||||
|         # hit ctrl-c again because teardown didn't full take place in | ||||
|         # which case we do the "hard" raising of a local KBI. | ||||
|         else: | ||||
|             raise KeyboardInterrupt | ||||
| 
 | ||||
|     any_connected = False | ||||
| 
 | ||||
|     if uid_in_debug is not None: | ||||
|         # try to see if the supposed (sub)actor in debug still | ||||
|         # has an active connection to *this* actor, and if not | ||||
|         # it's likely they aren't using the TTY lock / debugger | ||||
|         # and we should propagate SIGINT normally. | ||||
|         chans = actor._peers.get(tuple(uid_in_debug)) | ||||
|         if chans: | ||||
|             any_connected = any(chan.connected() for chan in chans) | ||||
|             if not any_connected: | ||||
|                 log.warning( | ||||
|                     'A global actor reported to be in debug ' | ||||
|                     'but no connection exists for this child:\n' | ||||
|                     f'{uid_in_debug}\n' | ||||
|                     'Allowing SIGINT propagation..' | ||||
|                 ) | ||||
|                 return do_cancel() | ||||
| 
 | ||||
|     # only set in the actor actually running the REPL | ||||
|     pdb_obj = Lock.repl | ||||
| 
 | ||||
|     # root actor branch that reports whether or not a child | ||||
|     # has locked debugger. | ||||
|     if ( | ||||
|         is_root_process() | ||||
|         and uid_in_debug is not None | ||||
| 
 | ||||
|         # XXX: only if there is an existing connection to the | ||||
|         # (sub-)actor in debug do we ignore SIGINT in this | ||||
|         # parent! Otherwise we may hang waiting for an actor | ||||
|         # which has already terminated to unlock. | ||||
|         and any_connected | ||||
|     ): | ||||
|         # we are root and some actor is in debug mode | ||||
|         # if uid_in_debug is not None: | ||||
| 
 | ||||
|         if pdb_obj: | ||||
|             name = uid_in_debug[0] | ||||
|             if name != 'root': | ||||
|                 log.pdb( | ||||
|                     f"Ignoring SIGINT, child in debug mode: `{uid_in_debug}`" | ||||
|                 ) | ||||
| 
 | ||||
|             else: | ||||
|                 log.pdb( | ||||
|                     "Ignoring SIGINT while in debug mode" | ||||
|                 ) | ||||
|     elif ( | ||||
|         is_root_process() | ||||
|     ): | ||||
|         if pdb_obj: | ||||
|             log.pdb( | ||||
|                 "Ignoring SIGINT since debug mode is enabled" | ||||
|             ) | ||||
| 
 | ||||
|         if ( | ||||
|             Lock._root_local_task_cs_in_debug | ||||
|             and not Lock._root_local_task_cs_in_debug.cancel_called | ||||
|         ): | ||||
|             Lock._root_local_task_cs_in_debug.cancel() | ||||
| 
 | ||||
|             # revert back to ``trio`` handler asap! | ||||
|             Lock.unshield_sigint() | ||||
| 
 | ||||
|     # child actor that has locked the debugger | ||||
|     elif not is_root_process(): | ||||
| 
 | ||||
|         chan: Channel = actor._parent_chan | ||||
|         if not chan or not chan.connected(): | ||||
|             log.warning( | ||||
|                 'A global actor reported to be in debug ' | ||||
|                 'but no connection exists for its parent:\n' | ||||
|                 f'{uid_in_debug}\n' | ||||
|                 'Allowing SIGINT propagation..' | ||||
|             ) | ||||
|             return do_cancel() | ||||
| 
 | ||||
|         task = Lock.local_task_in_debug | ||||
|         if ( | ||||
|             task | ||||
|             and pdb_obj | ||||
|         ): | ||||
|             log.pdb( | ||||
|                 f"Ignoring SIGINT while task in debug mode: `{task}`" | ||||
|             ) | ||||
| 
 | ||||
|         # TODO: how to handle the case of an intermediary-child actor | ||||
|         # that **is not** marked in debug mode? See oustanding issue: | ||||
|         # https://github.com/goodboy/tractor/issues/320 | ||||
|         # elif debug_mode(): | ||||
| 
 | ||||
|     else:  # XXX: shouldn't ever get here? | ||||
|         print("WTFWTFWTF") | ||||
|         raise KeyboardInterrupt | ||||
| 
 | ||||
|     # NOTE: currently (at least on ``fancycompleter`` 0.9.2) | ||||
|     # it looks to be that the last command that was run (eg. ll) | ||||
|     # will be repeated by default. | ||||
| 
 | ||||
|     # maybe redraw/print last REPL output to console since | ||||
|     # we want to alert the user that more input is expect since | ||||
|     # nothing has been done dur to ignoring sigint. | ||||
|     if ( | ||||
|         pdb_obj  # only when this actor has a REPL engaged | ||||
|     ): | ||||
|         # XXX: yah, mega hack, but how else do we catch this madness XD | ||||
|         if pdb_obj.shname == 'xonsh': | ||||
|             pdb_obj.stdout.write(pdb_obj.prompt) | ||||
| 
 | ||||
|         pdb_obj.stdout.flush() | ||||
| 
 | ||||
|         # TODO: make this work like sticky mode where if there is output | ||||
|         # detected as written to the tty we redraw this part underneath | ||||
|         # and erase the past draw of this same bit above? | ||||
|         # pdb_obj.sticky = True | ||||
|         # pdb_obj._print_if_sticky() | ||||
| 
 | ||||
|         # also see these links for an approach from ``ptk``: | ||||
|         # https://github.com/goodboy/tractor/issues/130#issuecomment-663752040 | ||||
|         # https://github.com/prompt-toolkit/python-prompt-toolkit/blob/c2c6af8a0308f9e5d7c0e28cb8a02963fe0ce07a/prompt_toolkit/patch_stdout.py | ||||
| 
 | ||||
|         # XXX LEGACY: lol, see ``pdbpp`` issue: | ||||
|         # https://github.com/pdbpp/pdbpp/issues/496 | ||||
| 
 | ||||
| 
 | ||||
| def _set_trace( | ||||
|     actor: tractor.Actor | None = None, | ||||
|     pdb: MultiActorPdb | None = None, | ||||
| ): | ||||
|     __tracebackhide__ = True | ||||
|     actor = actor or tractor.current_actor() | ||||
| 
 | ||||
|     # start 2 levels up in user code | ||||
|     frame: Optional[FrameType] = sys._getframe() | ||||
|     if frame: | ||||
|         frame = frame.f_back  # type: ignore | ||||
| 
 | ||||
|     if ( | ||||
|         frame | ||||
|         and pdb | ||||
|         and actor is not None | ||||
|     ): | ||||
|         log.pdb(f"\nAttaching pdb to actor: {actor.uid}\n") | ||||
|         # no f!#$&* idea, but when we're in async land | ||||
|         # we need 2x frames up? | ||||
|         frame = frame.f_back | ||||
| 
 | ||||
|     else: | ||||
|         pdb, undo_sigint = mk_mpdb() | ||||
| 
 | ||||
|         # we entered the global ``breakpoint()`` built-in from sync | ||||
|         # code? | ||||
|         Lock.local_task_in_debug = 'sync' | ||||
| 
 | ||||
|     pdb.set_trace(frame=frame) | ||||
| 
 | ||||
| 
 | ||||
| breakpoint = partial( | ||||
|     _breakpoint, | ||||
|     _set_trace, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def _post_mortem( | ||||
|     actor: tractor.Actor, | ||||
|     pdb: MultiActorPdb, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Enter the ``pdbpp`` port mortem entrypoint using our custom | ||||
|     debugger instance. | ||||
| 
 | ||||
|     ''' | ||||
|     log.pdb(f"\nAttaching to pdb in crashed actor: {actor.uid}\n") | ||||
| 
 | ||||
|     # TODO: you need ``pdbpp`` master (at least this commit | ||||
|     # https://github.com/pdbpp/pdbpp/commit/b757794857f98d53e3ebbe70879663d7d843a6c2) | ||||
|     # to fix this and avoid the hang it causes. See issue: | ||||
|     # https://github.com/pdbpp/pdbpp/issues/480 | ||||
|     # TODO: help with a 3.10+ major release if/when it arrives. | ||||
| 
 | ||||
|     pdbp.xpm(Pdb=lambda: pdb) | ||||
| 
 | ||||
| 
 | ||||
| post_mortem = partial( | ||||
|     _breakpoint, | ||||
|     _post_mortem, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| async def _maybe_enter_pm(err): | ||||
|     if ( | ||||
|         debug_mode() | ||||
| 
 | ||||
|         # NOTE: don't enter debug mode recursively after quitting pdb | ||||
|         # Iow, don't re-enter the repl if the `quit` command was issued | ||||
|         # by the user. | ||||
|         and not isinstance(err, bdb.BdbQuit) | ||||
| 
 | ||||
|         # XXX: if the error is the likely result of runtime-wide | ||||
|         # cancellation, we don't want to enter the debugger since | ||||
|         # there's races between when the parent actor has killed all | ||||
|         # comms and when the child tries to contact said parent to | ||||
|         # acquire the tty lock. | ||||
| 
 | ||||
|         # Really we just want to mostly avoid catching KBIs here so there | ||||
|         # might be a simpler check we can do? | ||||
|         and not is_multi_cancelled(err) | ||||
|     ): | ||||
|         log.debug("Actor crashed, entering debug mode") | ||||
|         try: | ||||
|             await post_mortem() | ||||
|         finally: | ||||
|             Lock.release() | ||||
|             return True | ||||
| 
 | ||||
|     else: | ||||
|         return False | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def acquire_debug_lock( | ||||
|     subactor_uid: tuple[str, str], | ||||
| ) -> AsyncGenerator[None, tuple]: | ||||
|     ''' | ||||
|     Grab root's debug lock on entry, release on exit. | ||||
| 
 | ||||
|     This helper is for actor's who don't actually need | ||||
|     to acquired the debugger but want to wait until the | ||||
|     lock is free in the process-tree root. | ||||
| 
 | ||||
|     ''' | ||||
|     if not debug_mode(): | ||||
|         yield None | ||||
|         return | ||||
| 
 | ||||
|     async with trio.open_nursery() as n: | ||||
|         cs = await n.start( | ||||
|             wait_for_parent_stdin_hijack, | ||||
|             subactor_uid, | ||||
|         ) | ||||
|         yield None | ||||
|         cs.cancel() | ||||
| 
 | ||||
| 
 | ||||
| async def maybe_wait_for_debugger( | ||||
|     poll_steps: int = 2, | ||||
|     poll_delay: float = 0.1, | ||||
|     child_in_debug: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     if ( | ||||
|         not debug_mode() | ||||
|         and not child_in_debug | ||||
|     ): | ||||
|         return | ||||
| 
 | ||||
|     if ( | ||||
|         is_root_process() | ||||
|     ): | ||||
|         # If we error in the root but the debugger is | ||||
|         # engaged we don't want to prematurely kill (and | ||||
|         # thus clobber access to) the local tty since it | ||||
|         # will make the pdb repl unusable. | ||||
|         # Instead try to wait for pdb to be released before | ||||
|         # tearing down. | ||||
| 
 | ||||
|         sub_in_debug = None | ||||
| 
 | ||||
|         for _ in range(poll_steps): | ||||
| 
 | ||||
|             if Lock.global_actor_in_debug: | ||||
|                 sub_in_debug = tuple(Lock.global_actor_in_debug) | ||||
| 
 | ||||
|             log.debug('Root polling for debug') | ||||
| 
 | ||||
|             with trio.CancelScope(shield=True): | ||||
|                 await trio.sleep(poll_delay) | ||||
| 
 | ||||
|                 # TODO: could this make things more deterministic?  wait | ||||
|                 # to see if a sub-actor task will be scheduled and grab | ||||
|                 # the tty lock on the next tick? | ||||
|                 # XXX: doesn't seem to work | ||||
|                 # await trio.testing.wait_all_tasks_blocked(cushion=0) | ||||
| 
 | ||||
|                 debug_complete = Lock.no_remote_has_tty | ||||
|                 if ( | ||||
|                     (debug_complete and | ||||
|                      not debug_complete.is_set()) | ||||
|                 ): | ||||
|                     log.debug( | ||||
|                         'Root has errored but pdb is in use by ' | ||||
|                         f'child {sub_in_debug}\n' | ||||
|                         'Waiting on tty lock to release..') | ||||
| 
 | ||||
|                     await debug_complete.wait() | ||||
| 
 | ||||
|                 await trio.sleep(poll_delay) | ||||
|                 continue | ||||
|         else: | ||||
|             log.debug( | ||||
|                     'Root acquired TTY LOCK' | ||||
|             ) | ||||
|  | @ -15,52 +15,71 @@ | |||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Actor discovery API. | ||||
| Discovery (protocols) API for automatic addressing and location | ||||
| management of (service) actors. | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from typing import ( | ||||
|     Optional, | ||||
|     Union, | ||||
|     AsyncGenerator, | ||||
|     AsyncContextManager, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| 
 | ||||
| from tractor.log import get_logger | ||||
| from .trionics import gather_contexts | ||||
| from ._ipc import _connect_chan, Channel | ||||
| from ._portal import ( | ||||
|     Portal, | ||||
|     open_portal, | ||||
|     LocalPortal, | ||||
| ) | ||||
| from ._state import current_actor, _runtime_vars | ||||
| from ._state import ( | ||||
|     current_actor, | ||||
|     _runtime_vars, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_arbiter( | ||||
| 
 | ||||
| async def get_registry( | ||||
|     host: str, | ||||
|     port: int, | ||||
| 
 | ||||
| ) -> AsyncGenerator[Union[Portal, LocalPortal], None]: | ||||
| ) -> AsyncGenerator[ | ||||
|     Portal | LocalPortal | None, | ||||
|     None, | ||||
| ]: | ||||
|     ''' | ||||
|     Return a portal instance connected to a local or remote | ||||
|     arbiter. | ||||
|     registry-service actor; if a connection already exists re-use it | ||||
|     (presumably to call a `.register_actor()` registry runtime RPC | ||||
|     ep). | ||||
| 
 | ||||
|     ''' | ||||
|     actor = current_actor() | ||||
| 
 | ||||
|     if not actor: | ||||
|         raise RuntimeError("No actor instance has been defined yet?") | ||||
| 
 | ||||
|     if actor.is_arbiter: | ||||
|     actor: Actor = current_actor() | ||||
|     if actor.is_registrar: | ||||
|         # we're already the arbiter | ||||
|         # (likely a re-entrant call from the arbiter actor) | ||||
|         yield LocalPortal(actor, Channel((host, port))) | ||||
|         yield LocalPortal( | ||||
|             actor, | ||||
|             Channel((host, port)) | ||||
|         ) | ||||
|     else: | ||||
|         async with _connect_chan(host, port) as chan: | ||||
|         # TODO: try to look pre-existing connection from | ||||
|         # `Actor._peers` and use it instead? | ||||
|         async with ( | ||||
|             _connect_chan(host, port) as chan, | ||||
|             open_portal(chan) as regstr_ptl, | ||||
|         ): | ||||
|             yield regstr_ptl | ||||
| 
 | ||||
|             async with open_portal(chan) as arb_portal: | ||||
| 
 | ||||
|                 yield arb_portal | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
|  | @ -68,62 +87,104 @@ async def get_root( | |||
|     **kwargs, | ||||
| ) -> AsyncGenerator[Portal, None]: | ||||
| 
 | ||||
|     # TODO: rename mailbox to `_root_maddr` when we finally | ||||
|     # add and impl libp2p multi-addrs? | ||||
|     host, port = _runtime_vars['_root_mailbox'] | ||||
|     assert host is not None | ||||
| 
 | ||||
|     async with _connect_chan(host, port) as chan: | ||||
|         async with open_portal(chan, **kwargs) as portal: | ||||
|     async with ( | ||||
|         _connect_chan(host, port) as chan, | ||||
|         open_portal(chan, **kwargs) as portal, | ||||
|     ): | ||||
|         yield portal | ||||
| 
 | ||||
| 
 | ||||
| def get_peer_by_name( | ||||
|     name: str, | ||||
|     # uuid: str|None = None, | ||||
| 
 | ||||
| ) -> list[Channel]|None:  # at least 1 | ||||
|     ''' | ||||
|     Scan for an existing connection (set) to a named actor | ||||
|     and return any channels from `Actor._peers`. | ||||
| 
 | ||||
|     This is an optimization method over querying the registrar for | ||||
|     the same info. | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = current_actor() | ||||
|     to_scan: dict[tuple, list[Channel]] = actor._peers.copy() | ||||
|     pchan: Channel|None = actor._parent_chan | ||||
|     if pchan: | ||||
|         to_scan[pchan.uid].append(pchan) | ||||
| 
 | ||||
|     for aid, chans in to_scan.items(): | ||||
|         _, peer_name = aid | ||||
|         if name == peer_name: | ||||
|             if not chans: | ||||
|                 log.warning( | ||||
|                     'No IPC chans for matching peer {peer_name}\n' | ||||
|                 ) | ||||
|                 continue | ||||
|             return chans | ||||
| 
 | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def query_actor( | ||||
|     name: str, | ||||
|     arbiter_sockaddr: Optional[tuple[str, int]] = None, | ||||
|     regaddr: tuple[str, int]|None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[tuple[str, int], None]: | ||||
| ) -> AsyncGenerator[ | ||||
|     tuple[str, int]|None, | ||||
|     None, | ||||
| ]: | ||||
|     ''' | ||||
|     Simple address lookup for a given actor name. | ||||
|     Lookup a transport address (by actor name) via querying a registrar | ||||
|     listening @ `regaddr`. | ||||
| 
 | ||||
|     Returns the (socket) address or ``None``. | ||||
|     Returns the transport protocol (socket) address or `None` if no | ||||
|     entry under that name exists. | ||||
| 
 | ||||
|     ''' | ||||
|     actor = current_actor() | ||||
|     async with get_arbiter( | ||||
|         *arbiter_sockaddr or actor._arb_addr | ||||
|     ) as arb_portal: | ||||
|     actor: Actor = current_actor() | ||||
|     if ( | ||||
|         name == 'registrar' | ||||
|         and actor.is_registrar | ||||
|     ): | ||||
|         raise RuntimeError( | ||||
|             'The current actor IS the registry!?' | ||||
|         ) | ||||
| 
 | ||||
|         sockaddr = await arb_portal.run_from_ns( | ||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) | ||||
|     if maybe_peers: | ||||
|         yield maybe_peers[0].raddr | ||||
|         return | ||||
| 
 | ||||
|     reg_portal: Portal | ||||
|     regaddr: tuple[str, int] = regaddr or actor.reg_addrs[0] | ||||
|     async with get_registry(*regaddr) as reg_portal: | ||||
|         # TODO: return portals to all available actors - for now | ||||
|         # just the last one that registered | ||||
|         sockaddr: tuple[str, int] = await reg_portal.run_from_ns( | ||||
|             'self', | ||||
|             'find_actor', | ||||
|             name=name, | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: return portals to all available actors - for now just | ||||
|         # the last one that registered | ||||
|         if name == 'arbiter' and actor.is_arbiter: | ||||
|             raise RuntimeError("The current actor is the arbiter") | ||||
| 
 | ||||
|         yield sockaddr if sockaddr else None | ||||
|         yield sockaddr | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def find_actor( | ||||
| async def maybe_open_portal( | ||||
|     addr: tuple[str, int], | ||||
|     name: str, | ||||
|     arbiter_sockaddr: tuple[str, int] | None = None | ||||
| 
 | ||||
| ) -> AsyncGenerator[Optional[Portal], None]: | ||||
|     ''' | ||||
|     Ask the arbiter to find actor(s) by name. | ||||
| 
 | ||||
|     Returns a connected portal to the last registered matching actor | ||||
|     known to the arbiter. | ||||
| 
 | ||||
|     ''' | ||||
| ): | ||||
|     async with query_actor( | ||||
|         name=name, | ||||
|         arbiter_sockaddr=arbiter_sockaddr, | ||||
|         regaddr=addr, | ||||
|     ) as sockaddr: | ||||
|         pass | ||||
| 
 | ||||
|     if sockaddr: | ||||
|         async with _connect_chan(*sockaddr) as chan: | ||||
|  | @ -133,30 +194,121 @@ async def find_actor( | |||
|         yield None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def find_actor( | ||||
|     name: str, | ||||
|     registry_addrs: list[tuple[str, int]]|None = None, | ||||
| 
 | ||||
|     only_first: bool = True, | ||||
|     raise_on_none: bool = False, | ||||
| 
 | ||||
| ) -> AsyncGenerator[ | ||||
|     Portal | list[Portal] | None, | ||||
|     None, | ||||
| ]: | ||||
|     ''' | ||||
|     Ask the arbiter to find actor(s) by name. | ||||
| 
 | ||||
|     Returns a connected portal to the last registered matching actor | ||||
|     known to the arbiter. | ||||
| 
 | ||||
|     ''' | ||||
|     # optimization path, use any pre-existing peer channel | ||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) | ||||
|     if maybe_peers and only_first: | ||||
|         async with open_portal(maybe_peers[0]) as peer_portal: | ||||
|             yield peer_portal | ||||
|             return | ||||
| 
 | ||||
|     if not registry_addrs: | ||||
|         # XXX NOTE: make sure to dynamically read the value on | ||||
|         # every call since something may change it globally (eg. | ||||
|         # like in our discovery test suite)! | ||||
|         from . import _root | ||||
|         registry_addrs = ( | ||||
|             _runtime_vars['_registry_addrs'] | ||||
|             or | ||||
|             _root._default_lo_addrs | ||||
|         ) | ||||
| 
 | ||||
|     maybe_portals: list[ | ||||
|         AsyncContextManager[tuple[str, int]] | ||||
|     ] = list( | ||||
|         maybe_open_portal( | ||||
|             addr=addr, | ||||
|             name=name, | ||||
|         ) | ||||
|         for addr in registry_addrs | ||||
|     ) | ||||
|     portals: list[Portal] | ||||
|     async with gather_contexts( | ||||
|         mngrs=maybe_portals, | ||||
|     ) as portals: | ||||
|         # log.runtime( | ||||
|         #     'Gathered portals:\n' | ||||
|         #     f'{portals}' | ||||
|         # ) | ||||
|         # NOTE: `gather_contexts()` will return a | ||||
|         # `tuple[None, None, ..., None]` if no contact | ||||
|         # can be made with any regstrar at any of the | ||||
|         # N provided addrs! | ||||
|         if not any(portals): | ||||
|             if raise_on_none: | ||||
|                 raise RuntimeError( | ||||
|                     f'No actor "{name}" found registered @ {registry_addrs}' | ||||
|                 ) | ||||
|             yield None | ||||
|             return | ||||
| 
 | ||||
|         portals: list[Portal] = list(portals) | ||||
|         if only_first: | ||||
|             yield portals[0] | ||||
| 
 | ||||
|         else: | ||||
|             # TODO: currently this may return multiple portals | ||||
|             # given there are multi-homed or multiple registrars.. | ||||
|             # SO, we probably need de-duplication logic? | ||||
|             yield portals | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def wait_for_actor( | ||||
|     name: str, | ||||
|     arbiter_sockaddr: tuple[str, int] | None = None, | ||||
|     # registry_addr: tuple[str, int] | None = None, | ||||
|     registry_addr: tuple[str, int] | None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[Portal, None]: | ||||
|     ''' | ||||
|     Wait on an actor to register with the arbiter. | ||||
| 
 | ||||
|     A portal to the first registered actor is returned. | ||||
|     Wait on at least one peer actor to register `name` with the | ||||
|     registrar, yield a `Portal to the first registree. | ||||
| 
 | ||||
|     ''' | ||||
|     actor = current_actor() | ||||
|     actor: Actor = current_actor() | ||||
| 
 | ||||
|     async with get_arbiter( | ||||
|         *arbiter_sockaddr or actor._arb_addr, | ||||
|     ) as arb_portal: | ||||
|         sockaddrs = await arb_portal.run_from_ns( | ||||
|     # optimization path, use any pre-existing peer channel | ||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) | ||||
|     if maybe_peers: | ||||
|         async with open_portal(maybe_peers[0]) as peer_portal: | ||||
|             yield peer_portal | ||||
|             return | ||||
| 
 | ||||
|     regaddr: tuple[str, int] = ( | ||||
|         registry_addr | ||||
|         or | ||||
|         actor.reg_addrs[0] | ||||
|     ) | ||||
|     # TODO: use `.trionics.gather_contexts()` like | ||||
|     # above in `find_actor()` as well? | ||||
|     reg_portal: Portal | ||||
|     async with get_registry(*regaddr) as reg_portal: | ||||
|         sockaddrs = await reg_portal.run_from_ns( | ||||
|             'self', | ||||
|             'wait_for_actor', | ||||
|             name=name, | ||||
|         ) | ||||
|         sockaddr = sockaddrs[-1] | ||||
| 
 | ||||
|         # get latest registered addr by default? | ||||
|         # TODO: offer multi-portal yields in multi-homed case? | ||||
|         sockaddr: tuple[str, int] = sockaddrs[-1] | ||||
| 
 | ||||
|         async with _connect_chan(*sockaddr) as chan: | ||||
|             async with open_portal(chan) as portal: | ||||
|  |  | |||
|  | @ -20,6 +20,9 @@ Sub-process entry points. | |||
| """ | ||||
| from __future__ import annotations | ||||
| from functools import partial | ||||
| import multiprocessing as mp | ||||
| import os | ||||
| import textwrap | ||||
| from typing import ( | ||||
|     Any, | ||||
|     TYPE_CHECKING, | ||||
|  | @ -32,6 +35,7 @@ from .log import ( | |||
|     get_logger, | ||||
| ) | ||||
| from . import _state | ||||
| from .devx import _debug | ||||
| from .to_asyncio import run_as_asyncio_guest | ||||
| from ._runtime import ( | ||||
|     async_main, | ||||
|  | @ -47,8 +51,8 @@ log = get_logger(__name__) | |||
| 
 | ||||
| def _mp_main( | ||||
| 
 | ||||
|     actor: Actor,  # type: ignore | ||||
|     accept_addr: tuple[str, int], | ||||
|     actor: Actor, | ||||
|     accept_addrs: list[tuple[str, int]], | ||||
|     forkserver_info: tuple[Any, Any, Any, Any, Any], | ||||
|     start_method: SpawnMethodKey, | ||||
|     parent_addr: tuple[str, int] | None = None, | ||||
|  | @ -56,29 +60,31 @@ def _mp_main( | |||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     The routine called *after fork* which invokes a fresh ``trio.run`` | ||||
|     The routine called *after fork* which invokes a fresh `trio.run()` | ||||
| 
 | ||||
|     ''' | ||||
|     actor._forkserver_info = forkserver_info | ||||
|     from ._spawn import try_set_start_method | ||||
|     spawn_ctx = try_set_start_method(start_method) | ||||
|     spawn_ctx: mp.context.BaseContext = try_set_start_method(start_method) | ||||
|     assert spawn_ctx | ||||
| 
 | ||||
|     if actor.loglevel is not None: | ||||
|         log.info( | ||||
|             f"Setting loglevel for {actor.uid} to {actor.loglevel}") | ||||
|             f'Setting loglevel for {actor.uid} to {actor.loglevel}' | ||||
|         ) | ||||
|         get_console_log(actor.loglevel) | ||||
| 
 | ||||
|     assert spawn_ctx | ||||
|     # TODO: use scops headers like for `trio` below! | ||||
|     # (well after we libify it maybe..) | ||||
|     log.info( | ||||
|         f"Started new {spawn_ctx.current_process()} for {actor.uid}") | ||||
| 
 | ||||
|     _state._current_actor = actor | ||||
| 
 | ||||
|     log.debug(f"parent_addr is {parent_addr}") | ||||
|         f'Started new {spawn_ctx.current_process()} for {actor.uid}' | ||||
|     #     f"parent_addr is {parent_addr}" | ||||
|     ) | ||||
|     _state._current_actor: Actor = actor | ||||
|     trio_main = partial( | ||||
|         async_main, | ||||
|         actor, | ||||
|         accept_addr, | ||||
|         actor=actor, | ||||
|         accept_addrs=accept_addrs, | ||||
|         parent_addr=parent_addr | ||||
|     ) | ||||
|     try: | ||||
|  | @ -91,12 +97,114 @@ def _mp_main( | |||
|         pass  # handle it the same way trio does? | ||||
| 
 | ||||
|     finally: | ||||
|         log.info(f"Actor {actor.uid} terminated") | ||||
|         log.info( | ||||
|             f'`mp`-subactor {actor.uid} exited' | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: move this func to some kinda `.devx._conc_lang.py` eventually | ||||
| # as we work out our multi-domain state-flow-syntax! | ||||
| def nest_from_op( | ||||
|     input_op: str, | ||||
|     # | ||||
|     # ?TODO? an idea for a syntax to the state of concurrent systems | ||||
|     # as a "3-domain" (execution, scope, storage) model and using | ||||
|     # a minimal ascii/utf-8 operator-set. | ||||
|     # | ||||
|     # try not to take any of this seriously yet XD | ||||
|     # | ||||
|     # > is a "play operator" indicating (CPU bound) | ||||
|     #   exec/work/ops required at the "lowest level computing" | ||||
|     # | ||||
|     # execution primititves (tasks, threads, actors..) denote their | ||||
|     # lifetime with '(' and ')' since parentheses normally are used | ||||
|     # in many langs to denote function calls. | ||||
|     # | ||||
|     # starting = ( | ||||
|     # >(  opening/starting; beginning of the thread-of-exec (toe?) | ||||
|     # (>  opened/started,  (finished spawning toe) | ||||
|     # |_<Task: blah blah..>  repr of toe, in py these look like <objs> | ||||
|     # | ||||
|     # >) closing/exiting/stopping, | ||||
|     # )> closed/exited/stopped, | ||||
|     # |_<Task: blah blah..> | ||||
|     #   [OR <), )< ?? ] | ||||
|     # | ||||
|     # ending = ) | ||||
|     # >c) cancelling to close/exit | ||||
|     # c)> cancelled (caused close), OR? | ||||
|     #  |_<Actor: ..> | ||||
|     #   OR maybe "<c)" which better indicates the cancel being | ||||
|     #   "delivered/returned" / returned" to LHS? | ||||
|     # | ||||
|     # >x)  erroring to eventuall exit | ||||
|     # x)>  errored and terminated | ||||
|     #  |_<Actor: ...> | ||||
|     # | ||||
|     # scopes: supers/nurseries, IPC-ctxs, sessions, perms, etc. | ||||
|     # >{  opening | ||||
|     # {>  opened | ||||
|     # }>  closed | ||||
|     # >}  closing | ||||
|     # | ||||
|     # storage: like queues, shm-buffers, files, etc.. | ||||
|     # >[  opening | ||||
|     # [>  opened | ||||
|     #  |_<FileObj: ..> | ||||
|     # | ||||
|     # >]  closing | ||||
|     # ]>  closed | ||||
| 
 | ||||
|     # IPC ops: channels, transports, msging | ||||
|     # =>  req msg | ||||
|     # <=  resp msg | ||||
|     # <=> 2-way streaming (of msgs) | ||||
|     # <-  recv 1 msg | ||||
|     # ->  send 1 msg | ||||
|     # | ||||
|     # TODO: still not sure on R/L-HS approach..? | ||||
|     # =>(  send-req to exec start (task, actor, thread..) | ||||
|     # (<=  recv-req to ^ | ||||
|     # | ||||
|     # (<=  recv-req ^ | ||||
|     # <=(  recv-resp opened remote exec primitive | ||||
|     # <=)  recv-resp closed | ||||
|     # | ||||
|     # )<=c req to stop due to cancel | ||||
|     # c=>) req to stop due to cancel | ||||
|     # | ||||
|     # =>{  recv-req to open | ||||
|     # <={  send-status that it closed | ||||
| 
 | ||||
|     tree_str: str, | ||||
| 
 | ||||
|     # NOTE: so move back-from-the-left of the `input_op` by | ||||
|     # this amount. | ||||
|     back_from_op: int = 0, | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Depth-increment the input (presumably hierarchy/supervision) | ||||
|     input "tree string" below the provided `input_op` execution | ||||
|     operator, so injecting a `"\n|_{input_op}\n"`and indenting the | ||||
|     `tree_str` to nest content aligned with the ops last char. | ||||
| 
 | ||||
|     ''' | ||||
|     return ( | ||||
|         f'{input_op}\n' | ||||
|         + | ||||
|         textwrap.indent( | ||||
|             tree_str, | ||||
|             prefix=( | ||||
|                 len(input_op) | ||||
|                 - | ||||
|                 (back_from_op + 1) | ||||
|             ) * ' ', | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def _trio_main( | ||||
| 
 | ||||
|     actor: Actor,  # type: ignore | ||||
|     actor: Actor, | ||||
|     *, | ||||
|     parent_addr: tuple[str, int] | None = None, | ||||
|     infect_asyncio: bool = False, | ||||
|  | @ -106,33 +214,73 @@ def _trio_main( | |||
|     Entry point for a `trio_run_in_process` subactor. | ||||
| 
 | ||||
|     ''' | ||||
|     log.info(f"Started new trio process for {actor.uid}") | ||||
| 
 | ||||
|     if actor.loglevel is not None: | ||||
|         log.info( | ||||
|             f"Setting loglevel for {actor.uid} to {actor.loglevel}") | ||||
|         get_console_log(actor.loglevel) | ||||
| 
 | ||||
|     log.info( | ||||
|         f"Started {actor.uid}") | ||||
|     _debug.hide_runtime_frames() | ||||
| 
 | ||||
|     _state._current_actor = actor | ||||
| 
 | ||||
|     log.debug(f"parent_addr is {parent_addr}") | ||||
|     trio_main = partial( | ||||
|         async_main, | ||||
|         actor, | ||||
|         parent_addr=parent_addr | ||||
|     ) | ||||
| 
 | ||||
|     if actor.loglevel is not None: | ||||
|         get_console_log(actor.loglevel) | ||||
|         actor_info: str = ( | ||||
|             f'|_{actor}\n' | ||||
|             f'  uid: {actor.uid}\n' | ||||
|             f'  pid: {os.getpid()}\n' | ||||
|             f'  parent_addr: {parent_addr}\n' | ||||
|             f'  loglevel: {actor.loglevel}\n' | ||||
|         ) | ||||
|         log.info( | ||||
|             'Starting new `trio` subactor:\n' | ||||
|             + | ||||
|             nest_from_op( | ||||
|                 input_op='>(',  # see syntax ideas above | ||||
|                 tree_str=actor_info, | ||||
|                 back_from_op=1, | ||||
|             ) | ||||
|         ) | ||||
|     logmeth = log.info | ||||
|     exit_status: str = ( | ||||
|         'Subactor exited\n' | ||||
|         + | ||||
|         nest_from_op( | ||||
|             input_op=')>',  # like a "closed-to-play"-icon from super perspective | ||||
|             tree_str=actor_info, | ||||
|             back_from_op=1, | ||||
|         ) | ||||
|     ) | ||||
|     try: | ||||
|         if infect_asyncio: | ||||
|             actor._infected_aio = True | ||||
|             run_as_asyncio_guest(trio_main) | ||||
|         else: | ||||
|             trio.run(trio_main) | ||||
| 
 | ||||
|     except KeyboardInterrupt: | ||||
|         log.cancel(f"Actor {actor.uid} received KBI") | ||||
|         logmeth = log.cancel | ||||
|         exit_status: str = ( | ||||
|             'Actor received KBI (aka an OS-cancel)\n' | ||||
|             + | ||||
|             nest_from_op( | ||||
|                 input_op='c)>',  # closed due to cancel (see above) | ||||
|                 tree_str=actor_info, | ||||
|             ) | ||||
|         ) | ||||
|     except BaseException as err: | ||||
|         logmeth = log.error | ||||
|         exit_status: str = ( | ||||
|             'Main actor task exited due to crash?\n' | ||||
|             + | ||||
|             nest_from_op( | ||||
|                 input_op='x)>',  # closed by error | ||||
|                 tree_str=actor_info, | ||||
|             ) | ||||
|         ) | ||||
|         # NOTE since we raise a tb will already be shown on the | ||||
|         # console, thus we do NOT use `.exception()` above. | ||||
|         raise err | ||||
| 
 | ||||
|     finally: | ||||
|         log.info(f"Actor {actor.uid} terminated") | ||||
|         logmeth(exit_status) | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										536
									
								
								tractor/_ipc.py
								
								
								
								
							
							
						
						
									
										536
									
								
								tractor/_ipc.py
								
								
								
								
							|  | @ -19,38 +19,64 @@ Inter-process comms abstractions | |||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| import platform | ||||
| import struct | ||||
| import typing | ||||
| from collections.abc import ( | ||||
|     AsyncGenerator, | ||||
|     AsyncIterator, | ||||
| ) | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| import platform | ||||
| from pprint import pformat | ||||
| import struct | ||||
| import typing | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     runtime_checkable, | ||||
|     Optional, | ||||
|     Protocol, | ||||
|     Type, | ||||
|     TypeVar, | ||||
| ) | ||||
| 
 | ||||
| from tricycle import BufferedReceiveStream | ||||
| import msgspec | ||||
| from tricycle import BufferedReceiveStream | ||||
| import trio | ||||
| from async_generator import asynccontextmanager | ||||
| 
 | ||||
| from .log import get_logger | ||||
| from ._exceptions import TransportClosed | ||||
| from tractor.log import get_logger | ||||
| from tractor._exceptions import ( | ||||
|     MsgTypeError, | ||||
|     pack_from_raise, | ||||
|     TransportClosed, | ||||
|     _mk_send_mte, | ||||
|     _mk_recv_mte, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _ctxvar_MsgCodec, | ||||
|     # _codec,  XXX see `self._codec` sanity/debug checks | ||||
|     MsgCodec, | ||||
|     types as msgtypes, | ||||
|     pretty_struct, | ||||
| ) | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| _is_windows = platform.system() == 'Windows' | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| def get_stream_addrs(stream: trio.SocketStream) -> tuple: | ||||
|     # should both be IP sockets | ||||
| def get_stream_addrs( | ||||
|     stream: trio.SocketStream | ||||
| ) -> tuple[ | ||||
|     tuple[str, int],  # local | ||||
|     tuple[str, int],  # remote | ||||
| ]: | ||||
|     ''' | ||||
|     Return the `trio` streaming transport prot's socket-addrs for | ||||
|     both the local and remote sides as a pair. | ||||
| 
 | ||||
|     ''' | ||||
|     # rn, should both be IP sockets | ||||
|     lsockname = stream.socket.getsockname() | ||||
|     rsockname = stream.socket.getpeername() | ||||
|     return ( | ||||
|  | @ -59,16 +85,22 @@ def get_stream_addrs(stream: trio.SocketStream) -> tuple: | |||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| MsgType = TypeVar("MsgType") | ||||
| 
 | ||||
| # TODO: consider using a generic def and indexing with our eventual | ||||
| # msg definition/types? | ||||
| # - https://docs.python.org/3/library/typing.html#typing.Protocol | ||||
| # - https://jcristharif.com/msgspec/usage.html#structs | ||||
| # from tractor.msg.types import MsgType | ||||
| # ?TODO? this should be our `Union[*msgtypes.__spec__]` alias now right..? | ||||
| # => BLEH, except can't bc prots must inherit typevar or param-spec | ||||
| #   vars.. | ||||
| MsgType = TypeVar('MsgType') | ||||
| 
 | ||||
| 
 | ||||
| # TODO: break up this mod into a subpkg so we can start adding new | ||||
| # backends and move this type stuff into a dedicated file.. Bo | ||||
| # | ||||
| @runtime_checkable | ||||
| class MsgTransport(Protocol[MsgType]): | ||||
| # | ||||
| # ^-TODO-^ consider using a generic def and indexing with our | ||||
| # eventual msg definition/types? | ||||
| # - https://docs.python.org/3/library/typing.html#typing.Protocol | ||||
| 
 | ||||
|     stream: trio.SocketStream | ||||
|     drained: list[MsgType] | ||||
|  | @ -103,20 +135,37 @@ class MsgTransport(Protocol[MsgType]): | |||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| # TODO: not sure why we have to inherit here, but it seems to be an | ||||
| # issue with ``get_msg_transport()`` returning a ``Type[Protocol]``; | ||||
| # probably should make a `mypy` issue? | ||||
| # TODO: typing oddity.. not sure why we have to inherit here, but it | ||||
| # seems to be an issue with `get_msg_transport()` returning | ||||
| # a `Type[Protocol]`; probably should make a `mypy` issue? | ||||
| class MsgpackTCPStream(MsgTransport): | ||||
|     ''' | ||||
|     A ``trio.SocketStream`` delivering ``msgpack`` formatted data | ||||
|     using the ``msgspec`` codec lib. | ||||
| 
 | ||||
|     ''' | ||||
|     layer_key: int = 4 | ||||
|     name_key: str = 'tcp' | ||||
| 
 | ||||
|     # TODO: better naming for this? | ||||
|     # -[ ] check how libp2p does naming for such things? | ||||
|     codec_key: str = 'msgpack' | ||||
| 
 | ||||
|     def __init__( | ||||
|         self, | ||||
|         stream: trio.SocketStream, | ||||
|         prefix_size: int = 4, | ||||
| 
 | ||||
|         # XXX optionally provided codec pair for `msgspec`: | ||||
|         # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
|         # | ||||
|         # TODO: define this as a `Codec` struct which can be | ||||
|         # overriden dynamically by the application/runtime? | ||||
|         codec: tuple[ | ||||
|             Callable[[Any], Any]|None,  # coder | ||||
|             Callable[[type, Any], Any]|None,  # decoder | ||||
|         ]|None = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         self.stream = stream | ||||
|  | @ -126,30 +175,44 @@ class MsgpackTCPStream(MsgTransport): | |||
|         self._laddr, self._raddr = get_stream_addrs(stream) | ||||
| 
 | ||||
|         # create read loop instance | ||||
|         self._agen = self._iter_packets() | ||||
|         self._aiter_pkts = self._iter_packets() | ||||
|         self._send_lock = trio.StrictFIFOLock() | ||||
| 
 | ||||
|         # public i guess? | ||||
|         self.drained: list[dict] = [] | ||||
| 
 | ||||
|         self.recv_stream = BufferedReceiveStream(transport_stream=stream) | ||||
|         self.recv_stream = BufferedReceiveStream( | ||||
|             transport_stream=stream | ||||
|         ) | ||||
|         self.prefix_size = prefix_size | ||||
| 
 | ||||
|         # TODO: struct aware messaging coders | ||||
|         self.encode = msgspec.msgpack.Encoder().encode | ||||
|         self.decode = msgspec.msgpack.Decoder().decode  # dict[str, Any]) | ||||
|         # allow for custom IPC msg interchange format | ||||
|         # dynamic override Bo | ||||
|         self._task = trio.lowlevel.current_task() | ||||
| 
 | ||||
|         # XXX for ctxvar debug only! | ||||
|         # self._codec: MsgCodec = ( | ||||
|         #     codec | ||||
|         #     or | ||||
|         #     _codec._ctxvar_MsgCodec.get() | ||||
|         # ) | ||||
| 
 | ||||
|     async def _iter_packets(self) -> AsyncGenerator[dict, None]: | ||||
|         '''Yield packets from the underlying stream. | ||||
|         ''' | ||||
|         Yield `bytes`-blob decoded packets from the underlying TCP | ||||
|         stream using the current task's `MsgCodec`. | ||||
| 
 | ||||
|         This is a streaming routine implemented as an async generator | ||||
|         func (which was the original design, but could be changed?) | ||||
|         and is allocated by a `.__call__()` inside `.__init__()` where | ||||
|         it is assigned to the `._aiter_pkts` attr. | ||||
| 
 | ||||
|         ''' | ||||
|         import msgspec  # noqa | ||||
|         decodes_failed: int = 0 | ||||
| 
 | ||||
|         while True: | ||||
|             try: | ||||
|                 header = await self.recv_stream.receive_exactly(4) | ||||
| 
 | ||||
|                 header: bytes = await self.recv_stream.receive_exactly(4) | ||||
|             except ( | ||||
|                 ValueError, | ||||
|                 ConnectionResetError, | ||||
|  | @ -158,25 +221,122 @@ class MsgpackTCPStream(MsgTransport): | |||
|                 # seem to be getting racy failures here on | ||||
|                 # arbiter/registry name subs.. | ||||
|                 trio.BrokenResourceError, | ||||
|             ): | ||||
|                 raise TransportClosed( | ||||
|                     f'transport {self} was already closed prior ro read' | ||||
|                 ) | ||||
| 
 | ||||
|             ) as trans_err: | ||||
| 
 | ||||
|                 loglevel = 'transport' | ||||
|                 match trans_err: | ||||
|                     # case ( | ||||
|                     #     ConnectionResetError() | ||||
|                     # ): | ||||
|                     #     loglevel = 'transport' | ||||
| 
 | ||||
|                     # peer actor (graceful??) TCP EOF but `tricycle` | ||||
|                     # seems to raise a 0-bytes-read? | ||||
|                     case ValueError() if ( | ||||
|                         'unclean EOF' in trans_err.args[0] | ||||
|                     ): | ||||
|                         pass | ||||
| 
 | ||||
|                     # peer actor (task) prolly shutdown quickly due | ||||
|                     # to cancellation | ||||
|                     case trio.BrokenResourceError() if ( | ||||
|                         'Connection reset by peer' in trans_err.args[0] | ||||
|                     ): | ||||
|                         pass | ||||
| 
 | ||||
|                     # unless the disconnect condition falls under "a | ||||
|                     # normal operation breakage" we usualy console warn | ||||
|                     # about it. | ||||
|                     case _: | ||||
|                         loglevel: str = 'warning' | ||||
| 
 | ||||
| 
 | ||||
|                 raise TransportClosed( | ||||
|                     message=( | ||||
|                         f'IPC transport already closed by peer\n' | ||||
|                         f'x)> {type(trans_err)}\n' | ||||
|                         f' |_{self}\n' | ||||
|                     ), | ||||
|                     loglevel=loglevel, | ||||
|                 ) from trans_err | ||||
| 
 | ||||
|             # XXX definitely can happen if transport is closed | ||||
|             # manually by another `trio.lowlevel.Task` in the | ||||
|             # same actor; we use this in some simulated fault | ||||
|             # testing for ex, but generally should never happen | ||||
|             # under normal operation! | ||||
|             # | ||||
|             # NOTE: as such we always re-raise this error from the | ||||
|             #       RPC msg loop! | ||||
|             except trio.ClosedResourceError as closure_err: | ||||
|                 raise TransportClosed( | ||||
|                     message=( | ||||
|                         f'IPC transport already manually closed locally?\n' | ||||
|                         f'x)> {type(closure_err)} \n' | ||||
|                         f' |_{self}\n' | ||||
|                     ), | ||||
|                     loglevel='error', | ||||
|                     raise_on_report=( | ||||
|                         closure_err.args[0] == 'another task closed this fd' | ||||
|                         or | ||||
|                         closure_err.args[0] in ['another task closed this fd'] | ||||
|                     ), | ||||
|                 ) from closure_err | ||||
| 
 | ||||
|             # graceful TCP EOF disconnect | ||||
|             if header == b'': | ||||
|                 raise TransportClosed( | ||||
|                     f'transport {self} was already closed prior ro read' | ||||
|                     message=( | ||||
|                         f'IPC transport already gracefully closed\n' | ||||
|                         f')>\n' | ||||
|                         f'|_{self}\n' | ||||
|                     ), | ||||
|                     loglevel='transport', | ||||
|                     # cause=???  # handy or no? | ||||
|                 ) | ||||
| 
 | ||||
|             size: int | ||||
|             size, = struct.unpack("<I", header) | ||||
| 
 | ||||
|             log.transport(f'received header {size}')  # type: ignore | ||||
| 
 | ||||
|             msg_bytes = await self.recv_stream.receive_exactly(size) | ||||
|             msg_bytes: bytes = await self.recv_stream.receive_exactly(size) | ||||
| 
 | ||||
|             log.transport(f"received {msg_bytes}")  # type: ignore | ||||
|             try: | ||||
|                 yield self.decode(msg_bytes) | ||||
|                 # NOTE: lookup the `trio.Task.context`'s var for | ||||
|                 # the current `MsgCodec`. | ||||
|                 codec: MsgCodec = _ctxvar_MsgCodec.get() | ||||
| 
 | ||||
|                 # XXX for ctxvar debug only! | ||||
|                 # if self._codec.pld_spec != codec.pld_spec: | ||||
|                 #     assert ( | ||||
|                 #         task := trio.lowlevel.current_task() | ||||
|                 #     ) is not self._task | ||||
|                 #     self._task = task | ||||
|                 #     self._codec = codec | ||||
|                 #     log.runtime( | ||||
|                 #         f'Using new codec in {self}.recv()\n' | ||||
|                 #         f'codec: {self._codec}\n\n' | ||||
|                 #         f'msg_bytes: {msg_bytes}\n' | ||||
|                 #     ) | ||||
|                 yield codec.decode(msg_bytes) | ||||
| 
 | ||||
|             # XXX NOTE: since the below error derives from | ||||
|             # `DecodeError` we need to catch is specially | ||||
|             # and always raise such that spec violations | ||||
|             # are never allowed to be caught silently! | ||||
|             except msgspec.ValidationError as verr: | ||||
|                 msgtyperr: MsgTypeError = _mk_recv_mte( | ||||
|                     msg=msg_bytes, | ||||
|                     codec=codec, | ||||
|                     src_validation_error=verr, | ||||
|                 ) | ||||
|                 # XXX deliver up to `Channel.recv()` where | ||||
|                 # a re-raise and `Error`-pack can inject the far | ||||
|                 # end actor `.uid`. | ||||
|                 yield msgtyperr | ||||
| 
 | ||||
|             except ( | ||||
|                 msgspec.DecodeError, | ||||
|                 UnicodeDecodeError, | ||||
|  | @ -186,30 +346,96 @@ class MsgpackTCPStream(MsgTransport): | |||
|                     # do with a channel drop - hope that receiving from the | ||||
|                     # channel will raise an expected error and bubble up. | ||||
|                     try: | ||||
|                         msg_str: str | bytes = msg_bytes.decode() | ||||
|                         msg_str: str|bytes = msg_bytes.decode() | ||||
|                     except UnicodeDecodeError: | ||||
|                         msg_str = msg_bytes | ||||
| 
 | ||||
|                     log.error( | ||||
|                         '`msgspec` failed to decode!?\n' | ||||
|                         'dumping bytes:\n' | ||||
|                         f'{msg_str!r}' | ||||
|                     log.exception( | ||||
|                         'Failed to decode msg?\n' | ||||
|                         f'{codec}\n\n' | ||||
|                         'Rxed bytes from wire:\n\n' | ||||
|                         f'{msg_str!r}\n' | ||||
|                     ) | ||||
|                     decodes_failed += 1 | ||||
|                 else: | ||||
|                     raise | ||||
| 
 | ||||
|     async def send(self, msg: Any) -> None: | ||||
|     async def send( | ||||
|         self, | ||||
|         msg: msgtypes.MsgType, | ||||
| 
 | ||||
|         strict_types: bool = True, | ||||
|         hide_tb: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a msgpack encoded py-object-blob-as-msg over TCP. | ||||
| 
 | ||||
|         If `strict_types == True` then a `MsgTypeError` will be raised on any | ||||
|         invalid msg type | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         # XXX see `trio._sync.AsyncContextManagerMixin` for details | ||||
|         # on the `.acquire()`/`.release()` sequencing.. | ||||
|         async with self._send_lock: | ||||
| 
 | ||||
|             bytes_data: bytes = self.encode(msg) | ||||
|             # NOTE: lookup the `trio.Task.context`'s var for | ||||
|             # the current `MsgCodec`. | ||||
|             codec: MsgCodec = _ctxvar_MsgCodec.get() | ||||
| 
 | ||||
|             # XXX for ctxvar debug only! | ||||
|             # if self._codec.pld_spec != codec.pld_spec: | ||||
|             #     self._codec = codec | ||||
|             #     log.runtime( | ||||
|             #         f'Using new codec in {self}.send()\n' | ||||
|             #         f'codec: {self._codec}\n\n' | ||||
|             #         f'msg: {msg}\n' | ||||
|             #     ) | ||||
| 
 | ||||
|             if type(msg) not in msgtypes.__msg_types__: | ||||
|                 if strict_types: | ||||
|                     raise _mk_send_mte( | ||||
|                         msg, | ||||
|                         codec=codec, | ||||
|                     ) | ||||
|                 else: | ||||
|                     log.warning( | ||||
|                         'Sending non-`Msg`-spec msg?\n\n' | ||||
|                         f'{msg}\n' | ||||
|                     ) | ||||
| 
 | ||||
|             try: | ||||
|                 bytes_data: bytes = codec.encode(msg) | ||||
|             except TypeError as _err: | ||||
|                 typerr = _err | ||||
|                 msgtyperr: MsgTypeError = _mk_send_mte( | ||||
|                     msg, | ||||
|                     codec=codec, | ||||
|                     message=( | ||||
|                         f'IPC-msg-spec violation in\n\n' | ||||
|                         f'{pretty_struct.Struct.pformat(msg)}' | ||||
|                     ), | ||||
|                     src_type_error=typerr, | ||||
|                 ) | ||||
|                 raise msgtyperr from typerr | ||||
| 
 | ||||
|             # supposedly the fastest says, | ||||
|             # https://stackoverflow.com/a/54027962 | ||||
|             size: bytes = struct.pack("<I", len(bytes_data)) | ||||
| 
 | ||||
|             return await self.stream.send_all(size + bytes_data) | ||||
| 
 | ||||
|         # ?TODO? does it help ever to dynamically show this | ||||
|         # frame? | ||||
|         # try: | ||||
|         #     <the-above_code> | ||||
|         # except BaseException as _err: | ||||
|         #     err = _err | ||||
|         #     if not isinstance(err, MsgTypeError): | ||||
|         #         __tracebackhide__: bool = False | ||||
|         #     raise | ||||
| 
 | ||||
|     @property | ||||
|     def laddr(self) -> tuple[str, int]: | ||||
|         return self._laddr | ||||
|  | @ -219,7 +445,7 @@ class MsgpackTCPStream(MsgTransport): | |||
|         return self._raddr | ||||
| 
 | ||||
|     async def recv(self) -> Any: | ||||
|         return await self._agen.asend(None) | ||||
|         return await self._aiter_pkts.asend(None) | ||||
| 
 | ||||
|     async def drain(self) -> AsyncIterator[dict]: | ||||
|         ''' | ||||
|  | @ -236,7 +462,7 @@ class MsgpackTCPStream(MsgTransport): | |||
|                 yield msg | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self._agen | ||||
|         return self._aiter_pkts | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         return self.stream.socket.fileno() != -1 | ||||
|  | @ -267,7 +493,7 @@ class Channel: | |||
|     def __init__( | ||||
| 
 | ||||
|         self, | ||||
|         destaddr: Optional[tuple[str, int]], | ||||
|         destaddr: tuple[str, int]|None, | ||||
| 
 | ||||
|         msg_transport_type_key: tuple[str, str] = ('msgpack', 'tcp'), | ||||
| 
 | ||||
|  | @ -285,18 +511,31 @@ class Channel: | |||
| 
 | ||||
|         # Either created in ``.connect()`` or passed in by | ||||
|         # user in ``.from_stream()``. | ||||
|         self._stream: Optional[trio.SocketStream] = None | ||||
|         self.msgstream: Optional[MsgTransport] = None | ||||
|         self._stream: trio.SocketStream|None = None | ||||
|         self._transport: MsgTransport|None = None | ||||
| 
 | ||||
|         # set after handshake - always uid of far end | ||||
|         self.uid: Optional[tuple[str, str]] = None | ||||
|         self.uid: tuple[str, str]|None = None | ||||
| 
 | ||||
|         self._agen = self._aiter_recv() | ||||
|         self._exc: Optional[Exception] = None  # set if far end actor errors | ||||
|         self._aiter_msgs = self._iter_msgs() | ||||
|         self._exc: Exception|None = None  # set if far end actor errors | ||||
|         self._closed: bool = False | ||||
|         # flag set on ``Portal.cancel_actor()`` indicating | ||||
|         # remote (peer) cancellation of the far end actor runtime. | ||||
|         self._cancel_called: bool = False  # set on ``Portal.cancel_actor()`` | ||||
| 
 | ||||
|         # flag set by ``Portal.cancel_actor()`` indicating remote | ||||
|         # (possibly peer) cancellation of the far end actor | ||||
|         # runtime. | ||||
|         self._cancel_called: bool = False | ||||
| 
 | ||||
|     @property | ||||
|     def msgstream(self) -> MsgTransport: | ||||
|         log.info( | ||||
|             '`Channel.msgstream` is an old name, use `._transport`' | ||||
|         ) | ||||
|         return self._transport | ||||
| 
 | ||||
|     @property | ||||
|     def transport(self) -> MsgTransport: | ||||
|         return self._transport | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_stream( | ||||
|  | @ -307,37 +546,78 @@ class Channel: | |||
|     ) -> Channel: | ||||
| 
 | ||||
|         src, dst = get_stream_addrs(stream) | ||||
|         chan = Channel(destaddr=dst, **kwargs) | ||||
|         chan = Channel( | ||||
|             destaddr=dst, | ||||
|             **kwargs, | ||||
|         ) | ||||
| 
 | ||||
|         # set immediately here from provided instance | ||||
|         chan._stream = stream | ||||
|         chan._stream: trio.SocketStream = stream | ||||
|         chan.set_msg_transport(stream) | ||||
|         return chan | ||||
| 
 | ||||
|     def set_msg_transport( | ||||
|         self, | ||||
|         stream: trio.SocketStream, | ||||
|         type_key: Optional[tuple[str, str]] = None, | ||||
|         type_key: tuple[str, str]|None = None, | ||||
| 
 | ||||
|         # XXX optionally provided codec pair for `msgspec`: | ||||
|         # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
|         codec: MsgCodec|None = None, | ||||
| 
 | ||||
|     ) -> MsgTransport: | ||||
|         type_key = type_key or self._transport_key | ||||
|         self.msgstream = get_msg_transport(type_key)(stream) | ||||
|         return self.msgstream | ||||
|         type_key = ( | ||||
|             type_key | ||||
|             or | ||||
|             self._transport_key | ||||
|         ) | ||||
|         # get transport type, then | ||||
|         self._transport = get_msg_transport( | ||||
|             type_key | ||||
|         # instantiate an instance of the msg-transport | ||||
|         )( | ||||
|             stream, | ||||
|             codec=codec, | ||||
|         ) | ||||
|         return self._transport | ||||
| 
 | ||||
|     @cm | ||||
|     def apply_codec( | ||||
|         self, | ||||
|         codec: MsgCodec, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Temporarily override the underlying IPC msg codec for | ||||
|         dynamic enforcement of messaging schema. | ||||
| 
 | ||||
|         ''' | ||||
|         orig: MsgCodec = self._transport.codec | ||||
|         try: | ||||
|             self._transport.codec = codec | ||||
|             yield | ||||
|         finally: | ||||
|             self._transport.codec = orig | ||||
| 
 | ||||
|     # TODO: do a .src/.dst: str for maddrs? | ||||
|     def __repr__(self) -> str: | ||||
|         if self.msgstream: | ||||
|         if not self._transport: | ||||
|             return '<Channel with inactive transport?>' | ||||
| 
 | ||||
|         return repr( | ||||
|                 self.msgstream.stream.socket._sock).replace(  # type: ignore | ||||
|                         "socket.socket", "Channel") | ||||
|         return object.__repr__(self) | ||||
|             self._transport.stream.socket._sock | ||||
|         ).replace(  # type: ignore | ||||
|             "socket.socket", | ||||
|             "Channel", | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def laddr(self) -> Optional[tuple[str, int]]: | ||||
|         return self.msgstream.laddr if self.msgstream else None | ||||
|     def laddr(self) -> tuple[str, int]|None: | ||||
|         return self._transport.laddr if self._transport else None | ||||
| 
 | ||||
|     @property | ||||
|     def raddr(self) -> Optional[tuple[str, int]]: | ||||
|         return self.msgstream.raddr if self.msgstream else None | ||||
|     def raddr(self) -> tuple[str, int]|None: | ||||
|         return self._transport.raddr if self._transport else None | ||||
| 
 | ||||
|     async def connect( | ||||
|         self, | ||||
|  | @ -356,26 +636,62 @@ class Channel: | |||
|             *destaddr, | ||||
|             **kwargs | ||||
|         ) | ||||
|         msgstream = self.set_msg_transport(stream) | ||||
|         transport = self.set_msg_transport(stream) | ||||
| 
 | ||||
|         log.transport( | ||||
|             f'Opened channel[{type(msgstream)}]: {self.laddr} -> {self.raddr}' | ||||
|             f'Opened channel[{type(transport)}]: {self.laddr} -> {self.raddr}' | ||||
|         ) | ||||
|         return msgstream | ||||
|         return transport | ||||
| 
 | ||||
|     async def send(self, item: Any) -> None: | ||||
|     # TODO: something like, | ||||
|     # `pdbp.hideframe_on(errors=[MsgTypeError])` | ||||
|     # instead of the `try/except` hack we have rn.. | ||||
|     # seems like a pretty useful thing to have in general | ||||
|     # along with being able to filter certain stack frame(s / sets) | ||||
|     # possibly based on the current log-level? | ||||
|     async def send( | ||||
|         self, | ||||
|         payload: Any, | ||||
| 
 | ||||
|         log.transport(f"send `{item}`")  # type: ignore | ||||
|         assert self.msgstream | ||||
|         hide_tb: bool = False, | ||||
| 
 | ||||
|         await self.msgstream.send(item) | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a coded msg-blob over the transport. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         try: | ||||
|             log.transport( | ||||
|                 '=> send IPC msg:\n\n' | ||||
|                 f'{pformat(payload)}\n' | ||||
|             ) | ||||
|             # assert self._transport  # but why typing? | ||||
|             await self._transport.send( | ||||
|                 payload, | ||||
|                 hide_tb=hide_tb, | ||||
|             ) | ||||
|         except BaseException as _err: | ||||
|             err = _err  # bind for introspection | ||||
|             if not isinstance(_err, MsgTypeError): | ||||
|                 # assert err | ||||
|                 __tracebackhide__: bool = False | ||||
|             else: | ||||
|                 assert err.cid | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|     async def recv(self) -> Any: | ||||
|         assert self.msgstream | ||||
|         return await self.msgstream.recv() | ||||
|         assert self._transport | ||||
|         return await self._transport.recv() | ||||
| 
 | ||||
|         # TODO: auto-reconnect features like 0mq/nanomsg? | ||||
|         # -[ ] implement it manually with nods to SC prot | ||||
|         #      possibly on multiple transport backends? | ||||
|         #  -> seems like that might be re-inventing scalability | ||||
|         #     prots tho no? | ||||
|         # try: | ||||
|         #     return await self.msgstream.recv() | ||||
|         #     return await self._transport.recv() | ||||
|         # except trio.BrokenResourceError: | ||||
|         #     if self._autorecon: | ||||
|         #         await self._reconnect() | ||||
|  | @ -388,8 +704,8 @@ class Channel: | |||
|             f'Closing channel to {self.uid} ' | ||||
|             f'{self.laddr} -> {self.raddr}' | ||||
|         ) | ||||
|         assert self.msgstream | ||||
|         await self.msgstream.stream.aclose() | ||||
|         assert self._transport | ||||
|         await self._transport.stream.aclose() | ||||
|         self._closed = True | ||||
| 
 | ||||
|     async def __aenter__(self): | ||||
|  | @ -400,8 +716,11 @@ class Channel: | |||
|         await self.aclose(*args) | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self._agen | ||||
|         return self._aiter_msgs | ||||
| 
 | ||||
|     # ?TODO? run any reconnection sequence? | ||||
|     # -[ ] prolly should be impl-ed as deco-API? | ||||
|     # | ||||
|     # async def _reconnect(self) -> None: | ||||
|     #     """Handle connection failures by polling until a reconnect can be | ||||
|     #     established. | ||||
|  | @ -419,7 +738,6 @@ class Channel: | |||
|     #             else: | ||||
|     #                 log.transport("Stream connection re-established!") | ||||
| 
 | ||||
|     #                 # TODO: run any reconnection sequence | ||||
|     #                 # on_recon = self._recon_seq | ||||
|     #                 # if on_recon: | ||||
|     #                 #     await on_recon(self) | ||||
|  | @ -433,23 +751,42 @@ class Channel: | |||
|     #                     " for re-establishment") | ||||
|     #             await trio.sleep(1) | ||||
| 
 | ||||
|     async def _aiter_recv( | ||||
|     async def _iter_msgs( | ||||
|         self | ||||
|     ) -> AsyncGenerator[Any, None]: | ||||
|         ''' | ||||
|         Async iterate items from underlying stream. | ||||
|         Yield `MsgType` IPC msgs decoded and deliverd from | ||||
|         an underlying `MsgTransport` protocol. | ||||
| 
 | ||||
|         This is a streaming routine alo implemented as an async-gen | ||||
|         func (same a `MsgTransport._iter_pkts()`) gets allocated by | ||||
|         a `.__call__()` inside `.__init__()` where it is assigned to | ||||
|         the `._aiter_msgs` attr. | ||||
| 
 | ||||
|         ''' | ||||
|         assert self.msgstream | ||||
|         assert self._transport | ||||
|         while True: | ||||
|             try: | ||||
|                 async for item in self.msgstream: | ||||
|                     yield item | ||||
|                     # sent = yield item | ||||
|                     # if sent is not None: | ||||
|                     #     # optimization, passing None through all the | ||||
|                     #     # time is pointless | ||||
|                     #     await self.msgstream.send(sent) | ||||
|                 async for msg in self._transport: | ||||
|                     match msg: | ||||
|                         # NOTE: if transport/interchange delivers | ||||
|                         # a type error, we pack it with the far | ||||
|                         # end peer `Actor.uid` and relay the | ||||
|                         # `Error`-msg upward to the `._rpc` stack | ||||
|                         # for normal RAE handling. | ||||
|                         case MsgTypeError(): | ||||
|                             yield pack_from_raise( | ||||
|                                 local_err=msg, | ||||
|                                 cid=msg.cid, | ||||
| 
 | ||||
|                                 # XXX we pack it here bc lower | ||||
|                                 # layers have no notion of an | ||||
|                                 # actor-id ;) | ||||
|                                 src_uid=self.uid, | ||||
|                             ) | ||||
|                         case _: | ||||
|                             yield msg | ||||
| 
 | ||||
|             except trio.BrokenResourceError: | ||||
| 
 | ||||
|                 # if not self._autorecon: | ||||
|  | @ -462,12 +799,14 @@ class Channel: | |||
|             #     continue | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         return self.msgstream.connected() if self.msgstream else False | ||||
|         return self._transport.connected() if self._transport else False | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def _connect_chan( | ||||
|     host: str, port: int | ||||
|     host: str, | ||||
|     port: int | ||||
| 
 | ||||
| ) -> typing.AsyncGenerator[Channel, None]: | ||||
|     ''' | ||||
|     Create and connect a channel with disconnect on context manager | ||||
|  | @ -477,4 +816,5 @@ async def _connect_chan( | |||
|     chan = Channel((host, port)) | ||||
|     await chan.connect() | ||||
|     yield chan | ||||
|     with trio.CancelScope(shield=True): | ||||
|         await chan.aclose() | ||||
|  |  | |||
|  | @ -0,0 +1,151 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Multiaddress parser and utils according the spec(s) defined by | ||||
| `libp2p` and used in dependent project such as `ipfs`: | ||||
| 
 | ||||
| - https://docs.libp2p.io/concepts/fundamentals/addressing/ | ||||
| - https://github.com/libp2p/specs/blob/master/addressing/README.md | ||||
| 
 | ||||
| ''' | ||||
| from typing import Iterator | ||||
| 
 | ||||
| from bidict import bidict | ||||
| 
 | ||||
| # TODO: see if we can leverage libp2p ecosys projects instead of | ||||
| # rolling our own (parser) impls of the above addressing specs: | ||||
| # - https://github.com/libp2p/py-libp2p | ||||
| # - https://docs.libp2p.io/concepts/nat/circuit-relay/#relay-addresses | ||||
| # prots: bidict[int, str] = bidict({ | ||||
| prots: bidict[int, str] = { | ||||
|     'ipv4': 3, | ||||
|     'ipv6': 3, | ||||
|     'wg': 3, | ||||
| 
 | ||||
|     'tcp': 4, | ||||
|     'udp': 4, | ||||
| 
 | ||||
|     # TODO: support the next-gen shite Bo | ||||
|     # 'quic': 4, | ||||
|     # 'ssh': 7,  # via rsyscall bootstrapping | ||||
| } | ||||
| 
 | ||||
| prot_params: dict[str, tuple[str]] = { | ||||
|     'ipv4': ('addr',), | ||||
|     'ipv6': ('addr',), | ||||
|     'wg': ('addr', 'port', 'pubkey'), | ||||
| 
 | ||||
|     'tcp': ('port',), | ||||
|     'udp': ('port',), | ||||
| 
 | ||||
|     # 'quic': ('port',), | ||||
|     # 'ssh': ('port',), | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| def iter_prot_layers( | ||||
|     multiaddr: str, | ||||
| ) -> Iterator[ | ||||
|     tuple[ | ||||
|         int, | ||||
|         list[str] | ||||
|     ] | ||||
| ]: | ||||
|     ''' | ||||
|     Unpack a libp2p style "multiaddress" into multiple "segments" | ||||
|     for each "layer" of the protocoll stack (in OSI terms). | ||||
| 
 | ||||
|     ''' | ||||
|     tokens: list[str] = multiaddr.split('/') | ||||
|     root, tokens = tokens[0], tokens[1:] | ||||
|     assert not root  # there is a root '/' on LHS | ||||
|     itokens = iter(tokens) | ||||
| 
 | ||||
|     prot: str | None = None | ||||
|     params: list[str] = [] | ||||
|     for token in itokens: | ||||
|         # every prot path should start with a known | ||||
|         # key-str. | ||||
|         if token in prots: | ||||
|             if prot is None: | ||||
|                 prot: str = token | ||||
|             else: | ||||
|                 yield prot, params | ||||
|                 prot = token | ||||
| 
 | ||||
|             params = [] | ||||
| 
 | ||||
|         elif token not in prots: | ||||
|             params.append(token) | ||||
| 
 | ||||
|     else: | ||||
|         yield prot, params | ||||
| 
 | ||||
| 
 | ||||
| def parse_maddr( | ||||
|     multiaddr: str, | ||||
| ) -> dict[str, str | int | dict]: | ||||
|     ''' | ||||
|     Parse a libp2p style "multiaddress" into its distinct protocol | ||||
|     segments where each segment is of the form: | ||||
| 
 | ||||
|         `../<protocol>/<param0>/<param1>/../<paramN>` | ||||
| 
 | ||||
|     and is loaded into a (order preserving) `layers: dict[str, | ||||
|     dict[str, Any]` which holds each protocol-layer-segment of the | ||||
|     original `str` path as a separate entry according to its approx | ||||
|     OSI "layer number". | ||||
| 
 | ||||
|     Any `paramN` in the path must be distinctly defined by a str-token in the | ||||
|     (module global) `prot_params` table. | ||||
| 
 | ||||
|     For eg. for wireguard which requires an address, port number and publickey | ||||
|     the protocol params are specified as the entry: | ||||
| 
 | ||||
|         'wg': ('addr', 'port', 'pubkey'), | ||||
| 
 | ||||
|     and are thus parsed from a maddr in that order: | ||||
|         `'/wg/1.1.1.1/51820/<pubkey>'` | ||||
| 
 | ||||
|     ''' | ||||
|     layers: dict[str, str | int | dict] = {} | ||||
|     for ( | ||||
|         prot_key, | ||||
|         params, | ||||
|     ) in iter_prot_layers(multiaddr): | ||||
| 
 | ||||
|         layer: int = prots[prot_key]  # OSI layer used for sorting | ||||
|         ep: dict[str, int | str] = {'layer': layer} | ||||
|         layers[prot_key] = ep | ||||
| 
 | ||||
|         # TODO; validation and resolving of names: | ||||
|         # - each param via a validator provided as part of the | ||||
|         #   prot_params def? (also see `"port"` case below..) | ||||
|         # - do a resolv step that will check addrs against | ||||
|         #   any loaded network.resolv: dict[str, str] | ||||
|         rparams: list = list(reversed(params)) | ||||
|         for key in prot_params[prot_key]: | ||||
|             val: str | int = rparams.pop() | ||||
| 
 | ||||
|             # TODO: UGHH, dunno what we should do for validation | ||||
|             # here, put it in the params spec somehow? | ||||
|             if key == 'port': | ||||
|                 val = int(val) | ||||
| 
 | ||||
|             ep[key] = val | ||||
| 
 | ||||
|     return layers | ||||
|  | @ -15,71 +15,70 @@ | |||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Memory boundary "Portals": an API for structured | ||||
| concurrency linked tasks running in disparate memory domains. | ||||
| Memory "portal" contruct. | ||||
| 
 | ||||
| "Memory portals" are both an API and set of IPC wrapping primitives | ||||
| for managing structured concurrency "cancel-scope linked" tasks | ||||
| running in disparate virtual memory domains - at least in different | ||||
| OS processes, possibly on different (hardware) hosts. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import asynccontextmanager as acm | ||||
| import importlib | ||||
| import inspect | ||||
| from typing import ( | ||||
|     Any, Optional, | ||||
|     Callable, AsyncGenerator, | ||||
|     Type, | ||||
|     Any, | ||||
|     Callable, | ||||
|     AsyncGenerator, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| from functools import partial | ||||
| from dataclasses import dataclass | ||||
| from pprint import pformat | ||||
| import warnings | ||||
| 
 | ||||
| import trio | ||||
| from async_generator import asynccontextmanager | ||||
| 
 | ||||
| from .trionics import maybe_open_nursery | ||||
| from ._state import current_actor | ||||
| from ._state import ( | ||||
|     current_actor, | ||||
| ) | ||||
| from ._ipc import Channel | ||||
| from .log import get_logger | ||||
| from .msg import NamespacePath | ||||
| from ._exceptions import ( | ||||
|     unpack_error, | ||||
|     NoResult, | ||||
|     ContextCancelled, | ||||
| from .msg import ( | ||||
|     # Error, | ||||
|     PayloadMsg, | ||||
|     NamespacePath, | ||||
|     Return, | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     # unpack_error, | ||||
|     NoResult, | ||||
| ) | ||||
| from ._context import ( | ||||
|     Context, | ||||
|     open_context_from_portal, | ||||
| ) | ||||
| from ._streaming import ( | ||||
|     MsgStream, | ||||
| ) | ||||
| from ._context import Context | ||||
| from ._streaming import MsgStream | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| def _unwrap_msg( | ||||
|     msg: dict[str, Any], | ||||
|     channel: Channel | ||||
| 
 | ||||
| ) -> Any: | ||||
|     __tracebackhide__ = True | ||||
|     try: | ||||
|         return msg['return'] | ||||
|     except KeyError: | ||||
|         # internal error should never get here | ||||
|         assert msg.get('cid'), "Received internal error at portal?" | ||||
|         raise unpack_error(msg, channel) from None | ||||
| 
 | ||||
| 
 | ||||
| class MessagingError(Exception): | ||||
|     'Some kind of unexpected SC messaging dialog issue' | ||||
| 
 | ||||
| 
 | ||||
| class Portal: | ||||
|     ''' | ||||
|     A 'portal' to a(n) (remote) ``Actor``. | ||||
|     A 'portal' to a memory-domain-separated `Actor`. | ||||
| 
 | ||||
|     A portal is "opened" (and eventually closed) by one side of an | ||||
|     inter-actor communication context. The side which opens the portal | ||||
|     is equivalent to a "caller" in function parlance and usually is | ||||
|     either the called actor's parent (in process tree hierarchy terms) | ||||
|     or a client interested in scheduling work to be done remotely in a | ||||
|     far process. | ||||
|     process which has a separate (virtual) memory domain. | ||||
| 
 | ||||
|     The portal api allows the "caller" actor to invoke remote routines | ||||
|     and receive results through an underlying ``tractor.Channel`` as | ||||
|  | @ -89,22 +88,45 @@ class Portal: | |||
|     like having a "portal" between the seperate actor memory spaces. | ||||
| 
 | ||||
|     ''' | ||||
|     # the timeout for a remote cancel request sent to | ||||
|     # a(n) (peer) actor. | ||||
|     cancel_timeout = 0.5 | ||||
|     # global timeout for remote cancel requests sent to | ||||
|     # connected (peer) actors. | ||||
|     cancel_timeout: float = 0.5 | ||||
| 
 | ||||
|     def __init__(self, channel: Channel) -> None: | ||||
|         self.channel = channel | ||||
|     def __init__( | ||||
|         self, | ||||
|         channel: Channel, | ||||
|     ) -> None: | ||||
| 
 | ||||
|         self._chan: Channel = channel | ||||
|         # during the portal's lifetime | ||||
|         self._result_msg: Optional[dict] = None | ||||
|         self._final_result_pld: Any|None = None | ||||
|         self._final_result_msg: PayloadMsg|None = None | ||||
| 
 | ||||
|         # When set to a ``Context`` (when _submit_for_result is called) | ||||
|         # it is expected that ``result()`` will be awaited at some | ||||
|         # point. | ||||
|         self._expect_result: Context | None = None | ||||
|         self._expect_result_ctx: Context|None = None | ||||
|         self._streams: set[MsgStream] = set() | ||||
|         self.actor = current_actor() | ||||
|         self.actor: Actor = current_actor() | ||||
| 
 | ||||
|     @property | ||||
|     def chan(self) -> Channel: | ||||
|         return self._chan | ||||
| 
 | ||||
|     @property | ||||
|     def channel(self) -> Channel: | ||||
|         ''' | ||||
|         Proxy to legacy attr name.. | ||||
| 
 | ||||
|         Consider the shorter `Portal.chan` instead of `.channel` ;) | ||||
|         ''' | ||||
|         log.debug( | ||||
|             'Consider the shorter `Portal.chan` instead of `.channel` ;)' | ||||
|         ) | ||||
|         return self.chan | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     async def _submit_for_result( | ||||
|         self, | ||||
|         ns: str, | ||||
|  | @ -112,32 +134,34 @@ class Portal: | |||
|         **kwargs | ||||
|     ) -> None: | ||||
| 
 | ||||
|         assert self._expect_result is None, \ | ||||
|                 "A pending main result has already been submitted" | ||||
| 
 | ||||
|         self._expect_result = await self.actor.start_remote_task( | ||||
|             self.channel, | ||||
|             ns, | ||||
|             func, | ||||
|             kwargs | ||||
|         if self._expect_result_ctx is not None: | ||||
|             raise RuntimeError( | ||||
|                 'A pending main result has already been submitted' | ||||
|             ) | ||||
| 
 | ||||
|     async def _return_once( | ||||
|         self._expect_result_ctx: Context = await self.actor.start_remote_task( | ||||
|             self.channel, | ||||
|             nsf=NamespacePath(f'{ns}:{func}'), | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: we should deprecate this API right? since if we remove | ||||
|     # `.run_in_actor()` (and instead move it to a `.highlevel` | ||||
|     # wrapper api (around a single `.open_context()` call) we don't | ||||
|     # really have any notion of a "main" remote task any more? | ||||
|     # | ||||
|     # @api_frame | ||||
|     async def wait_for_result( | ||||
|         self, | ||||
|         ctx: Context, | ||||
| 
 | ||||
|     ) -> dict[str, Any]: | ||||
| 
 | ||||
|         assert ctx._remote_func_type == 'asyncfunc'  # single response | ||||
|         msg = await ctx._recv_chan.receive() | ||||
|         return msg | ||||
| 
 | ||||
|     async def result(self) -> Any: | ||||
|         hide_tb: bool = True, | ||||
|     ) -> Any: | ||||
|         ''' | ||||
|         Return the result(s) from the remote actor's "main" task. | ||||
|         Return the final result delivered by a `Return`-msg from the | ||||
|         remote peer actor's "main" task's `return` statement. | ||||
| 
 | ||||
|         ''' | ||||
|         # __tracebackhide__ = True | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         # Check for non-rpc errors slapped on the | ||||
|         # channel for which we always raise | ||||
|         exc = self.channel._exc | ||||
|  | @ -145,7 +169,7 @@ class Portal: | |||
|             raise exc | ||||
| 
 | ||||
|         # not expecting a "main" result | ||||
|         if self._expect_result is None: | ||||
|         if self._expect_result_ctx is None: | ||||
|             log.warning( | ||||
|                 f"Portal for {self.channel.uid} not expecting a final" | ||||
|                 " result?\nresult() should only be called if subactor" | ||||
|  | @ -153,14 +177,41 @@ class Portal: | |||
|             return NoResult | ||||
| 
 | ||||
|         # expecting a "main" result | ||||
|         assert self._expect_result | ||||
|         assert self._expect_result_ctx | ||||
| 
 | ||||
|         if self._result_msg is None: | ||||
|             self._result_msg = await self._return_once( | ||||
|                 self._expect_result | ||||
|         if self._final_result_msg is None: | ||||
|             try: | ||||
|                 ( | ||||
|                     self._final_result_msg, | ||||
|                     self._final_result_pld, | ||||
|                 ) = await self._expect_result_ctx._pld_rx.recv_msg_w_pld( | ||||
|                     ipc=self._expect_result_ctx, | ||||
|                     expect_msg=Return, | ||||
|                 ) | ||||
|             except BaseException as err: | ||||
|                 # TODO: wrap this into `@api_frame` optionally with | ||||
|                 # some kinda filtering mechanism like log levels? | ||||
|                 __tracebackhide__: bool = False | ||||
|                 raise err | ||||
| 
 | ||||
|         return _unwrap_msg(self._result_msg, self.channel) | ||||
|         return self._final_result_pld | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     async def result( | ||||
|         self, | ||||
|         *args, | ||||
|         **kwargs, | ||||
|     ) -> Any|Exception: | ||||
|         typname: str = type(self).__name__ | ||||
|         log.warning( | ||||
|             f'`{typname}.result()` is DEPRECATED!\n' | ||||
|             f'Use `{typname}.wait_for_result()` instead!\n' | ||||
|         ) | ||||
|         return await self.wait_for_result( | ||||
|             *args, | ||||
|             **kwargs, | ||||
|         ) | ||||
| 
 | ||||
|     async def _cancel_streams(self): | ||||
|         # terminate all locally running async generator | ||||
|  | @ -191,33 +242,60 @@ class Portal: | |||
| 
 | ||||
|     ) -> bool: | ||||
|         ''' | ||||
|         Cancel the actor on the other end of this portal. | ||||
|         Cancel the actor runtime (and thus process) on the far | ||||
|         end of this portal. | ||||
| 
 | ||||
|         **NOTE** THIS CANCELS THE ENTIRE RUNTIME AND THE | ||||
|         SUBPROCESS, it DOES NOT just cancel the remote task. If you | ||||
|         want to have a handle to cancel a remote ``tri.Task`` look | ||||
|         at `.open_context()` and the definition of | ||||
|         `._context.Context.cancel()` which CAN be used for this | ||||
|         purpose. | ||||
| 
 | ||||
|         ''' | ||||
|         if not self.channel.connected(): | ||||
|             log.cancel("This channel is already closed can't cancel") | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
| 
 | ||||
|         chan: Channel = self.channel | ||||
|         if not chan.connected(): | ||||
|             log.runtime( | ||||
|                 'This channel is already closed, skipping cancel request..' | ||||
|             ) | ||||
|             return False | ||||
| 
 | ||||
|         reminfo: str = ( | ||||
|             f'c)=> {self.channel.uid}\n' | ||||
|             f'  |_{chan}\n' | ||||
|         ) | ||||
|         log.cancel( | ||||
|             f"Sending actor cancel request to {self.channel.uid} on " | ||||
|             f"{self.channel}") | ||||
| 
 | ||||
|         self.channel._cancel_called = True | ||||
|             f'Requesting actor-runtime cancel for peer\n\n' | ||||
|             f'{reminfo}' | ||||
|         ) | ||||
| 
 | ||||
|         # XXX the one spot we set it? | ||||
|         self.channel._cancel_called: bool = True | ||||
|         try: | ||||
|             # send cancel cmd - might not get response | ||||
|             # XXX: sure would be nice to make this work with a proper shield | ||||
|             # XXX: sure would be nice to make this work with | ||||
|             # a proper shield | ||||
|             with trio.move_on_after( | ||||
|                 timeout | ||||
|                 or self.cancel_timeout | ||||
|                 or | ||||
|                 self.cancel_timeout | ||||
|             ) as cs: | ||||
|                 cs.shield = True | ||||
| 
 | ||||
|                 await self.run_from_ns('self', 'cancel') | ||||
|                 cs.shield: bool = True | ||||
|                 await self.run_from_ns( | ||||
|                     'self', | ||||
|                     'cancel', | ||||
|                 ) | ||||
|                 return True | ||||
| 
 | ||||
|             if cs.cancelled_caught: | ||||
|                 log.cancel(f"May have failed to cancel {self.channel.uid}") | ||||
|                 # may timeout and we never get an ack (obvi racy) | ||||
|                 # but that doesn't mean it wasn't cancelled. | ||||
|                 log.debug( | ||||
|                     'May have failed to cancel peer?\n' | ||||
|                     f'{reminfo}' | ||||
|                 ) | ||||
| 
 | ||||
|             # if we get here some weird cancellation case happened | ||||
|             return False | ||||
|  | @ -226,11 +304,15 @@ class Portal: | |||
|             trio.ClosedResourceError, | ||||
|             trio.BrokenResourceError, | ||||
|         ): | ||||
|             log.cancel( | ||||
|                 f"{self.channel} for {self.channel.uid} was already " | ||||
|                 "closed or broken?") | ||||
|             log.debug( | ||||
|                 'IPC chan for actor already closed or broken?\n\n' | ||||
|                 f'{self.channel.uid}\n' | ||||
|                 f' |_{self.channel}\n' | ||||
|             ) | ||||
|             return False | ||||
| 
 | ||||
|     # TODO: do we still need this for low level `Actor`-runtime | ||||
|     # method calls or can we also remove it? | ||||
|     async def run_from_ns( | ||||
|         self, | ||||
|         namespace_path: str, | ||||
|  | @ -249,25 +331,33 @@ class Portal: | |||
| 
 | ||||
|           A special namespace `self` can be used to invoke `Actor` | ||||
|           instance methods in the remote runtime. Currently this | ||||
|             should only be used solely for ``tractor`` runtime | ||||
|             internals. | ||||
|           should only ever be used for `Actor` (method) runtime | ||||
|           internals! | ||||
| 
 | ||||
|         ''' | ||||
|         ctx = await self.actor.start_remote_task( | ||||
|             self.channel, | ||||
|             namespace_path, | ||||
|             function_name, | ||||
|             kwargs, | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         nsf = NamespacePath( | ||||
|             f'{namespace_path}:{function_name}' | ||||
|         ) | ||||
|         ctx: Context = await self.actor.start_remote_task( | ||||
|             chan=self.channel, | ||||
|             nsf=nsf, | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
|         return await ctx._pld_rx.recv_pld( | ||||
|             ipc=ctx, | ||||
|             expect_msg=Return, | ||||
|         ) | ||||
|         ctx._portal = self | ||||
|         msg = await self._return_once(ctx) | ||||
|         return _unwrap_msg(msg, self.channel) | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     async def run( | ||||
|         self, | ||||
|         func: str, | ||||
|         fn_name: Optional[str] = None, | ||||
|         fn_name: str|None = None, | ||||
|         **kwargs | ||||
| 
 | ||||
|     ) -> Any: | ||||
|         ''' | ||||
|         Submit a remote function to be scheduled and run by actor, in | ||||
|  | @ -277,6 +367,8 @@ class Portal: | |||
|         remote rpc task or a local async generator instance. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
| 
 | ||||
|         if isinstance(func, str): | ||||
|             warnings.warn( | ||||
|                 "`Portal.run(namespace: str, funcname: str)` is now" | ||||
|  | @ -286,8 +378,9 @@ class Portal: | |||
|                 DeprecationWarning, | ||||
|                 stacklevel=2, | ||||
|             ) | ||||
|             fn_mod_path = func | ||||
|             fn_mod_path: str = func | ||||
|             assert isinstance(fn_name, str) | ||||
|             nsf = NamespacePath(f'{fn_mod_path}:{fn_name}') | ||||
| 
 | ||||
|         else:  # function reference was passed directly | ||||
|             if ( | ||||
|  | @ -300,27 +393,36 @@ class Portal: | |||
|                 raise TypeError( | ||||
|                     f'{func} must be a non-streaming async function!') | ||||
| 
 | ||||
|             fn_mod_path, fn_name = NamespacePath.from_ref(func).to_tuple() | ||||
|             nsf = NamespacePath.from_ref(func) | ||||
| 
 | ||||
|         ctx = await self.actor.start_remote_task( | ||||
|             self.channel, | ||||
|             fn_mod_path, | ||||
|             fn_name, | ||||
|             kwargs, | ||||
|             nsf=nsf, | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
|         ctx._portal = self | ||||
|         return _unwrap_msg( | ||||
|             await self._return_once(ctx), | ||||
|             self.channel, | ||||
|         return await ctx._pld_rx.recv_pld( | ||||
|             ipc=ctx, | ||||
|             expect_msg=Return, | ||||
|         ) | ||||
| 
 | ||||
|     @asynccontextmanager | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     @acm | ||||
|     async def open_stream_from( | ||||
|         self, | ||||
|         async_gen_func: Callable,  # typing: ignore | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> AsyncGenerator[MsgStream, None]: | ||||
|         ''' | ||||
|         Legacy one-way streaming API. | ||||
| 
 | ||||
|         TODO: re-impl on top `Portal.open_context()` + an async gen | ||||
|         around `Context.open_stream()`. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
| 
 | ||||
|         if not inspect.isasyncgenfunction(async_gen_func): | ||||
|             if not ( | ||||
|  | @ -330,17 +432,12 @@ class Portal: | |||
|                 raise TypeError( | ||||
|                     f'{async_gen_func} must be an async generator function!') | ||||
| 
 | ||||
|         fn_mod_path, fn_name = NamespacePath.from_ref( | ||||
|             async_gen_func | ||||
|         ).to_tuple() | ||||
| 
 | ||||
|         ctx = await self.actor.start_remote_task( | ||||
|         ctx: Context = await self.actor.start_remote_task( | ||||
|             self.channel, | ||||
|             fn_mod_path, | ||||
|             fn_name, | ||||
|             kwargs | ||||
|             nsf=NamespacePath.from_ref(async_gen_func), | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
|         ctx._portal = self | ||||
| 
 | ||||
|         # ensure receive-only stream entrypoint | ||||
|         assert ctx._remote_func_type == 'asyncgen' | ||||
|  | @ -348,13 +445,14 @@ class Portal: | |||
|         try: | ||||
|             # deliver receive only stream | ||||
|             async with MsgStream( | ||||
|                 ctx, ctx._recv_chan, | ||||
|             ) as rchan: | ||||
|                 self._streams.add(rchan) | ||||
|                 yield rchan | ||||
|                 ctx=ctx, | ||||
|                 rx_chan=ctx._rx_chan, | ||||
|             ) as stream: | ||||
|                 self._streams.add(stream) | ||||
|                 ctx._stream = stream | ||||
|                 yield stream | ||||
| 
 | ||||
|         finally: | ||||
| 
 | ||||
|             # cancel the far end task on consumer close | ||||
|             # NOTE: this is a special case since we assume that if using | ||||
|             # this ``.open_fream_from()`` api, the stream is one a one | ||||
|  | @ -373,205 +471,14 @@ class Portal: | |||
| 
 | ||||
|             # XXX: should this always be done? | ||||
|             # await recv_chan.aclose() | ||||
|             self._streams.remove(rchan) | ||||
|             self._streams.remove(stream) | ||||
| 
 | ||||
|     @asynccontextmanager | ||||
|     async def open_context( | ||||
| 
 | ||||
|         self, | ||||
|         func: Callable, | ||||
|         allow_overruns: bool = False, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> AsyncGenerator[tuple[Context, Any], None]: | ||||
|         ''' | ||||
|         Open an inter-actor task context. | ||||
| 
 | ||||
|         This is a synchronous API which allows for deterministic | ||||
|         setup/teardown of a remote task. The yielded ``Context`` further | ||||
|         allows for opening bidirectional streams, explicit cancellation | ||||
|         and synchronized final result collection. See ``tractor.Context``. | ||||
| 
 | ||||
|         ''' | ||||
|         # conduct target func method structural checks | ||||
|         if not inspect.iscoroutinefunction(func) and ( | ||||
|             getattr(func, '_tractor_contex_function', False) | ||||
|         ): | ||||
|             raise TypeError( | ||||
|                 f'{func} must be an async generator function!') | ||||
| 
 | ||||
|         # TODO: i think from here onward should probably | ||||
|         # just be factored into an `@acm` inside a new | ||||
|         # a new `_context.py` mod. | ||||
|         fn_mod_path, fn_name = NamespacePath.from_ref(func).to_tuple() | ||||
| 
 | ||||
|         ctx = await self.actor.start_remote_task( | ||||
|             self.channel, | ||||
|             fn_mod_path, | ||||
|             fn_name, | ||||
|             kwargs, | ||||
| 
 | ||||
|             # NOTE: it's imporant to expose this since you might | ||||
|             # get the case where the parent who opened the context does | ||||
|             # not open a stream until after some slow startup/init | ||||
|             # period, in which case when the first msg is read from | ||||
|             # the feeder mem chan, say when first calling | ||||
|             # `Context.open_stream(allow_overruns=True)`, the overrun condition will be | ||||
|             # raised before any ignoring of overflow msgs can take | ||||
|             # place.. | ||||
|             allow_overruns=allow_overruns, | ||||
|         ) | ||||
| 
 | ||||
|         assert ctx._remote_func_type == 'context' | ||||
|         msg = await ctx._recv_chan.receive() | ||||
| 
 | ||||
|         try: | ||||
|             # the "first" value here is delivered by the callee's | ||||
|             # ``Context.started()`` call. | ||||
|             first = msg['started'] | ||||
|             ctx._started_called = True | ||||
| 
 | ||||
|         except KeyError: | ||||
|             assert msg.get('cid'), ("Received internal error at context?") | ||||
| 
 | ||||
|             if msg.get('error'): | ||||
|                 # raise kerr from unpack_error(msg, self.channel) | ||||
|                 raise unpack_error(msg, self.channel) from None | ||||
|             else: | ||||
|                 raise MessagingError( | ||||
|                     f'Context for {ctx.cid} was expecting a `started` message' | ||||
|                     f' but received a non-error msg:\n{pformat(msg)}' | ||||
|                 ) | ||||
| 
 | ||||
|         _err: BaseException | None = None | ||||
|         ctx._portal: Portal = self | ||||
| 
 | ||||
|         uid: tuple = self.channel.uid | ||||
|         cid: str = ctx.cid | ||||
|         etype: Type[BaseException] | None = None | ||||
| 
 | ||||
|         # deliver context instance and .started() msg value in enter | ||||
|         # tuple. | ||||
|         try: | ||||
|             async with trio.open_nursery() as nurse: | ||||
|                 ctx._scope_nursery = nurse | ||||
|                 ctx._scope = nurse.cancel_scope | ||||
| 
 | ||||
|                 yield ctx, first | ||||
| 
 | ||||
|                 # when in allow_ovveruns mode there may be lingering | ||||
|                 # overflow sender tasks remaining? | ||||
|                 if nurse.child_tasks: | ||||
|                     # ensure we are in overrun state with | ||||
|                     # ``._allow_overruns=True`` bc otherwise | ||||
|                     # there should be no tasks in this nursery! | ||||
|                     if ( | ||||
|                         not ctx._allow_overruns | ||||
|                         or len(nurse.child_tasks) > 1 | ||||
|                     ): | ||||
|                         raise RuntimeError( | ||||
|                             'Context has sub-tasks but is ' | ||||
|                             'not in `allow_overruns=True` Mode!?' | ||||
|                         ) | ||||
|                     ctx._scope.cancel() | ||||
| 
 | ||||
|         except ContextCancelled as err: | ||||
|             _err = err | ||||
| 
 | ||||
|             # swallow and mask cross-actor task context cancels that | ||||
|             # were initiated by *this* side's task. | ||||
|             if not ctx._cancel_called: | ||||
|                 # XXX: this should NEVER happen! | ||||
|                 # from ._debug import breakpoint | ||||
|                 # await breakpoint() | ||||
|                 raise | ||||
| 
 | ||||
|             # if the context was cancelled by client code | ||||
|             # then we don't need to raise since user code | ||||
|             # is expecting this and the block should exit. | ||||
|             else: | ||||
|                 log.debug(f'Context {ctx} cancelled gracefully') | ||||
| 
 | ||||
|         except ( | ||||
|             BaseException, | ||||
| 
 | ||||
|             # more specifically, we need to handle these but not | ||||
|             # sure it's worth being pedantic: | ||||
|             # Exception, | ||||
|             # trio.Cancelled, | ||||
|             # KeyboardInterrupt, | ||||
| 
 | ||||
|         ) as err: | ||||
|             etype = type(err) | ||||
| 
 | ||||
|             # cancel ourselves on any error. | ||||
|             log.cancel( | ||||
|                 'Context cancelled for task, sending cancel request..\n' | ||||
|                 f'task:{cid}\n' | ||||
|                 f'actor:{uid}' | ||||
|             ) | ||||
|             try: | ||||
| 
 | ||||
|                 await ctx.cancel() | ||||
|             except trio.BrokenResourceError: | ||||
|                 log.warning( | ||||
|                     'IPC connection for context is broken?\n' | ||||
|                     f'task:{cid}\n' | ||||
|                     f'actor:{uid}' | ||||
|                 ) | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|         else: | ||||
|             if ctx.chan.connected(): | ||||
|                 log.info( | ||||
|                     'Waiting on final context-task result for\n' | ||||
|                     f'task: {cid}\n' | ||||
|                     f'actor: {uid}' | ||||
|                 ) | ||||
|                 result = await ctx.result() | ||||
|                 log.runtime( | ||||
|                     f'Context {fn_name} returned ' | ||||
|                     f'value from callee `{result}`' | ||||
|                 ) | ||||
| 
 | ||||
|         finally: | ||||
|             # though it should be impossible for any tasks | ||||
|             # operating *in* this scope to have survived | ||||
|             # we tear down the runtime feeder chan last | ||||
|             # to avoid premature stream clobbers. | ||||
|             if ctx._recv_chan is not None: | ||||
|                 # should we encapsulate this in the context api? | ||||
|                 await ctx._recv_chan.aclose() | ||||
| 
 | ||||
|             if etype: | ||||
|                 if ctx._cancel_called: | ||||
|                     log.cancel( | ||||
|                         f'Context {fn_name} cancelled by caller with\n{etype}' | ||||
|                     ) | ||||
|                 elif _err is not None: | ||||
|                     log.cancel( | ||||
|                         f'Context for task cancelled by callee with {etype}\n' | ||||
|                         f'target: `{fn_name}`\n' | ||||
|                         f'task:{cid}\n' | ||||
|                         f'actor:{uid}' | ||||
|                     ) | ||||
|             # XXX: (MEGA IMPORTANT) if this is a root opened process we | ||||
|             # wait for any immediate child in debug before popping the | ||||
|             # context from the runtime msg loop otherwise inside | ||||
|             # ``Actor._push_result()`` the msg will be discarded and in | ||||
|             # the case where that msg is global debugger unlock (via | ||||
|             # a "stop" msg for a stream), this can result in a deadlock | ||||
|             # where the root is waiting on the lock to clear but the | ||||
|             # child has already cleared it and clobbered IPC. | ||||
|             from ._debug import maybe_wait_for_debugger | ||||
|             await maybe_wait_for_debugger() | ||||
| 
 | ||||
|             # remove the context from runtime tracking | ||||
|             self.actor._contexts.pop( | ||||
|                 (self.channel.uid, ctx.cid), | ||||
|                 None, | ||||
|             ) | ||||
|     # NOTE: impl is found in `._context`` mod to make | ||||
|     # reading/groking the details simpler code-org-wise. This | ||||
|     # method does not have to be used over that `@acm` module func | ||||
|     # directly, it is for conventience and from the original API | ||||
|     # design. | ||||
|     open_context = open_context_from_portal | ||||
| 
 | ||||
| 
 | ||||
| @dataclass | ||||
|  | @ -586,7 +493,12 @@ class LocalPortal: | |||
|     actor: 'Actor'  # type: ignore # noqa | ||||
|     channel: Channel | ||||
| 
 | ||||
|     async def run_from_ns(self, ns: str, func_name: str, **kwargs) -> Any: | ||||
|     async def run_from_ns( | ||||
|         self, | ||||
|         ns: str, | ||||
|         func_name: str, | ||||
|         **kwargs, | ||||
|     ) -> Any: | ||||
|         ''' | ||||
|         Run a requested local function from a namespace path and | ||||
|         return it's result. | ||||
|  | @ -597,11 +509,11 @@ class LocalPortal: | |||
|         return await func(**kwargs) | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def open_portal( | ||||
| 
 | ||||
|     channel: Channel, | ||||
|     nursery: Optional[trio.Nursery] = None, | ||||
|     tn: trio.Nursery|None = None, | ||||
|     start_msg_loop: bool = True, | ||||
|     shield: bool = False, | ||||
| 
 | ||||
|  | @ -609,15 +521,19 @@ async def open_portal( | |||
|     ''' | ||||
|     Open a ``Portal`` through the provided ``channel``. | ||||
| 
 | ||||
|     Spawns a background task to handle message processing (normally | ||||
|     done by the actor-runtime implicitly). | ||||
|     Spawns a background task to handle RPC processing, normally | ||||
|     done by the actor-runtime implicitly via a call to | ||||
|     `._rpc.process_messages()`. just after connection establishment. | ||||
| 
 | ||||
|     ''' | ||||
|     actor = current_actor() | ||||
|     assert actor | ||||
|     was_connected = False | ||||
|     was_connected: bool = False | ||||
| 
 | ||||
|     async with maybe_open_nursery(nursery, shield=shield) as nursery: | ||||
|     async with maybe_open_nursery( | ||||
|         tn, | ||||
|         shield=shield, | ||||
|     ) as tn: | ||||
| 
 | ||||
|         if not channel.connected(): | ||||
|             await channel.connect() | ||||
|  | @ -626,10 +542,10 @@ async def open_portal( | |||
|         if channel.uid is None: | ||||
|             await actor._do_handshake(channel) | ||||
| 
 | ||||
|         msg_loop_cs: Optional[trio.CancelScope] = None | ||||
|         msg_loop_cs: trio.CancelScope|None = None | ||||
|         if start_msg_loop: | ||||
|             from ._runtime import process_messages | ||||
|             msg_loop_cs = await nursery.start( | ||||
|             msg_loop_cs = await tn.start( | ||||
|                 partial( | ||||
|                     process_messages, | ||||
|                     actor, | ||||
|  | @ -646,12 +562,10 @@ async def open_portal( | |||
|             await portal.aclose() | ||||
| 
 | ||||
|             if was_connected: | ||||
|                 # gracefully signal remote channel-msg loop | ||||
|                 await channel.send(None) | ||||
|                 # await channel.aclose() | ||||
|                 await channel.aclose() | ||||
| 
 | ||||
|             # cancel background msg loop task | ||||
|             if msg_loop_cs: | ||||
|             if msg_loop_cs is not None: | ||||
|                 msg_loop_cs.cancel() | ||||
| 
 | ||||
|             nursery.cancel_scope.cancel() | ||||
|             tn.cancel_scope.cancel() | ||||
|  |  | |||
							
								
								
									
										340
									
								
								tractor/_root.py
								
								
								
								
							
							
						
						
									
										340
									
								
								tractor/_root.py
								
								
								
								
							|  | @ -18,26 +18,28 @@ | |||
| Root actor runtime ignition(s). | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from functools import partial | ||||
| import importlib | ||||
| import inspect | ||||
| import logging | ||||
| import os | ||||
| import signal | ||||
| import sys | ||||
| import os | ||||
| import typing | ||||
| from typing import Callable | ||||
| import warnings | ||||
| 
 | ||||
| 
 | ||||
| from exceptiongroup import BaseExceptionGroup | ||||
| import trio | ||||
| 
 | ||||
| from ._runtime import ( | ||||
|     Actor, | ||||
|     Arbiter, | ||||
|     # TODO: rename and make a non-actor subtype? | ||||
|     # Arbiter as Registry, | ||||
|     async_main, | ||||
| ) | ||||
| from . import _debug | ||||
| from .devx import _debug | ||||
| from . import _spawn | ||||
| from . import _state | ||||
| from . import log | ||||
|  | @ -46,60 +48,131 @@ from ._exceptions import is_multi_cancelled | |||
| 
 | ||||
| 
 | ||||
| # set at startup and after forks | ||||
| _default_arbiter_host: str = '127.0.0.1' | ||||
| _default_arbiter_port: int = 1616 | ||||
| _default_host: str = '127.0.0.1' | ||||
| _default_port: int = 1616 | ||||
| 
 | ||||
| # default registry always on localhost | ||||
| _default_lo_addrs: list[tuple[str, int]] = [( | ||||
|     _default_host, | ||||
|     _default_port, | ||||
| )] | ||||
| 
 | ||||
| 
 | ||||
| logger = log.get_logger('tractor') | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def open_root_actor( | ||||
| 
 | ||||
|     *, | ||||
|     # defaults are above | ||||
|     arbiter_addr: tuple[str, int] | None = None, | ||||
|     registry_addrs: list[tuple[str, int]]|None = None, | ||||
| 
 | ||||
|     # defaults are above | ||||
|     registry_addr: tuple[str, int] | None = None, | ||||
|     arbiter_addr: tuple[str, int]|None = None, | ||||
| 
 | ||||
|     name: str | None = 'root', | ||||
|     name: str|None = 'root', | ||||
| 
 | ||||
|     # either the `multiprocessing` start method: | ||||
|     # https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods | ||||
|     # OR `trio` (the new default). | ||||
|     start_method: _spawn.SpawnMethodKey | None = None, | ||||
|     start_method: _spawn.SpawnMethodKey|None = None, | ||||
| 
 | ||||
|     # enables the multi-process debugger support | ||||
|     debug_mode: bool = False, | ||||
|     maybe_enable_greenback: bool = True,  # `.pause_from_sync()/breakpoint()` support | ||||
|     enable_stack_on_sig: bool = False, | ||||
| 
 | ||||
|     # internal logging | ||||
|     loglevel: str | None = None, | ||||
|     loglevel: str|None = None, | ||||
| 
 | ||||
|     enable_modules: list | None = None, | ||||
|     rpc_module_paths: list | None = None, | ||||
|     enable_modules: list|None = None, | ||||
|     rpc_module_paths: list|None = None, | ||||
| 
 | ||||
| ) -> typing.Any: | ||||
|     # NOTE: allow caller to ensure that only one registry exists | ||||
|     # and that this call creates it. | ||||
|     ensure_registry: bool = False, | ||||
| 
 | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
|     # XXX, proxied directly to `.devx._debug._maybe_enter_pm()` | ||||
|     # for REPL-entry logic. | ||||
|     debug_filter: Callable[ | ||||
|         [BaseException|BaseExceptionGroup], | ||||
|         bool, | ||||
|     ] = lambda err: not is_multi_cancelled(err), | ||||
| 
 | ||||
|     # TODO, a way for actors to augment passing derived | ||||
|     # read-only state to sublayers? | ||||
|     # extra_rt_vars: dict|None = None, | ||||
| 
 | ||||
| ) -> Actor: | ||||
|     ''' | ||||
|     Runtime init entry point for ``tractor``. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     _debug.hide_runtime_frames() | ||||
| 
 | ||||
|     # TODO: stick this in a `@cm` defined in `devx._debug`? | ||||
|     # | ||||
|     # Override the global debugger hook to make it play nice with | ||||
|     # ``trio``, see much discussion in: | ||||
|     # https://github.com/python-trio/trio/issues/1155#issuecomment-742964018 | ||||
|     builtin_bp_handler = sys.breakpointhook | ||||
|     orig_bp_path: str | None = os.environ.get('PYTHONBREAKPOINT', None) | ||||
|     os.environ['PYTHONBREAKPOINT'] = 'tractor._debug._set_trace' | ||||
|     builtin_bp_handler: Callable = sys.breakpointhook | ||||
|     orig_bp_path: str|None = os.environ.get( | ||||
|         'PYTHONBREAKPOINT', | ||||
|         None, | ||||
|     ) | ||||
|     if ( | ||||
|         debug_mode | ||||
|         and maybe_enable_greenback | ||||
|         and ( | ||||
|             maybe_mod := await _debug.maybe_init_greenback( | ||||
|                 raise_not_found=False, | ||||
|             ) | ||||
|         ) | ||||
|     ): | ||||
|         logger.info( | ||||
|             f'Found `greenback` installed @ {maybe_mod}\n' | ||||
|             'Enabling `tractor.pause_from_sync()` support!\n' | ||||
|         ) | ||||
|         os.environ['PYTHONBREAKPOINT'] = ( | ||||
|             'tractor.devx._debug._sync_pause_from_builtin' | ||||
|         ) | ||||
|         _state._runtime_vars['use_greenback'] = True | ||||
| 
 | ||||
|     else: | ||||
|         # TODO: disable `breakpoint()` by default (without | ||||
|         # `greenback`) since it will break any multi-actor | ||||
|         # usage by a clobbered TTY's stdstreams! | ||||
|         def block_bps(*args, **kwargs): | ||||
|             raise RuntimeError( | ||||
|                 'Trying to use `breakpoint()` eh?\n\n' | ||||
|                 'Welp, `tractor` blocks `breakpoint()` built-in calls by default!\n' | ||||
|                 'If you need to use it please install `greenback` and set ' | ||||
|                 '`debug_mode=True` when opening the runtime ' | ||||
|                 '(either via `.open_nursery()` or `open_root_actor()`)\n' | ||||
|             ) | ||||
| 
 | ||||
|         sys.breakpointhook = block_bps | ||||
|         # lol ok, | ||||
|         # https://docs.python.org/3/library/sys.html#sys.breakpointhook | ||||
|         os.environ['PYTHONBREAKPOINT'] = "0" | ||||
| 
 | ||||
|     # attempt to retreive ``trio``'s sigint handler and stash it | ||||
|     # on our debugger lock state. | ||||
|     _debug.Lock._trio_handler = signal.getsignal(signal.SIGINT) | ||||
|     _debug.DebugStatus._trio_handler = signal.getsignal(signal.SIGINT) | ||||
| 
 | ||||
|     # mark top most level process as root actor | ||||
|     _state._runtime_vars['_is_root'] = True | ||||
| 
 | ||||
|     # caps based rpc list | ||||
|     enable_modules = enable_modules or [] | ||||
|     enable_modules = ( | ||||
|         enable_modules | ||||
|         or | ||||
|         [] | ||||
|     ) | ||||
| 
 | ||||
|     if rpc_module_paths: | ||||
|         warnings.warn( | ||||
|  | @ -115,29 +188,34 @@ async def open_root_actor( | |||
| 
 | ||||
|     if arbiter_addr is not None: | ||||
|         warnings.warn( | ||||
|             '`arbiter_addr` is now deprecated and has been renamed to' | ||||
|             '`registry_addr`.\nUse that instead..', | ||||
|             '`arbiter_addr` is now deprecated\n' | ||||
|             'Use `registry_addrs: list[tuple]` instead..', | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         registry_addrs = [arbiter_addr] | ||||
| 
 | ||||
|     registry_addr = (host, port) = ( | ||||
|         registry_addr | ||||
|         or arbiter_addr | ||||
|         or ( | ||||
|             _default_arbiter_host, | ||||
|             _default_arbiter_port, | ||||
|         ) | ||||
|     registry_addrs: list[tuple[str, int]] = ( | ||||
|         registry_addrs | ||||
|         or | ||||
|         _default_lo_addrs | ||||
|     ) | ||||
|     assert registry_addrs | ||||
| 
 | ||||
|     loglevel = (loglevel or log._default_loglevel).upper() | ||||
|     loglevel = ( | ||||
|         loglevel | ||||
|         or log._default_loglevel | ||||
|     ).upper() | ||||
| 
 | ||||
|     if debug_mode and _spawn._spawn_method == 'trio': | ||||
|     if ( | ||||
|         debug_mode | ||||
|         and _spawn._spawn_method == 'trio' | ||||
|     ): | ||||
|         _state._runtime_vars['_debug_mode'] = True | ||||
| 
 | ||||
|         # expose internal debug module to every actor allowing | ||||
|         # for use of ``await tractor.breakpoint()`` | ||||
|         enable_modules.append('tractor._debug') | ||||
|         # expose internal debug module to every actor allowing for | ||||
|         # use of ``await tractor.pause()`` | ||||
|         enable_modules.append('tractor.devx._debug') | ||||
| 
 | ||||
|         # if debug mode get's enabled *at least* use that level of | ||||
|         # logging for some informative console prompts. | ||||
|  | @ -150,97 +228,187 @@ async def open_root_actor( | |||
|         ): | ||||
|             loglevel = 'PDB' | ||||
| 
 | ||||
| 
 | ||||
|     elif debug_mode: | ||||
|         raise RuntimeError( | ||||
|             "Debug mode is only supported for the `trio` backend!" | ||||
|         ) | ||||
| 
 | ||||
|     log.get_console_log(loglevel) | ||||
|     assert loglevel | ||||
|     _log = log.get_console_log(loglevel) | ||||
|     assert _log | ||||
| 
 | ||||
|     # TODO: factor this into `.devx._stackscope`!! | ||||
|     if ( | ||||
|         debug_mode | ||||
|         and | ||||
|         enable_stack_on_sig | ||||
|     ): | ||||
|         from .devx._stackscope import enable_stack_on_sig | ||||
|         enable_stack_on_sig() | ||||
| 
 | ||||
|     # closed into below ping task-func | ||||
|     ponged_addrs: list[tuple[str, int]] = [] | ||||
| 
 | ||||
|     async def ping_tpt_socket( | ||||
|         addr: tuple[str, int], | ||||
|         timeout: float = 1, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Attempt temporary connection to see if a registry is | ||||
|         listening at the requested address by a tranport layer | ||||
|         ping. | ||||
| 
 | ||||
|         If a connection can't be made quickly we assume none no | ||||
|         server is listening at that addr. | ||||
| 
 | ||||
|         ''' | ||||
|         try: | ||||
|         # make a temporary connection to see if an arbiter exists, | ||||
|         # if one can't be made quickly we assume none exists. | ||||
|         arbiter_found = False | ||||
| 
 | ||||
|         # TODO: this connect-and-bail forces us to have to carefully | ||||
|         # rewrap TCP 104-connection-reset errors as EOF so as to avoid | ||||
|         # propagating cancel-causing errors to the channel-msg loop | ||||
|         # machinery.  Likely it would be better to eventually have | ||||
|         # a "discovery" protocol with basic handshake instead. | ||||
|         with trio.move_on_after(1): | ||||
|             async with _connect_chan(host, port): | ||||
|                 arbiter_found = True | ||||
|             # TODO: this connect-and-bail forces us to have to | ||||
|             # carefully rewrap TCP 104-connection-reset errors as | ||||
|             # EOF so as to avoid propagating cancel-causing errors | ||||
|             # to the channel-msg loop machinery. Likely it would | ||||
|             # be better to eventually have a "discovery" protocol | ||||
|             # with basic handshake instead? | ||||
|             with trio.move_on_after(timeout): | ||||
|                 async with _connect_chan(*addr): | ||||
|                     ponged_addrs.append(addr) | ||||
| 
 | ||||
|         except OSError: | ||||
|             # TODO: make this a "discovery" log level? | ||||
|         logger.warning(f"No actor registry found @ {host}:{port}") | ||||
|             logger.info( | ||||
|                 f'No actor registry found @ {addr}\n' | ||||
|             ) | ||||
| 
 | ||||
|     # create a local actor and start up its main routine/task | ||||
|     if arbiter_found: | ||||
|     async with trio.open_nursery() as tn: | ||||
|         for addr in registry_addrs: | ||||
|             tn.start_soon( | ||||
|                 ping_tpt_socket, | ||||
|                 tuple(addr),  # TODO: just drop this requirement? | ||||
|             ) | ||||
| 
 | ||||
|     trans_bind_addrs: list[tuple[str, int]] = [] | ||||
| 
 | ||||
|     # Create a new local root-actor instance which IS NOT THE | ||||
|     # REGISTRAR | ||||
|     if ponged_addrs: | ||||
|         if ensure_registry: | ||||
|             raise RuntimeError( | ||||
|                  f'Failed to open `{name}`@{ponged_addrs}: ' | ||||
|                 'registry socket(s) already bound' | ||||
|             ) | ||||
| 
 | ||||
|         # we were able to connect to an arbiter | ||||
|         logger.info(f"Arbiter seems to exist @ {host}:{port}") | ||||
|         logger.info( | ||||
|             f'Registry(s) seem(s) to exist @ {ponged_addrs}' | ||||
|         ) | ||||
| 
 | ||||
|         actor = Actor( | ||||
|             name or 'anonymous', | ||||
|             arbiter_addr=registry_addr, | ||||
|             name=name or 'anonymous', | ||||
|             registry_addrs=ponged_addrs, | ||||
|             loglevel=loglevel, | ||||
|             enable_modules=enable_modules, | ||||
|         ) | ||||
|         host, port = (host, 0) | ||||
|         # DO NOT use the registry_addrs as the transport server | ||||
|         # addrs for this new non-registar, root-actor. | ||||
|         for host, port in ponged_addrs: | ||||
|             # NOTE: zero triggers dynamic OS port allocation | ||||
|             trans_bind_addrs.append((host, 0)) | ||||
| 
 | ||||
|     # Start this local actor as the "registrar", aka a regular | ||||
|     # actor who manages the local registry of "mailboxes" of | ||||
|     # other process-tree-local sub-actors. | ||||
|     else: | ||||
|         # start this local actor as the arbiter (aka a regular actor who | ||||
|         # manages the local registry of "mailboxes") | ||||
| 
 | ||||
|         # Note that if the current actor is the arbiter it is desirable | ||||
|         # for it to stay up indefinitely until a re-election process has | ||||
|         # taken place - which is not implemented yet FYI). | ||||
|         # NOTE that if the current actor IS THE REGISTAR, the | ||||
|         # following init steps are taken: | ||||
|         # - the tranport layer server is bound to each (host, port) | ||||
|         #   pair defined in provided registry_addrs, or the default. | ||||
|         trans_bind_addrs = registry_addrs | ||||
| 
 | ||||
|         # - it is normally desirable for any registrar to stay up | ||||
|         #   indefinitely until either all registered (child/sub) | ||||
|         #   actors are terminated (via SC supervision) or, | ||||
|         #   a re-election process has taken place.  | ||||
|         # NOTE: all of ^ which is not implemented yet - see: | ||||
|         # https://github.com/goodboy/tractor/issues/216 | ||||
|         # https://github.com/goodboy/tractor/pull/348 | ||||
|         # https://github.com/goodboy/tractor/issues/296 | ||||
| 
 | ||||
|         actor = Arbiter( | ||||
|             name or 'arbiter', | ||||
|             arbiter_addr=registry_addr, | ||||
|             name or 'registrar', | ||||
|             registry_addrs=registry_addrs, | ||||
|             loglevel=loglevel, | ||||
|             enable_modules=enable_modules, | ||||
|         ) | ||||
|         # XXX, in case the root actor runtime was actually run from | ||||
|         # `tractor.to_asyncio.run_as_asyncio_guest()` and NOt | ||||
|         # `.trio.run()`. | ||||
|         actor._infected_aio = _state._runtime_vars['_is_infected_aio'] | ||||
| 
 | ||||
|     # Start up main task set via core actor-runtime nurseries. | ||||
|     try: | ||||
|         # assign process-local actor | ||||
|         _state._current_actor = actor | ||||
| 
 | ||||
|         # start local channel-server and fake the portal API | ||||
|         # NOTE: this won't block since we provide the nursery | ||||
|         logger.info(f"Starting local {actor} @ {host}:{port}") | ||||
|         ml_addrs_str: str = '\n'.join( | ||||
|             f'@{addr}' for addr in trans_bind_addrs | ||||
|         ) | ||||
|         logger.info( | ||||
|             f'Starting local {actor.uid} on the following transport addrs:\n' | ||||
|             f'{ml_addrs_str}' | ||||
|         ) | ||||
| 
 | ||||
|         # start the actor runtime in a new task | ||||
|         async with trio.open_nursery() as nursery: | ||||
| 
 | ||||
|             # ``_runtime.async_main()`` creates an internal nursery and | ||||
|             # thus blocks here until the entire underlying actor tree has | ||||
|             # terminated thereby conducting structured concurrency. | ||||
| 
 | ||||
|             # ``_runtime.async_main()`` creates an internal nursery | ||||
|             # and blocks here until any underlying actor(-process) | ||||
|             # tree has terminated thereby conducting so called | ||||
|             # "end-to-end" structured concurrency throughout an | ||||
|             # entire hierarchical python sub-process set; all | ||||
|             # "actor runtime" primitives are SC-compat and thus all | ||||
|             # transitively spawned actors/processes must be as | ||||
|             # well. | ||||
|             await nursery.start( | ||||
|                 partial( | ||||
|                     async_main, | ||||
|                     actor, | ||||
|                     accept_addr=(host, port), | ||||
|                     accept_addrs=trans_bind_addrs, | ||||
|                     parent_addr=None | ||||
|                 ) | ||||
|             ) | ||||
|             try: | ||||
|                 yield actor | ||||
| 
 | ||||
|             except ( | ||||
|                 Exception, | ||||
|                 BaseExceptionGroup, | ||||
|             ) as err: | ||||
| 
 | ||||
|                 entered = await _debug._maybe_enter_pm(err) | ||||
|                 # XXX NOTE XXX see equiv note inside | ||||
|                 # `._runtime.Actor._stream_handler()` where in the | ||||
|                 # non-root or root-that-opened-this-mahually case we | ||||
|                 # wait for the local actor-nursery to exit before | ||||
|                 # exiting the transport channel handler. | ||||
|                 entered: bool = await _debug._maybe_enter_pm( | ||||
|                     err, | ||||
|                     api_frame=inspect.currentframe(), | ||||
|                     debug_filter=debug_filter, | ||||
|                 ) | ||||
| 
 | ||||
|                 if not entered and not is_multi_cancelled(err): | ||||
|                     logger.exception("Root actor crashed:") | ||||
|                 if ( | ||||
|                     not entered | ||||
|                     and | ||||
|                     not is_multi_cancelled( | ||||
|                         err, | ||||
|                     ) | ||||
|                 ): | ||||
|                     logger.exception('Root actor crashed\n') | ||||
| 
 | ||||
|                 # always re-raise | ||||
|                 # ALWAYS re-raise any error bubbled up from the | ||||
|                 # runtime! | ||||
|                 raise | ||||
| 
 | ||||
|             finally: | ||||
|  | @ -253,20 +421,29 @@ async def open_root_actor( | |||
|                 #     for an in nurseries: | ||||
|                 #         tempn.start_soon(an.exited.wait) | ||||
| 
 | ||||
|                 logger.cancel("Shutting down root actor") | ||||
|                 await actor.cancel( | ||||
|                     requesting_uid=actor.uid, | ||||
|                 logger.info( | ||||
|                     'Closing down root actor' | ||||
|                 ) | ||||
|                 await actor.cancel(None)  # self cancel | ||||
|     finally: | ||||
|         _state._current_actor = None | ||||
|         _state._last_actor_terminated = actor | ||||
| 
 | ||||
|         # restore breakpoint hook state | ||||
|         # restore built-in `breakpoint()` hook state | ||||
|         if ( | ||||
|             debug_mode | ||||
|             and | ||||
|             maybe_enable_greenback | ||||
|         ): | ||||
|             if builtin_bp_handler is not None: | ||||
|                 sys.breakpointhook = builtin_bp_handler | ||||
| 
 | ||||
|             if orig_bp_path is not None: | ||||
|                 os.environ['PYTHONBREAKPOINT'] = orig_bp_path | ||||
| 
 | ||||
|             else: | ||||
|                 # clear env back to having no entry | ||||
|             os.environ.pop('PYTHONBREAKPOINT') | ||||
|                 os.environ.pop('PYTHONBREAKPOINT', None) | ||||
| 
 | ||||
|         logger.runtime("Root actor terminated") | ||||
| 
 | ||||
|  | @ -276,10 +453,7 @@ def run_daemon( | |||
| 
 | ||||
|     # runtime kwargs | ||||
|     name: str | None = 'root', | ||||
|     registry_addr: tuple[str, int] = ( | ||||
|         _default_arbiter_host, | ||||
|         _default_arbiter_port, | ||||
|     ), | ||||
|     registry_addrs: list[tuple[str, int]] = _default_lo_addrs, | ||||
| 
 | ||||
|     start_method: str | None = None, | ||||
|     debug_mode: bool = False, | ||||
|  | @ -303,7 +477,7 @@ def run_daemon( | |||
|     async def _main(): | ||||
| 
 | ||||
|         async with open_root_actor( | ||||
|             registry_addr=registry_addr, | ||||
|             registry_addrs=registry_addrs, | ||||
|             name=name, | ||||
|             start_method=start_method, | ||||
|             debug_mode=debug_mode, | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										2423
									
								
								tractor/_runtime.py
								
								
								
								
							
							
						
						
									
										2423
									
								
								tractor/_runtime.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,833 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| SC friendly shared memory management geared at real-time | ||||
| processing. | ||||
| 
 | ||||
| Support for ``numpy`` compatible array-buffers is provided but is | ||||
| considered optional within the context of this runtime-library. | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from sys import byteorder | ||||
| import time | ||||
| from typing import Optional | ||||
| from multiprocessing import shared_memory as shm | ||||
| from multiprocessing.shared_memory import ( | ||||
|     SharedMemory, | ||||
|     ShareableList, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import Struct | ||||
| import tractor | ||||
| 
 | ||||
| from .log import get_logger | ||||
| 
 | ||||
| 
 | ||||
| _USE_POSIX = getattr(shm, '_USE_POSIX', False) | ||||
| if _USE_POSIX: | ||||
|     from _posixshmem import shm_unlink | ||||
| 
 | ||||
| 
 | ||||
| try: | ||||
|     import numpy as np | ||||
|     from numpy.lib import recfunctions as rfn | ||||
|     # import nptyping | ||||
| except ImportError: | ||||
|     pass | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| def disable_mantracker(): | ||||
|     ''' | ||||
|     Disable all ``multiprocessing``` "resource tracking" machinery since | ||||
|     it's an absolute multi-threaded mess of non-SC madness. | ||||
| 
 | ||||
|     ''' | ||||
|     from multiprocessing import resource_tracker as mantracker | ||||
| 
 | ||||
|     # Tell the "resource tracker" thing to fuck off. | ||||
|     class ManTracker(mantracker.ResourceTracker): | ||||
|         def register(self, name, rtype): | ||||
|             pass | ||||
| 
 | ||||
|         def unregister(self, name, rtype): | ||||
|             pass | ||||
| 
 | ||||
|         def ensure_running(self): | ||||
|             pass | ||||
| 
 | ||||
|     # "know your land and know your prey" | ||||
|     # https://www.dailymotion.com/video/x6ozzco | ||||
|     mantracker._resource_tracker = ManTracker() | ||||
|     mantracker.register = mantracker._resource_tracker.register | ||||
|     mantracker.ensure_running = mantracker._resource_tracker.ensure_running | ||||
|     mantracker.unregister = mantracker._resource_tracker.unregister | ||||
|     mantracker.getfd = mantracker._resource_tracker.getfd | ||||
| 
 | ||||
| 
 | ||||
| disable_mantracker() | ||||
| 
 | ||||
| 
 | ||||
| class SharedInt: | ||||
|     ''' | ||||
|     Wrapper around a single entry shared memory array which | ||||
|     holds an ``int`` value used as an index counter. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         shm: SharedMemory, | ||||
|     ) -> None: | ||||
|         self._shm = shm | ||||
| 
 | ||||
|     @property | ||||
|     def value(self) -> int: | ||||
|         return int.from_bytes(self._shm.buf, byteorder) | ||||
| 
 | ||||
|     @value.setter | ||||
|     def value(self, value) -> None: | ||||
|         self._shm.buf[:] = value.to_bytes(self._shm.size, byteorder) | ||||
| 
 | ||||
|     def destroy(self) -> None: | ||||
|         if _USE_POSIX: | ||||
|             # We manually unlink to bypass all the "resource tracker" | ||||
|             # nonsense meant for non-SC systems. | ||||
|             name = self._shm.name | ||||
|             try: | ||||
|                 shm_unlink(name) | ||||
|             except FileNotFoundError: | ||||
|                 # might be a teardown race here? | ||||
|                 log.warning(f'Shm for {name} already unlinked?') | ||||
| 
 | ||||
| 
 | ||||
| class NDToken(Struct, frozen=True): | ||||
|     ''' | ||||
|     Internal represenation of a shared memory ``numpy`` array "token" | ||||
|     which can be used to key and load a system (OS) wide shm entry | ||||
|     and correctly read the array by type signature. | ||||
| 
 | ||||
|     This type is msg safe. | ||||
| 
 | ||||
|     ''' | ||||
|     shm_name: str  # this servers as a "key" value | ||||
|     shm_first_index_name: str | ||||
|     shm_last_index_name: str | ||||
|     dtype_descr: tuple | ||||
|     size: int  # in struct-array index / row terms | ||||
| 
 | ||||
|     # TODO: use nptyping here on dtypes | ||||
|     @property | ||||
|     def dtype(self) -> list[tuple[str, str, tuple[int, ...]]]: | ||||
|         return np.dtype( | ||||
|             list( | ||||
|                 map(tuple, self.dtype_descr) | ||||
|             ) | ||||
|         ).descr | ||||
| 
 | ||||
|     def as_msg(self): | ||||
|         return self.to_dict() | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_msg(cls, msg: dict) -> NDToken: | ||||
|         if isinstance(msg, NDToken): | ||||
|             return msg | ||||
| 
 | ||||
|         # TODO: native struct decoding | ||||
|         # return _token_dec.decode(msg) | ||||
| 
 | ||||
|         msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr'])) | ||||
|         return NDToken(**msg) | ||||
| 
 | ||||
| 
 | ||||
| # _token_dec = msgspec.msgpack.Decoder(NDToken) | ||||
| 
 | ||||
| # TODO: this api? | ||||
| # _known_tokens = tractor.ActorVar('_shm_tokens', {}) | ||||
| # _known_tokens = tractor.ContextStack('_known_tokens', ) | ||||
| # _known_tokens = trio.RunVar('shms', {}) | ||||
| 
 | ||||
| # TODO: this should maybe be provided via | ||||
| # a `.trionics.maybe_open_context()` wrapper factory? | ||||
| # process-local store of keys to tokens | ||||
| _known_tokens: dict[str, NDToken] = {} | ||||
| 
 | ||||
| 
 | ||||
| def get_shm_token(key: str) -> NDToken | None: | ||||
|     ''' | ||||
|     Convenience func to check if a token | ||||
|     for the provided key is known by this process. | ||||
| 
 | ||||
|     Returns either the ``numpy`` token or a string for a shared list. | ||||
| 
 | ||||
|     ''' | ||||
|     return _known_tokens.get(key) | ||||
| 
 | ||||
| 
 | ||||
| def _make_token( | ||||
|     key: str, | ||||
|     size: int, | ||||
|     dtype: np.dtype, | ||||
| 
 | ||||
| ) -> NDToken: | ||||
|     ''' | ||||
|     Create a serializable token that can be used | ||||
|     to access a shared array. | ||||
| 
 | ||||
|     ''' | ||||
|     return NDToken( | ||||
|         shm_name=key, | ||||
|         shm_first_index_name=key + "_first", | ||||
|         shm_last_index_name=key + "_last", | ||||
|         dtype_descr=tuple(np.dtype(dtype).descr), | ||||
|         size=size, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| class ShmArray: | ||||
|     ''' | ||||
|     A shared memory ``numpy.ndarray`` API. | ||||
| 
 | ||||
|     An underlying shared memory buffer is allocated based on | ||||
|     a user specified ``numpy.ndarray``. This fixed size array | ||||
|     can be read and written to by pushing data both onto the "front" | ||||
|     or "back" of a set index range. The indexes for the "first" and | ||||
|     "last" index are themselves stored in shared memory (accessed via | ||||
|     ``SharedInt`` interfaces) values such that multiple processes can | ||||
|     interact with the same array using a synchronized-index. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         shmarr: np.ndarray, | ||||
|         first: SharedInt, | ||||
|         last: SharedInt, | ||||
|         shm: SharedMemory, | ||||
|         # readonly: bool = True, | ||||
|     ) -> None: | ||||
|         self._array = shmarr | ||||
| 
 | ||||
|         # indexes for first and last indices corresponding | ||||
|         # to fille data | ||||
|         self._first = first | ||||
|         self._last = last | ||||
| 
 | ||||
|         self._len = len(shmarr) | ||||
|         self._shm = shm | ||||
|         self._post_init: bool = False | ||||
| 
 | ||||
|         # pushing data does not write the index (aka primary key) | ||||
|         self._write_fields: list[str] | None = None | ||||
|         dtype = shmarr.dtype | ||||
|         if dtype.fields: | ||||
|             self._write_fields = list(shmarr.dtype.fields.keys())[1:] | ||||
| 
 | ||||
|     # TODO: ringbuf api? | ||||
| 
 | ||||
|     @property | ||||
|     def _token(self) -> NDToken: | ||||
|         return NDToken( | ||||
|             shm_name=self._shm.name, | ||||
|             shm_first_index_name=self._first._shm.name, | ||||
|             shm_last_index_name=self._last._shm.name, | ||||
|             dtype_descr=tuple(self._array.dtype.descr), | ||||
|             size=self._len, | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def token(self) -> dict: | ||||
|         """Shared memory token that can be serialized and used by | ||||
|         another process to attach to this array. | ||||
|         """ | ||||
|         return self._token.as_msg() | ||||
| 
 | ||||
|     @property | ||||
|     def index(self) -> int: | ||||
|         return self._last.value % self._len | ||||
| 
 | ||||
|     @property | ||||
|     def array(self) -> np.ndarray: | ||||
|         ''' | ||||
|         Return an up-to-date ``np.ndarray`` view of the | ||||
|         so-far-written data to the underlying shm buffer. | ||||
| 
 | ||||
|         ''' | ||||
|         a = self._array[self._first.value:self._last.value] | ||||
| 
 | ||||
|         # first, last = self._first.value, self._last.value | ||||
|         # a = self._array[first:last] | ||||
| 
 | ||||
|         # TODO: eventually comment this once we've not seen it in the | ||||
|         # wild in a long time.. | ||||
|         # XXX: race where first/last indexes cause a reader | ||||
|         # to load an empty array.. | ||||
|         if len(a) == 0 and self._post_init: | ||||
|             raise RuntimeError('Empty array race condition hit!?') | ||||
|             # breakpoint() | ||||
| 
 | ||||
|         return a | ||||
| 
 | ||||
|     def ustruct( | ||||
|         self, | ||||
|         fields: Optional[list[str]] = None, | ||||
| 
 | ||||
|         # type that all field values will be cast to | ||||
|         # in the returned view. | ||||
|         common_dtype: np.dtype = float, | ||||
| 
 | ||||
|     ) -> np.ndarray: | ||||
| 
 | ||||
|         array = self._array | ||||
| 
 | ||||
|         if fields: | ||||
|             selection = array[fields] | ||||
|             # fcount = len(fields) | ||||
|         else: | ||||
|             selection = array | ||||
|             # fcount = len(array.dtype.fields) | ||||
| 
 | ||||
|         # XXX: manual ``.view()`` attempt that also doesn't work. | ||||
|         # uview = selection.view( | ||||
|         #     dtype='<f16', | ||||
|         # ).reshape(-1, 4, order='A') | ||||
| 
 | ||||
|         # assert len(selection) == len(uview) | ||||
| 
 | ||||
|         u = rfn.structured_to_unstructured( | ||||
|             selection, | ||||
|             # dtype=float, | ||||
|             copy=True, | ||||
|         ) | ||||
| 
 | ||||
|         # unstruct = np.ndarray(u.shape, dtype=a.dtype, buffer=shm.buf) | ||||
|         # array[:] = a[:] | ||||
|         return u | ||||
|         # return ShmArray( | ||||
|         #     shmarr=u, | ||||
|         #     first=self._first, | ||||
|         #     last=self._last, | ||||
|         #     shm=self._shm | ||||
|         # ) | ||||
| 
 | ||||
|     def last( | ||||
|         self, | ||||
|         length: int = 1, | ||||
| 
 | ||||
|     ) -> np.ndarray: | ||||
|         ''' | ||||
|         Return the last ``length``'s worth of ("row") entries from the | ||||
|         array. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.array[-length:] | ||||
| 
 | ||||
|     def push( | ||||
|         self, | ||||
|         data: np.ndarray, | ||||
| 
 | ||||
|         field_map: Optional[dict[str, str]] = None, | ||||
|         prepend: bool = False, | ||||
|         update_first: bool = True, | ||||
|         start: int | None = None, | ||||
| 
 | ||||
|     ) -> int: | ||||
|         ''' | ||||
|         Ring buffer like "push" to append data | ||||
|         into the buffer and return updated "last" index. | ||||
| 
 | ||||
|         NB: no actual ring logic yet to give a "loop around" on overflow | ||||
|         condition, lel. | ||||
| 
 | ||||
|         ''' | ||||
|         length = len(data) | ||||
| 
 | ||||
|         if prepend: | ||||
|             index = (start or self._first.value) - length | ||||
| 
 | ||||
|             if index < 0: | ||||
|                 raise ValueError( | ||||
|                     f'Array size of {self._len} was overrun during prepend.\n' | ||||
|                     f'You have passed {abs(index)} too many datums.' | ||||
|                 ) | ||||
| 
 | ||||
|         else: | ||||
|             index = start if start is not None else self._last.value | ||||
| 
 | ||||
|         end = index + length | ||||
| 
 | ||||
|         if field_map: | ||||
|             src_names, dst_names = zip(*field_map.items()) | ||||
|         else: | ||||
|             dst_names = src_names = self._write_fields | ||||
| 
 | ||||
|         try: | ||||
|             self._array[ | ||||
|                 list(dst_names) | ||||
|             ][index:end] = data[list(src_names)][:] | ||||
| 
 | ||||
|             # NOTE: there was a race here between updating | ||||
|             # the first and last indices and when the next reader | ||||
|             # tries to access ``.array`` (which due to the index | ||||
|             # overlap will be empty). Pretty sure we've fixed it now | ||||
|             # but leaving this here as a reminder. | ||||
|             if ( | ||||
|                 prepend | ||||
|                 and update_first | ||||
|                 and length | ||||
|             ): | ||||
|                 assert index < self._first.value | ||||
| 
 | ||||
|             if ( | ||||
|                 index < self._first.value | ||||
|                 and update_first | ||||
|             ): | ||||
|                 assert prepend, 'prepend=True not passed but index decreased?' | ||||
|                 self._first.value = index | ||||
| 
 | ||||
|             elif not prepend: | ||||
|                 self._last.value = end | ||||
| 
 | ||||
|             self._post_init = True | ||||
|             return end | ||||
| 
 | ||||
|         except ValueError as err: | ||||
|             if field_map: | ||||
|                 raise | ||||
| 
 | ||||
|             # should raise if diff detected | ||||
|             self.diff_err_fields(data) | ||||
|             raise err | ||||
| 
 | ||||
|     def diff_err_fields( | ||||
|         self, | ||||
|         data: np.ndarray, | ||||
|     ) -> None: | ||||
|         # reraise with any field discrepancy | ||||
|         our_fields, their_fields = ( | ||||
|             set(self._array.dtype.fields), | ||||
|             set(data.dtype.fields), | ||||
|         ) | ||||
| 
 | ||||
|         only_in_ours = our_fields - their_fields | ||||
|         only_in_theirs = their_fields - our_fields | ||||
| 
 | ||||
|         if only_in_ours: | ||||
|             raise TypeError( | ||||
|                 f"Input array is missing field(s): {only_in_ours}" | ||||
|             ) | ||||
|         elif only_in_theirs: | ||||
|             raise TypeError( | ||||
|                 f"Input array has unknown field(s): {only_in_theirs}" | ||||
|             ) | ||||
| 
 | ||||
|     # TODO: support "silent" prepends that don't update ._first.value? | ||||
|     def prepend( | ||||
|         self, | ||||
|         data: np.ndarray, | ||||
|     ) -> int: | ||||
|         end = self.push(data, prepend=True) | ||||
|         assert end | ||||
| 
 | ||||
|     def close(self) -> None: | ||||
|         self._first._shm.close() | ||||
|         self._last._shm.close() | ||||
|         self._shm.close() | ||||
| 
 | ||||
|     def destroy(self) -> None: | ||||
|         if _USE_POSIX: | ||||
|             # We manually unlink to bypass all the "resource tracker" | ||||
|             # nonsense meant for non-SC systems. | ||||
|             shm_unlink(self._shm.name) | ||||
| 
 | ||||
|         self._first.destroy() | ||||
|         self._last.destroy() | ||||
| 
 | ||||
|     def flush(self) -> None: | ||||
|         # TODO: flush to storage backend like markestore? | ||||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| def open_shm_ndarray( | ||||
|     size: int, | ||||
|     key: str | None = None, | ||||
|     dtype: np.dtype | None = None, | ||||
|     append_start_index: int | None = None, | ||||
|     readonly: bool = False, | ||||
| 
 | ||||
| ) -> ShmArray: | ||||
|     ''' | ||||
|     Open a memory shared ``numpy`` using the standard library. | ||||
| 
 | ||||
|     This call unlinks (aka permanently destroys) the buffer on teardown | ||||
|     and thus should be used from the parent-most accessor (process). | ||||
| 
 | ||||
|     ''' | ||||
|     # create new shared mem segment for which we | ||||
|     # have write permission | ||||
|     a = np.zeros(size, dtype=dtype) | ||||
|     a['index'] = np.arange(len(a)) | ||||
| 
 | ||||
|     shm = SharedMemory( | ||||
|         name=key, | ||||
|         create=True, | ||||
|         size=a.nbytes | ||||
|     ) | ||||
|     array = np.ndarray( | ||||
|         a.shape, | ||||
|         dtype=a.dtype, | ||||
|         buffer=shm.buf | ||||
|     ) | ||||
|     array[:] = a[:] | ||||
|     array.setflags(write=int(not readonly)) | ||||
| 
 | ||||
|     token = _make_token( | ||||
|         key=key, | ||||
|         size=size, | ||||
|         dtype=dtype, | ||||
|     ) | ||||
| 
 | ||||
|     # create single entry arrays for storing an first and last indices | ||||
|     first = SharedInt( | ||||
|         shm=SharedMemory( | ||||
|             name=token.shm_first_index_name, | ||||
|             create=True, | ||||
|             size=4,  # std int | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
|     last = SharedInt( | ||||
|         shm=SharedMemory( | ||||
|             name=token.shm_last_index_name, | ||||
|             create=True, | ||||
|             size=4,  # std int | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
|     # Start the "real-time" append-updated (or "pushed-to") section | ||||
|     # after some start index: ``append_start_index``. This allows appending | ||||
|     # from a start point in the array which isn't the 0 index and looks | ||||
|     # something like, | ||||
|     # ------------------------- | ||||
|     # |              |        i | ||||
|     # _________________________ | ||||
|     # <-------------> <-------> | ||||
|     #  history         real-time | ||||
|     # | ||||
|     # Once fully "prepended", the history section will leave the | ||||
|     # ``ShmArray._start.value: int = 0`` and the yet-to-be written | ||||
|     # real-time section will start at ``ShmArray.index: int``. | ||||
| 
 | ||||
|     # this sets the index to nearly 2/3rds into the the length of | ||||
|     # the buffer leaving at least a "days worth of second samples" | ||||
|     # for the real-time section. | ||||
|     if append_start_index is None: | ||||
|         append_start_index = round(size * 0.616) | ||||
| 
 | ||||
|     last.value = first.value = append_start_index | ||||
| 
 | ||||
|     shmarr = ShmArray( | ||||
|         array, | ||||
|         first, | ||||
|         last, | ||||
|         shm, | ||||
|     ) | ||||
| 
 | ||||
|     assert shmarr._token == token | ||||
|     _known_tokens[key] = shmarr.token | ||||
| 
 | ||||
|     # "unlink" created shm on process teardown by | ||||
|     # pushing teardown calls onto actor context stack | ||||
|     stack = tractor.current_actor().lifetime_stack | ||||
|     stack.callback(shmarr.close) | ||||
|     stack.callback(shmarr.destroy) | ||||
| 
 | ||||
|     return shmarr | ||||
| 
 | ||||
| 
 | ||||
| def attach_shm_ndarray( | ||||
|     token: tuple[str, str, tuple[str, str]], | ||||
|     readonly: bool = True, | ||||
| 
 | ||||
| ) -> ShmArray: | ||||
|     ''' | ||||
|     Attach to an existing shared memory array previously | ||||
|     created by another process using ``open_shared_array``. | ||||
| 
 | ||||
|     No new shared mem is allocated but wrapper types for read/write | ||||
|     access are constructed. | ||||
| 
 | ||||
|     ''' | ||||
|     token = NDToken.from_msg(token) | ||||
|     key = token.shm_name | ||||
| 
 | ||||
|     if key in _known_tokens: | ||||
|         assert NDToken.from_msg(_known_tokens[key]) == token, "WTF" | ||||
| 
 | ||||
|     # XXX: ugh, looks like due to the ``shm_open()`` C api we can't | ||||
|     # actually place files in a subdir, see discussion here: | ||||
|     # https://stackoverflow.com/a/11103289 | ||||
| 
 | ||||
|     # attach to array buffer and view as per dtype | ||||
|     _err: Optional[Exception] = None | ||||
|     for _ in range(3): | ||||
|         try: | ||||
|             shm = SharedMemory( | ||||
|                 name=key, | ||||
|                 create=False, | ||||
|             ) | ||||
|             break | ||||
|         except OSError as oserr: | ||||
|             _err = oserr | ||||
|             time.sleep(0.1) | ||||
|     else: | ||||
|         if _err: | ||||
|             raise _err | ||||
| 
 | ||||
|     shmarr = np.ndarray( | ||||
|         (token.size,), | ||||
|         dtype=token.dtype, | ||||
|         buffer=shm.buf | ||||
|     ) | ||||
|     shmarr.setflags(write=int(not readonly)) | ||||
| 
 | ||||
|     first = SharedInt( | ||||
|         shm=SharedMemory( | ||||
|             name=token.shm_first_index_name, | ||||
|             create=False, | ||||
|             size=4,  # std int | ||||
|         ), | ||||
|     ) | ||||
|     last = SharedInt( | ||||
|         shm=SharedMemory( | ||||
|             name=token.shm_last_index_name, | ||||
|             create=False, | ||||
|             size=4,  # std int | ||||
|         ), | ||||
|     ) | ||||
| 
 | ||||
|     # make sure we can read | ||||
|     first.value | ||||
| 
 | ||||
|     sha = ShmArray( | ||||
|         shmarr, | ||||
|         first, | ||||
|         last, | ||||
|         shm, | ||||
|     ) | ||||
|     # read test | ||||
|     sha.array | ||||
| 
 | ||||
|     # Stash key -> token knowledge for future queries | ||||
|     # via `maybe_opepn_shm_array()` but only after we know | ||||
|     # we can attach. | ||||
|     if key not in _known_tokens: | ||||
|         _known_tokens[key] = token | ||||
| 
 | ||||
|     # "close" attached shm on actor teardown | ||||
|     tractor.current_actor().lifetime_stack.callback(sha.close) | ||||
| 
 | ||||
|     return sha | ||||
| 
 | ||||
| 
 | ||||
| def maybe_open_shm_ndarray( | ||||
|     key: str,  # unique identifier for segment | ||||
|     size: int, | ||||
|     dtype: np.dtype | None = None, | ||||
|     append_start_index: int = 0, | ||||
|     readonly: bool = True, | ||||
| 
 | ||||
| ) -> tuple[ShmArray, bool]: | ||||
|     ''' | ||||
|     Attempt to attach to a shared memory block using a "key" lookup | ||||
|     to registered blocks in the users overall "system" registry | ||||
|     (presumes you don't have the block's explicit token). | ||||
| 
 | ||||
|     This function is meant to solve the problem of discovering whether | ||||
|     a shared array token has been allocated or discovered by the actor | ||||
|     running in **this** process. Systems where multiple actors may seek | ||||
|     to access a common block can use this function to attempt to acquire | ||||
|     a token as discovered by the actors who have previously stored | ||||
|     a "key" -> ``NDToken`` map in an actor local (aka python global) | ||||
|     variable. | ||||
| 
 | ||||
|     If you know the explicit ``NDToken`` for your memory segment instead | ||||
|     use ``attach_shm_array``. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         # see if we already know this key | ||||
|         token = _known_tokens[key] | ||||
|         return ( | ||||
|             attach_shm_ndarray( | ||||
|                 token=token, | ||||
|                 readonly=readonly, | ||||
|             ), | ||||
|             False,  # not newly opened | ||||
|         ) | ||||
|     except KeyError: | ||||
|         log.warning(f"Could not find {key} in shms cache") | ||||
|         if dtype: | ||||
|             token = _make_token( | ||||
|                 key, | ||||
|                 size=size, | ||||
|                 dtype=dtype, | ||||
|             ) | ||||
|         else: | ||||
| 
 | ||||
|             try: | ||||
|                 return ( | ||||
|                     attach_shm_ndarray( | ||||
|                         token=token, | ||||
|                         readonly=readonly, | ||||
|                     ), | ||||
|                     False, | ||||
|                 ) | ||||
|             except FileNotFoundError: | ||||
|                 log.warning(f"Could not attach to shm with token {token}") | ||||
| 
 | ||||
|         # This actor does not know about memory | ||||
|         # associated with the provided "key". | ||||
|         # Attempt to open a block and expect | ||||
|         # to fail if a block has been allocated | ||||
|         # on the OS by someone else. | ||||
|         return ( | ||||
|             open_shm_ndarray( | ||||
|                 key=key, | ||||
|                 size=size, | ||||
|                 dtype=dtype, | ||||
|                 append_start_index=append_start_index, | ||||
|                 readonly=readonly, | ||||
|             ), | ||||
|             True, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| class ShmList(ShareableList): | ||||
|     ''' | ||||
|     Carbon copy of ``.shared_memory.ShareableList`` with a few | ||||
|     enhancements: | ||||
| 
 | ||||
|     - readonly mode via instance var flag  `._readonly: bool` | ||||
|     - ``.__getitem__()`` accepts ``slice`` inputs | ||||
|     - exposes the underlying buffer "name" as a ``.key: str`` | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         sequence: list | None = None, | ||||
|         *, | ||||
|         name: str | None = None, | ||||
|         readonly: bool = True | ||||
| 
 | ||||
|     ) -> None: | ||||
|         self._readonly = readonly | ||||
|         self._key = name | ||||
|         return super().__init__( | ||||
|             sequence=sequence, | ||||
|             name=name, | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def key(self) -> str: | ||||
|         return self._key | ||||
| 
 | ||||
|     @property | ||||
|     def readonly(self) -> bool: | ||||
|         return self._readonly | ||||
| 
 | ||||
|     def __setitem__( | ||||
|         self, | ||||
|         position, | ||||
|         value, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         # mimick ``numpy`` error | ||||
|         if self._readonly: | ||||
|             raise ValueError('assignment destination is read-only') | ||||
| 
 | ||||
|         return super().__setitem__(position, value) | ||||
| 
 | ||||
|     def __getitem__( | ||||
|         self, | ||||
|         indexish, | ||||
|     ) -> list: | ||||
| 
 | ||||
|         # NOTE: this is a non-writeable view (copy?) of the buffer | ||||
|         # in a new list instance. | ||||
|         if isinstance(indexish, slice): | ||||
|             return list(self)[indexish] | ||||
| 
 | ||||
|         return super().__getitem__(indexish) | ||||
| 
 | ||||
|     # TODO: should we offer a `.array` and `.push()` equivalent | ||||
|     # to the `ShmArray`? | ||||
|     # currently we have the following limitations: | ||||
|     # - can't write slices of input using traditional slice-assign | ||||
|     #   syntax due to the ``ShareableList.__setitem__()`` implementation. | ||||
|     # - ``list(shmlist)`` returns a non-mutable copy instead of | ||||
|     #   a writeable view which would be handier numpy-style ops. | ||||
| 
 | ||||
| 
 | ||||
| def open_shm_list( | ||||
|     key: str, | ||||
|     sequence: list | None = None, | ||||
|     size: int = int(2 ** 10), | ||||
|     dtype: float | int | bool | str | bytes | None = float, | ||||
|     readonly: bool = True, | ||||
| 
 | ||||
| ) -> ShmList: | ||||
| 
 | ||||
|     if sequence is None: | ||||
|         default = { | ||||
|             float: 0., | ||||
|             int: 0, | ||||
|             bool: True, | ||||
|             str: 'doggy', | ||||
|             None: None, | ||||
|         }[dtype] | ||||
|         sequence = [default] * size | ||||
| 
 | ||||
|     shml = ShmList( | ||||
|         sequence=sequence, | ||||
|         name=key, | ||||
|         readonly=readonly, | ||||
|     ) | ||||
| 
 | ||||
|     # "close" attached shm on actor teardown | ||||
|     try: | ||||
|         actor = tractor.current_actor() | ||||
|         actor.lifetime_stack.callback(shml.shm.close) | ||||
|         actor.lifetime_stack.callback(shml.shm.unlink) | ||||
|     except RuntimeError: | ||||
|         log.warning('tractor runtime not active, skipping teardown steps') | ||||
| 
 | ||||
|     return shml | ||||
| 
 | ||||
| 
 | ||||
| def attach_shm_list( | ||||
|     key: str, | ||||
|     readonly: bool = False, | ||||
| 
 | ||||
| ) -> ShmList: | ||||
| 
 | ||||
|     return ShmList( | ||||
|         name=key, | ||||
|         readonly=readonly, | ||||
|     ) | ||||
|  | @ -31,25 +31,28 @@ from typing import ( | |||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from exceptiongroup import BaseExceptionGroup | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| from trio import TaskStatus | ||||
| 
 | ||||
| from ._debug import ( | ||||
| from tractor.devx import ( | ||||
|     maybe_wait_for_debugger, | ||||
|     acquire_debug_lock, | ||||
| ) | ||||
| from ._state import ( | ||||
| from tractor._state import ( | ||||
|     current_actor, | ||||
|     is_main_process, | ||||
|     is_root_process, | ||||
|     debug_mode, | ||||
|     _runtime_vars, | ||||
| ) | ||||
| from tractor.log import get_logger | ||||
| from tractor._portal import Portal | ||||
| from tractor._runtime import Actor | ||||
| from tractor._entry import _mp_main | ||||
| from tractor._exceptions import ActorFailure | ||||
| from tractor.msg.types import ( | ||||
|     SpawnSpec, | ||||
| ) | ||||
| from .log import get_logger | ||||
| from ._portal import Portal | ||||
| from ._runtime import Actor | ||||
| from ._entry import _mp_main | ||||
| from ._exceptions import ActorFailure | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|  | @ -140,11 +143,13 @@ async def exhaust_portal( | |||
|     ''' | ||||
|     __tracebackhide__ = True | ||||
|     try: | ||||
|         log.debug(f"Waiting on final result from {actor.uid}") | ||||
|         log.debug( | ||||
|             f'Waiting on final result from {actor.uid}' | ||||
|         ) | ||||
| 
 | ||||
|         # XXX: streams should never be reaped here since they should | ||||
|         # always be established and shutdown using a context manager api | ||||
|         final = await portal.result() | ||||
|         final: Any = await portal.wait_for_result() | ||||
| 
 | ||||
|     except ( | ||||
|         Exception, | ||||
|  | @ -152,13 +157,23 @@ async def exhaust_portal( | |||
|     ) as err: | ||||
|         # we reraise in the parent task via a ``BaseExceptionGroup`` | ||||
|         return err | ||||
| 
 | ||||
|     except trio.Cancelled as err: | ||||
|         # lol, of course we need this too ;P | ||||
|         # TODO: merge with above? | ||||
|         log.warning(f"Cancelled result waiter for {portal.actor.uid}") | ||||
|         log.warning( | ||||
|             'Cancelled portal result waiter task:\n' | ||||
|             f'uid: {portal.channel.uid}\n' | ||||
|             f'error: {err}\n' | ||||
|         ) | ||||
|         return err | ||||
| 
 | ||||
|     else: | ||||
|         log.debug(f"Returning final result: {final}") | ||||
|         log.debug( | ||||
|             f'Returning final result from portal:\n' | ||||
|             f'uid: {portal.channel.uid}\n' | ||||
|             f'result: {final}\n' | ||||
|         ) | ||||
|         return final | ||||
| 
 | ||||
| 
 | ||||
|  | @ -170,41 +185,75 @@ async def cancel_on_completion( | |||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Cancel actor gracefully once it's "main" portal's | ||||
|     Cancel actor gracefully once its "main" portal's | ||||
|     result arrives. | ||||
| 
 | ||||
|     Should only be called for actors spawned with `run_in_actor()`. | ||||
|     Should only be called for actors spawned via the | ||||
|     `Portal.run_in_actor()` API. | ||||
| 
 | ||||
|     => and really this API will be deprecated and should be | ||||
|     re-implemented as a `.hilevel.one_shot_task_nursery()`..) | ||||
| 
 | ||||
|     ''' | ||||
|     # if this call errors we store the exception for later | ||||
|     # in ``errors`` which will be reraised inside | ||||
|     # an exception group and we still send out a cancel request | ||||
|     result = await exhaust_portal(portal, actor) | ||||
|     result: Any|Exception = await exhaust_portal( | ||||
|         portal, | ||||
|         actor, | ||||
|     ) | ||||
|     if isinstance(result, Exception): | ||||
|         errors[actor.uid] = result | ||||
|         log.warning( | ||||
|             f"Cancelling {portal.channel.uid} after error {result}" | ||||
|         errors[actor.uid]: Exception = result | ||||
|         log.cancel( | ||||
|             'Cancelling subactor runtime due to error:\n\n' | ||||
|             f'Portal.cancel_actor() => {portal.channel.uid}\n\n' | ||||
|             f'error: {result}\n' | ||||
|         ) | ||||
| 
 | ||||
|     else: | ||||
|         log.runtime( | ||||
|             f"Cancelling {portal.channel.uid} gracefully " | ||||
|             f"after result {result}") | ||||
|             'Cancelling subactor gracefully:\n\n' | ||||
|             f'Portal.cancel_actor() => {portal.channel.uid}\n\n' | ||||
|             f'result: {result}\n' | ||||
|         ) | ||||
| 
 | ||||
|     # cancel the process now that we have a final result | ||||
|     await portal.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| async def do_hard_kill( | ||||
| async def hard_kill( | ||||
|     proc: trio.Process, | ||||
|     terminate_after: int = 3, | ||||
| 
 | ||||
|     terminate_after: int = 1.6, | ||||
|     # NOTE: for mucking with `.pause()`-ing inside the runtime | ||||
|     # whilst also hacking on it XD | ||||
|     # terminate_after: int = 99999, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Un-gracefully terminate an OS level `trio.Process` after timeout. | ||||
| 
 | ||||
|     Used in 2 main cases: | ||||
| 
 | ||||
|     - "unknown remote runtime state": a hanging/stalled actor that | ||||
|       isn't responding after sending a (graceful) runtime cancel | ||||
|       request via an IPC msg. | ||||
|     - "cancelled during spawn": a process who's actor runtime was | ||||
|       cancelled before full startup completed (such that | ||||
|       cancel-request-handling machinery was never fully | ||||
|       initialized) and thus a "cancel request msg" is never going | ||||
|       to be handled. | ||||
| 
 | ||||
|     ''' | ||||
|     log.cancel( | ||||
|         'Terminating sub-proc\n' | ||||
|         f'>x)\n' | ||||
|         f' |_{proc}\n' | ||||
|     ) | ||||
|     # NOTE: this timeout used to do nothing since we were shielding | ||||
|     # the ``.wait()`` inside ``new_proc()`` which will pretty much | ||||
|     # never release until the process exits, now it acts as | ||||
|     # a hard-kill time ultimatum. | ||||
|     log.debug(f"Terminating {proc}") | ||||
|     with trio.move_on_after(terminate_after) as cs: | ||||
| 
 | ||||
|         # NOTE: code below was copied verbatim from the now deprecated | ||||
|  | @ -215,6 +264,9 @@ async def do_hard_kill( | |||
|         # and wait for it to exit. If cancelled, kills the process and | ||||
|         # waits for it to finish exiting before propagating the | ||||
|         # cancellation. | ||||
|         # | ||||
|         # This code was originally triggred by ``proc.__aexit__()`` | ||||
|         # but now must be called manually. | ||||
|         with trio.CancelScope(shield=True): | ||||
|             if proc.stdin is not None: | ||||
|                 await proc.stdin.aclose() | ||||
|  | @ -230,16 +282,25 @@ async def do_hard_kill( | |||
|                 with trio.CancelScope(shield=True): | ||||
|                     await proc.wait() | ||||
| 
 | ||||
|     # XXX NOTE XXX: zombie squad dispatch: | ||||
|     # (should ideally never, but) If we do get here it means | ||||
|     # graceful termination of a process failed and we need to | ||||
|     # resort to OS level signalling to interrupt and cancel the | ||||
|     # (presumably stalled or hung) actor. Since we never allow | ||||
|     # zombies (as a feature) we ask the OS to do send in the | ||||
|     # removal swad as the last resort. | ||||
|     if cs.cancelled_caught: | ||||
|         # XXX: should pretty much never get here unless we have | ||||
|         # to move the bits from ``proc.__aexit__()`` out and | ||||
|         # into here. | ||||
|         log.critical(f"#ZOMBIE_LORD_IS_HERE: {proc}") | ||||
|         # TODO: toss in the skynet-logo face as ascii art? | ||||
|         log.critical( | ||||
|             # 'Well, the #ZOMBIE_LORD_IS_HERE# to collect\n' | ||||
|             '#T-800 deployed to collect zombie B0\n' | ||||
|             f'>x)\n' | ||||
|             f' |_{proc}\n' | ||||
|         ) | ||||
|         proc.kill() | ||||
| 
 | ||||
| 
 | ||||
| async def soft_wait( | ||||
| 
 | ||||
| async def soft_kill( | ||||
|     proc: ProcessType, | ||||
|     wait_func: Callable[ | ||||
|         [ProcessType], | ||||
|  | @ -248,15 +309,40 @@ async def soft_wait( | |||
|     portal: Portal, | ||||
| 
 | ||||
| ) -> None: | ||||
|     # Wait for proc termination but **dont' yet** call | ||||
|     # ``trio.Process.__aexit__()`` (it tears down stdio | ||||
|     # which will kill any waiting remote pdb trace). | ||||
|     # This is a "soft" (cancellable) join/reap. | ||||
|     uid = portal.channel.uid | ||||
|     ''' | ||||
|     Wait for proc termination but **don't yet** teardown | ||||
|     std-streams since it will clobber any ongoing pdb REPL | ||||
|     session. | ||||
| 
 | ||||
|     This is our "soft"/graceful, and thus itself also cancellable, | ||||
|     join/reap on an actor-runtime-in-process shutdown; it is | ||||
|     **not** the same as a "hard kill" via an OS signal (for that | ||||
|     see `.hard_kill()`). | ||||
| 
 | ||||
|     ''' | ||||
|     uid: tuple[str, str] = portal.channel.uid | ||||
|     try: | ||||
|         log.cancel(f'Soft waiting on actor:\n{uid}') | ||||
|         log.cancel( | ||||
|             'Soft killing sub-actor via portal request\n' | ||||
|             f'c)> {portal.chan.uid}\n' | ||||
|             f' |_{proc}\n' | ||||
|         ) | ||||
|         # wait on sub-proc to signal termination | ||||
|         await wait_func(proc) | ||||
| 
 | ||||
|     except trio.Cancelled: | ||||
|         with trio.CancelScope(shield=True): | ||||
|             await maybe_wait_for_debugger( | ||||
|                 child_in_debug=_runtime_vars.get( | ||||
|                     '_debug_mode', False | ||||
|                 ), | ||||
|                 header_msg=( | ||||
|                     'Delaying `soft_kill()` subproc reaper while debugger locked..\n' | ||||
|                 ), | ||||
|                 # TODO: need a diff value then default? | ||||
|                 # poll_steps=9999999, | ||||
|             ) | ||||
| 
 | ||||
|         # if cancelled during a soft wait, cancel the child | ||||
|         # actor before entering the hard reap sequence | ||||
|         # below. This means we try to do a graceful teardown | ||||
|  | @ -267,22 +353,29 @@ async def soft_wait( | |||
| 
 | ||||
|             async def cancel_on_proc_deth(): | ||||
|                 ''' | ||||
|                 Cancel the actor cancel request if we detect that | ||||
|                 that the process terminated. | ||||
|                 "Cancel-the-cancel" request: if we detect that the | ||||
|                 underlying sub-process exited prior to | ||||
|                 a `Portal.cancel_actor()` call completing . | ||||
| 
 | ||||
|                 ''' | ||||
|                 await wait_func(proc) | ||||
|                 n.cancel_scope.cancel() | ||||
| 
 | ||||
|             # start a task to wait on the termination of the | ||||
|             # process by itself waiting on a (caller provided) wait | ||||
|             # function which should unblock when the target process | ||||
|             # has terminated. | ||||
|             n.start_soon(cancel_on_proc_deth) | ||||
| 
 | ||||
|             # send the actor-runtime a cancel request. | ||||
|             await portal.cancel_actor() | ||||
| 
 | ||||
|             if proc.poll() is None:  # type: ignore | ||||
|                 log.warning( | ||||
|                     'Actor still alive after cancel request:\n' | ||||
|                     f'{uid}' | ||||
|                     'Subactor still alive after cancel request?\n\n' | ||||
|                     f'uid: {uid}\n' | ||||
|                     f'|_{proc}\n' | ||||
|                 ) | ||||
| 
 | ||||
|                 n.cancel_scope.cancel() | ||||
|         raise | ||||
| 
 | ||||
|  | @ -294,7 +387,7 @@ async def new_proc( | |||
|     errors: dict[tuple[str, str], Exception], | ||||
| 
 | ||||
|     # passed through to actor main | ||||
|     bind_addr: tuple[str, int], | ||||
|     bind_addrs: list[tuple[str, int]], | ||||
|     parent_addr: tuple[str, int], | ||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||
| 
 | ||||
|  | @ -306,7 +399,7 @@ async def new_proc( | |||
| ) -> None: | ||||
| 
 | ||||
|     # lookup backend spawning target | ||||
|     target = _methods[_spawn_method] | ||||
|     target: Callable = _methods[_spawn_method] | ||||
| 
 | ||||
|     # mark the new actor with the global spawn method | ||||
|     subactor._spawn_method = _spawn_method | ||||
|  | @ -316,7 +409,7 @@ async def new_proc( | |||
|         actor_nursery, | ||||
|         subactor, | ||||
|         errors, | ||||
|         bind_addr, | ||||
|         bind_addrs, | ||||
|         parent_addr, | ||||
|         _runtime_vars,  # run time vars | ||||
|         infect_asyncio=infect_asyncio, | ||||
|  | @ -331,7 +424,7 @@ async def trio_proc( | |||
|     errors: dict[tuple[str, str], Exception], | ||||
| 
 | ||||
|     # passed through to actor main | ||||
|     bind_addr: tuple[str, int], | ||||
|     bind_addrs: list[tuple[str, int]], | ||||
|     parent_addr: tuple[str, int], | ||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||
|     *, | ||||
|  | @ -374,19 +467,21 @@ async def trio_proc( | |||
|         spawn_cmd.append("--asyncio") | ||||
| 
 | ||||
|     cancelled_during_spawn: bool = False | ||||
|     proc: trio.Process | None = None | ||||
|     proc: trio.Process|None = None | ||||
|     try: | ||||
|         try: | ||||
|             # TODO: needs ``trio_typing`` patch? | ||||
|             proc = await trio.lowlevel.open_process(spawn_cmd) | ||||
| 
 | ||||
|             log.runtime(f"Started {proc}") | ||||
|             proc: trio.Process = await trio.lowlevel.open_process(spawn_cmd) | ||||
|             log.runtime( | ||||
|                 'Started new child\n' | ||||
|                 f'|_{proc}\n' | ||||
|             ) | ||||
| 
 | ||||
|             # wait for actor to spawn and connect back to us | ||||
|             # channel should have handshake completed by the | ||||
|             # local actor by the time we get a ref to it | ||||
|             event, chan = await actor_nursery._actor.wait_for_peer( | ||||
|                 subactor.uid) | ||||
|                 subactor.uid | ||||
|             ) | ||||
| 
 | ||||
|         except trio.Cancelled: | ||||
|             cancelled_during_spawn = True | ||||
|  | @ -415,18 +510,20 @@ async def trio_proc( | |||
|             portal, | ||||
|         ) | ||||
| 
 | ||||
|         # send additional init params | ||||
|         await chan.send({ | ||||
|             "_parent_main_data": subactor._parent_main_data, | ||||
|             "enable_modules": subactor.enable_modules, | ||||
|             "_arb_addr": subactor._arb_addr, | ||||
|             "bind_host": bind_addr[0], | ||||
|             "bind_port": bind_addr[1], | ||||
|             "_runtime_vars": _runtime_vars, | ||||
|         }) | ||||
|         # send a "spawning specification" which configures the | ||||
|         # initial runtime state of the child. | ||||
|         await chan.send( | ||||
|             SpawnSpec( | ||||
|                 _parent_main_data=subactor._parent_main_data, | ||||
|                 enable_modules=subactor.enable_modules, | ||||
|                 reg_addrs=subactor.reg_addrs, | ||||
|                 bind_addrs=bind_addrs, | ||||
|                 _runtime_vars=_runtime_vars, | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         # track subactor in current nursery | ||||
|         curr_actor = current_actor() | ||||
|         curr_actor: Actor = current_actor() | ||||
|         curr_actor._actoruid2nursery[subactor.uid] = actor_nursery | ||||
| 
 | ||||
|         # resume caller at next checkpoint now that child is up | ||||
|  | @ -448,7 +545,7 @@ async def trio_proc( | |||
|             # This is a "soft" (cancellable) join/reap which | ||||
|             # will remote cancel the actor on a ``trio.Cancelled`` | ||||
|             # condition. | ||||
|             await soft_wait( | ||||
|             await soft_kill( | ||||
|                 proc, | ||||
|                 trio.Process.wait, | ||||
|                 portal | ||||
|  | @ -457,8 +554,10 @@ async def trio_proc( | |||
|             # cancel result waiter that may have been spawned in | ||||
|             # tandem if not done already | ||||
|             log.cancel( | ||||
|                 "Cancelling existing result waiter task for " | ||||
|                 f"{subactor.uid}") | ||||
|                 'Cancelling portal result reaper task\n' | ||||
|                 f'>c)\n' | ||||
|                 f' |_{subactor.uid}\n' | ||||
|             ) | ||||
|             nursery.cancel_scope.cancel() | ||||
| 
 | ||||
|     finally: | ||||
|  | @ -466,9 +565,13 @@ async def trio_proc( | |||
|         # allowed! Do this **after** cancellation/teardown to avoid | ||||
|         # killing the process too early. | ||||
|         if proc: | ||||
|             log.cancel(f'Hard reap sequence starting for {subactor.uid}') | ||||
|             with trio.CancelScope(shield=True): | ||||
|             log.cancel( | ||||
|                 f'Hard reap sequence starting for subactor\n' | ||||
|                 f'>x)\n' | ||||
|                 f' |_{subactor}@{subactor.uid}\n' | ||||
|             ) | ||||
| 
 | ||||
|             with trio.CancelScope(shield=True): | ||||
|                 # don't clobber an ongoing pdb | ||||
|                 if cancelled_during_spawn: | ||||
|                     # Try again to avoid TTY clobbering. | ||||
|  | @ -476,7 +579,17 @@ async def trio_proc( | |||
|                         with trio.move_on_after(0.5): | ||||
|                             await proc.wait() | ||||
| 
 | ||||
|                 if is_root_process(): | ||||
|                 await maybe_wait_for_debugger( | ||||
|                     child_in_debug=_runtime_vars.get( | ||||
|                         '_debug_mode', False | ||||
|                     ), | ||||
|                     header_msg=( | ||||
|                         'Delaying subproc reaper while debugger locked..\n' | ||||
|                     ), | ||||
| 
 | ||||
|                     # TODO: need a diff value then default? | ||||
|                     # poll_steps=9999999, | ||||
|                 ) | ||||
|                 # TODO: solve the following issue where we need | ||||
|                 # to do a similar wait like this but in an | ||||
|                 # "intermediary" parent actor that itself isn't | ||||
|  | @ -484,14 +597,22 @@ async def trio_proc( | |||
|                 # to hold off on relaying SIGINT until that child | ||||
|                 # is complete. | ||||
|                 # https://github.com/goodboy/tractor/issues/320 | ||||
|                     await maybe_wait_for_debugger( | ||||
|                         child_in_debug=_runtime_vars.get( | ||||
|                             '_debug_mode', False), | ||||
|                     ) | ||||
|                 # -[ ] we need to handle non-root parent-actors specially | ||||
|                 # by somehow determining if a child is in debug and then | ||||
|                 # avoiding cancel/kill of said child by this | ||||
|                 # (intermediary) parent until such a time as the root says | ||||
|                 # the pdb lock is released and we are good to tear down | ||||
|                 # (our children).. | ||||
|                 # | ||||
|                 # -[ ] so maybe something like this where we try to | ||||
|                 #     acquire the lock and get notified of who has it, | ||||
|                 #     check that uid against our known children? | ||||
|                 # this_uid: tuple[str, str] = current_actor().uid | ||||
|                 # await acquire_debug_lock(this_uid) | ||||
| 
 | ||||
|                 if proc.poll() is None: | ||||
|                     log.cancel(f"Attempting to hard kill {proc}") | ||||
|                     await do_hard_kill(proc) | ||||
|                     await hard_kill(proc) | ||||
| 
 | ||||
|                 log.debug(f"Joined {proc}") | ||||
|         else: | ||||
|  | @ -509,7 +630,7 @@ async def mp_proc( | |||
|     subactor: Actor, | ||||
|     errors: dict[tuple[str, str], Exception], | ||||
|     # passed through to actor main | ||||
|     bind_addr: tuple[str, int], | ||||
|     bind_addrs: list[tuple[str, int]], | ||||
|     parent_addr: tuple[str, int], | ||||
|     _runtime_vars: dict[str, Any],  # serialized and sent to _child | ||||
|     *, | ||||
|  | @ -567,7 +688,7 @@ async def mp_proc( | |||
|         target=_mp_main, | ||||
|         args=( | ||||
|             subactor, | ||||
|             bind_addr, | ||||
|             bind_addrs, | ||||
|             fs_info, | ||||
|             _spawn_method, | ||||
|             parent_addr, | ||||
|  | @ -635,7 +756,7 @@ async def mp_proc( | |||
|             # This is a "soft" (cancellable) join/reap which | ||||
|             # will remote cancel the actor on a ``trio.Cancelled`` | ||||
|             # condition. | ||||
|             await soft_wait( | ||||
|             await soft_kill( | ||||
|                 proc, | ||||
|                 proc_waiter, | ||||
|                 portal | ||||
|  |  | |||
|  | @ -18,27 +18,83 @@ | |||
| Per process state | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from contextvars import ( | ||||
|     ContextVar, | ||||
| ) | ||||
| from typing import ( | ||||
|     Optional, | ||||
|     Any, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| _current_actor: Optional['Actor'] = None  # type: ignore # noqa | ||||
| from trio.lowlevel import current_task | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
|     from ._context import Context | ||||
| 
 | ||||
| 
 | ||||
| _current_actor: Actor|None = None  # type: ignore # noqa | ||||
| _last_actor_terminated: Actor|None = None | ||||
| 
 | ||||
| # TODO: mk this a `msgspec.Struct`! | ||||
| _runtime_vars: dict[str, Any] = { | ||||
|     '_debug_mode': False, | ||||
|     '_is_root': False, | ||||
|     '_root_mailbox': (None, None) | ||||
|     '_root_mailbox': (None, None), | ||||
|     '_registry_addrs': [], | ||||
| 
 | ||||
|     '_is_infected_aio': False, | ||||
| 
 | ||||
|     # for `tractor.pause_from_sync()` & `breakpoint()` support | ||||
|     'use_greenback': False, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| def current_actor(err_on_no_runtime: bool = True) -> 'Actor':  # type: ignore # noqa | ||||
| def last_actor() -> Actor|None: | ||||
|     ''' | ||||
|     Try to return last active `Actor` singleton | ||||
|     for this process. | ||||
| 
 | ||||
|     For case where runtime already exited but someone is asking | ||||
|     about the "last" actor probably to get its `.uid: tuple`. | ||||
| 
 | ||||
|     ''' | ||||
|     return _last_actor_terminated | ||||
| 
 | ||||
| 
 | ||||
| def current_actor( | ||||
|     err_on_no_runtime: bool = True, | ||||
| ) -> Actor: | ||||
|     ''' | ||||
|     Get the process-local actor instance. | ||||
| 
 | ||||
|     ''' | ||||
|     if ( | ||||
|         err_on_no_runtime | ||||
|         and | ||||
|         _current_actor is None | ||||
|     ): | ||||
|         msg: str = 'No local actor has been initialized yet?\n' | ||||
|         from ._exceptions import NoRuntime | ||||
|     if _current_actor is None and err_on_no_runtime: | ||||
|         raise NoRuntime("No local actor has been initialized yet") | ||||
| 
 | ||||
|         if last := last_actor(): | ||||
|             msg += ( | ||||
|                 f'Apparently the lact active actor was\n' | ||||
|                 f'|_{last}\n' | ||||
|                 f'|_{last.uid}\n' | ||||
|             ) | ||||
|         # no actor runtime has (as of yet) ever been started for | ||||
|         # this process. | ||||
|         else: | ||||
|             msg += ( | ||||
|                 # 'No last actor found?\n' | ||||
|                 '\nDid you forget to call one of,\n' | ||||
|                 '- `tractor.open_root_actor()`\n' | ||||
|                 '- `tractor.open_nursery()`\n' | ||||
|             ) | ||||
| 
 | ||||
|         raise NoRuntime(msg) | ||||
| 
 | ||||
|     return _current_actor | ||||
| 
 | ||||
|  | @ -63,3 +119,26 @@ def debug_mode() -> bool: | |||
| 
 | ||||
| def is_root_process() -> bool: | ||||
|     return _runtime_vars['_is_root'] | ||||
| 
 | ||||
| 
 | ||||
| _ctxvar_Context: ContextVar[Context] = ContextVar( | ||||
|     'ipc_context', | ||||
|     default=None, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def current_ipc_ctx( | ||||
|     error_on_not_set: bool = False, | ||||
| ) -> Context|None: | ||||
|     ctx: Context = _ctxvar_Context.get() | ||||
| 
 | ||||
|     if ( | ||||
|         not ctx | ||||
|         and error_on_not_set | ||||
|     ): | ||||
|         from ._exceptions import InternalError | ||||
|         raise InternalError( | ||||
|             'No IPC context has been allocated for this task yet?\n' | ||||
|             f'|_{current_task()}\n' | ||||
|         ) | ||||
|     return ctx | ||||
|  |  | |||
|  | @ -21,10 +21,12 @@ The machinery and types behind ``Context.open_stream()`` | |||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| import inspect | ||||
| from contextlib import asynccontextmanager as acm | ||||
| import inspect | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncGenerator, | ||||
|     Callable, | ||||
|     AsyncIterator, | ||||
|     TYPE_CHECKING, | ||||
|  | @ -34,16 +36,25 @@ import warnings | |||
| import trio | ||||
| 
 | ||||
| from ._exceptions import ( | ||||
|     unpack_error, | ||||
|     ContextCancelled, | ||||
|     RemoteActorError, | ||||
| ) | ||||
| from .log import get_logger | ||||
| from .trionics import ( | ||||
|     broadcast_receiver, | ||||
|     BroadcastReceiver, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     # Return, | ||||
|     # Stop, | ||||
|     MsgType, | ||||
|     Yield, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
|     from ._context import Context | ||||
|     from ._ipc import Channel | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  | @ -54,14 +65,13 @@ log = get_logger(__name__) | |||
| #   messages? class ReceiveChannel(AsyncResource, Generic[ReceiveType]): | ||||
| # - use __slots__ on ``Context``? | ||||
| 
 | ||||
| 
 | ||||
| class MsgStream(trio.abc.Channel): | ||||
|     ''' | ||||
|     A bidirectional message stream for receiving logically sequenced | ||||
|     values over an inter-actor IPC ``Channel``. | ||||
|     values over an inter-actor IPC `Channel`. | ||||
| 
 | ||||
|     This is the type returned to a local task which entered either | ||||
|     ``Portal.open_stream_from()`` or ``Context.open_stream()``. | ||||
|     `Portal.open_stream_from()` or `Context.open_stream()`. | ||||
| 
 | ||||
|     Termination rules: | ||||
| 
 | ||||
|  | @ -77,7 +87,7 @@ class MsgStream(trio.abc.Channel): | |||
|         self, | ||||
|         ctx: Context,  # typing: ignore # noqa | ||||
|         rx_chan: trio.MemoryReceiveChannel, | ||||
|         _broadcaster: BroadcastReceiver | None = None, | ||||
|         _broadcaster: BroadcastReceiver|None = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         self._ctx = ctx | ||||
|  | @ -85,122 +95,234 @@ class MsgStream(trio.abc.Channel): | |||
|         self._broadcaster = _broadcaster | ||||
| 
 | ||||
|         # flag to denote end of stream | ||||
|         self._eoc: bool = False | ||||
|         self._closed: bool = False | ||||
|         self._eoc: bool|trio.EndOfChannel = False | ||||
|         self._closed: bool|trio.ClosedResourceError = False | ||||
| 
 | ||||
|     # delegate directly to underlying mem channel | ||||
|     def receive_nowait(self): | ||||
|         msg = self._rx_chan.receive_nowait() | ||||
|         return msg['yield'] | ||||
| 
 | ||||
|     async def receive(self): | ||||
|         '''Async receive a single msg from the IPC transport, the next | ||||
|         in sequence for this stream. | ||||
|     @property | ||||
|     def ctx(self) -> Context: | ||||
|         ''' | ||||
|         A read-only ref to this stream's inter-actor-task `Context`. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._ctx | ||||
| 
 | ||||
|     @property | ||||
|     def chan(self) -> Channel: | ||||
|         ''' | ||||
|         Ref to the containing `Context`'s transport `Channel`. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._ctx.chan | ||||
| 
 | ||||
|     # TODO: could we make this a direct method bind to `PldRx`? | ||||
|     # -> receive_nowait = PldRx.recv_pld | ||||
|     # |_ means latter would have to accept `MsgStream`-as-`self`? | ||||
|     #  => should be fine as long as, | ||||
|     #  -[ ] both define `._rx_chan` | ||||
|     #  -[ ] .ctx is bound into `PldRx` using a `@cm`? | ||||
|     # | ||||
|     # delegate directly to underlying mem channel | ||||
|     def receive_nowait( | ||||
|         self, | ||||
|         expect_msg: MsgType = Yield, | ||||
|     ): | ||||
|         ctx: Context = self._ctx | ||||
|         return ctx._pld_rx.recv_pld_nowait( | ||||
|             ipc=self, | ||||
|             expect_msg=expect_msg, | ||||
|         ) | ||||
| 
 | ||||
|     async def receive( | ||||
|         self, | ||||
| 
 | ||||
|         hide_tb: bool = False, | ||||
|     ): | ||||
|         ''' | ||||
|         Receive a single msg from the IPC transport, the next in | ||||
|         sequence sent by the far end task (possibly in order as | ||||
|         determined by the underlying protocol). | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         # NOTE FYI: `trio.ReceiveChannel` implements EOC handling as | ||||
|         # follows (aka uses it to gracefully exit async for loops): | ||||
|         # | ||||
|         # async def __anext__(self) -> ReceiveType: | ||||
|         #     try: | ||||
|         #         return await self.receive() | ||||
|         #     except trio.EndOfChannel: | ||||
|         #         raise StopAsyncIteration | ||||
|         # | ||||
|         # see ``.aclose()`` for notes on the old behaviour prior to | ||||
|         # introducing this | ||||
|         if self._eoc: | ||||
|             raise trio.EndOfChannel | ||||
|             raise self._eoc | ||||
| 
 | ||||
|         if self._closed: | ||||
|             raise trio.ClosedResourceError('This stream was closed') | ||||
|             raise self._closed | ||||
| 
 | ||||
|         src_err: Exception|None = None  # orig tb | ||||
|         try: | ||||
|             msg = await self._rx_chan.receive() | ||||
|             return msg['yield'] | ||||
|             ctx: Context = self._ctx | ||||
|             return await ctx._pld_rx.recv_pld(ipc=self) | ||||
| 
 | ||||
|         except KeyError as err: | ||||
|             # internal error should never get here | ||||
|             assert msg.get('cid'), ("Received internal error at portal?") | ||||
|         # XXX: the stream terminates on either of: | ||||
|         # - `self._rx_chan.receive()` raising  after manual closure | ||||
|         #   by the rpc-runtime, | ||||
|         #   OR | ||||
|         # - via a `Stop`-msg received from remote peer task. | ||||
|         #   NOTE | ||||
|         #   |_ previously this was triggered by calling | ||||
|         #   ``._rx_chan.aclose()`` on the send side of the channel | ||||
|         #   inside `Actor._deliver_ctx_payload()`, but now the 'stop' | ||||
|         #   message handling gets delegated to `PldRFx.recv_pld()` | ||||
|         #   internals. | ||||
|         except trio.EndOfChannel as eoc: | ||||
|             # a graceful stream finished signal | ||||
|             self._eoc = eoc | ||||
|             src_err = eoc | ||||
| 
 | ||||
|             # TODO: handle 2 cases with 3.10 match syntax | ||||
|             # - 'stop' | ||||
|             # - 'error' | ||||
|             # possibly just handle msg['stop'] here! | ||||
| 
 | ||||
|             if self._closed: | ||||
|                 raise trio.ClosedResourceError('This stream was closed') | ||||
| 
 | ||||
|             if msg.get('stop') or self._eoc: | ||||
|                 log.debug(f"{self} was stopped at remote end") | ||||
| 
 | ||||
|                 # XXX: important to set so that a new ``.receive()`` | ||||
|                 # call (likely by another task using a broadcast receiver) | ||||
|                 # doesn't accidentally pull the ``return`` message | ||||
|                 # value out of the underlying feed mem chan! | ||||
|                 self._eoc = True | ||||
| 
 | ||||
|                 # # when the send is closed we assume the stream has | ||||
|                 # # terminated and signal this local iterator to stop | ||||
|                 # await self.aclose() | ||||
| 
 | ||||
|                 # XXX: this causes ``ReceiveChannel.__anext__()`` to | ||||
|                 # raise a ``StopAsyncIteration`` **and** in our catch | ||||
|                 # block below it will trigger ``.aclose()``. | ||||
|                 raise trio.EndOfChannel from err | ||||
| 
 | ||||
|             # TODO: test that shows stream raising an expected error!!! | ||||
|             elif msg.get('error'): | ||||
|                 # raise the error message | ||||
|                 raise unpack_error(msg, self._ctx.chan) | ||||
| 
 | ||||
|             else: | ||||
|                 raise | ||||
| 
 | ||||
|         except ( | ||||
|             trio.ClosedResourceError,  # by self._rx_chan | ||||
|             trio.EndOfChannel,  # by self._rx_chan or `stop` msg from far end | ||||
|         ): | ||||
|             # XXX: we close the stream on any of these error conditions: | ||||
| 
 | ||||
|             # a ``ClosedResourceError`` indicates that the internal | ||||
|             # feeder memory receive channel was closed likely by the | ||||
|             # runtime after the associated transport-channel | ||||
|             # disconnected or broke. | ||||
| 
 | ||||
|             # an ``EndOfChannel`` indicates either the internal recv | ||||
|             # memchan exhausted **or** we raisesd it just above after | ||||
|             # receiving a `stop` message from the far end of the stream. | ||||
| 
 | ||||
|             # Previously this was triggered by calling ``.aclose()`` on | ||||
|             # the send side of the channel inside | ||||
|             # ``Actor._push_result()`` (should still be commented code | ||||
|             # there - which should eventually get removed), but now the | ||||
|             # 'stop' message handling has been put just above. | ||||
| 
 | ||||
|             # TODO: Locally, we want to close this stream gracefully, by | ||||
|             # terminating any local consumers tasks deterministically. | ||||
|             # One we have broadcast support, we **don't** want to be | ||||
|             # closing this stream and not flushing a final value to | ||||
|             # remaining (clone) consumers who may not have been | ||||
|             # scheduled to receive it yet. | ||||
|         # a `ClosedResourceError` indicates that the internal feeder | ||||
|         # memory receive channel was closed likely by the runtime | ||||
|         # after the associated transport-channel disconnected or | ||||
|         # broke. | ||||
|         except trio.ClosedResourceError as cre:  # by self._rx_chan.receive() | ||||
|             src_err = cre | ||||
|             log.warning( | ||||
|                 '`Context._rx_chan` was already closed?' | ||||
|             ) | ||||
|             self._closed = cre | ||||
| 
 | ||||
|         # when the send is closed we assume the stream has | ||||
|         # terminated and signal this local iterator to stop | ||||
|             await self.aclose() | ||||
|         drained: list[Exception|dict] = await self.aclose() | ||||
|         if drained: | ||||
|             # ?TODO? pass these to the `._ctx._drained_msgs: deque` | ||||
|             # and then iterate them as part of any `.wait_for_result()` call? | ||||
|             # | ||||
|             # from .devx import pause | ||||
|             # await pause() | ||||
|             log.warning( | ||||
|                 'Drained context msgs during closure\n\n' | ||||
|                 f'{drained}' | ||||
|             ) | ||||
| 
 | ||||
|             raise  # propagate | ||||
|         # NOTE XXX: if the context was cancelled or remote-errored | ||||
|         # but we received the stream close msg first, we | ||||
|         # probably want to instead raise the remote error | ||||
|         # over the end-of-stream connection error since likely | ||||
|         # the remote error was the source cause? | ||||
|         # ctx: Context = self._ctx | ||||
|         ctx.maybe_raise( | ||||
|             raise_ctxc_from_self_call=True, | ||||
|             from_src_exc=src_err, | ||||
|         ) | ||||
| 
 | ||||
|     async def aclose(self): | ||||
|         # propagate any error but hide low-level frame details from | ||||
|         # the caller by default for console/debug-REPL noise | ||||
|         # reduction. | ||||
|         if ( | ||||
|             hide_tb | ||||
|             and ( | ||||
| 
 | ||||
|                 # XXX NOTE special conditions: don't reraise on | ||||
|                 # certain stream-specific internal error types like, | ||||
|                 # | ||||
|                 # - `trio.EoC` since we want to use the exact instance | ||||
|                 #   to ensure that it is the error that bubbles upward | ||||
|                 #   for silent absorption by `Context.open_stream()`. | ||||
|                 not self._eoc | ||||
| 
 | ||||
|                 # - `RemoteActorError` (or subtypes like ctxc) | ||||
|                 #    since we want to present the error as though it is | ||||
|                 #    "sourced" directly from this `.receive()` call and | ||||
|                 #    generally NOT include the stack frames raised from | ||||
|                 #    inside the `PldRx` and/or the transport stack | ||||
|                 #    layers. | ||||
|                 or isinstance(src_err, RemoteActorError) | ||||
|             ) | ||||
|         ): | ||||
|             raise type(src_err)(*src_err.args) from src_err | ||||
|         else: | ||||
|             # for any non-graceful-EOC we want to NOT hide this frame | ||||
|             if not self._eoc: | ||||
|                 __tracebackhide__: bool = False | ||||
| 
 | ||||
|             raise src_err | ||||
| 
 | ||||
|     async def aclose(self) -> list[Exception|dict]: | ||||
|         ''' | ||||
|         Cancel associated remote actor task and local memory channel on | ||||
|         close. | ||||
| 
 | ||||
|         Notes:  | ||||
|          - REMEMBER that this is also called by `.__aexit__()` so | ||||
|            careful consideration must be made to handle whatever | ||||
|            internal stsate is mutated, particuarly in terms of | ||||
|            draining IPC msgs! | ||||
| 
 | ||||
|          - more or less we try to maintain adherance to trio's `.aclose()` semantics: | ||||
|            https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||
|         ''' | ||||
|         # XXX: keep proper adherance to trio's `.aclose()` semantics: | ||||
|         # https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||
|         rx_chan = self._rx_chan | ||||
| 
 | ||||
|         if rx_chan._closed: | ||||
|             log.cancel(f"{self} is already closed") | ||||
|         # rx_chan = self._rx_chan | ||||
| 
 | ||||
|         # XXX NOTE XXX | ||||
|         # it's SUPER IMPORTANT that we ensure we don't DOUBLE | ||||
|         # DRAIN msgs on closure so avoid getting stuck handing on | ||||
|         # the `._rx_chan` since we call this method on | ||||
|         # `.__aexit__()` as well!!! | ||||
|         # => SO ENSURE WE CATCH ALL TERMINATION STATES in this | ||||
|         # block including the EoC.. | ||||
|         if self.closed: | ||||
|             # this stream has already been closed so silently succeed as | ||||
|             # per ``trio.AsyncResource`` semantics. | ||||
|             # https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||
|             return | ||||
|             return [] | ||||
| 
 | ||||
|         self._eoc = True | ||||
|         ctx: Context = self._ctx | ||||
|         drained: list[Exception|dict] = [] | ||||
|         while not drained: | ||||
|             try: | ||||
|                 maybe_final_msg = self.receive_nowait( | ||||
|                     # allow_msgs=[Yield, Return], | ||||
|                     expect_msg=Yield, | ||||
|                 ) | ||||
|                 if maybe_final_msg: | ||||
|                     log.debug( | ||||
|                         'Drained un-processed stream msg:\n' | ||||
|                         f'{pformat(maybe_final_msg)}' | ||||
|                     ) | ||||
|                     # TODO: inject into parent `Context` buf? | ||||
|                     drained.append(maybe_final_msg) | ||||
| 
 | ||||
|             # NOTE: we only need these handlers due to the | ||||
|             # `.receive_nowait()` call above which may re-raise | ||||
|             # one of these errors on a msg key error! | ||||
| 
 | ||||
|             except trio.WouldBlock as be: | ||||
|                 drained.append(be) | ||||
|                 break | ||||
| 
 | ||||
|             except trio.EndOfChannel as eoc: | ||||
|                 self._eoc: Exception = eoc | ||||
|                 drained.append(eoc) | ||||
|                 break | ||||
| 
 | ||||
|             except trio.ClosedResourceError as cre: | ||||
|                 self._closed = cre | ||||
|                 drained.append(cre) | ||||
|                 break | ||||
| 
 | ||||
|             except ContextCancelled as ctxc: | ||||
|                 # log.exception('GOT CTXC') | ||||
|                 log.cancel( | ||||
|                     'Context was cancelled during stream closure:\n' | ||||
|                     f'canceller: {ctxc.canceller}\n' | ||||
|                     f'{pformat(ctxc.msgdata)}' | ||||
|                 ) | ||||
|                 break | ||||
| 
 | ||||
|         # NOTE: this is super subtle IPC messaging stuff: | ||||
|         # Relay stop iteration to far end **iff** we're | ||||
|  | @ -231,26 +353,41 @@ class MsgStream(trio.abc.Channel): | |||
|         except ( | ||||
|             trio.BrokenResourceError, | ||||
|             trio.ClosedResourceError | ||||
|         ): | ||||
|         ) as re: | ||||
|             # the underlying channel may already have been pulled | ||||
|             # in which case our stop message is meaningless since | ||||
|             # it can't traverse the transport. | ||||
|             ctx = self._ctx | ||||
|             log.warning( | ||||
|                 f'Stream was already destroyed?\n' | ||||
|                 f'actor: {ctx.chan.uid}\n' | ||||
|                 f'ctx id: {ctx.cid}' | ||||
|             ) | ||||
|             drained.append(re) | ||||
|             self._closed = re | ||||
| 
 | ||||
|         self._closed = True | ||||
|         # if caught_eoc: | ||||
|         #     # from .devx import _debug | ||||
|         #     # await _debug.pause() | ||||
|         #     with trio.CancelScope(shield=True): | ||||
|         #         await rx_chan.aclose() | ||||
| 
 | ||||
|         # Do we close the local mem chan ``self._rx_chan`` ??!? | ||||
|         if not self._eoc: | ||||
|             message: str = ( | ||||
|                 f'Stream self-closed by {self._ctx.side!r}-side before EoC\n' | ||||
|                 # } bc a stream is a "scope"/msging-phase inside an IPC | ||||
|                 f'x}}>\n' | ||||
|                 f'|_{self}\n' | ||||
|             ) | ||||
|             log.cancel(message) | ||||
|             self._eoc = trio.EndOfChannel(message) | ||||
| 
 | ||||
|         # NO, DEFINITELY NOT if we're a bi-dir ``MsgStream``! | ||||
|         # BECAUSE this same core-msg-loop mem recv-chan is used to deliver | ||||
|         # the potential final result from the surrounding inter-actor | ||||
|         # `Context` so we don't want to close it until that context has | ||||
|         # run to completion. | ||||
|         # ?XXX WAIT, why do we not close the local mem chan `._rx_chan` XXX? | ||||
|         # => NO, DEFINITELY NOT! <= | ||||
|         # if we're a bi-dir ``MsgStream`` BECAUSE this same | ||||
|         # core-msg-loop mem recv-chan is used to deliver the | ||||
|         # potential final result from the surrounding inter-actor | ||||
|         # `Context` so we don't want to close it until that | ||||
|         # context has run to completion. | ||||
| 
 | ||||
|         # XXX: Notes on old behaviour: | ||||
|         # await rx_chan.aclose() | ||||
|  | @ -279,6 +416,26 @@ class MsgStream(trio.abc.Channel): | |||
|         # runtime's closure of ``rx_chan`` in the case where we may | ||||
|         # still need to consume msgs that are "in transit" from the far | ||||
|         # end (eg. for ``Context.result()``). | ||||
|         # self._closed = True | ||||
|         return drained | ||||
| 
 | ||||
|     @property | ||||
|     def closed(self) -> bool: | ||||
| 
 | ||||
|         rxc: bool = self._rx_chan._closed | ||||
|         _closed: bool|Exception = self._closed | ||||
|         _eoc: bool|trio.EndOfChannel = self._eoc | ||||
|         if rxc or _closed or _eoc: | ||||
|             log.runtime( | ||||
|                 f'`MsgStream` is already closed\n' | ||||
|                 f'{self}\n' | ||||
|                 f' |_cid: {self._ctx.cid}\n' | ||||
|                 f' |_rx_chan._closed: {type(rxc)} = {rxc}\n' | ||||
|                 f' |_closed: {type(_closed)} = {_closed}\n' | ||||
|                 f' |_eoc: {type(_eoc)} = {_eoc}' | ||||
|             ) | ||||
|             return True | ||||
|         return False | ||||
| 
 | ||||
|     @acm | ||||
|     async def subscribe( | ||||
|  | @ -308,6 +465,9 @@ class MsgStream(trio.abc.Channel): | |||
|                 self, | ||||
|                 # use memory channel size by default | ||||
|                 self._rx_chan._state.max_buffer_size,  # type: ignore | ||||
| 
 | ||||
|                 # TODO: can remove this kwarg right since | ||||
|                 # by default behaviour is to do this anyway? | ||||
|                 receive_afunc=self.receive, | ||||
|             ) | ||||
| 
 | ||||
|  | @ -334,19 +494,260 @@ class MsgStream(trio.abc.Channel): | |||
| 
 | ||||
|     async def send( | ||||
|         self, | ||||
|         data: Any | ||||
|         data: Any, | ||||
| 
 | ||||
|         hide_tb: bool = True, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a message over this stream to the far end. | ||||
| 
 | ||||
|         ''' | ||||
|         if self._ctx._remote_error: | ||||
|             raise self._ctx._remote_error  # from None | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         # raise any alreay known error immediately | ||||
|         self._ctx.maybe_raise() | ||||
|         if self._eoc: | ||||
|             raise self._eoc | ||||
| 
 | ||||
|         if self._closed: | ||||
|             raise trio.ClosedResourceError('This stream was already closed') | ||||
|             raise self._closed | ||||
| 
 | ||||
|         try: | ||||
|             await self._ctx.chan.send( | ||||
|                 payload=Yield( | ||||
|                     cid=self._ctx.cid, | ||||
|                     pld=data, | ||||
|                 ), | ||||
|             ) | ||||
|         except ( | ||||
|             trio.ClosedResourceError, | ||||
|             trio.BrokenResourceError, | ||||
|             BrokenPipeError, | ||||
|         ) as trans_err: | ||||
|             if hide_tb: | ||||
|                 raise type(trans_err)( | ||||
|                     *trans_err.args | ||||
|                 ) from trans_err | ||||
|             else: | ||||
|                 raise | ||||
| 
 | ||||
|     # TODO: msg capability context api1 | ||||
|     # @acm | ||||
|     # async def enable_msg_caps( | ||||
|     #     self, | ||||
|     #     msg_subtypes: Union[ | ||||
|     #         list[list[Struct]], | ||||
|     #         Protocol,   # hypothetical type that wraps a msg set | ||||
|     #     ], | ||||
|     # ) -> tuple[Callable, Callable]:  # payload enc, dec pair | ||||
|     #     ... | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_stream_from_ctx( | ||||
|     ctx: Context, | ||||
|     allow_overruns: bool|None = False, | ||||
|     msg_buffer_size: int|None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[MsgStream, None]: | ||||
|     ''' | ||||
|     Open a `MsgStream`, a bi-directional msg transport dialog | ||||
|     connected to the cross-actor peer task for an IPC `Context`. | ||||
| 
 | ||||
|     This context manager must be entered in both the "parent" (task | ||||
|     which entered `Portal.open_context()`) and "child" (RPC task | ||||
|     which is decorated by `@context`) tasks for the stream to | ||||
|     logically be considered "open"; if one side begins sending to an | ||||
|     un-opened peer, depending on policy config, msgs will either be | ||||
|     queued until the other side opens and/or a `StreamOverrun` will | ||||
|     (eventually) be raised. | ||||
| 
 | ||||
|                          ------ - ------ | ||||
| 
 | ||||
|     Runtime semantics design: | ||||
| 
 | ||||
|     A `MsgStream` session adheres to "one-shot use" semantics, | ||||
|     meaning if you close the scope it **can not** be "re-opened". | ||||
| 
 | ||||
|     Instead you must re-establish a new surrounding RPC `Context` | ||||
|     (RTC: remote task context?) using `Portal.open_context()`. | ||||
| 
 | ||||
|     In the future this *design choice* may need to be changed but | ||||
|     currently there seems to be no obvious reason to support such | ||||
|     semantics.. | ||||
| 
 | ||||
|     - "pausing a stream" can be supported with a message implemented | ||||
|       by the `tractor` application dev. | ||||
| 
 | ||||
|     - any remote error will normally require a restart of the entire | ||||
|       `trio.Task`'s scope due to the nature of `trio`'s cancellation | ||||
|       (`CancelScope`) system and semantics (level triggered). | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = ctx._actor | ||||
| 
 | ||||
|     # If the surrounding context has been cancelled by some | ||||
|     # task with a handle to THIS, we error here immediately | ||||
|     # since it likely means the surrounding lexical-scope has | ||||
|     # errored, been `trio.Cancelled` or at the least | ||||
|     # `Context.cancel()` was called by some task. | ||||
|     if ctx._cancel_called: | ||||
| 
 | ||||
|         # XXX NOTE: ALWAYS RAISE any remote error here even if | ||||
|         # it's an expected `ContextCancelled` due to a local | ||||
|         # task having called `.cancel()`! | ||||
|         # | ||||
|         # WHY: we expect the error to always bubble up to the | ||||
|         # surrounding `Portal.open_context()` call and be | ||||
|         # absorbed there (silently) and we DO NOT want to | ||||
|         # actually try to stream - a cancel msg was already | ||||
|         # sent to the other side! | ||||
|         ctx.maybe_raise( | ||||
|             raise_ctxc_from_self_call=True, | ||||
|         ) | ||||
|         # NOTE: this is diff then calling | ||||
|         # `._maybe_raise_remote_err()` specifically | ||||
|         # because we want to raise a ctxc on any task entering this `.open_stream()` | ||||
|         # AFTER cancellation was already been requested, | ||||
|         # we DO NOT want to absorb any ctxc ACK silently! | ||||
|         # if ctx._remote_error: | ||||
|         #     raise ctx._remote_error | ||||
| 
 | ||||
|         # XXX NOTE: if no `ContextCancelled` has been responded | ||||
|         # back from the other side (yet), we raise a different | ||||
|         # runtime error indicating that this task's usage of | ||||
|         # `Context.cancel()` and then `.open_stream()` is WRONG! | ||||
|         task: str = trio.lowlevel.current_task().name | ||||
|         raise RuntimeError( | ||||
|             'Stream opened after `Context.cancel()` called..?\n' | ||||
|             f'task: {actor.uid[0]}:{task}\n' | ||||
|             f'{ctx}' | ||||
|         ) | ||||
| 
 | ||||
|     if ( | ||||
|         not ctx._portal | ||||
|         and not ctx._started_called | ||||
|     ): | ||||
|         raise RuntimeError( | ||||
|             'Context.started()` must be called before opening a stream' | ||||
|         ) | ||||
| 
 | ||||
|     # NOTE: in one way streaming this only happens on the | ||||
|     # parent-ctx-task side (on the side that calls | ||||
|     # `Actor.start_remote_task()`) so if you try to send | ||||
|     # a stop from the caller to the callee in the | ||||
|     # single-direction-stream case you'll get a lookup error | ||||
|     # currently. | ||||
|     ctx: Context = actor.get_context( | ||||
|         chan=ctx.chan, | ||||
|         cid=ctx.cid, | ||||
|         nsf=ctx._nsf, | ||||
|         # side=ctx.side, | ||||
| 
 | ||||
|         msg_buffer_size=msg_buffer_size, | ||||
|         allow_overruns=allow_overruns, | ||||
|     ) | ||||
|     ctx._allow_overruns: bool = allow_overruns | ||||
|     assert ctx is ctx | ||||
| 
 | ||||
|     # XXX: If the underlying channel feeder receive mem chan has | ||||
|     # been closed then likely client code has already exited | ||||
|     # a ``.open_stream()`` block prior or there was some other | ||||
|     # unanticipated error or cancellation from ``trio``. | ||||
| 
 | ||||
|     if ctx._rx_chan._closed: | ||||
|         raise trio.ClosedResourceError( | ||||
|             'The underlying channel for this stream was already closed!\n' | ||||
|         ) | ||||
| 
 | ||||
|     # NOTE: implicitly this will call `MsgStream.aclose()` on | ||||
|     # `.__aexit__()` due to stream's parent `Channel` type! | ||||
|     # | ||||
|     # XXX NOTE XXX: ensures the stream is "one-shot use", | ||||
|     # which specifically means that on exit, | ||||
|     # - signal ``trio.EndOfChannel``/``StopAsyncIteration`` to | ||||
|     #   the far end indicating that the caller exited | ||||
|     #   the streaming context purposefully by letting | ||||
|     #   the exit block exec. | ||||
|     # - this is diff from the cancel/error case where | ||||
|     #   a cancel request from this side or an error | ||||
|     #   should be sent to the far end indicating the | ||||
|     #   stream WAS NOT just closed normally/gracefully. | ||||
|     async with MsgStream( | ||||
|         ctx=ctx, | ||||
|         rx_chan=ctx._rx_chan, | ||||
|     ) as stream: | ||||
| 
 | ||||
|         # NOTE: we track all existing streams per portal for | ||||
|         # the purposes of attempting graceful closes on runtime | ||||
|         # cancel requests. | ||||
|         if ctx._portal: | ||||
|             ctx._portal._streams.add(stream) | ||||
| 
 | ||||
|         try: | ||||
|             ctx._stream_opened: bool = True | ||||
|             ctx._stream = stream | ||||
| 
 | ||||
|             # XXX: do we need this? | ||||
|             # ensure we aren't cancelled before yielding the stream | ||||
|             # await trio.lowlevel.checkpoint() | ||||
|             yield stream | ||||
| 
 | ||||
|             # XXX: (MEGA IMPORTANT) if this is a root opened process we | ||||
|             # wait for any immediate child in debug before popping the | ||||
|             # context from the runtime msg loop otherwise inside | ||||
|             # ``Actor._deliver_ctx_payload()`` the msg will be discarded and in | ||||
|             # the case where that msg is global debugger unlock (via | ||||
|             # a "stop" msg for a stream), this can result in a deadlock | ||||
|             # where the root is waiting on the lock to clear but the | ||||
|             # child has already cleared it and clobbered IPC. | ||||
|             # | ||||
|             # await maybe_wait_for_debugger() | ||||
| 
 | ||||
|             # XXX TODO: pretty sure this isn't needed (see | ||||
|             # note above this block) AND will result in | ||||
|             # a double `.send_stop()` call. The only reason to | ||||
|             # put it here would be to due with "order" in | ||||
|             # terms of raising any remote error (as per | ||||
|             # directly below) or bc the stream's | ||||
|             # `.__aexit__()` block might not get run | ||||
|             # (doubtful)? Either way if we did put this back | ||||
|             # in we also need a state var to avoid the double | ||||
|             # stop-msg send.. | ||||
|             # | ||||
|             # await stream.aclose() | ||||
| 
 | ||||
|         # NOTE: absorb and do not raise any | ||||
|         # EoC received from the other side such that | ||||
|         # it is not raised inside the surrounding | ||||
|         # context block's scope! | ||||
|         except trio.EndOfChannel as eoc: | ||||
|             if ( | ||||
|                 eoc | ||||
|                 and | ||||
|                 stream.closed | ||||
|             ): | ||||
|                 # sanity, can remove? | ||||
|                 assert eoc is stream._eoc | ||||
| 
 | ||||
|                 log.warning( | ||||
|                     'Stream was terminated by EoC\n\n' | ||||
|                     # NOTE: won't show the error <Type> but | ||||
|                     # does show txt followed by IPC msg. | ||||
|                     f'{str(eoc)}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         finally: | ||||
|             if ctx._portal: | ||||
|                 try: | ||||
|                     ctx._portal._streams.remove(stream) | ||||
|                 except KeyError: | ||||
|                     log.warning( | ||||
|                         f'Stream was already destroyed?\n' | ||||
|                         f'actor: {ctx.chan.uid}\n' | ||||
|                         f'ctx id: {ctx.cid}' | ||||
|                     ) | ||||
| 
 | ||||
|         await self._ctx.chan.send({'yield': data, 'cid': self._ctx.cid}) | ||||
| 
 | ||||
| 
 | ||||
| def stream(func: Callable) -> Callable: | ||||
|  | @ -356,7 +757,7 @@ def stream(func: Callable) -> Callable: | |||
|     ''' | ||||
|     # TODO: apply whatever solution ``mypy`` ends up picking for this: | ||||
|     # https://github.com/python/mypy/issues/2087#issuecomment-769266912 | ||||
|     func._tractor_stream_function = True  # type: ignore | ||||
|     func._tractor_stream_function: bool = True  # type: ignore | ||||
| 
 | ||||
|     sig = inspect.signature(func) | ||||
|     params = sig.parameters | ||||
|  |  | |||
|  | @ -21,22 +21,22 @@ | |||
| from contextlib import asynccontextmanager as acm | ||||
| from functools import partial | ||||
| import inspect | ||||
| from typing import ( | ||||
|     Optional, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| from pprint import pformat | ||||
| from typing import TYPE_CHECKING | ||||
| import typing | ||||
| import warnings | ||||
| 
 | ||||
| from exceptiongroup import BaseExceptionGroup | ||||
| import trio | ||||
| 
 | ||||
| from ._debug import maybe_wait_for_debugger | ||||
| from .devx._debug import maybe_wait_for_debugger | ||||
| from ._state import current_actor, is_main_process | ||||
| from .log import get_logger, get_loglevel | ||||
| from ._runtime import Actor | ||||
| from ._portal import Portal | ||||
| from ._exceptions import is_multi_cancelled | ||||
| from ._exceptions import ( | ||||
|     is_multi_cancelled, | ||||
|     ContextCancelled, | ||||
| ) | ||||
| from ._root import open_root_actor | ||||
| from . import _state | ||||
| from . import _spawn | ||||
|  | @ -80,54 +80,85 @@ class ActorNursery: | |||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         # TODO: maybe def these as fields of a struct looking type? | ||||
|         actor: Actor, | ||||
|         ria_nursery: trio.Nursery, | ||||
|         da_nursery: trio.Nursery, | ||||
|         errors: dict[tuple[str, str], BaseException], | ||||
| 
 | ||||
|     ) -> None: | ||||
|         # self.supervisor = supervisor  # TODO | ||||
|         self._actor: Actor = actor | ||||
|         self._ria_nursery = ria_nursery | ||||
| 
 | ||||
|         # TODO: rename to `._tn` for our conventional "task-nursery" | ||||
|         self._da_nursery = da_nursery | ||||
| 
 | ||||
|         self._children: dict[ | ||||
|             tuple[str, str], | ||||
|             tuple[ | ||||
|                 Actor, | ||||
|                 trio.Process | mp.Process, | ||||
|                 Optional[Portal], | ||||
|                 Portal | None, | ||||
|             ] | ||||
|         ] = {} | ||||
|         # portals spawned with ``run_in_actor()`` are | ||||
|         # cancelled when their "main" result arrives | ||||
|         self._cancel_after_result_on_exit: set = set() | ||||
| 
 | ||||
|         self.cancelled: bool = False | ||||
|         self._join_procs = trio.Event() | ||||
|         self._at_least_one_child_in_debug: bool = False | ||||
|         self.errors = errors | ||||
|         self._scope_error: BaseException|None = None | ||||
|         self.exited = trio.Event() | ||||
| 
 | ||||
|         # NOTE: when no explicit call is made to | ||||
|         # `.open_root_actor()` by application code, | ||||
|         # `.open_nursery()` will implicitly call it to start the | ||||
|         # actor-tree runtime. In this case we mark ourselves as | ||||
|         # such so that runtime components can be aware for logging | ||||
|         # and syncing purposes to any actor opened nurseries. | ||||
|         self._implicit_runtime_started: bool = False | ||||
| 
 | ||||
|         # TODO: remove the `.run_in_actor()` API and thus this 2ndary | ||||
|         # nursery when that API get's moved outside this primitive! | ||||
|         self._ria_nursery = ria_nursery | ||||
|         # portals spawned with ``run_in_actor()`` are | ||||
|         # cancelled when their "main" result arrives | ||||
|         self._cancel_after_result_on_exit: set = set() | ||||
| 
 | ||||
|     async def start_actor( | ||||
|         self, | ||||
|         name: str, | ||||
| 
 | ||||
|         *, | ||||
|         bind_addr: tuple[str, int] = _default_bind_addr, | ||||
|         rpc_module_paths: list[str] | None = None, | ||||
|         enable_modules: list[str] | None = None, | ||||
|         loglevel: str | None = None,  # set log level per subactor | ||||
|         nursery: trio.Nursery | None = None, | ||||
|         debug_mode: Optional[bool] | None = None, | ||||
| 
 | ||||
|         bind_addrs: list[tuple[str, int]] = [_default_bind_addr], | ||||
|         rpc_module_paths: list[str]|None = None, | ||||
|         enable_modules: list[str]|None = None, | ||||
|         loglevel: str|None = None,  # set log level per subactor | ||||
|         debug_mode: bool|None = None, | ||||
|         infect_asyncio: bool = False, | ||||
| 
 | ||||
|         # TODO: ideally we can rm this once we no longer have | ||||
|         # a `._ria_nursery` since the dependent APIs have been | ||||
|         # removed! | ||||
|         nursery: trio.Nursery|None = None, | ||||
| 
 | ||||
|     ) -> Portal: | ||||
|         ''' | ||||
|         Start a (daemon) actor: an process that has no designated | ||||
|         "main task" besides the runtime. | ||||
| 
 | ||||
|         ''' | ||||
|         loglevel = loglevel or self._actor.loglevel or get_loglevel() | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         loglevel: str = ( | ||||
|             loglevel | ||||
|             or self._actor.loglevel | ||||
|             or get_loglevel() | ||||
|         ) | ||||
| 
 | ||||
|         # configure and pass runtime state | ||||
|         _rtv = _state._runtime_vars.copy() | ||||
|         _rtv['_is_root'] = False | ||||
|         _rtv['_is_infected_aio'] = infect_asyncio | ||||
| 
 | ||||
|         # allow setting debug policy per actor | ||||
|         if debug_mode is not None: | ||||
|  | @ -150,14 +181,16 @@ class ActorNursery: | |||
|             # modules allowed to invoked funcs from | ||||
|             enable_modules=enable_modules, | ||||
|             loglevel=loglevel, | ||||
|             arbiter_addr=current_actor()._arb_addr, | ||||
| 
 | ||||
|             # verbatim relay this actor's registrar addresses | ||||
|             registry_addrs=current_actor().reg_addrs, | ||||
|         ) | ||||
|         parent_addr = self._actor.accept_addr | ||||
|         assert parent_addr | ||||
| 
 | ||||
|         # start a task to spawn a process | ||||
|         # blocks until process has been started and a portal setup | ||||
|         nursery = nursery or self._da_nursery | ||||
|         nursery: trio.Nursery = nursery or self._da_nursery | ||||
| 
 | ||||
|         # XXX: the type ignore is actually due to a `mypy` bug | ||||
|         return await nursery.start(  # type: ignore | ||||
|  | @ -167,21 +200,29 @@ class ActorNursery: | |||
|                 self, | ||||
|                 subactor, | ||||
|                 self.errors, | ||||
|                 bind_addr, | ||||
|                 bind_addrs, | ||||
|                 parent_addr, | ||||
|                 _rtv,  # run time vars | ||||
|                 infect_asyncio=infect_asyncio, | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: DEPRECATE THIS: | ||||
|     # -[ ] impl instead as a hilevel wrapper on | ||||
|     #   top of a `@context` style invocation. | ||||
|     #  |_ dynamic @context decoration on child side | ||||
|     #  |_ implicit `Portal.open_context() as (ctx, first):` | ||||
|     #    and `return first` on parent side. | ||||
|     #  |_ mention how it's similar to `trio-parallel` API? | ||||
|     # -[ ] use @api_frame on the wrapper | ||||
|     async def run_in_actor( | ||||
|         self, | ||||
| 
 | ||||
|         fn: typing.Callable, | ||||
|         *, | ||||
| 
 | ||||
|         name: Optional[str] = None, | ||||
|         bind_addr: tuple[str, int] = _default_bind_addr, | ||||
|         name: str | None = None, | ||||
|         bind_addrs: tuple[str, int] = [_default_bind_addr], | ||||
|         rpc_module_paths: list[str] | None = None, | ||||
|         enable_modules: list[str] | None = None, | ||||
|         loglevel: str | None = None,  # set log level per subactor | ||||
|  | @ -190,25 +231,28 @@ class ActorNursery: | |||
|         **kwargs,  # explicit args to ``fn`` | ||||
| 
 | ||||
|     ) -> Portal: | ||||
|         """Spawn a new actor, run a lone task, then terminate the actor and | ||||
|         ''' | ||||
|         Spawn a new actor, run a lone task, then terminate the actor and | ||||
|         return its result. | ||||
| 
 | ||||
|         Actors spawned using this method are kept alive at nursery teardown | ||||
|         until the task spawned by executing ``fn`` completes at which point | ||||
|         the actor is terminated. | ||||
|         """ | ||||
|         mod_path = fn.__module__ | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         mod_path: str = fn.__module__ | ||||
| 
 | ||||
|         if name is None: | ||||
|             # use the explicit function name if not provided | ||||
|             name = fn.__name__ | ||||
| 
 | ||||
|         portal = await self.start_actor( | ||||
|         portal: Portal = await self.start_actor( | ||||
|             name, | ||||
|             enable_modules=[mod_path] + ( | ||||
|                 enable_modules or rpc_module_paths or [] | ||||
|             ), | ||||
|             bind_addr=bind_addr, | ||||
|             bind_addrs=bind_addrs, | ||||
|             loglevel=loglevel, | ||||
|             # use the run_in_actor nursery | ||||
|             nursery=self._ria_nursery, | ||||
|  | @ -232,21 +276,42 @@ class ActorNursery: | |||
|         ) | ||||
|         return portal | ||||
| 
 | ||||
|     async def cancel(self, hard_kill: bool = False) -> None: | ||||
|         """Cancel this nursery by instructing each subactor to cancel | ||||
|         itself and wait for all subactors to terminate. | ||||
|     # @api_frame | ||||
|     async def cancel( | ||||
|         self, | ||||
|         hard_kill: bool = False, | ||||
| 
 | ||||
|         If ``hard_killl`` is set to ``True`` then kill the processes | ||||
|         directly without any far end graceful ``trio`` cancellation. | ||||
|         """ | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Cancel this actor-nursery by instructing each subactor's | ||||
|         runtime to cancel and wait for all underlying sub-processes | ||||
|         to terminate. | ||||
| 
 | ||||
|         If `hard_kill` is set then kill the processes directly using | ||||
|         the spawning-backend's API/OS-machinery without any attempt | ||||
|         at (graceful) `trio`-style cancellation using our | ||||
|         `Actor.cancel()`. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         self.cancelled = True | ||||
| 
 | ||||
|         log.cancel(f"Cancelling nursery in {self._actor.uid}") | ||||
|         # TODO: impl a repr for spawn more compact | ||||
|         # then `._children`.. | ||||
|         children: dict = self._children | ||||
|         child_count: int = len(children) | ||||
|         msg: str = f'Cancelling actor nursery with {child_count} children\n' | ||||
|         with trio.move_on_after(3) as cs: | ||||
|             async with trio.open_nursery() as tn: | ||||
| 
 | ||||
|             async with trio.open_nursery() as nursery: | ||||
| 
 | ||||
|                 for subactor, proc, portal in self._children.values(): | ||||
|                 subactor: Actor | ||||
|                 proc: trio.Process | ||||
|                 portal: Portal | ||||
|                 for ( | ||||
|                     subactor, | ||||
|                     proc, | ||||
|                     portal, | ||||
|                 ) in children.values(): | ||||
| 
 | ||||
|                     # TODO: are we ever even going to use this or | ||||
|                     # is the spawning backend responsible for such | ||||
|  | @ -258,12 +323,13 @@ class ActorNursery: | |||
|                         if portal is None:  # actor hasn't fully spawned yet | ||||
|                             event = self._actor._peer_connected[subactor.uid] | ||||
|                             log.warning( | ||||
|                                 f"{subactor.uid} wasn't finished spawning?") | ||||
|                                 f"{subactor.uid} never 't finished spawning?" | ||||
|                             ) | ||||
| 
 | ||||
|                             await event.wait() | ||||
| 
 | ||||
|                             # channel/portal should now be up | ||||
|                             _, _, portal = self._children[subactor.uid] | ||||
|                             _, _, portal = children[subactor.uid] | ||||
| 
 | ||||
|                             # XXX should be impossible to get here | ||||
|                             # unless method was called from within | ||||
|  | @ -280,14 +346,24 @@ class ActorNursery: | |||
|                         # spawn cancel tasks for each sub-actor | ||||
|                         assert portal | ||||
|                         if portal.channel.connected(): | ||||
|                             nursery.start_soon(portal.cancel_actor) | ||||
|                             tn.start_soon(portal.cancel_actor) | ||||
| 
 | ||||
|                 log.cancel(msg) | ||||
|         # if we cancelled the cancel (we hung cancelling remote actors) | ||||
|         # then hard kill all sub-processes | ||||
|         if cs.cancelled_caught: | ||||
|             log.error( | ||||
|                 f"Failed to cancel {self}\nHard killing process tree!") | ||||
|             for subactor, proc, portal in self._children.values(): | ||||
|                 f'Failed to cancel {self}?\n' | ||||
|                 'Hard killing underlying subprocess tree!\n' | ||||
|             ) | ||||
|             subactor: Actor | ||||
|             proc: trio.Process | ||||
|             portal: Portal | ||||
|             for ( | ||||
|                 subactor, | ||||
|                 proc, | ||||
|                 portal, | ||||
|             ) in children.values(): | ||||
|                 log.warning(f"Hard killing process {proc}") | ||||
|                 proc.terminate() | ||||
| 
 | ||||
|  | @ -298,11 +374,15 @@ class ActorNursery: | |||
| @acm | ||||
| async def _open_and_supervise_one_cancels_all_nursery( | ||||
|     actor: Actor, | ||||
|     tb_hide: bool = False, | ||||
| 
 | ||||
| ) -> typing.AsyncGenerator[ActorNursery, None]: | ||||
| 
 | ||||
|     # TODO: yay or nay? | ||||
|     __tracebackhide__ = True | ||||
|     # normally don't need to show user by default | ||||
|     __tracebackhide__: bool = tb_hide | ||||
| 
 | ||||
|     outer_err: BaseException|None = None | ||||
|     inner_err: BaseException|None = None | ||||
| 
 | ||||
|     # the collection of errors retreived from spawned sub-actors | ||||
|     errors: dict[tuple[str, str], BaseException] = {} | ||||
|  | @ -312,7 +392,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|     # handling errors that are generated by the inner nursery in | ||||
|     # a supervisor strategy **before** blocking indefinitely to wait for | ||||
|     # actors spawned in "daemon mode" (aka started using | ||||
|     # ``ActorNursery.start_actor()``). | ||||
|     # `ActorNursery.start_actor()`). | ||||
| 
 | ||||
|     # errors from this daemon actor nursery bubble up to caller | ||||
|     async with trio.open_nursery() as da_nursery: | ||||
|  | @ -327,7 +407,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|             # the above "daemon actor" nursery will be notified. | ||||
|             async with trio.open_nursery() as ria_nursery: | ||||
| 
 | ||||
|                 anursery = ActorNursery( | ||||
|                 an = ActorNursery( | ||||
|                     actor, | ||||
|                     ria_nursery, | ||||
|                     da_nursery, | ||||
|  | @ -336,18 +416,19 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                 try: | ||||
|                     # spawning of actors happens in the caller's scope | ||||
|                     # after we yield upwards | ||||
|                     yield anursery | ||||
|                     yield an | ||||
| 
 | ||||
|                     # When we didn't error in the caller's scope, | ||||
|                     # signal all process-monitor-tasks to conduct | ||||
|                     # the "hard join phase". | ||||
|                     log.runtime( | ||||
|                         f"Waiting on subactors {anursery._children} " | ||||
|                         "to complete" | ||||
|                         'Waiting on subactors to complete:\n' | ||||
|                         f'{pformat(an._children)}\n' | ||||
|                     ) | ||||
|                     anursery._join_procs.set() | ||||
|                     an._join_procs.set() | ||||
| 
 | ||||
|                 except BaseException as inner_err: | ||||
|                 except BaseException as _inner_err: | ||||
|                     inner_err = _inner_err | ||||
|                     errors[actor.uid] = inner_err | ||||
| 
 | ||||
|                     # If we error in the root but the debugger is | ||||
|  | @ -357,37 +438,60 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                     # Instead try to wait for pdb to be released before | ||||
|                     # tearing down. | ||||
|                     await maybe_wait_for_debugger( | ||||
|                         child_in_debug=anursery._at_least_one_child_in_debug | ||||
|                         child_in_debug=an._at_least_one_child_in_debug | ||||
|                     ) | ||||
| 
 | ||||
|                     # if the caller's scope errored then we activate our | ||||
|                     # one-cancels-all supervisor strategy (don't | ||||
|                     # worry more are coming). | ||||
|                     anursery._join_procs.set() | ||||
|                     an._join_procs.set() | ||||
| 
 | ||||
|                     # XXX: hypothetically an error could be | ||||
|                     # raised and then a cancel signal shows up | ||||
|                     # XXX NOTE XXX: hypothetically an error could | ||||
|                     # be raised and then a cancel signal shows up | ||||
|                     # slightly after in which case the `else:` | ||||
|                     # block here might not complete?  For now, | ||||
|                     # shield both. | ||||
|                     with trio.CancelScope(shield=True): | ||||
|                         etype = type(inner_err) | ||||
|                         etype: type = type(inner_err) | ||||
|                         if etype in ( | ||||
|                             trio.Cancelled, | ||||
|                             KeyboardInterrupt | ||||
|                             KeyboardInterrupt, | ||||
|                         ) or ( | ||||
|                             is_multi_cancelled(inner_err) | ||||
|                         ): | ||||
|                             log.cancel( | ||||
|                                 f"Nursery for {current_actor().uid} " | ||||
|                                 f"was cancelled with {etype}") | ||||
|                                 f'Actor-nursery cancelled by {etype}\n\n' | ||||
| 
 | ||||
|                                 f'{current_actor().uid}\n' | ||||
|                                 f' |_{an}\n\n' | ||||
| 
 | ||||
|                                 # TODO: show tb str? | ||||
|                                 # f'{tb_str}' | ||||
|                             ) | ||||
|                         elif etype in { | ||||
|                             ContextCancelled, | ||||
|                         }: | ||||
|                             log.cancel( | ||||
|                                 'Actor-nursery caught remote cancellation\n\n' | ||||
| 
 | ||||
|                                 f'{inner_err.tb_str}' | ||||
|                             ) | ||||
|                         else: | ||||
|                             log.exception( | ||||
|                                 f"Nursery for {current_actor().uid} " | ||||
|                                 f"errored with") | ||||
|                                 'Nursery errored with:\n' | ||||
| 
 | ||||
|                                 # TODO: same thing as in | ||||
|                                 # `._invoke()` to compute how to | ||||
|                                 # place this div-line in the | ||||
|                                 # middle of the above msg | ||||
|                                 # content.. | ||||
|                                 # -[ ] prolly helper-func it too | ||||
|                                 #   in our `.log` module.. | ||||
|                                 # '------ - ------' | ||||
|                             ) | ||||
| 
 | ||||
|                         # cancel all subactors | ||||
|                         await anursery.cancel() | ||||
|                         await an.cancel() | ||||
| 
 | ||||
|             # ria_nursery scope end | ||||
| 
 | ||||
|  | @ -402,24 +506,30 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|             Exception, | ||||
|             BaseExceptionGroup, | ||||
|             trio.Cancelled | ||||
|         ) as _outer_err: | ||||
|             outer_err = _outer_err | ||||
| 
 | ||||
|         ) as err: | ||||
|             an._scope_error = outer_err or inner_err | ||||
| 
 | ||||
|             # XXX: yet another guard before allowing the cancel | ||||
|             # sequence in case a (single) child is in debug. | ||||
|             await maybe_wait_for_debugger( | ||||
|                 child_in_debug=anursery._at_least_one_child_in_debug | ||||
|                 child_in_debug=an._at_least_one_child_in_debug | ||||
|             ) | ||||
| 
 | ||||
|             # If actor-local error was raised while waiting on | ||||
|             # ".run_in_actor()" actors then we also want to cancel all | ||||
|             # remaining sub-actors (due to our lone strategy: | ||||
|             # one-cancels-all). | ||||
|             log.cancel(f"Nursery cancelling due to {err}") | ||||
|             if anursery._children: | ||||
|             if an._children: | ||||
|                 log.cancel( | ||||
|                     'Actor-nursery cancelling due error type:\n' | ||||
|                     f'{outer_err}\n' | ||||
|                 ) | ||||
|                 with trio.CancelScope(shield=True): | ||||
|                     await anursery.cancel() | ||||
|                     await an.cancel() | ||||
|             raise | ||||
| 
 | ||||
|         finally: | ||||
|             # No errors were raised while awaiting ".run_in_actor()" | ||||
|             # actors but those actors may have returned remote errors as | ||||
|  | @ -428,9 +538,9 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|             # collected in ``errors`` so cancel all actors, summarize | ||||
|             # all errors and re-raise. | ||||
|             if errors: | ||||
|                 if anursery._children: | ||||
|                 if an._children: | ||||
|                     with trio.CancelScope(shield=True): | ||||
|                         await anursery.cancel() | ||||
|                         await an.cancel() | ||||
| 
 | ||||
|                 # use `BaseExceptionGroup` as needed | ||||
|                 if len(errors) > 1: | ||||
|  | @ -441,11 +551,19 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                 else: | ||||
|                     raise list(errors.values())[0] | ||||
| 
 | ||||
|             # show frame on any (likely) internal error | ||||
|             if ( | ||||
|                 not an.cancelled | ||||
|                 and an._scope_error | ||||
|             ): | ||||
|                 __tracebackhide__: bool = False | ||||
| 
 | ||||
|         # da_nursery scope end - nursery checkpoint | ||||
|     # final exit | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| # @api_frame | ||||
| async def open_nursery( | ||||
|     **kwargs, | ||||
| 
 | ||||
|  | @ -465,19 +583,21 @@ async def open_nursery( | |||
|     which cancellation scopes correspond to each spawned subactor set. | ||||
| 
 | ||||
|     ''' | ||||
|     implicit_runtime = False | ||||
| 
 | ||||
|     actor = current_actor(err_on_no_runtime=False) | ||||
| 
 | ||||
|     __tracebackhide__: bool = True | ||||
|     implicit_runtime: bool = False | ||||
|     actor: Actor = current_actor(err_on_no_runtime=False) | ||||
|     an: ActorNursery|None = None | ||||
|     try: | ||||
|         if actor is None and is_main_process(): | ||||
| 
 | ||||
|         if ( | ||||
|             actor is None | ||||
|             and is_main_process() | ||||
|         ): | ||||
|             # if we are the parent process start the | ||||
|             # actor runtime implicitly | ||||
|             log.info("Starting actor runtime!") | ||||
| 
 | ||||
|             # mark us for teardown on exit | ||||
|             implicit_runtime = True | ||||
|             implicit_runtime: bool = True | ||||
| 
 | ||||
|             async with open_root_actor(**kwargs) as actor: | ||||
|                 assert actor is current_actor() | ||||
|  | @ -485,24 +605,54 @@ async def open_nursery( | |||
|                 try: | ||||
|                     async with _open_and_supervise_one_cancels_all_nursery( | ||||
|                         actor | ||||
|                     ) as anursery: | ||||
|                         yield anursery | ||||
|                     ) as an: | ||||
| 
 | ||||
|                         # NOTE: mark this nursery as having | ||||
|                         # implicitly started the root actor so | ||||
|                         # that `._runtime` machinery can avoid | ||||
|                         # certain teardown synchronization | ||||
|                         # blocking/waits and any associated (warn) | ||||
|                         # logging when it's known that this | ||||
|                         # nursery shouldn't be exited before the | ||||
|                         # root actor is. | ||||
|                         an._implicit_runtime_started = True | ||||
|                         yield an | ||||
|                 finally: | ||||
|                     anursery.exited.set() | ||||
|                     # XXX: this event will be set after the root actor | ||||
|                     # runtime is already torn down, so we want to | ||||
|                     # avoid any blocking on it. | ||||
|                     an.exited.set() | ||||
| 
 | ||||
|         else:  # sub-nursery case | ||||
| 
 | ||||
|             try: | ||||
|                 async with _open_and_supervise_one_cancels_all_nursery( | ||||
|                     actor | ||||
|                 ) as anursery: | ||||
|                     yield anursery | ||||
|                 ) as an: | ||||
|                     yield an | ||||
|             finally: | ||||
|                 anursery.exited.set() | ||||
|                 an.exited.set() | ||||
| 
 | ||||
|     finally: | ||||
|         log.debug("Nursery teardown complete") | ||||
|         # show frame on any internal runtime-scope error | ||||
|         if ( | ||||
|             an | ||||
|             and not an.cancelled | ||||
|             and an._scope_error | ||||
|         ): | ||||
|             __tracebackhide__: bool = False | ||||
| 
 | ||||
|         msg: str = ( | ||||
|             'Actor-nursery exited\n' | ||||
|             f'|_{an}\n' | ||||
|         ) | ||||
| 
 | ||||
|         # shutdown runtime if it was started | ||||
|         if implicit_runtime: | ||||
|             log.info("Shutting down actor tree") | ||||
|             # shutdown runtime if it was started and report noisly | ||||
|             # that we're did so. | ||||
|             msg += '=> Shutting down actor runtime <=\n' | ||||
|             log.info(msg) | ||||
| 
 | ||||
|         else: | ||||
|             # keep noise low during std operation. | ||||
|             log.runtime(msg) | ||||
|  |  | |||
|  | @ -0,0 +1,96 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Various helpers/utils for auditing your `tractor` app and/or the | ||||
| core runtime. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| import pathlib | ||||
| 
 | ||||
| import tractor | ||||
| from .pytest import ( | ||||
|     tractor_test as tractor_test | ||||
| ) | ||||
| from .fault_simulation import ( | ||||
|     break_ipc as break_ipc, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def repodir() -> pathlib.Path: | ||||
|     ''' | ||||
|     Return the abspath to the repo directory. | ||||
| 
 | ||||
|     ''' | ||||
|     # 2 parents up to step up through tests/<repo_dir> | ||||
|     return pathlib.Path( | ||||
|         __file__ | ||||
| 
 | ||||
|     # 3 .parents bc: | ||||
|     # <._testing-pkg>.<tractor-pkg>.<git-repo-dir> | ||||
|     # /$HOME/../<tractor-repo-dir>/tractor/_testing/__init__.py | ||||
|     ).parent.parent.parent.absolute() | ||||
| 
 | ||||
| 
 | ||||
| def examples_dir() -> pathlib.Path: | ||||
|     ''' | ||||
|     Return the abspath to the examples directory as `pathlib.Path`. | ||||
| 
 | ||||
|     ''' | ||||
|     return repodir() / 'examples' | ||||
| 
 | ||||
| 
 | ||||
| def mk_cmd( | ||||
|     ex_name: str, | ||||
|     exs_subpath: str = 'debugging', | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Generate a shell command suitable to pass to ``pexpect.spawn()``. | ||||
| 
 | ||||
|     ''' | ||||
|     script_path: pathlib.Path = ( | ||||
|         examples_dir() | ||||
|         / exs_subpath | ||||
|         / f'{ex_name}.py' | ||||
|     ) | ||||
|     return ' '.join([ | ||||
|         'python', | ||||
|         str(script_path) | ||||
|     ]) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def expect_ctxc( | ||||
|     yay: bool, | ||||
|     reraise: bool = False, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Small acm to catch `ContextCancelled` errors when expected | ||||
|     below it in a `async with ()` block. | ||||
| 
 | ||||
|     ''' | ||||
|     if yay: | ||||
|         try: | ||||
|             yield | ||||
|             raise RuntimeError('Never raised ctxc?') | ||||
|         except tractor.ContextCancelled: | ||||
|             if reraise: | ||||
|                 raise | ||||
|             else: | ||||
|                 return | ||||
|     else: | ||||
|         yield | ||||
|  | @ -0,0 +1,92 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| `pytest` utils helpers and plugins for testing `tractor`'s runtime | ||||
| and applications. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| from tractor import ( | ||||
|     MsgStream, | ||||
| ) | ||||
| 
 | ||||
| async def break_ipc( | ||||
|     stream: MsgStream, | ||||
|     method: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| 
 | ||||
|     def_method: str = 'socket_close', | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     XXX: close the channel right after an error is raised | ||||
|     purposely breaking the IPC transport to make sure the parent | ||||
|     doesn't get stuck in debug or hang on the connection join. | ||||
|     this more or less simulates an infinite msg-receive hang on | ||||
|     the other end. | ||||
| 
 | ||||
|     ''' | ||||
|     # close channel via IPC prot msging before | ||||
|     # any transport breakage | ||||
|     if pre_close: | ||||
|         await stream.aclose() | ||||
| 
 | ||||
|     method: str = method or def_method | ||||
|     print( | ||||
|         '#################################\n' | ||||
|         'Simulating CHILD-side IPC BREAK!\n' | ||||
|         f'method: {method}\n' | ||||
|         f'pre `.aclose()`: {pre_close}\n' | ||||
|         '#################################\n' | ||||
|     ) | ||||
| 
 | ||||
|     match method: | ||||
|         case 'socket_close': | ||||
|             await stream._ctx.chan.transport.stream.aclose() | ||||
| 
 | ||||
|         case 'socket_eof': | ||||
|             # NOTE: `trio` does the following underneath this | ||||
|             # call in `src/trio/_highlevel_socket.py`: | ||||
|             # `Stream.socket.shutdown(tsocket.SHUT_WR)` | ||||
|             await stream._ctx.chan.transport.stream.send_eof() | ||||
| 
 | ||||
|         # TODO: remove since now this will be invalid with our | ||||
|         # new typed msg spec? | ||||
|         # case 'msg': | ||||
|         #     await stream._ctx.chan.send(None) | ||||
| 
 | ||||
|         # TODO: the actual real-world simulated cases like | ||||
|         # transport layer hangs and/or lower layer 2-gens type | ||||
|         # scenarios.. | ||||
|         # | ||||
|         # -[ ] already have some issues for this general testing | ||||
|         # area: | ||||
|         #  - https://github.com/goodboy/tractor/issues/97 | ||||
|         #  - https://github.com/goodboy/tractor/issues/124 | ||||
|         #   - PR from @guille: | ||||
|         #     https://github.com/goodboy/tractor/pull/149 | ||||
|         # case 'hang': | ||||
|         # TODO: framework research: | ||||
|         # | ||||
|         # - https://github.com/GuoTengda1993/pynetem | ||||
|         # - https://github.com/shopify/toxiproxy | ||||
|         # - https://manpages.ubuntu.com/manpages/trusty/man1/wirefilter.1.html | ||||
| 
 | ||||
|         case _: | ||||
|             raise RuntimeError( | ||||
|                 f'IPC break method unsupported: {method}' | ||||
|             ) | ||||
|  | @ -0,0 +1,113 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| `pytest` utils helpers and plugins for testing `tractor`'s runtime | ||||
| and applications. | ||||
| 
 | ||||
| ''' | ||||
| from functools import ( | ||||
|     partial, | ||||
|     wraps, | ||||
| ) | ||||
| import inspect | ||||
| import platform | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| def tractor_test(fn): | ||||
|     ''' | ||||
|     Decorator for async test funcs to present them as "native" | ||||
|     looking sync funcs runnable by `pytest` using `trio.run()`. | ||||
| 
 | ||||
|     Use: | ||||
| 
 | ||||
|     @tractor_test | ||||
|     async def test_whatever(): | ||||
|         await ... | ||||
| 
 | ||||
|     If fixtures: | ||||
| 
 | ||||
|         - ``reg_addr`` (a socket addr tuple where arbiter is listening) | ||||
|         - ``loglevel`` (logging level passed to tractor internals) | ||||
|         - ``start_method`` (subprocess spawning backend) | ||||
| 
 | ||||
|     are defined in the `pytest` fixture space they will be automatically | ||||
|     injected to tests declaring these funcargs. | ||||
|     ''' | ||||
|     @wraps(fn) | ||||
|     def wrapper( | ||||
|         *args, | ||||
|         loglevel=None, | ||||
|         reg_addr=None, | ||||
|         start_method: str|None = None, | ||||
|         debug_mode: bool = False, | ||||
|         **kwargs | ||||
|     ): | ||||
|         # __tracebackhide__ = True | ||||
| 
 | ||||
|         # NOTE: inject ant test func declared fixture | ||||
|         # names by manually checking! | ||||
|         if 'reg_addr' in inspect.signature(fn).parameters: | ||||
|             # injects test suite fixture value to test as well | ||||
|             # as `run()` | ||||
|             kwargs['reg_addr'] = reg_addr | ||||
| 
 | ||||
|         if 'loglevel' in inspect.signature(fn).parameters: | ||||
|             # allows test suites to define a 'loglevel' fixture | ||||
|             # that activates the internal logging | ||||
|             kwargs['loglevel'] = loglevel | ||||
| 
 | ||||
|         if start_method is None: | ||||
|             if platform.system() == "Windows": | ||||
|                 start_method = 'trio' | ||||
| 
 | ||||
|         if 'start_method' in inspect.signature(fn).parameters: | ||||
|             # set of subprocess spawning backends | ||||
|             kwargs['start_method'] = start_method | ||||
| 
 | ||||
|         if 'debug_mode' in inspect.signature(fn).parameters: | ||||
|             # set of subprocess spawning backends | ||||
|             kwargs['debug_mode'] = debug_mode | ||||
| 
 | ||||
| 
 | ||||
|         if kwargs: | ||||
| 
 | ||||
|             # use explicit root actor start | ||||
|             async def _main(): | ||||
|                 async with tractor.open_root_actor( | ||||
|                     # **kwargs, | ||||
|                     registry_addrs=[reg_addr] if reg_addr else None, | ||||
|                     loglevel=loglevel, | ||||
|                     start_method=start_method, | ||||
| 
 | ||||
|                     # TODO: only enable when pytest is passed --pdb | ||||
|                     debug_mode=debug_mode, | ||||
| 
 | ||||
|                 ): | ||||
|                     await fn(*args, **kwargs) | ||||
| 
 | ||||
|             main = _main | ||||
| 
 | ||||
|         else: | ||||
|             # use implicit root actor start | ||||
|             main = partial(fn, *args, **kwargs) | ||||
| 
 | ||||
|         return trio.run(main) | ||||
| 
 | ||||
|     return wrapper | ||||
|  | @ -0,0 +1,43 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Runtime "developer experience" utils and addons to aid our | ||||
| (advanced) users and core devs in building distributed applications | ||||
| and working with/on the actor runtime. | ||||
| 
 | ||||
| """ | ||||
| from ._debug import ( | ||||
|     maybe_wait_for_debugger as maybe_wait_for_debugger, | ||||
|     acquire_debug_lock as acquire_debug_lock, | ||||
|     breakpoint as breakpoint, | ||||
|     pause as pause, | ||||
|     pause_from_sync as pause_from_sync, | ||||
|     sigint_shield as sigint_shield, | ||||
|     open_crash_handler as open_crash_handler, | ||||
|     maybe_open_crash_handler as maybe_open_crash_handler, | ||||
|     maybe_init_greenback as maybe_init_greenback, | ||||
|     post_mortem as post_mortem, | ||||
|     mk_pdb as mk_pdb, | ||||
| ) | ||||
| from ._stackscope import ( | ||||
|     enable_stack_on_sig as enable_stack_on_sig, | ||||
| ) | ||||
| from .pformat import ( | ||||
|     add_div as add_div, | ||||
|     pformat_caller_frame as pformat_caller_frame, | ||||
|     pformat_boxed_tb as pformat_boxed_tb, | ||||
| ) | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,303 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Tools for code-object annotation, introspection and mutation | ||||
| as it pertains to improving the grok-ability of our runtime! | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from functools import partial | ||||
| import inspect | ||||
| from types import ( | ||||
|     FrameType, | ||||
|     FunctionType, | ||||
|     MethodType, | ||||
|     # CodeType, | ||||
| ) | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     Type, | ||||
| ) | ||||
| 
 | ||||
| from tractor.msg import ( | ||||
|     pretty_struct, | ||||
|     NamespacePath, | ||||
| ) | ||||
| import wrapt | ||||
| 
 | ||||
| 
 | ||||
| # TODO: yeah, i don't love this and we should prolly just | ||||
| # write a decorator that actually keeps a stupid ref to the func | ||||
| # obj.. | ||||
| def get_class_from_frame(fr: FrameType) -> ( | ||||
|     FunctionType | ||||
|     |MethodType | ||||
| ): | ||||
|     ''' | ||||
|     Attempt to get the function (or method) reference | ||||
|     from a given `FrameType`. | ||||
| 
 | ||||
|     Verbatim from an SO: | ||||
|     https://stackoverflow.com/a/2220759 | ||||
| 
 | ||||
|     ''' | ||||
|     args, _, _, value_dict = inspect.getargvalues(fr) | ||||
| 
 | ||||
|     # we check the first parameter for the frame function is | ||||
|     # named 'self' | ||||
|     if ( | ||||
|         len(args) | ||||
|         and | ||||
|         # TODO: other cases for `@classmethod` etc..?) | ||||
|         args[0] == 'self' | ||||
|     ): | ||||
|         # in that case, 'self' will be referenced in value_dict | ||||
|         instance: object = value_dict.get('self') | ||||
|         if instance: | ||||
|           # return its class | ||||
|           return getattr( | ||||
|               instance, | ||||
|               '__class__', | ||||
|               None, | ||||
|           ) | ||||
| 
 | ||||
|     # return None otherwise | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| def get_ns_and_func_from_frame( | ||||
|     frame: FrameType, | ||||
| ) -> Callable: | ||||
|     ''' | ||||
|     Return the corresponding function object reference from | ||||
|     a `FrameType`, and return it and it's parent namespace `dict`. | ||||
| 
 | ||||
|     ''' | ||||
|     ns: dict[str, Any] | ||||
| 
 | ||||
|     # for a method, go up a frame and lookup the name in locals() | ||||
|     if '.' in (qualname := frame.f_code.co_qualname): | ||||
|         cls_name, _, func_name = qualname.partition('.') | ||||
|         ns = frame.f_back.f_locals[cls_name].__dict__ | ||||
| 
 | ||||
|     else: | ||||
|         func_name: str = frame.f_code.co_name | ||||
|         ns = frame.f_globals | ||||
| 
 | ||||
|     return ( | ||||
|         ns, | ||||
|         ns[func_name], | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def func_ref_from_frame( | ||||
|     frame: FrameType, | ||||
| ) -> Callable: | ||||
|     func_name: str = frame.f_code.co_name | ||||
|     try: | ||||
|         return frame.f_globals[func_name] | ||||
|     except KeyError: | ||||
|         cls: Type|None = get_class_from_frame(frame) | ||||
|         if cls: | ||||
|             return getattr( | ||||
|                 cls, | ||||
|                 func_name, | ||||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| class CallerInfo(pretty_struct.Struct): | ||||
|     # https://docs.python.org/dev/reference/datamodel.html#frame-objects | ||||
|     # https://docs.python.org/dev/library/inspect.html#the-interpreter-stack | ||||
|     _api_frame: FrameType | ||||
| 
 | ||||
|     @property | ||||
|     def api_frame(self) -> FrameType: | ||||
|         try: | ||||
|             self._api_frame.clear() | ||||
|         except RuntimeError: | ||||
|             # log.warning( | ||||
|             print( | ||||
|                 f'Frame {self._api_frame} for {self.api_func} is still active!' | ||||
|             ) | ||||
| 
 | ||||
|         return self._api_frame | ||||
| 
 | ||||
|     _api_func: Callable | ||||
| 
 | ||||
|     @property | ||||
|     def api_func(self) -> Callable: | ||||
|         return self._api_func | ||||
| 
 | ||||
|     _caller_frames_up: int|None = 1 | ||||
|     _caller_frame: FrameType|None = None  # cached after first stack scan | ||||
| 
 | ||||
|     @property | ||||
|     def api_nsp(self) -> NamespacePath|None: | ||||
|         func: FunctionType = self.api_func | ||||
|         if func: | ||||
|             return NamespacePath.from_ref(func) | ||||
| 
 | ||||
|         return '<unknown>' | ||||
| 
 | ||||
|     @property | ||||
|     def caller_frame(self) -> FrameType: | ||||
| 
 | ||||
|         # if not already cached, scan up stack explicitly by | ||||
|         # configured count. | ||||
|         if not self._caller_frame: | ||||
|             if self._caller_frames_up: | ||||
|                 for _ in range(self._caller_frames_up): | ||||
|                     caller_frame: FrameType|None = self.api_frame.f_back | ||||
| 
 | ||||
|                 if not caller_frame: | ||||
|                     raise ValueError( | ||||
|                         'No frame exists {self._caller_frames_up} up from\n' | ||||
|                         f'{self.api_frame} @ {self.api_nsp}\n' | ||||
|                     ) | ||||
| 
 | ||||
|             self._caller_frame = caller_frame | ||||
| 
 | ||||
|         return self._caller_frame | ||||
| 
 | ||||
|     @property | ||||
|     def caller_nsp(self) -> NamespacePath|None: | ||||
|         func: FunctionType = self.api_func | ||||
|         if func: | ||||
|             return NamespacePath.from_ref(func) | ||||
| 
 | ||||
|         return '<unknown>' | ||||
| 
 | ||||
| 
 | ||||
| def find_caller_info( | ||||
|     dunder_var: str = '__runtimeframe__', | ||||
|     iframes:int = 1, | ||||
|     check_frame_depth: bool = True, | ||||
| 
 | ||||
| ) -> CallerInfo|None: | ||||
|     ''' | ||||
|     Scan up the callstack for a frame with a `dunder_var: str` variable | ||||
|     and return the `iframes` frames above it. | ||||
| 
 | ||||
|     By default we scan for a `__runtimeframe__` scope var which | ||||
|     denotes a `tractor` API above which (one frame up) is "user | ||||
|     app code" which "called into" the `tractor` method or func. | ||||
| 
 | ||||
|     TODO: ex with `Portal.open_context()` | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: use this instead? | ||||
|     # https://docs.python.org/3/library/inspect.html#inspect.getouterframes | ||||
|     frames: list[inspect.FrameInfo] = inspect.stack() | ||||
|     for fi in frames: | ||||
|         assert ( | ||||
|             fi.function | ||||
|             == | ||||
|             fi.frame.f_code.co_name | ||||
|         ) | ||||
|         this_frame: FrameType = fi.frame | ||||
|         dunder_val: int|None = this_frame.f_locals.get(dunder_var) | ||||
|         if dunder_val: | ||||
|             go_up_iframes: int = ( | ||||
|                 dunder_val  # could be 0 or `True` i guess? | ||||
|                 or | ||||
|                 iframes | ||||
|             ) | ||||
|             rt_frame: FrameType = fi.frame | ||||
|             call_frame = rt_frame | ||||
|             for i in range(go_up_iframes): | ||||
|                 call_frame = call_frame.f_back | ||||
| 
 | ||||
|             return CallerInfo( | ||||
|                 _api_frame=rt_frame, | ||||
|                 _api_func=func_ref_from_frame(rt_frame), | ||||
|                 _caller_frames_up=go_up_iframes, | ||||
|             ) | ||||
| 
 | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| _frame2callerinfo_cache: dict[FrameType, CallerInfo] = {} | ||||
| 
 | ||||
| 
 | ||||
| # TODO: -[x] move all this into new `.devx._frame_stack`! | ||||
| # -[ ] consider rename to _callstack? | ||||
| # -[ ] prolly create a `@runtime_api` dec? | ||||
| #   |_ @api_frame seems better? | ||||
| # -[ ] ^- make it capture and/or accept buncha optional | ||||
| #     meta-data like a fancier version of `@pdbp.hideframe`. | ||||
| # | ||||
| def api_frame( | ||||
|     wrapped: Callable|None = None, | ||||
|     *, | ||||
|     caller_frames_up: int = 1, | ||||
| 
 | ||||
| ) -> Callable: | ||||
| 
 | ||||
|     # handle the decorator called WITHOUT () case, | ||||
|     # i.e. just @api_frame, NOT @api_frame(extra=<blah>) | ||||
|     if wrapped is None: | ||||
|         return partial( | ||||
|             api_frame, | ||||
|             caller_frames_up=caller_frames_up, | ||||
|         ) | ||||
| 
 | ||||
|     @wrapt.decorator | ||||
|     async def wrapper( | ||||
|         wrapped: Callable, | ||||
|         instance: object, | ||||
|         args: tuple, | ||||
|         kwargs: dict, | ||||
|     ): | ||||
|         # maybe cache the API frame for this call | ||||
|         global _frame2callerinfo_cache | ||||
|         this_frame: FrameType = inspect.currentframe() | ||||
|         api_frame: FrameType = this_frame.f_back | ||||
| 
 | ||||
|         if not _frame2callerinfo_cache.get(api_frame): | ||||
|             _frame2callerinfo_cache[api_frame] = CallerInfo( | ||||
|                 _api_frame=api_frame, | ||||
|                 _api_func=wrapped, | ||||
|                 _caller_frames_up=caller_frames_up, | ||||
|             ) | ||||
| 
 | ||||
|         return wrapped(*args, **kwargs) | ||||
| 
 | ||||
|     # annotate the function as a "api function", meaning it is | ||||
|     # a function for which the function above it in the call stack should be | ||||
|     # non-`tractor` code aka "user code". | ||||
|     # | ||||
|     # in the global frame cache for easy lookup from a given | ||||
|     # func-instance | ||||
|     wrapped._call_infos: dict[FrameType, CallerInfo] = _frame2callerinfo_cache | ||||
|     wrapped.__api_func__: bool = True | ||||
|     return wrapper(wrapped) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: something like this instead of the adhoc frame-unhiding | ||||
| # blocks all over the runtime!! XD | ||||
| # -[ ] ideally we can expect a certain error (set) and if something | ||||
| #     else is raised then all frames below the wrapped one will be | ||||
| #     un-hidden via `__tracebackhide__: bool = False`. | ||||
| # |_ might need to dynamically mutate the code objs like | ||||
| #    `pdbp.hideframe()` does? | ||||
| # -[ ] use this as a `@acm` decorator as introed in 3.10? | ||||
| # @acm | ||||
| # async def unhide_frame_when_not( | ||||
| #     error_set: set[BaseException], | ||||
| # ) -> TracebackType: | ||||
| #     ... | ||||
|  | @ -0,0 +1,239 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| The fundamental cross process SC abstraction: an inter-actor, | ||||
| cancel-scope linked task "context". | ||||
| 
 | ||||
| A ``Context`` is very similar to the ``trio.Nursery.cancel_scope`` built | ||||
| into each ``trio.Nursery`` except it links the lifetimes of memory space | ||||
| disjoint, parallel executing tasks in separate actors. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| # from functools import partial | ||||
| from threading import ( | ||||
|     current_thread, | ||||
|     Thread, | ||||
|     RLock, | ||||
| ) | ||||
| import multiprocessing as mp | ||||
| from signal import ( | ||||
|     signal, | ||||
|     getsignal, | ||||
|     SIGUSR1, | ||||
| ) | ||||
| # import traceback | ||||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     Callable, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import trio | ||||
| from tractor import ( | ||||
|     _state, | ||||
|     log as logmod, | ||||
| ) | ||||
| 
 | ||||
| log = logmod.get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from tractor._spawn import ProcessType | ||||
|     from tractor import ( | ||||
|         Actor, | ||||
|         ActorNursery, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| @trio.lowlevel.disable_ki_protection | ||||
| def dump_task_tree() -> None: | ||||
|     ''' | ||||
|     Do a classic `stackscope.extract()` task-tree dump to console at | ||||
|     `.devx()` level. | ||||
| 
 | ||||
|     ''' | ||||
|     import stackscope | ||||
|     tree_str: str = str( | ||||
|         stackscope.extract( | ||||
|             trio.lowlevel.current_root_task(), | ||||
|             recurse_child_tasks=True | ||||
|         ) | ||||
|     ) | ||||
|     actor: Actor = _state.current_actor() | ||||
|     thr: Thread = current_thread() | ||||
|     log.devx( | ||||
|         f'Dumping `stackscope` tree for actor\n' | ||||
|         f'{actor.uid}:\n' | ||||
|         f'|_{mp.current_process()}\n' | ||||
|         f'  |_{thr}\n' | ||||
|         f'    |_{actor}\n\n' | ||||
| 
 | ||||
|         # start-of-trace-tree delimiter (mostly for testing) | ||||
|         '------ - ------\n' | ||||
|         '\n' | ||||
|         + | ||||
|         f'{tree_str}\n' | ||||
|         + | ||||
|         # end-of-trace-tree delimiter (mostly for testing) | ||||
|         f'\n' | ||||
|         f'------ {actor.uid!r} ------\n' | ||||
|     ) | ||||
|     # TODO: can remove this right? | ||||
|     # -[ ] was original code from author | ||||
|     # | ||||
|     # print( | ||||
|     #     'DUMPING FROM PRINT\n' | ||||
|     #     + | ||||
|     #     content | ||||
|     # ) | ||||
|     # import logging | ||||
|     # try: | ||||
|     #     with open("/dev/tty", "w") as tty: | ||||
|     #         tty.write(tree_str) | ||||
|     # except BaseException: | ||||
|     #     logging.getLogger( | ||||
|     #         "task_tree" | ||||
|     #     ).exception("Error printing task tree") | ||||
| 
 | ||||
| _handler_lock = RLock() | ||||
| _tree_dumped: bool = False | ||||
| 
 | ||||
| 
 | ||||
| def dump_tree_on_sig( | ||||
|     sig: int, | ||||
|     frame: object, | ||||
| 
 | ||||
|     relay_to_subs: bool = True, | ||||
| 
 | ||||
| ) -> None: | ||||
|     global _tree_dumped, _handler_lock | ||||
|     with _handler_lock: | ||||
|         if _tree_dumped: | ||||
|             log.warning( | ||||
|                 'Already dumped for this actor...??' | ||||
|             ) | ||||
|             return | ||||
| 
 | ||||
|         _tree_dumped = True | ||||
| 
 | ||||
|         # actor: Actor = _state.current_actor() | ||||
|         log.devx( | ||||
|             'Trying to dump `stackscope` tree..\n' | ||||
|         ) | ||||
|         try: | ||||
|             dump_task_tree() | ||||
|             # await actor._service_n.start_soon( | ||||
|             #     partial( | ||||
|             #         trio.to_thread.run_sync, | ||||
|             #         dump_task_tree, | ||||
|             #     ) | ||||
|             # ) | ||||
|             # trio.lowlevel.current_trio_token().run_sync_soon( | ||||
|             #     dump_task_tree | ||||
|             # ) | ||||
| 
 | ||||
|         except RuntimeError: | ||||
|             log.exception( | ||||
|                 'Failed to dump `stackscope` tree..\n' | ||||
|             ) | ||||
|             # not in async context -- print a normal traceback | ||||
|             # traceback.print_stack() | ||||
|             raise | ||||
| 
 | ||||
|         except BaseException: | ||||
|             log.exception( | ||||
|                 'Failed to dump `stackscope` tree..\n' | ||||
|             ) | ||||
|             raise | ||||
| 
 | ||||
|         log.devx( | ||||
|             'Supposedly we dumped just fine..?' | ||||
|         ) | ||||
| 
 | ||||
|     if not relay_to_subs: | ||||
|         return | ||||
| 
 | ||||
|     an: ActorNursery | ||||
|     for an in _state.current_actor()._actoruid2nursery.values(): | ||||
|         subproc: ProcessType | ||||
|         subactor: Actor | ||||
|         for subactor, subproc, _ in an._children.values(): | ||||
|             log.warning( | ||||
|                 f'Relaying `SIGUSR1`[{sig}] to sub-actor\n' | ||||
|                 f'{subactor}\n' | ||||
|                 f' |_{subproc}\n' | ||||
|             ) | ||||
| 
 | ||||
|             # bc of course stdlib can't have a std API.. XD | ||||
|             match subproc: | ||||
|                 case trio.Process(): | ||||
|                     subproc.send_signal(sig) | ||||
| 
 | ||||
|                 case mp.Process(): | ||||
|                     subproc._send_signal(sig) | ||||
| 
 | ||||
| 
 | ||||
| def enable_stack_on_sig( | ||||
|     sig: int = SIGUSR1, | ||||
| ) -> ModuleType: | ||||
|     ''' | ||||
|     Enable `stackscope` tracing on reception of a signal; by | ||||
|     default this is SIGUSR1. | ||||
| 
 | ||||
|     HOT TIP: a task/ctx-tree dump can be triggered from a shell with | ||||
|     fancy cmds. | ||||
| 
 | ||||
|     For ex. from `bash` using `pgrep` and cmd-sustitution | ||||
|     (https://www.gnu.org/software/bash/manual/bash.html#Command-Substitution) | ||||
|     you could use: | ||||
| 
 | ||||
|     >> kill -SIGUSR1 $(pgrep -f '<cmd>') | ||||
| 
 | ||||
|     Or with with `xonsh` (which has diff capture-from-subproc syntax) | ||||
| 
 | ||||
|     >> kill -SIGUSR1 @$(pgrep -f '<cmd>') | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         import stackscope | ||||
|     except ImportError: | ||||
|         log.warning( | ||||
|             '`stackscope` not installed for use in debug mode!' | ||||
|         ) | ||||
|         return None | ||||
| 
 | ||||
|     handler: Callable|int = getsignal(sig) | ||||
|     if handler is dump_tree_on_sig: | ||||
|         log.devx( | ||||
|             'A `SIGUSR1` handler already exists?\n' | ||||
|             f'|_ {handler!r}\n' | ||||
|         ) | ||||
|         return | ||||
| 
 | ||||
|     signal( | ||||
|         sig, | ||||
|         dump_tree_on_sig, | ||||
|     ) | ||||
|     log.devx( | ||||
|         'Enabling trace-trees on `SIGUSR1` ' | ||||
|         'since `stackscope` is installed @ \n' | ||||
|         f'{stackscope!r}\n\n' | ||||
|         f'With `SIGUSR1` handler\n' | ||||
|         f'|_{dump_tree_on_sig}\n' | ||||
|     ) | ||||
|     return stackscope | ||||
|  | @ -0,0 +1,129 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| CLI framework extensions for hacking on the actor runtime. | ||||
| 
 | ||||
| Currently popular frameworks supported are: | ||||
| 
 | ||||
|   - `typer` via the `@callback` API | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
| ) | ||||
| from typing_extensions import Annotated | ||||
| 
 | ||||
| import typer | ||||
| 
 | ||||
| 
 | ||||
| _runtime_vars: dict[str, Any] = {} | ||||
| 
 | ||||
| 
 | ||||
| def load_runtime_vars( | ||||
|     ctx: typer.Context, | ||||
|     callback: Callable, | ||||
|     pdb: bool = False,  # --pdb | ||||
|     ll: Annotated[ | ||||
|         str, | ||||
|         typer.Option( | ||||
|             '--loglevel', | ||||
|             '-l', | ||||
|             help='BigD logging level', | ||||
|         ), | ||||
|     ] = 'cancel',  # -l info | ||||
| ): | ||||
|     ''' | ||||
|     Maybe engage crash handling with `pdbp` when code inside | ||||
|     a `typer` CLI endpoint cmd raises. | ||||
| 
 | ||||
|     To use this callback simply take your `app = typer.Typer()` instance | ||||
|     and decorate this function with it like so: | ||||
| 
 | ||||
|     .. code:: python | ||||
| 
 | ||||
|         from tractor.devx import cli | ||||
| 
 | ||||
|         app = typer.Typer() | ||||
| 
 | ||||
|         # manual decoration to hook into `click`'s context system! | ||||
|         cli.load_runtime_vars = app.callback( | ||||
|             invoke_without_command=True, | ||||
|         ) | ||||
| 
 | ||||
|     And then you can use the now augmented `click` CLI context as so, | ||||
| 
 | ||||
|     .. code:: python | ||||
| 
 | ||||
|         @app.command( | ||||
|             context_settings={ | ||||
|                 "allow_extra_args": True, | ||||
|                 "ignore_unknown_options": True, | ||||
|             } | ||||
|         ) | ||||
|         def my_cli_cmd( | ||||
|             ctx: typer.Context, | ||||
|         ): | ||||
|             rtvars: dict = ctx.runtime_vars | ||||
|             pdb: bool = rtvars['pdb'] | ||||
| 
 | ||||
|             with tractor.devx.cli.maybe_open_crash_handler(pdb=pdb): | ||||
|                 trio.run( | ||||
|                     partial( | ||||
|                         my_tractor_main_task_func, | ||||
|                         debug_mode=pdb, | ||||
|                         loglevel=rtvars['ll'], | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
|     which will enable log level and debug mode globally for the entire | ||||
|     `tractor` + `trio` runtime thereafter! | ||||
| 
 | ||||
|     Bo | ||||
| 
 | ||||
|     ''' | ||||
|     global _runtime_vars | ||||
|     _runtime_vars |= { | ||||
|         'pdb': pdb, | ||||
|         'll': ll, | ||||
|     } | ||||
| 
 | ||||
|     ctx.runtime_vars: dict[str, Any] = _runtime_vars | ||||
|     print( | ||||
|         f'`typer` sub-cmd: {ctx.invoked_subcommand}\n' | ||||
|         f'`tractor` runtime vars: {_runtime_vars}' | ||||
|     ) | ||||
| 
 | ||||
|     # XXX NOTE XXX: hackzone.. if no sub-cmd is specified (the | ||||
|     # default if the user just invokes `bigd`) then we simply | ||||
|     # invoke the sole `_bigd()` cmd passing in the "parent" | ||||
|     # typer.Context directly to that call since we're treating it | ||||
|     # as a "non sub-command" or wtv.. | ||||
|     # TODO: ideally typer would have some kinda built-in way to get | ||||
|     # this behaviour without having to construct and manually | ||||
|     # invoke our own cmd.. | ||||
|     if ( | ||||
|         ctx.invoked_subcommand is None | ||||
|         or ctx.invoked_subcommand == callback.__name__ | ||||
|     ): | ||||
|         cmd: typer.core.TyperCommand = typer.core.TyperCommand( | ||||
|             name='bigd', | ||||
|             callback=callback, | ||||
|         ) | ||||
|         ctx.params = {'ctx': ctx} | ||||
|         cmd.invoke(ctx) | ||||
|  | @ -0,0 +1,169 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Pretty formatters for use throughout the code base. | ||||
| Mostly handy for logging and exception message content. | ||||
| 
 | ||||
| ''' | ||||
| import textwrap | ||||
| import traceback | ||||
| 
 | ||||
| from trio import CancelScope | ||||
| 
 | ||||
| 
 | ||||
| def add_div( | ||||
|     message: str, | ||||
|     div_str: str = '------ - ------', | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Add a "divider string" to the input `message` with | ||||
|     a little math to center it underneath. | ||||
| 
 | ||||
|     ''' | ||||
|     div_offset: int = ( | ||||
|         round(len(message)/2)+1 | ||||
|         - | ||||
|         round(len(div_str)/2)+1 | ||||
|     ) | ||||
|     div_str: str = ( | ||||
|         '\n' + ' '*div_offset + f'{div_str}\n' | ||||
|     ) | ||||
|     return div_str | ||||
| 
 | ||||
| 
 | ||||
| def pformat_boxed_tb( | ||||
|     tb_str: str, | ||||
|     fields_str: str|None = None, | ||||
|     field_prefix: str = ' |_', | ||||
| 
 | ||||
|     tb_box_indent: int|None = None, | ||||
|     tb_body_indent: int = 1, | ||||
|     boxer_header: str = '-' | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Create a "boxed" looking traceback string. | ||||
| 
 | ||||
|     Useful for emphasizing traceback text content as being an | ||||
|     embedded attribute of some other object (like | ||||
|     a `RemoteActorError` or other boxing remote error shuttle | ||||
|     container). | ||||
| 
 | ||||
|     Any other parent/container "fields" can be passed in the | ||||
|     `fields_str` input along with other prefix/indent settings. | ||||
| 
 | ||||
|     ''' | ||||
|     if ( | ||||
|         fields_str | ||||
|         and | ||||
|         field_prefix | ||||
|     ): | ||||
|         fields: str = textwrap.indent( | ||||
|             fields_str, | ||||
|             prefix=field_prefix, | ||||
|         ) | ||||
|     else: | ||||
|         fields = fields_str or '' | ||||
| 
 | ||||
|     tb_body = tb_str | ||||
|     if tb_body_indent: | ||||
|         tb_body: str = textwrap.indent( | ||||
|             tb_str, | ||||
|             prefix=tb_body_indent * ' ', | ||||
|         ) | ||||
| 
 | ||||
|     tb_box: str = ( | ||||
|         f'|\n' | ||||
|         f' ------ {boxer_header} ------\n' | ||||
|         f'{tb_body}' | ||||
|         f' ------ {boxer_header}- ------\n' | ||||
|         f'_|' | ||||
|     ) | ||||
|     tb_box_indent: str = ( | ||||
|         tb_box_indent | ||||
|         or | ||||
|         1 | ||||
| 
 | ||||
|         # (len(field_prefix)) | ||||
|         # ? ^-TODO-^ ? if you wanted another indent level | ||||
|     ) | ||||
|     if tb_box_indent > 0: | ||||
|         tb_box: str = textwrap.indent( | ||||
|             tb_box, | ||||
|             prefix=tb_box_indent * ' ', | ||||
|         ) | ||||
| 
 | ||||
|     return ( | ||||
|         fields | ||||
|         + | ||||
|         tb_box | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pformat_caller_frame( | ||||
|     stack_limit: int = 1, | ||||
|     box_tb: bool = True, | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Capture and return the traceback text content from | ||||
|     `stack_limit` call frames up. | ||||
| 
 | ||||
|     ''' | ||||
|     tb_str: str = ( | ||||
|         '\n'.join( | ||||
|             traceback.format_stack(limit=stack_limit) | ||||
|         ) | ||||
|     ) | ||||
|     if box_tb: | ||||
|         tb_str: str = pformat_boxed_tb( | ||||
|             tb_str=tb_str, | ||||
|             field_prefix='  ', | ||||
|             indent='', | ||||
|         ) | ||||
|     return tb_str | ||||
| 
 | ||||
| 
 | ||||
| def pformat_cs( | ||||
|     cs: CancelScope, | ||||
|     var_name: str = 'cs', | ||||
|     field_prefix: str = ' |_', | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Pretty format info about a `trio.CancelScope` including most | ||||
|     of its public state and `._cancel_status`. | ||||
| 
 | ||||
|     The output can be modified to show a "var name" for the | ||||
|     instance as a field prefix, just a simple str before each | ||||
|     line more or less. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
|     fields: str = textwrap.indent( | ||||
|         ( | ||||
|             f'cancel_called = {cs.cancel_called}\n' | ||||
|             f'cancelled_caught = {cs.cancelled_caught}\n' | ||||
|             f'_cancel_status = {cs._cancel_status}\n' | ||||
|             f'shield = {cs.shield}\n' | ||||
|         ), | ||||
|         prefix=field_prefix, | ||||
|     ) | ||||
|     return ( | ||||
|         f'{var_name}: {cs}\n' | ||||
|         + | ||||
|         fields | ||||
|     ) | ||||
|  | @ -31,7 +31,7 @@ from typing import ( | |||
|     Callable, | ||||
| ) | ||||
| from functools import partial | ||||
| from async_generator import aclosing | ||||
| from contextlib import aclosing | ||||
| 
 | ||||
| import trio | ||||
| import wrapt | ||||
|  |  | |||
							
								
								
									
										233
									
								
								tractor/log.py
								
								
								
								
							
							
						
						
									
										233
									
								
								tractor/log.py
								
								
								
								
							|  | @ -21,6 +21,11 @@ Log like a forester! | |||
| from collections.abc import Mapping | ||||
| import sys | ||||
| import logging | ||||
| from logging import ( | ||||
|     LoggerAdapter, | ||||
|     Logger, | ||||
|     StreamHandler, | ||||
| ) | ||||
| import colorlog  # type: ignore | ||||
| 
 | ||||
| import trio | ||||
|  | @ -48,17 +53,20 @@ LOG_FORMAT = ( | |||
| 
 | ||||
| DATE_FORMAT = '%b %d %H:%M:%S' | ||||
| 
 | ||||
| LEVELS = { | ||||
| # FYI, ERROR is 40 | ||||
| # TODO: use a `bidict` to avoid the :155 check? | ||||
| CUSTOM_LEVELS: dict[str, int] = { | ||||
|     'TRANSPORT': 5, | ||||
|     'RUNTIME': 15, | ||||
|     'CANCEL': 16, | ||||
|     'DEVX': 17, | ||||
|     'CANCEL': 22, | ||||
|     'PDB': 500, | ||||
| } | ||||
| 
 | ||||
| STD_PALETTE = { | ||||
|     'CRITICAL': 'red', | ||||
|     'ERROR': 'red', | ||||
|     'PDB': 'white', | ||||
|     'DEVX': 'cyan', | ||||
|     'WARNING': 'yellow', | ||||
|     'INFO': 'green', | ||||
|     'CANCEL': 'yellow', | ||||
|  | @ -75,7 +83,7 @@ BOLD_PALETTE = { | |||
| 
 | ||||
| # TODO: this isn't showing the correct '{filename}' | ||||
| # as it did before.. | ||||
| class StackLevelAdapter(logging.LoggerAdapter): | ||||
| class StackLevelAdapter(LoggerAdapter): | ||||
| 
 | ||||
|     def transport( | ||||
|         self, | ||||
|  | @ -83,7 +91,8 @@ class StackLevelAdapter(logging.LoggerAdapter): | |||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         IPC level msg-ing. | ||||
|         IPC transport level msg IO; generally anything below | ||||
|         `._ipc.Channel` and friends. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.log(5, msg) | ||||
|  | @ -99,29 +108,67 @@ class StackLevelAdapter(logging.LoggerAdapter): | |||
|         msg: str, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Cancellation logging, mostly for runtime reporting. | ||||
|         Cancellation sequencing, mostly for runtime reporting. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.log(16, msg) | ||||
|         return self.log( | ||||
|             level=22, | ||||
|             msg=msg, | ||||
|             # stacklevel=4, | ||||
|         ) | ||||
| 
 | ||||
|     def pdb( | ||||
|         self, | ||||
|         msg: str, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Debugger logging. | ||||
|         `pdb`-REPL (debugger) related statuses. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.log(500, msg) | ||||
| 
 | ||||
|     def log(self, level, msg, *args, **kwargs): | ||||
|         """ | ||||
|     def devx( | ||||
|         self, | ||||
|         msg: str, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         "Developer experience" sub-sys statuses. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.log(17, msg) | ||||
| 
 | ||||
|     def log( | ||||
|         self, | ||||
|         level, | ||||
|         msg, | ||||
|         *args, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         ''' | ||||
|         Delegate a log call to the underlying logger, after adding | ||||
|         contextual information from this adapter instance. | ||||
|         """ | ||||
| 
 | ||||
|         NOTE: all custom level methods (above) delegate to this! | ||||
| 
 | ||||
|         ''' | ||||
|         if self.isEnabledFor(level): | ||||
|             stacklevel: int = 3 | ||||
|             if ( | ||||
|                 level in CUSTOM_LEVELS.values() | ||||
|             ): | ||||
|                 stacklevel: int = 4 | ||||
| 
 | ||||
|             # msg, kwargs = self.process(msg, kwargs) | ||||
|             self._log(level, msg, args, **kwargs) | ||||
|             self._log( | ||||
|                 level=level, | ||||
|                 msg=msg, | ||||
|                 args=args, | ||||
|                 # NOTE: not sure how this worked before but, it | ||||
|                 # seems with our custom level methods defined above | ||||
|                 # we do indeed (now) require another stack level?? | ||||
|                 stacklevel=stacklevel, | ||||
|                 **kwargs, | ||||
|             ) | ||||
| 
 | ||||
|     # LOL, the stdlib doesn't allow passing through ``stacklevel``.. | ||||
|     def _log( | ||||
|  | @ -134,12 +181,15 @@ class StackLevelAdapter(logging.LoggerAdapter): | |||
|         stack_info=False, | ||||
| 
 | ||||
|         # XXX: bit we added to show fileinfo from actual caller. | ||||
|         # this level then ``.log()`` then finally the caller's level.. | ||||
|         stacklevel=3, | ||||
|         # - this level | ||||
|         # - then ``.log()`` | ||||
|         # - then finally the caller's level.. | ||||
|         stacklevel=4, | ||||
|     ): | ||||
|         """ | ||||
|         ''' | ||||
|         Low-level log implementation, proxied to allow nested logger adapters. | ||||
|         """ | ||||
| 
 | ||||
|         ''' | ||||
|         return self.logger._log( | ||||
|             level, | ||||
|             msg, | ||||
|  | @ -151,8 +201,30 @@ class StackLevelAdapter(logging.LoggerAdapter): | |||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO IDEAs: | ||||
| # -[ ] move to `.devx.pformat`? | ||||
| # -[ ] do per task-name and actor-name color coding | ||||
| # -[ ] unique color per task-id and actor-uuid | ||||
| def pformat_task_uid( | ||||
|     id_part: str = 'tail' | ||||
| ): | ||||
|     ''' | ||||
|     Return `str`-ified unique for a `trio.Task` via a combo of its | ||||
|     `.name: str` and `id()` truncated output. | ||||
| 
 | ||||
|     ''' | ||||
|     task: trio.Task = trio.lowlevel.current_task() | ||||
|     tid: str = str(id(task)) | ||||
|     if id_part == 'tail': | ||||
|         tid_part: str = tid[-6:] | ||||
|     else: | ||||
|         tid_part: str = tid[:6] | ||||
| 
 | ||||
|     return f'{task.name}[{tid_part}]' | ||||
| 
 | ||||
| 
 | ||||
| _conc_name_getters = { | ||||
|     'task': lambda: trio.lowlevel.current_task().name, | ||||
|     'task': pformat_task_uid, | ||||
|     'actor': lambda: current_actor(), | ||||
|     'actor_name': lambda: current_actor().name, | ||||
|     'actor_uid': lambda: current_actor().uid[1][:6], | ||||
|  | @ -160,7 +232,10 @@ _conc_name_getters = { | |||
| 
 | ||||
| 
 | ||||
| class ActorContextInfo(Mapping): | ||||
|     "Dyanmic lookup for local actor and task names" | ||||
|     ''' | ||||
|     Dyanmic lookup for local actor and task names. | ||||
| 
 | ||||
|     ''' | ||||
|     _context_keys = ( | ||||
|         'task', | ||||
|         'actor', | ||||
|  | @ -183,33 +258,69 @@ class ActorContextInfo(Mapping): | |||
| 
 | ||||
| 
 | ||||
| def get_logger( | ||||
| 
 | ||||
|     name: str | None = None, | ||||
|     name: str|None = None, | ||||
|     _root_name: str = _proj_name, | ||||
| 
 | ||||
|     logger: Logger|None = None, | ||||
| 
 | ||||
|     # TODO, using `.config.dictConfig()` api? | ||||
|     # -[ ] SO answer with docs links | ||||
|     #  |_https://stackoverflow.com/questions/7507825/where-is-a-complete-example-of-logging-config-dictconfig | ||||
|     #  |_https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema | ||||
|     subsys_spec: str|None = None, | ||||
| 
 | ||||
| ) -> StackLevelAdapter: | ||||
|     '''Return the package log or a sub-logger for ``name`` if provided. | ||||
| 
 | ||||
|     ''' | ||||
|     log = rlog = logging.getLogger(_root_name) | ||||
|     log: Logger | ||||
|     log = rlog = logger or logging.getLogger(_root_name) | ||||
| 
 | ||||
|     if name and name != _proj_name: | ||||
|     if ( | ||||
|         name | ||||
|         and | ||||
|         name != _proj_name | ||||
|     ): | ||||
| 
 | ||||
|         # handling for modules that use ``get_logger(__name__)`` to | ||||
|         # avoid duplicate project-package token in msg output | ||||
|         rname, _, tail = name.partition('.') | ||||
|         if rname == _root_name: | ||||
|             name = tail | ||||
|         # NOTE: for handling for modules that use ``get_logger(__name__)`` | ||||
|         # we make the following stylistic choice: | ||||
|         # - always avoid duplicate project-package token | ||||
|         #   in msg output: i.e. tractor.tractor _ipc.py in header | ||||
|         #   looks ridiculous XD | ||||
|         # - never show the leaf module name in the {name} part | ||||
|         #   since in python the {filename} is always this same | ||||
|         #   module-file. | ||||
| 
 | ||||
|         sub_name: None|str = None | ||||
|         rname, _, sub_name = name.partition('.') | ||||
|         pkgpath, _, modfilename = sub_name.rpartition('.') | ||||
| 
 | ||||
|         # NOTE: for tractor itself never include the last level | ||||
|         # module key in the name such that something like: eg. | ||||
|         # 'tractor.trionics._broadcast` only includes the first | ||||
|         # 2 tokens in the (coloured) name part. | ||||
|         if rname == 'tractor': | ||||
|             sub_name = pkgpath | ||||
| 
 | ||||
|         if _root_name in sub_name: | ||||
|             duplicate, _, sub_name = sub_name.partition('.') | ||||
| 
 | ||||
|         if not sub_name: | ||||
|             log = rlog | ||||
|         else: | ||||
|             log = rlog.getChild(sub_name) | ||||
| 
 | ||||
|         log = rlog.getChild(name) | ||||
|         log.level = rlog.level | ||||
| 
 | ||||
|     # add our actor-task aware adapter which will dynamically look up | ||||
|     # the actor and task names at each log emit | ||||
|     logger = StackLevelAdapter(log, ActorContextInfo()) | ||||
|     logger = StackLevelAdapter( | ||||
|         log, | ||||
|         ActorContextInfo(), | ||||
|     ) | ||||
| 
 | ||||
|     # additional levels | ||||
|     for name, val in LEVELS.items(): | ||||
|     for name, val in CUSTOM_LEVELS.items(): | ||||
|         logging.addLevelName(val, name) | ||||
| 
 | ||||
|         # ensure customs levels exist as methods | ||||
|  | @ -219,28 +330,50 @@ def get_logger( | |||
| 
 | ||||
| 
 | ||||
| def get_console_log( | ||||
|     level: str | None = None, | ||||
|     level: str|None = None, | ||||
|     logger: Logger|None = None, | ||||
|     **kwargs, | ||||
| ) -> logging.LoggerAdapter: | ||||
|     '''Get the package logger and enable a handler which writes to stderr. | ||||
| 
 | ||||
|     Yeah yeah, i know we can use ``DictConfig``. You do it. | ||||
| ) -> LoggerAdapter: | ||||
|     ''' | ||||
|     log = get_logger(**kwargs)  # our root logger | ||||
|     logger = log.logger | ||||
|     Get a `tractor`-style logging instance: a `Logger` wrapped in | ||||
|     a `StackLevelAdapter` which injects various concurrency-primitive | ||||
|     (process, thread, task) fields and enables a `StreamHandler` that | ||||
|     writes on stderr using `colorlog` formatting. | ||||
| 
 | ||||
|     Yeah yeah, i know we can use `logging.config.dictConfig()`. You do it. | ||||
| 
 | ||||
|     ''' | ||||
|     log = get_logger( | ||||
|         logger=logger, | ||||
|         **kwargs | ||||
|     )  # set a root logger | ||||
|     logger: Logger = log.logger | ||||
| 
 | ||||
|     if not level: | ||||
|         return log | ||||
| 
 | ||||
|     log.setLevel(level.upper() if not isinstance(level, int) else level) | ||||
|     log.setLevel( | ||||
|         level.upper() | ||||
|         if not isinstance(level, int) | ||||
|         else level | ||||
|     ) | ||||
| 
 | ||||
|     if not any( | ||||
|         handler.stream == sys.stderr  # type: ignore | ||||
|         for handler in logger.handlers if getattr(handler, 'stream', None) | ||||
|         for handler in logger.handlers if getattr( | ||||
|             handler, | ||||
|             'stream', | ||||
|             None, | ||||
|         ) | ||||
|     ): | ||||
|         handler = logging.StreamHandler() | ||||
|         fmt = LOG_FORMAT | ||||
|         # if logger: | ||||
|         #     fmt = None | ||||
| 
 | ||||
|         handler = StreamHandler() | ||||
|         formatter = colorlog.ColoredFormatter( | ||||
|             LOG_FORMAT, | ||||
|             fmt=fmt, | ||||
|             datefmt=DATE_FORMAT, | ||||
|             log_colors=STD_PALETTE, | ||||
|             secondary_log_colors=BOLD_PALETTE, | ||||
|  | @ -254,3 +387,23 @@ def get_console_log( | |||
| 
 | ||||
| def get_loglevel() -> str: | ||||
|     return _default_loglevel | ||||
| 
 | ||||
| 
 | ||||
| # global module logger for tractor itself | ||||
| log: StackLevelAdapter = get_logger('tractor') | ||||
| 
 | ||||
| 
 | ||||
| def at_least_level( | ||||
|     log: Logger|LoggerAdapter, | ||||
|     level: int|str, | ||||
| ) -> bool: | ||||
|     ''' | ||||
|     Predicate to test if a given level is active. | ||||
| 
 | ||||
|     ''' | ||||
|     if isinstance(level, str): | ||||
|         level: int = CUSTOM_LEVELS[level.upper()] | ||||
| 
 | ||||
|     if log.getEffectiveLevel() <= level: | ||||
|         return True | ||||
|     return False | ||||
|  |  | |||
|  | @ -1,80 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Built-in messaging patterns, types, APIs and helpers. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| # TODO: integration with our ``enable_modules: list[str]`` caps sys. | ||||
| 
 | ||||
| # ``pkgutil.resolve_name()`` internally uses | ||||
| # ``importlib.import_module()`` which can be filtered by inserting | ||||
| # a ``MetaPathFinder`` into ``sys.meta_path`` (which we could do before | ||||
| # entering the ``_runtime.process_messages()`` loop). | ||||
| # - https://github.com/python/cpython/blob/main/Lib/pkgutil.py#L645 | ||||
| # - https://stackoverflow.com/questions/1350466/preventing-python-code-from-importing-certain-modules | ||||
| #   - https://stackoverflow.com/a/63320902 | ||||
| #   - https://docs.python.org/3/library/sys.html#sys.meta_path | ||||
| 
 | ||||
| # the new "Implicit Namespace Packages" might be relevant? | ||||
| # - https://www.python.org/dev/peps/pep-0420/ | ||||
| 
 | ||||
| # add implicit serialized message type support so that paths can be | ||||
| # handed directly to IPC primitives such as streams and `Portal.run()` | ||||
| # calls: | ||||
| # - via ``msgspec``: | ||||
| #   - https://jcristharif.com/msgspec/api.html#struct | ||||
| #   - https://jcristharif.com/msgspec/extending.html | ||||
| # via ``msgpack-python``: | ||||
| # - https://github.com/msgpack/msgpack-python#packingunpacking-of-custom-data-type | ||||
| 
 | ||||
| from __future__ import annotations | ||||
| from pkgutil import resolve_name | ||||
| 
 | ||||
| 
 | ||||
| class NamespacePath(str): | ||||
|     ''' | ||||
|     A serializeable description of a (function) Python object location | ||||
|     described by the target's module path and namespace key meant as | ||||
|     a message-native "packet" to allows actors to point-and-load objects | ||||
|     by absolute reference. | ||||
| 
 | ||||
|     ''' | ||||
|     _ref: object = None | ||||
| 
 | ||||
|     def load_ref(self) -> object: | ||||
|         if self._ref is None: | ||||
|             self._ref = resolve_name(self) | ||||
|         return self._ref | ||||
| 
 | ||||
|     def to_tuple( | ||||
|         self, | ||||
| 
 | ||||
|     ) -> tuple[str, str]: | ||||
|         ref = self.load_ref() | ||||
|         return ref.__module__, getattr(ref, '__name__', '') | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_ref( | ||||
|         cls, | ||||
|         ref, | ||||
| 
 | ||||
|     ) -> NamespacePath: | ||||
|         return cls(':'.join( | ||||
|             (ref.__module__, | ||||
|              getattr(ref, '__name__', '')) | ||||
|         )) | ||||
|  | @ -0,0 +1,73 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Built-in messaging patterns, types, APIs and helpers. | ||||
| 
 | ||||
| ''' | ||||
| from typing import ( | ||||
|     TypeAlias, | ||||
| ) | ||||
| from .ptr import ( | ||||
|     NamespacePath as NamespacePath, | ||||
| ) | ||||
| from .pretty_struct import ( | ||||
|     Struct as Struct, | ||||
| ) | ||||
| from ._codec import ( | ||||
|     _def_msgspec_codec as _def_msgspec_codec, | ||||
|     _ctxvar_MsgCodec as _ctxvar_MsgCodec, | ||||
| 
 | ||||
|     apply_codec as apply_codec, | ||||
|     mk_codec as mk_codec, | ||||
|     MsgCodec as MsgCodec, | ||||
|     MsgDec as MsgDec, | ||||
|     current_codec as current_codec, | ||||
| ) | ||||
| # currently can't bc circular with `._context` | ||||
| # from ._ops import ( | ||||
| #     PldRx as PldRx, | ||||
| #     _drain_to_final_msg as _drain_to_final_msg, | ||||
| # ) | ||||
| 
 | ||||
| from .types import ( | ||||
|     PayloadMsg as PayloadMsg, | ||||
| 
 | ||||
|     Aid as Aid, | ||||
|     SpawnSpec as SpawnSpec, | ||||
| 
 | ||||
|     Start as Start, | ||||
|     StartAck as StartAck, | ||||
| 
 | ||||
|     Started as Started, | ||||
|     Yield as Yield, | ||||
|     Stop as Stop, | ||||
|     Return as Return, | ||||
|     CancelAck as CancelAck, | ||||
| 
 | ||||
|     Error as Error, | ||||
| 
 | ||||
|     # type-var for `.pld` field | ||||
|     PayloadT as PayloadT, | ||||
| 
 | ||||
|     # full msg class set from above as list | ||||
|     __msg_types__ as __msg_types__, | ||||
| 
 | ||||
|     # type-alias for union of all msgs | ||||
|     MsgType as MsgType, | ||||
| ) | ||||
| 
 | ||||
| __msg_spec__: TypeAlias = MsgType | ||||
|  | @ -0,0 +1,699 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| IPC msg interchange codec management. | ||||
| 
 | ||||
| Supported backend libs: | ||||
| - `msgspec.msgpack` | ||||
| 
 | ||||
| ToDo: backends we prolly should offer: | ||||
| 
 | ||||
| - see project/lib list throughout GH issue discussion comments: | ||||
|   https://github.com/goodboy/tractor/issues/196 | ||||
| 
 | ||||
| - `capnproto`: https://capnproto.org/rpc.html | ||||
|    - https://capnproto.org/language.html#language-reference | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| from contextvars import ( | ||||
|     ContextVar, | ||||
|     Token, | ||||
| ) | ||||
| import textwrap | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     Protocol, | ||||
|     Type, | ||||
|     TYPE_CHECKING, | ||||
|     TypeVar, | ||||
|     Union, | ||||
| ) | ||||
| from types import ModuleType | ||||
| 
 | ||||
| import msgspec | ||||
| from msgspec import ( | ||||
|     msgpack, | ||||
|     Raw, | ||||
| ) | ||||
| # TODO: see notes below from @mikenerone.. | ||||
| # from tricycle import TreeVar | ||||
| 
 | ||||
| from tractor.msg.pretty_struct import Struct | ||||
| from tractor.msg.types import ( | ||||
|     mk_msg_spec, | ||||
|     MsgType, | ||||
| ) | ||||
| from tractor.log import get_logger | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from tractor._context import Context | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: unify with `MsgCodec` by making `._dec` part this? | ||||
| class MsgDec(Struct): | ||||
|     ''' | ||||
|     An IPC msg (payload) decoder. | ||||
| 
 | ||||
|     Normally used to decode only a payload: `MsgType.pld: | ||||
|     PayloadT` field before delivery to IPC consumer code. | ||||
| 
 | ||||
|     ''' | ||||
|     _dec: msgpack.Decoder | ||||
| 
 | ||||
|     @property | ||||
|     def dec(self) -> msgpack.Decoder: | ||||
|         return self._dec | ||||
| 
 | ||||
|     def __repr__(self) -> str: | ||||
| 
 | ||||
|         speclines: str = self.spec_str | ||||
| 
 | ||||
|         # in multi-typed spec case we stick the list | ||||
|         # all on newlines after the |__pld_spec__:, | ||||
|         # OW it's prolly single type spec-value | ||||
|         # so just leave it on same line. | ||||
|         if '\n' in speclines: | ||||
|             speclines: str = '\n' + textwrap.indent( | ||||
|                 speclines, | ||||
|                 prefix=' '*3, | ||||
|             ) | ||||
| 
 | ||||
|         body: str = textwrap.indent( | ||||
|             f'|_dec_hook: {self.dec.dec_hook}\n' | ||||
|             f'|__pld_spec__: {speclines}\n', | ||||
|             prefix=' '*2, | ||||
|         ) | ||||
|         return ( | ||||
|             f'<{type(self).__name__}(\n' | ||||
|             f'{body}' | ||||
|             ')>' | ||||
|         ) | ||||
| 
 | ||||
|     # struct type unions | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # | ||||
|     # ^-TODO-^: make a wrapper type for this such that alt | ||||
|     # backends can be represented easily without a `Union` needed, | ||||
|     # AND so that we have better support for wire transport. | ||||
|     # | ||||
|     # -[ ] maybe `FieldSpec` is a good name since msg-spec | ||||
|     #   better applies to a `MsgType[FieldSpec]`? | ||||
|     # | ||||
|     # -[ ] both as part of the `.open_context()` call AND as part of the | ||||
|     #     immediate ack-reponse (see similar below) | ||||
|     #     we should do spec matching and fail if anything is awry? | ||||
|     # | ||||
|     # -[ ] eventually spec should be generated/parsed from the | ||||
|     #     type-annots as # desired in GH issue: | ||||
|     #     https://github.com/goodboy/tractor/issues/365 | ||||
|     # | ||||
|     # -[ ] semantics of the mismatch case | ||||
|     #   - when caller-callee specs we should raise | ||||
|     #    a `MsgTypeError` or `MsgSpecError` or similar? | ||||
|     # | ||||
|     # -[ ] wrapper types for both spec types such that we can easily | ||||
|     #     IPC transport them? | ||||
|     #     - `TypeSpec: Union[Type]` | ||||
|     #      * also a `.__contains__()` for doing `None in | ||||
|     #      TypeSpec[None|int]` since rn you need to do it on | ||||
|     #      `.__args__` for unions.. | ||||
|     #     - `MsgSpec: Union[MsgType] | ||||
|     # | ||||
|     # -[ ] auto-genning this from new (in 3.12) type parameter lists Bo | ||||
|     # |_ https://docs.python.org/3/reference/compound_stmts.html#type-params | ||||
|     # |_ historical pep 695: https://peps.python.org/pep-0695/ | ||||
|     # |_ full lang spec: https://typing.readthedocs.io/en/latest/spec/ | ||||
|     # |_ on annotation scopes: | ||||
|     #    https://docs.python.org/3/reference/executionmodel.html#annotation-scopes | ||||
|     # |_ 3.13 will have subscriptable funcs Bo | ||||
|     #    https://peps.python.org/pep-0718/ | ||||
|     @property | ||||
|     def spec(self) -> Union[Type[Struct]]: | ||||
|         # NOTE: defined and applied inside `mk_codec()` | ||||
|         return self._dec.type | ||||
| 
 | ||||
|     # no difference, as compared to a `MsgCodec` which defines the | ||||
|     # `MsgType.pld: PayloadT` part of its spec separately | ||||
|     pld_spec = spec | ||||
| 
 | ||||
|     # TODO: would get moved into `FieldSpec.__str__()` right? | ||||
|     @property | ||||
|     def spec_str(self) -> str: | ||||
|         return pformat_msgspec( | ||||
|             codec=self, | ||||
|             join_char='|', | ||||
|         ) | ||||
| 
 | ||||
|     pld_spec_str = spec_str | ||||
| 
 | ||||
|     def decode( | ||||
|         self, | ||||
|         raw: Raw|bytes, | ||||
|     ) -> Any: | ||||
|         return self._dec.decode(raw) | ||||
| 
 | ||||
|     @property | ||||
|     def hook(self) -> Callable|None: | ||||
|         return self._dec.dec_hook | ||||
| 
 | ||||
| 
 | ||||
| def mk_dec( | ||||
|     spec: Union[Type[Struct]]|Any = Any, | ||||
|     dec_hook: Callable|None = None, | ||||
| 
 | ||||
| ) -> MsgDec: | ||||
|     ''' | ||||
|     Create an IPC msg decoder, normally used as the | ||||
|     `PayloadMsg.pld: PayloadT` field decoder inside a `PldRx`. | ||||
| 
 | ||||
|     ''' | ||||
|     return MsgDec( | ||||
|         _dec=msgpack.Decoder( | ||||
|             type=spec,  # like `MsgType[Any]` | ||||
|             dec_hook=dec_hook, | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def mk_msgspec_table( | ||||
|     dec: msgpack.Decoder, | ||||
|     msg: MsgType|None = None, | ||||
| 
 | ||||
| ) -> dict[str, MsgType]|str: | ||||
|     ''' | ||||
|     Fill out a `dict` of `MsgType`s keyed by name | ||||
|     for a given input `msgspec.msgpack.Decoder` | ||||
|     as defined by its `.type: Union[Type]` setting. | ||||
| 
 | ||||
|     If `msg` is provided, only deliver a `dict` with a single | ||||
|     entry for that type. | ||||
| 
 | ||||
|     ''' | ||||
|     msgspec: Union[Type]|Type = dec.type | ||||
| 
 | ||||
|     if not (msgtypes := getattr(msgspec, '__args__', False)): | ||||
|         msgtypes = [msgspec] | ||||
| 
 | ||||
|     msgt_table: dict[str, MsgType] = { | ||||
|         msgt: str(msgt.__name__) | ||||
|         for msgt in msgtypes | ||||
|     } | ||||
|     if msg: | ||||
|         msgt: MsgType = type(msg) | ||||
|         str_repr: str = msgt_table[msgt] | ||||
|         return {msgt: str_repr} | ||||
| 
 | ||||
|     return msgt_table | ||||
| 
 | ||||
| 
 | ||||
| def pformat_msgspec( | ||||
|     codec: MsgCodec|MsgDec, | ||||
|     msg: MsgType|None = None, | ||||
|     join_char: str = '\n', | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Pretty `str` format the `msgspec.msgpack.Decoder.type` attribute | ||||
|     for display in (console) log messages as a nice (maybe multiline) | ||||
|     presentation of all supported `Struct`s (subtypes) available for | ||||
|     typed decoding. | ||||
| 
 | ||||
|     ''' | ||||
|     dec: msgpack.Decoder = getattr(codec, 'dec', codec) | ||||
|     return join_char.join( | ||||
|         mk_msgspec_table( | ||||
|             dec=dec, | ||||
|             msg=msg, | ||||
|         ).values() | ||||
|     ) | ||||
| 
 | ||||
| # TODO: overall IPC msg-spec features (i.e. in this mod)! | ||||
| # | ||||
| # -[ ] API changes towards being interchange lib agnostic! | ||||
| #   -[ ] capnproto has pre-compiled schema for eg.. | ||||
| #    * https://capnproto.org/language.html | ||||
| #    * http://capnproto.github.io/pycapnp/quickstart.html | ||||
| #     * https://github.com/capnproto/pycapnp/blob/master/examples/addressbook.capnp | ||||
| # | ||||
| # -[ ] struct aware messaging coders as per: | ||||
| #   -[x] https://github.com/goodboy/tractor/issues/36 | ||||
| #   -[ ] https://github.com/goodboy/tractor/issues/196 | ||||
| #   -[ ] https://github.com/goodboy/tractor/issues/365 | ||||
| # | ||||
| class MsgCodec(Struct): | ||||
|     ''' | ||||
|     A IPC msg interchange format lib's encoder + decoder pair. | ||||
| 
 | ||||
|     Pretty much nothing more then delegation to underlying | ||||
|     `msgspec.<interchange-protocol>.Encoder/Decoder`s for now. | ||||
| 
 | ||||
|     ''' | ||||
|     _enc: msgpack.Encoder | ||||
|     _dec: msgpack.Decoder | ||||
|     _pld_spec: Type[Struct]|Raw|Any | ||||
| 
 | ||||
|     def __repr__(self) -> str: | ||||
|         speclines: str = textwrap.indent( | ||||
|             pformat_msgspec(codec=self), | ||||
|             prefix=' '*3, | ||||
|         ) | ||||
|         body: str = textwrap.indent( | ||||
|             f'|_lib = {self.lib.__name__!r}\n' | ||||
|             f'|_enc_hook: {self.enc.enc_hook}\n' | ||||
|             f'|_dec_hook: {self.dec.dec_hook}\n' | ||||
|             f'|_pld_spec: {self.pld_spec_str}\n' | ||||
|             # f'|\n' | ||||
|             f'|__msg_spec__:\n' | ||||
|             f'{speclines}\n', | ||||
|             prefix=' '*2, | ||||
|         ) | ||||
|         return ( | ||||
|             f'<{type(self).__name__}(\n' | ||||
|             f'{body}' | ||||
|             ')>' | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def pld_spec(self) -> Type[Struct]|Raw|Any: | ||||
|         return self._pld_spec | ||||
| 
 | ||||
|     @property | ||||
|     def pld_spec_str(self) -> str: | ||||
| 
 | ||||
|         # TODO: could also use match: instead? | ||||
|         spec: Union[Type]|Type = self.pld_spec | ||||
| 
 | ||||
|         # `typing.Union` case | ||||
|         if getattr(spec, '__args__', False): | ||||
|             return str(spec) | ||||
| 
 | ||||
|         # just a single type | ||||
|         else: | ||||
|             return spec.__name__ | ||||
| 
 | ||||
|     # struct type unions | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     @property | ||||
|     def msg_spec(self) -> Union[Type[Struct]]: | ||||
|         # NOTE: defined and applied inside `mk_codec()` | ||||
|         return self._dec.type | ||||
| 
 | ||||
|     # TODO: some way to make `pretty_struct.Struct` use this | ||||
|     # wrapped field over the `.msg_spec` one? | ||||
|     @property | ||||
|     def msg_spec_str(self) -> str: | ||||
|         return pformat_msgspec(self.msg_spec) | ||||
| 
 | ||||
|     lib: ModuleType = msgspec | ||||
| 
 | ||||
|     # TODO: use `functools.cached_property` for these ? | ||||
|     # https://docs.python.org/3/library/functools.html#functools.cached_property | ||||
|     @property | ||||
|     def enc(self) -> msgpack.Encoder: | ||||
|         return self._enc | ||||
| 
 | ||||
|     # TODO: reusing encode buffer for perf? | ||||
|     # https://jcristharif.com/msgspec/perf-tips.html#reusing-an-output-buffer | ||||
|     _buf: bytearray = bytearray() | ||||
| 
 | ||||
|     def encode( | ||||
|         self, | ||||
|         py_obj: Any, | ||||
| 
 | ||||
|         use_buf: bool = False, | ||||
|         # ^-XXX-^ uhh why am i getting this? | ||||
|         # |_BufferError: Existing exports of data: object cannot be re-sized | ||||
| 
 | ||||
|     ) -> bytes: | ||||
|         ''' | ||||
|         Encode input python objects to `msgpack` bytes for | ||||
|         transfer on a tranport protocol connection. | ||||
| 
 | ||||
|         When `use_buf == True` use the output buffer optimization: | ||||
|         https://jcristharif.com/msgspec/perf-tips.html#reusing-an-output-buffer | ||||
| 
 | ||||
|         ''' | ||||
|         if use_buf: | ||||
|             self._enc.encode_into(py_obj, self._buf) | ||||
|             return self._buf | ||||
|         else: | ||||
|             return self._enc.encode(py_obj) | ||||
| 
 | ||||
|     @property | ||||
|     def dec(self) -> msgpack.Decoder: | ||||
|         return self._dec | ||||
| 
 | ||||
|     def decode( | ||||
|         self, | ||||
|         msg: bytes, | ||||
|     ) -> Any: | ||||
|         ''' | ||||
|         Decode received `msgpack` bytes into a local python object | ||||
|         with special `msgspec.Struct` (or other type) handling | ||||
|         determined by the  | ||||
| 
 | ||||
|         ''' | ||||
|         # https://jcristharif.com/msgspec/usage.html#typed-decoding | ||||
|         return self._dec.decode(msg) | ||||
| 
 | ||||
| 
 | ||||
| # [x] TODO: a sub-decoder system as well? => No! | ||||
| # | ||||
| # -[x] do we still want to try and support the sub-decoder with | ||||
| # `.Raw` technique in the case that the `Generic` approach gives | ||||
| # future grief? | ||||
| # => NO, since we went with the `PldRx` approach instead B) | ||||
| # | ||||
| # IF however you want to see the code that was staged for this | ||||
| # from wayyy back, see the pure removal commit. | ||||
| 
 | ||||
| 
 | ||||
| def mk_codec( | ||||
|     # struct type unions set for `Decoder` | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     ipc_pld_spec: Union[Type[Struct]]|Any = Any, | ||||
| 
 | ||||
|     # TODO: offering a per-msg(-field) type-spec such that | ||||
|     # the fields can be dynamically NOT decoded and left as `Raw` | ||||
|     # values which are later loaded by a sub-decoder specified | ||||
|     # by `tag_field: str` value key? | ||||
|     # payload_msg_specs: dict[ | ||||
|     #     str,  # tag_field value as sub-decoder key | ||||
|     #     Union[Type[Struct]]  # `MsgType.pld` type spec | ||||
|     # ]|None = None, | ||||
| 
 | ||||
|     libname: str = 'msgspec', | ||||
| 
 | ||||
|     # proxy as `Struct(**kwargs)` for ad-hoc type extensions | ||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
|     # ------ - ------ | ||||
|     dec_hook: Callable|None = None, | ||||
|     enc_hook: Callable|None = None, | ||||
|     # ------ - ------ | ||||
|     # | ||||
|     # Encoder: | ||||
|     # write_buffer_size=write_buffer_size, | ||||
|     # | ||||
|     # Decoder: | ||||
|     # ext_hook: ext_hook_sig | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     Convenience factory for creating codecs eventually meant | ||||
|     to be interchange lib agnostic (i.e. once we support more then just | ||||
|     `msgspec` ;). | ||||
| 
 | ||||
|     ''' | ||||
|     # (manually) generate a msg-payload-spec for all relevant | ||||
|     # god-boxing-msg subtypes, parameterizing the `PayloadMsg.pld: PayloadT` | ||||
|     # for the decoder such that all sub-type msgs in our SCIPP | ||||
|     # will automatically decode to a type-"limited" payload (`Struct`) | ||||
|     # object (set). | ||||
|     ( | ||||
|         ipc_msg_spec, | ||||
|         msg_types, | ||||
|     ) = mk_msg_spec( | ||||
|         payload_type_union=ipc_pld_spec, | ||||
|     ) | ||||
|     assert len(ipc_msg_spec.__args__) == len(msg_types) | ||||
|     assert ipc_msg_spec | ||||
| 
 | ||||
|     # TODO: use this shim instead? | ||||
|     # bc.. unification, err somethin? | ||||
|     # dec: MsgDec = mk_dec( | ||||
|     #     spec=ipc_msg_spec, | ||||
|     #     dec_hook=dec_hook, | ||||
|     # ) | ||||
| 
 | ||||
|     dec = msgpack.Decoder( | ||||
|         type=ipc_msg_spec, | ||||
|         dec_hook=dec_hook, | ||||
|     ) | ||||
|     enc = msgpack.Encoder( | ||||
|        enc_hook=enc_hook, | ||||
|     ) | ||||
| 
 | ||||
|     codec = MsgCodec( | ||||
|         _enc=enc, | ||||
|         _dec=dec, | ||||
|         _pld_spec=ipc_pld_spec, | ||||
|     ) | ||||
| 
 | ||||
|     # sanity on expected backend support | ||||
|     assert codec.lib.__name__ == libname | ||||
| 
 | ||||
|     return codec | ||||
| 
 | ||||
| 
 | ||||
| # instance of the default `msgspec.msgpack` codec settings, i.e. | ||||
| # no custom structs, hooks or other special types. | ||||
| _def_msgspec_codec: MsgCodec = mk_codec(ipc_pld_spec=Any) | ||||
| 
 | ||||
| # The built-in IPC `Msg` spec. | ||||
| # Our composing "shuttle" protocol which allows `tractor`-app code | ||||
| # to use any `msgspec` supported type as the `PayloadMsg.pld` payload, | ||||
| # https://jcristharif.com/msgspec/supported-types.html | ||||
| # | ||||
| _def_tractor_codec: MsgCodec = mk_codec( | ||||
|     # TODO: use this for debug mode locking prot? | ||||
|     # ipc_pld_spec=Any, | ||||
|     ipc_pld_spec=Raw, | ||||
| ) | ||||
| # TODO: IDEALLY provides for per-`trio.Task` specificity of the | ||||
| # IPC msging codec used by the transport layer when doing | ||||
| # `Channel.send()/.recv()` of wire data. | ||||
| 
 | ||||
| # ContextVar-TODO: DIDN'T WORK, kept resetting in every new task to default!? | ||||
| # _ctxvar_MsgCodec: ContextVar[MsgCodec] = ContextVar( | ||||
| 
 | ||||
| # TreeVar-TODO: DIDN'T WORK, kept resetting in every new embedded nursery | ||||
| # even though it's supposed to inherit from a parent context ??? | ||||
| # | ||||
| # _ctxvar_MsgCodec: TreeVar[MsgCodec] = TreeVar( | ||||
| # | ||||
| # ^-NOTE-^: for this to work see the mods by @mikenerone from `trio` gitter: | ||||
| # | ||||
| # 22:02:54 <mikenerone> even for regular contextvars, all you have to do is: | ||||
| #    `task: Task = trio.lowlevel.current_task()` | ||||
| #    `task.parent_nursery.parent_task.context.run(my_ctx_var.set, new_value)` | ||||
| # | ||||
| # From a comment in his prop code he couldn't share outright: | ||||
| # 1. For every TreeVar set in the current task (which covers what | ||||
| #    we need from SynchronizerFacade), walk up the tree until the | ||||
| #    root or finding one where the TreeVar is already set, setting | ||||
| #    it in all of the contexts along the way. | ||||
| # 2. For each of those, we also forcibly set the values that are | ||||
| #    pending for child nurseries that have not yet accessed the | ||||
| #    TreeVar. | ||||
| # 3. We similarly set the pending values for the child nurseries | ||||
| #    of the *current* task. | ||||
| # | ||||
| _ctxvar_MsgCodec: ContextVar[MsgCodec] = ContextVar( | ||||
|     'msgspec_codec', | ||||
|     default=_def_tractor_codec, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def apply_codec( | ||||
|     codec: MsgCodec, | ||||
| 
 | ||||
|     ctx: Context|None = None, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     Dynamically apply a `MsgCodec` to the current task's runtime | ||||
|     context such that all (of a certain class of payload | ||||
|     containing i.e. `MsgType.pld: PayloadT`) IPC msgs are | ||||
|     processed with it for that task. | ||||
| 
 | ||||
|     Uses a `contextvars.ContextVar` to ensure the scope of any | ||||
|     codec setting matches the current `Context` or | ||||
|     `._rpc.process_messages()` feeder task's prior setting without | ||||
|     mutating any surrounding scope. | ||||
| 
 | ||||
|     When a `ctx` is supplied, only mod its `Context.pld_codec`. | ||||
| 
 | ||||
|     matches the `@cm` block and DOES NOT change to the original | ||||
|     (default) value in new tasks (as it does for `ContextVar`). | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = True | ||||
| 
 | ||||
|     if ctx is not None: | ||||
|         var: ContextVar = ctx._var_pld_codec | ||||
|     else: | ||||
|         # use IPC channel-connection "global" codec | ||||
|         var: ContextVar = _ctxvar_MsgCodec | ||||
| 
 | ||||
|     orig: MsgCodec = var.get() | ||||
| 
 | ||||
|     assert orig is not codec | ||||
|     if codec.pld_spec is None: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     log.info( | ||||
|         'Applying new msg-spec codec\n\n' | ||||
|         f'{codec}\n' | ||||
|     ) | ||||
|     token: Token = var.set(codec) | ||||
| 
 | ||||
|     # ?TODO? for TreeVar approach which copies from the | ||||
|     # cancel-scope of the prior value, NOT the prior task | ||||
|     # See the docs: | ||||
|     # - https://tricycle.readthedocs.io/en/latest/reference.html#tree-variables | ||||
|     # - https://github.com/oremanj/tricycle/blob/master/tricycle/_tests/test_tree_var.py | ||||
|     #   ^- see docs for @cm `.being()` API | ||||
|     # with _ctxvar_MsgCodec.being(codec): | ||||
|     #     new = _ctxvar_MsgCodec.get() | ||||
|     #     assert new is codec | ||||
|     #     yield codec | ||||
| 
 | ||||
|     try: | ||||
|         yield var.get() | ||||
|     finally: | ||||
|         var.reset(token) | ||||
|         log.info( | ||||
|             'Reverted to last msg-spec codec\n\n' | ||||
|             f'{orig}\n' | ||||
|         ) | ||||
|         assert var.get() is orig | ||||
| 
 | ||||
| 
 | ||||
| def current_codec() -> MsgCodec: | ||||
|     ''' | ||||
|     Return the current `trio.Task.context`'s value | ||||
|     for `msgspec_codec` used by `Channel.send/.recv()` | ||||
|     for wire serialization. | ||||
| 
 | ||||
|     ''' | ||||
|     return _ctxvar_MsgCodec.get() | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def limit_msg_spec( | ||||
|     payload_spec: Union[Type[Struct]], | ||||
| 
 | ||||
|     # TODO: don't need this approach right? | ||||
|     # -> related to the `MsgCodec._payload_decs` stuff above.. | ||||
|     # tagged_structs: list[Struct]|None = None, | ||||
| 
 | ||||
|     **codec_kwargs, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     Apply a `MsgCodec` that will natively decode the SC-msg set's | ||||
|     `PayloadMsg.pld: Union[Type[Struct]]` payload fields using | ||||
|     tagged-unions of `msgspec.Struct`s from the `payload_types` | ||||
|     for all IPC contexts in use by the current `trio.Task`. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = True | ||||
|     curr_codec: MsgCodec = current_codec() | ||||
|     msgspec_codec: MsgCodec = mk_codec( | ||||
|         ipc_pld_spec=payload_spec, | ||||
|         **codec_kwargs, | ||||
|     ) | ||||
|     with apply_codec(msgspec_codec) as applied_codec: | ||||
|         assert applied_codec is msgspec_codec | ||||
|         yield msgspec_codec | ||||
| 
 | ||||
|     assert curr_codec is current_codec() | ||||
| 
 | ||||
| 
 | ||||
| # XXX: msgspec won't allow this with non-struct custom types | ||||
| # like `NamespacePath`!@! | ||||
| # @cm | ||||
| # def extend_msg_spec( | ||||
| #     payload_spec: Union[Type[Struct]], | ||||
| 
 | ||||
| # ) -> MsgCodec: | ||||
| #     ''' | ||||
| #     Extend the current `MsgCodec.pld_spec` (type set) by extending | ||||
| #     the payload spec to **include** the types specified by | ||||
| #     `payload_spec`. | ||||
| 
 | ||||
| #     ''' | ||||
| #     codec: MsgCodec = current_codec() | ||||
| #     pld_spec: Union[Type] = codec.pld_spec | ||||
| #     extended_spec: Union[Type] = pld_spec|payload_spec | ||||
| 
 | ||||
| #     with limit_msg_spec(payload_types=extended_spec) as ext_codec: | ||||
| #         # import pdbp; pdbp.set_trace() | ||||
| #         assert ext_codec.pld_spec == extended_spec | ||||
| #         yield ext_codec | ||||
| # | ||||
| # ^-TODO-^ is it impossible to make something like this orr!? | ||||
| 
 | ||||
| # TODO: make an auto-custom hook generator from a set of input custom | ||||
| # types? | ||||
| # -[ ] below is a proto design using a `TypeCodec` idea? | ||||
| # | ||||
| # type var for the expected interchange-lib's | ||||
| # IPC-transport type when not available as a built-in | ||||
| # serialization output. | ||||
| WireT = TypeVar('WireT') | ||||
| 
 | ||||
| 
 | ||||
| # TODO: some kinda (decorator) API for built-in subtypes | ||||
| # that builds this implicitly by inspecting the `mro()`? | ||||
| class TypeCodec(Protocol): | ||||
|     ''' | ||||
|     A per-custom-type wire-transport serialization translator | ||||
|     description type. | ||||
| 
 | ||||
|     ''' | ||||
|     src_type: Type | ||||
|     wire_type: WireT | ||||
| 
 | ||||
|     def encode(obj: Type) -> WireT: | ||||
|         ... | ||||
| 
 | ||||
|     def decode( | ||||
|         obj_type: Type[WireT], | ||||
|         obj: WireT, | ||||
|     ) -> Type: | ||||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| class MsgpackTypeCodec(TypeCodec): | ||||
|     ... | ||||
| 
 | ||||
| 
 | ||||
| def mk_codec_hooks( | ||||
|     type_codecs: list[TypeCodec], | ||||
| 
 | ||||
| ) -> tuple[Callable, Callable]: | ||||
|     ''' | ||||
|     Deliver a `enc_hook()`/`dec_hook()` pair which handle | ||||
|     manual convertion from an input `Type` set such that whenever | ||||
|     the `TypeCodec.filter()` predicate matches the | ||||
|     `TypeCodec.decode()` is called on the input native object by | ||||
|     the `dec_hook()` and whenever the | ||||
|     `isiinstance(obj, TypeCodec.type)` matches against an | ||||
|     `enc_hook(obj=obj)` the return value is taken from a | ||||
|     `TypeCodec.encode(obj)` callback. | ||||
| 
 | ||||
|     ''' | ||||
|     ... | ||||
|  | @ -0,0 +1,842 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Near-application abstractions for `MsgType.pld: PayloadT|Raw` | ||||
| delivery, filtering and type checking as well as generic | ||||
| operational helpers for processing transaction flows. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     Type, | ||||
|     TYPE_CHECKING, | ||||
|     Union, | ||||
| ) | ||||
| # ------ - ------ | ||||
| from msgspec import ( | ||||
|     msgpack, | ||||
|     Raw, | ||||
|     Struct, | ||||
|     ValidationError, | ||||
| ) | ||||
| import trio | ||||
| # ------ - ------ | ||||
| from tractor.log import get_logger | ||||
| from tractor._exceptions import ( | ||||
|     MessagingError, | ||||
|     InternalError, | ||||
|     _raise_from_unexpected_msg, | ||||
|     MsgTypeError, | ||||
|     _mk_recv_mte, | ||||
|     pack_error, | ||||
| ) | ||||
| from tractor._state import current_ipc_ctx | ||||
| from ._codec import ( | ||||
|     mk_dec, | ||||
|     MsgDec, | ||||
|     MsgCodec, | ||||
|     current_codec, | ||||
| ) | ||||
| from .types import ( | ||||
|     CancelAck, | ||||
|     Error, | ||||
|     MsgType, | ||||
|     PayloadT, | ||||
|     Return, | ||||
|     Started, | ||||
|     Stop, | ||||
|     Yield, | ||||
|     pretty_struct, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from tractor._context import Context | ||||
|     from tractor._streaming import MsgStream | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| _def_any_pldec: MsgDec[Any] = mk_dec() | ||||
| 
 | ||||
| 
 | ||||
| class PldRx(Struct): | ||||
|     ''' | ||||
|     A "msg payload receiver". | ||||
| 
 | ||||
|     The pairing of a "feeder" `trio.abc.ReceiveChannel` and an | ||||
|     interchange-specific (eg. msgpack) payload field decoder. The | ||||
|     validation/type-filtering rules are runtime mutable and allow | ||||
|     type constraining the set of `MsgType.pld: Raw|PayloadT` | ||||
|     values at runtime, per IPC task-context. | ||||
| 
 | ||||
|     This abstraction, being just below "user application code", | ||||
|     allows for the equivalent of our `MsgCodec` (used for | ||||
|     typer-filtering IPC dialog protocol msgs against a msg-spec) | ||||
|     but with granular control around payload delivery (i.e. the | ||||
|     data-values user code actually sees and uses (the blobs that | ||||
|     are "shuttled" by the wrapping dialog prot) such that invalid | ||||
|     `.pld: Raw` can be decoded and handled by IPC-primitive user | ||||
|     code (i.e. that operates on `Context` and `Msgstream` APIs) | ||||
|     without knowledge of the lower level `Channel`/`MsgTransport` | ||||
|     primitives nor the `MsgCodec` in use. Further, lazily decoding | ||||
|     payload blobs allows for topical (and maybe intentionally | ||||
|     "partial") encryption of msg field subsets. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: better to bind it here? | ||||
|     # _rx_mc: trio.MemoryReceiveChannel | ||||
|     _pld_dec: MsgDec | ||||
|     _ctx: Context|None = None | ||||
|     _ipc: Context|MsgStream|None = None | ||||
| 
 | ||||
|     @property | ||||
|     def pld_dec(self) -> MsgDec: | ||||
|         return self._pld_dec | ||||
| 
 | ||||
|     # TODO: a better name? | ||||
|     # -[ ] when would this be used as it avoids needingn to pass the | ||||
|     #   ipc prim to every method | ||||
|     @cm | ||||
|     def wraps_ipc( | ||||
|         self, | ||||
|         ipc_prim: Context|MsgStream, | ||||
| 
 | ||||
|     ) -> PldRx: | ||||
|         ''' | ||||
|         Apply this payload receiver to an IPC primitive type, one | ||||
|         of `Context` or `MsgStream`. | ||||
| 
 | ||||
|         ''' | ||||
|         self._ipc = ipc_prim | ||||
|         try: | ||||
|             yield self | ||||
|         finally: | ||||
|             self._ipc = None | ||||
| 
 | ||||
|     @cm | ||||
|     def limit_plds( | ||||
|         self, | ||||
|         spec: Union[Type[Struct]], | ||||
|         **dec_kwargs, | ||||
| 
 | ||||
|     ) -> MsgDec: | ||||
|         ''' | ||||
|         Type-limit the loadable msg payloads via an applied | ||||
|         `MsgDec` given an input spec, revert to prior decoder on | ||||
|         exit. | ||||
| 
 | ||||
|         ''' | ||||
|         orig_dec: MsgDec = self._pld_dec | ||||
|         limit_dec: MsgDec = mk_dec( | ||||
|             spec=spec, | ||||
|             **dec_kwargs, | ||||
|         ) | ||||
|         try: | ||||
|             self._pld_dec = limit_dec | ||||
|             yield limit_dec | ||||
|         finally: | ||||
|             self._pld_dec = orig_dec | ||||
| 
 | ||||
|     @property | ||||
|     def dec(self) -> msgpack.Decoder: | ||||
|         return self._pld_dec.dec | ||||
| 
 | ||||
|     def recv_pld_nowait( | ||||
|         self, | ||||
|         # TODO: make this `MsgStream` compat as well, see above^ | ||||
|         # ipc_prim: Context|MsgStream, | ||||
|         ipc: Context|MsgStream, | ||||
| 
 | ||||
|         ipc_msg: MsgType|None = None, | ||||
|         expect_msg: Type[MsgType]|None = None, | ||||
|         hide_tb: bool = False, | ||||
|         **dec_pld_kwargs, | ||||
| 
 | ||||
|     ) -> Any|Raw: | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         msg: MsgType = ( | ||||
|             ipc_msg | ||||
|             or | ||||
| 
 | ||||
|             # sync-rx msg from underlying IPC feeder (mem-)chan | ||||
|             ipc._rx_chan.receive_nowait() | ||||
|         ) | ||||
|         return self.decode_pld( | ||||
|             msg, | ||||
|             ipc=ipc, | ||||
|             expect_msg=expect_msg, | ||||
|             hide_tb=hide_tb, | ||||
|             **dec_pld_kwargs, | ||||
|         ) | ||||
| 
 | ||||
|     async def recv_pld( | ||||
|         self, | ||||
|         ipc: Context|MsgStream, | ||||
|         ipc_msg: MsgType|None = None, | ||||
|         expect_msg: Type[MsgType]|None = None, | ||||
|         hide_tb: bool = True, | ||||
| 
 | ||||
|         **dec_pld_kwargs, | ||||
| 
 | ||||
|     ) -> Any|Raw: | ||||
|         ''' | ||||
|         Receive a `MsgType`, then decode and return its `.pld` field. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         msg: MsgType = ( | ||||
|             ipc_msg | ||||
|             or | ||||
|             # async-rx msg from underlying IPC feeder (mem-)chan | ||||
|             await ipc._rx_chan.receive() | ||||
|         ) | ||||
|         return self.decode_pld( | ||||
|             msg=msg, | ||||
|             ipc=ipc, | ||||
|             expect_msg=expect_msg, | ||||
|             **dec_pld_kwargs, | ||||
|         ) | ||||
| 
 | ||||
|     def decode_pld( | ||||
|         self, | ||||
|         msg: MsgType, | ||||
|         ipc: Context|MsgStream, | ||||
|         expect_msg: Type[MsgType]|None, | ||||
| 
 | ||||
|         raise_error: bool = True, | ||||
|         hide_tb: bool = True, | ||||
| 
 | ||||
|         # XXX for special (default?) case of send side call with | ||||
|         # `Context.started(validate_pld_spec=True)` | ||||
|         is_started_send_side: bool = False, | ||||
| 
 | ||||
|     ) -> PayloadT|Raw: | ||||
|         ''' | ||||
|         Decode a msg's payload field: `MsgType.pld: PayloadT|Raw` and | ||||
|         return the value or raise an appropriate error. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         src_err: BaseException|None = None | ||||
|         match msg: | ||||
|             # payload-data shuttle msg; deliver the `.pld` value | ||||
|             # directly to IPC (primitive) client-consumer code. | ||||
|             case ( | ||||
|                 Started(pld=pld)  # sync phase | ||||
|                 |Yield(pld=pld)  # streaming phase | ||||
|                 |Return(pld=pld)  # termination phase | ||||
|             ): | ||||
|                 try: | ||||
|                     pld: PayloadT = self._pld_dec.decode(pld) | ||||
|                     log.runtime( | ||||
|                         'Decoded msg payload\n\n' | ||||
|                         f'{msg}\n' | ||||
|                         f'where payload decoded as\n' | ||||
|                         f'|_pld={pld!r}\n' | ||||
|                     ) | ||||
|                     return pld | ||||
| 
 | ||||
|                 # XXX pld-value type failure | ||||
|                 except ValidationError as valerr: | ||||
|                     # pack mgterr into error-msg for | ||||
|                     # reraise below; ensure remote-actor-err | ||||
|                     # info is displayed nicely? | ||||
|                     mte: MsgTypeError = _mk_recv_mte( | ||||
|                         msg=msg, | ||||
|                         codec=self.pld_dec, | ||||
|                         src_validation_error=valerr, | ||||
|                         is_invalid_payload=True, | ||||
|                         expected_msg=expect_msg, | ||||
|                     ) | ||||
|                     # NOTE: just raise the MTE inline instead of all | ||||
|                     # the pack-unpack-repack non-sense when this is | ||||
|                     # a "send side" validation error. | ||||
|                     if is_started_send_side: | ||||
|                         raise mte | ||||
| 
 | ||||
|                     # NOTE: the `.message` is automatically | ||||
|                     # transferred into the message as long as we | ||||
|                     # define it as a `Error.message` field. | ||||
|                     err_msg: Error = pack_error( | ||||
|                         exc=mte, | ||||
|                         cid=msg.cid, | ||||
|                         src_uid=( | ||||
|                             ipc.chan.uid | ||||
|                             if not is_started_send_side | ||||
|                             else ipc._actor.uid | ||||
|                         ), | ||||
|                     ) | ||||
|                     mte._ipc_msg = err_msg | ||||
| 
 | ||||
|                     # XXX override the `msg` passed to | ||||
|                     # `_raise_from_unexpected_msg()` (below) so so | ||||
|                     # that we're effectively able to use that same | ||||
|                     # func to unpack and raise an "emulated remote | ||||
|                     # `Error`" of this local MTE. | ||||
|                     msg = err_msg | ||||
|                     # XXX NOTE: so when the `_raise_from_unexpected_msg()` | ||||
|                     # raises the boxed `err_msg` from above it raises | ||||
|                     # it from the above caught interchange-lib | ||||
|                     # validation error. | ||||
|                     src_err = valerr | ||||
| 
 | ||||
|             # a runtime-internal RPC endpoint response. | ||||
|             # always passthrough since (internal) runtime | ||||
|             # responses are generally never exposed to consumer | ||||
|             # code. | ||||
|             case CancelAck( | ||||
|                 pld=bool(cancelled) | ||||
|             ): | ||||
|                 return cancelled | ||||
| 
 | ||||
|             case Error(): | ||||
|                 src_err = MessagingError( | ||||
|                     'IPC ctx dialog terminated without `Return`-ing a result\n' | ||||
|                     f'Instead it raised {msg.boxed_type_str!r}!' | ||||
|                 ) | ||||
|                 # XXX NOTE XXX another super subtle runtime-y thing.. | ||||
|                 # | ||||
|                 # - when user code (transitively) calls into this | ||||
|                 #   func (usually via a `Context/MsgStream` API) we | ||||
|                 #   generally want errors to propagate immediately | ||||
|                 #   and directly so that the user can define how it | ||||
|                 #   wants to handle them. | ||||
|                 # | ||||
|                 #  HOWEVER, | ||||
|                 # | ||||
|                 # - for certain runtime calling cases, we don't want to | ||||
|                 #   directly raise since the calling code might have | ||||
|                 #   special logic around whether to raise the error | ||||
|                 #   or supress it silently (eg. a `ContextCancelled` | ||||
|                 #   received from the far end which was requested by | ||||
|                 #   this side, aka a self-cancel). | ||||
|                 # | ||||
|                 # SO, we offer a flag to control this. | ||||
|                 if not raise_error: | ||||
|                     return src_err | ||||
| 
 | ||||
|             case Stop(cid=cid): | ||||
|                 ctx: Context = getattr(ipc, 'ctx', ipc) | ||||
|                 message: str = ( | ||||
|                     f'{ctx.side!r}-side of ctx received stream-`Stop` from ' | ||||
|                     f'{ctx.peer_side!r} peer ?\n' | ||||
|                     f'|_cid: {cid}\n\n' | ||||
| 
 | ||||
|                     f'{pretty_struct.pformat(msg)}\n' | ||||
|                 ) | ||||
|                 if ctx._stream is None: | ||||
|                     explain: str = ( | ||||
|                         f'BUT, no `MsgStream` (was) open(ed) on this ' | ||||
|                         f'{ctx.side!r}-side of the IPC ctx?\n' | ||||
|                         f'Maybe check your code for streaming phase race conditions?\n' | ||||
|                     ) | ||||
|                     log.warning( | ||||
|                         message | ||||
|                         + | ||||
|                         explain | ||||
|                     ) | ||||
|                     # let caller decide what to do when only one | ||||
|                     # side opened a stream, don't raise. | ||||
|                     return msg | ||||
| 
 | ||||
|                 else: | ||||
|                     explain: str = ( | ||||
|                         'Received a `Stop` when it should NEVER be possible!?!?\n' | ||||
|                     ) | ||||
|                     # TODO: this is constructed inside | ||||
|                     # `_raise_from_unexpected_msg()` but maybe we | ||||
|                     # should pass it in? | ||||
|                     # src_err = trio.EndOfChannel(explain) | ||||
|                     src_err = None | ||||
| 
 | ||||
|             case _: | ||||
|                 src_err = InternalError( | ||||
|                     'Invalid IPC msg ??\n\n' | ||||
|                     f'{msg}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         # TODO: maybe use the new `.add_note()` from 3.11? | ||||
|         # |_https://docs.python.org/3.11/library/exceptions.html#BaseException.add_note | ||||
|         # | ||||
|         # fallthrough and raise from `src_err` | ||||
|         try: | ||||
|             _raise_from_unexpected_msg( | ||||
|                 ctx=getattr(ipc, 'ctx', ipc), | ||||
|                 msg=msg, | ||||
|                 src_err=src_err, | ||||
|                 log=log, | ||||
|                 expect_msg=expect_msg, | ||||
|                 hide_tb=hide_tb, | ||||
|             ) | ||||
|         except UnboundLocalError: | ||||
|             # XXX if there's an internal lookup error in the above | ||||
|             # code (prolly on `src_err`) we want to show this frame | ||||
|             # in the tb! | ||||
|             __tracebackhide__: bool = False | ||||
|             raise | ||||
| 
 | ||||
|     dec_msg = decode_pld | ||||
| 
 | ||||
|     async def recv_msg_w_pld( | ||||
|         self, | ||||
|         ipc: Context|MsgStream, | ||||
|         expect_msg: MsgType, | ||||
| 
 | ||||
|         # NOTE: generally speaking only for handling `Stop`-msgs that | ||||
|         # arrive during a call to `drain_to_final_msg()` above! | ||||
|         passthrough_non_pld_msgs: bool = True, | ||||
|         hide_tb: bool = True, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> tuple[MsgType, PayloadT]: | ||||
|         ''' | ||||
|         Retrieve the next avail IPC msg, decode it's payload, and return | ||||
|         the pair of refs. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         msg: MsgType = await ipc._rx_chan.receive() | ||||
| 
 | ||||
|         if passthrough_non_pld_msgs: | ||||
|             match msg: | ||||
|                 case Stop(): | ||||
|                     return msg, None | ||||
| 
 | ||||
|         # TODO: is there some way we can inject the decoded | ||||
|         # payload into an existing output buffer for the original | ||||
|         # msg instance? | ||||
|         pld: PayloadT = self.decode_pld( | ||||
|             msg, | ||||
|             ipc=ipc, | ||||
|             expect_msg=expect_msg, | ||||
|             hide_tb=hide_tb, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         return msg, pld | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def limit_plds( | ||||
|     spec: Union[Type[Struct]], | ||||
|     **dec_kwargs, | ||||
| 
 | ||||
| ) -> MsgDec: | ||||
|     ''' | ||||
|     Apply a `MsgCodec` that will natively decode the SC-msg set's | ||||
|     `PayloadMsg.pld: Union[Type[Struct]]` payload fields using | ||||
|     tagged-unions of `msgspec.Struct`s from the `payload_types` | ||||
|     for all IPC contexts in use by the current `trio.Task`. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = True | ||||
|     try: | ||||
|         curr_ctx: Context = current_ipc_ctx() | ||||
|         rx: PldRx = curr_ctx._pld_rx | ||||
|         orig_pldec: MsgDec = rx.pld_dec | ||||
| 
 | ||||
|         with rx.limit_plds( | ||||
|             spec=spec, | ||||
|             **dec_kwargs, | ||||
|         ) as pldec: | ||||
|             log.runtime( | ||||
|                 'Applying payload-decoder\n\n' | ||||
|                 f'{pldec}\n' | ||||
|             ) | ||||
|             yield pldec | ||||
|     finally: | ||||
|         log.runtime( | ||||
|             'Reverted to previous payload-decoder\n\n' | ||||
|             f'{orig_pldec}\n' | ||||
|         ) | ||||
|         # sanity on orig settings | ||||
|         assert rx.pld_dec is orig_pldec | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_limit_plds( | ||||
|     ctx: Context, | ||||
|     spec: Union[Type[Struct]]|None = None, | ||||
|     dec_hook: Callable|None = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> MsgDec|None: | ||||
|     ''' | ||||
|     Async compat maybe-payload type limiter. | ||||
| 
 | ||||
|     Mostly for use inside other internal `@acm`s such that a separate | ||||
|     indent block isn't needed when an async one is already being | ||||
|     used. | ||||
| 
 | ||||
|     ''' | ||||
|     if ( | ||||
|         spec is None | ||||
|         and | ||||
|         dec_hook is None | ||||
|     ): | ||||
|         yield None | ||||
|         return | ||||
| 
 | ||||
|     # sanity check on IPC scoping | ||||
|     curr_ctx: Context = current_ipc_ctx() | ||||
|     assert ctx is curr_ctx | ||||
| 
 | ||||
|     with ctx._pld_rx.limit_plds( | ||||
|         spec=spec, | ||||
|         dec_hook=dec_hook, | ||||
|         **kwargs, | ||||
|     ) as msgdec: | ||||
|         yield msgdec | ||||
| 
 | ||||
|     # when the applied spec is unwound/removed, the same IPC-ctx | ||||
|     # should still be in scope. | ||||
|     curr_ctx: Context = current_ipc_ctx() | ||||
|     assert ctx is curr_ctx | ||||
| 
 | ||||
| 
 | ||||
| async def drain_to_final_msg( | ||||
|     ctx: Context, | ||||
| 
 | ||||
|     hide_tb: bool = True, | ||||
|     msg_limit: int = 6, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     Return|None, | ||||
|     list[MsgType] | ||||
| ]: | ||||
|     ''' | ||||
|     Drain IPC msgs delivered to the underlying IPC context's | ||||
|     rx-mem-chan (i.e. from `Context._rx_chan`) in search for a final | ||||
|     `Return` or `Error` msg. | ||||
| 
 | ||||
|     Deliver the `Return` + preceding drained msgs (`list[MsgType]`) | ||||
|     as a pair unless an `Error` is found, in which unpack and raise | ||||
|     it. | ||||
| 
 | ||||
|     The motivation here is to always capture any remote error relayed | ||||
|     by the remote peer task during a ctxc condition. | ||||
| 
 | ||||
|     For eg. a ctxc-request may be sent to the peer as part of the | ||||
|     local task's (request for) cancellation but then that same task | ||||
|     **also errors** before executing the teardown in the | ||||
|     `Portal.open_context().__aexit__()` block. In such error-on-exit | ||||
|     cases we want to always capture and raise any delivered remote | ||||
|     error (like an expected ctxc-ACK) as part of the final | ||||
|     `ctx.wait_for_result()` teardown sequence such that the | ||||
|     `Context.outcome` related state always reflect what transpired | ||||
|     even after ctx closure and the `.open_context()` block exit. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     raise_overrun: bool = not ctx._allow_overruns | ||||
| 
 | ||||
|     # wait for a final context result by collecting (but | ||||
|     # basically ignoring) any bi-dir-stream msgs still in transit | ||||
|     # from the far end. | ||||
|     pre_result_drained: list[MsgType] = [] | ||||
|     result_msg: Return|Error|None = None | ||||
|     while not ( | ||||
|         ctx.maybe_error | ||||
|         and not ctx._final_result_is_set() | ||||
|     ): | ||||
|         try: | ||||
|             # receive all msgs, scanning for either a final result | ||||
|             # or error; the underlying call should never raise any | ||||
|             # remote error directly! | ||||
|             msg, pld = await ctx._pld_rx.recv_msg_w_pld( | ||||
|                 ipc=ctx, | ||||
|                 expect_msg=Return, | ||||
|                 raise_error=False, | ||||
|                 hide_tb=hide_tb, | ||||
|             ) | ||||
|             # ^-TODO-^ some bad ideas? | ||||
|             # -[ ] wrap final outcome .receive() in a scope so | ||||
|             #     it can be cancelled out of band if needed? | ||||
|             # |_with trio.CancelScope() as res_cs: | ||||
|             #       ctx._res_scope = res_cs | ||||
|             #       msg: dict = await ctx._rx_chan.receive() | ||||
|             #   if res_cs.cancelled_caught: | ||||
|             # | ||||
|             # -[ ] make sure pause points work here for REPLing | ||||
|             #   the runtime itself; i.e. ensure there's no hangs! | ||||
|             # |_from tractor.devx._debug import pause | ||||
|             #   await pause() | ||||
| 
 | ||||
|         # NOTE: we get here if the far end was | ||||
|         # `ContextCancelled` in 2 cases: | ||||
|         # 1. we requested the cancellation and thus | ||||
|         #    SHOULD NOT raise that far end error, | ||||
|         # 2. WE DID NOT REQUEST that cancel and thus | ||||
|         #    SHOULD RAISE HERE! | ||||
|         except trio.Cancelled as _taskc: | ||||
|             taskc: trio.Cancelled = _taskc | ||||
| 
 | ||||
|             # report when the cancellation wasn't (ostensibly) due to | ||||
|             # RPC operation, some surrounding parent cancel-scope. | ||||
|             if not ctx._scope.cancel_called: | ||||
|                 task: trio.lowlevel.Task = trio.lowlevel.current_task() | ||||
|                 rent_n: trio.Nursery = task.parent_nursery | ||||
|                 if ( | ||||
|                     (local_cs := rent_n.cancel_scope).cancel_called | ||||
|                 ): | ||||
|                     log.cancel( | ||||
|                         'RPC-ctx cancelled by local-parent scope during drain!\n\n' | ||||
|                         f'c}}>\n' | ||||
|                         f' |_{rent_n}\n' | ||||
|                         f'   |_.cancel_scope = {local_cs}\n' | ||||
|                         f'   |_>c}}\n' | ||||
|                         f'      |_{ctx.pformat(indent=" "*9)}' | ||||
|                         # ^TODO, some (other) simpler repr here? | ||||
|                     ) | ||||
|                     __tracebackhide__: bool = False | ||||
| 
 | ||||
|             # CASE 2: mask the local cancelled-error(s) | ||||
|             # only when we are sure the remote error is | ||||
|             # the source cause of this local task's | ||||
|             # cancellation. | ||||
|             ctx.maybe_raise( | ||||
|                 hide_tb=hide_tb, | ||||
|                 from_src_exc=taskc, | ||||
|                 # ?TODO? when *should* we use this? | ||||
|             ) | ||||
| 
 | ||||
|             # CASE 1: we DID request the cancel we simply | ||||
|             # continue to bubble up as normal. | ||||
|             raise taskc | ||||
| 
 | ||||
|         match msg: | ||||
| 
 | ||||
|             # final result arrived! | ||||
|             case Return(): | ||||
|                 log.runtime( | ||||
|                     'Context delivered final draining msg:\n' | ||||
|                     f'{pretty_struct.pformat(msg)}' | ||||
|                 ) | ||||
|                 ctx._result: Any = pld | ||||
|                 result_msg = msg | ||||
|                 break | ||||
| 
 | ||||
|             # far end task is still streaming to us so discard | ||||
|             # and report depending on local ctx state. | ||||
|             case Yield(): | ||||
|                 pre_result_drained.append(msg) | ||||
|                 if ( | ||||
|                     (ctx._stream.closed | ||||
|                      and (reason := 'stream was already closed') | ||||
|                     ) | ||||
|                     or (ctx.cancel_acked | ||||
|                         and (reason := 'ctx cancelled other side') | ||||
|                     ) | ||||
|                     or (ctx._cancel_called | ||||
|                         and (reason := 'ctx called `.cancel()`') | ||||
|                     ) | ||||
|                     or (len(pre_result_drained) > msg_limit | ||||
|                         and (reason := f'"yield" limit={msg_limit}') | ||||
|                     ) | ||||
|                 ): | ||||
|                     log.cancel( | ||||
|                         'Cancelling `MsgStream` drain since ' | ||||
|                         f'{reason}\n\n' | ||||
|                         f'<= {ctx.chan.uid}\n' | ||||
|                         f'  |_{ctx._nsf}()\n\n' | ||||
|                         f'=> {ctx._task}\n' | ||||
|                         f'  |_{ctx._stream}\n\n' | ||||
| 
 | ||||
|                         f'{pretty_struct.pformat(msg)}\n' | ||||
|                     ) | ||||
|                     break | ||||
| 
 | ||||
|                 # drain up to the `msg_limit` hoping to get | ||||
|                 # a final result or error/ctxc. | ||||
|                 else: | ||||
|                     log.warning( | ||||
|                         'Ignoring "yield" msg during `ctx.result()` drain..\n' | ||||
|                         f'<= {ctx.chan.uid}\n' | ||||
|                         f'  |_{ctx._nsf}()\n\n' | ||||
|                         f'=> {ctx._task}\n' | ||||
|                         f'  |_{ctx._stream}\n\n' | ||||
| 
 | ||||
|                         f'{pretty_struct.pformat(msg)}\n' | ||||
|                     ) | ||||
|                     continue | ||||
| 
 | ||||
|             # stream terminated, but no result yet.. | ||||
|             # | ||||
|             # TODO: work out edge cases here where | ||||
|             # a stream is open but the task also calls | ||||
|             # this? | ||||
|             # -[ ] should be a runtime error if a stream is open right? | ||||
|             # Stop() | ||||
|             case Stop(): | ||||
|                 pre_result_drained.append(msg) | ||||
|                 log.runtime(  # normal/expected shutdown transaction | ||||
|                     'Remote stream terminated due to "stop" msg:\n\n' | ||||
|                     f'{pretty_struct.pformat(msg)}\n' | ||||
|                 ) | ||||
|                 continue | ||||
| 
 | ||||
|             # remote error msg, likely already handled inside | ||||
|             # `Context._deliver_msg()` | ||||
|             case Error(): | ||||
|                 # TODO: can we replace this with `ctx.maybe_raise()`? | ||||
|                 # -[ ]  would this be handier for this case maybe? | ||||
|                 # |_async with maybe_raise_on_exit() as raises: | ||||
|                 #       if raises: | ||||
|                 #           log.error('some msg about raising..') | ||||
|                 # | ||||
|                 re: Exception|None = ctx._remote_error | ||||
|                 if re: | ||||
|                     assert msg is ctx._cancel_msg | ||||
|                     # NOTE: this solved a super duper edge case XD | ||||
|                     # this was THE super duper edge case of: | ||||
|                     # - local task opens a remote task, | ||||
|                     # - requests remote cancellation of far end | ||||
|                     #   ctx/tasks, | ||||
|                     # - needs to wait for the cancel ack msg | ||||
|                     #   (ctxc) or some result in the race case | ||||
|                     #   where the other side's task returns | ||||
|                     #   before the cancel request msg is ever | ||||
|                     #   rxed and processed, | ||||
|                     # - here this surrounding drain loop (which | ||||
|                     #   iterates all ipc msgs until the ack or | ||||
|                     #   an early result arrives) was NOT exiting | ||||
|                     #   since we are the edge case: local task | ||||
|                     #   does not re-raise any ctxc it receives | ||||
|                     #   IFF **it** was the cancellation | ||||
|                     #   requester.. | ||||
|                     # | ||||
|                     # XXX will raise if necessary but ow break | ||||
|                     # from loop presuming any supressed error | ||||
|                     # (ctxc) should terminate the context! | ||||
|                     ctx._maybe_raise_remote_err( | ||||
|                         re, | ||||
|                         # NOTE: obvi we don't care if we | ||||
|                         # overran the far end if we're already | ||||
|                         # waiting on a final result (msg). | ||||
|                         # raise_overrun_from_self=False, | ||||
|                         raise_overrun_from_self=raise_overrun, | ||||
|                     ) | ||||
|                     result_msg = msg | ||||
|                     break  # OOOOOF, yeah obvi we need this.. | ||||
| 
 | ||||
|                 else: | ||||
|                     # bubble the original src key error | ||||
|                     raise | ||||
| 
 | ||||
|             # XXX should pretty much never get here unless someone | ||||
|             # overrides the default `MsgType` spec. | ||||
|             case _: | ||||
|                 pre_result_drained.append(msg) | ||||
|                 # It's definitely an internal error if any other | ||||
|                 # msg type without a`'cid'` field arrives here! | ||||
|                 report: str = ( | ||||
|                     f'Invalid or unknown msg type {type(msg)!r}!?\n' | ||||
|                 ) | ||||
|                 if not msg.cid: | ||||
|                     report += ( | ||||
|                         '\nWhich also has no `.cid` field?\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 raise MessagingError( | ||||
|                     report | ||||
|                     + | ||||
|                     f'\n{msg}\n' | ||||
|                 ) | ||||
| 
 | ||||
|     else: | ||||
|         log.cancel( | ||||
|             'Skipping `MsgStream` drain since final outcome is set\n\n' | ||||
|             f'{ctx.outcome}\n' | ||||
|         ) | ||||
| 
 | ||||
|     return ( | ||||
|         result_msg, | ||||
|         pre_result_drained, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def validate_payload_msg( | ||||
|     pld_msg: Started|Yield|Return, | ||||
|     pld_value: PayloadT, | ||||
|     ipc: Context|MsgStream, | ||||
| 
 | ||||
|     raise_mte: bool = True, | ||||
|     strict_pld_parity: bool = False, | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
| ) -> MsgTypeError|None: | ||||
|     ''' | ||||
|     Validate a `PayloadMsg.pld` value with the current | ||||
|     IPC ctx's `PldRx` and raise an appropriate `MsgTypeError` | ||||
|     on failure. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     codec: MsgCodec = current_codec() | ||||
|     msg_bytes: bytes = codec.encode(pld_msg) | ||||
|     try: | ||||
|         roundtripped: Started = codec.decode(msg_bytes) | ||||
|         ctx: Context = getattr(ipc, 'ctx', ipc) | ||||
|         pld: PayloadT = ctx.pld_rx.decode_pld( | ||||
|             msg=roundtripped, | ||||
|             ipc=ipc, | ||||
|             expect_msg=Started, | ||||
|             hide_tb=hide_tb, | ||||
|             is_started_send_side=True, | ||||
|         ) | ||||
|         if ( | ||||
|             strict_pld_parity | ||||
|             and | ||||
|             pld != pld_value | ||||
|         ): | ||||
|             # TODO: make that one a mod func too.. | ||||
|             diff = pretty_struct.Struct.__sub__( | ||||
|                 roundtripped, | ||||
|                 pld_msg, | ||||
|             ) | ||||
|             complaint: str = ( | ||||
|                 'Started value does not match after roundtrip?\n\n' | ||||
|                 f'{diff}' | ||||
|             ) | ||||
|             raise ValidationError(complaint) | ||||
| 
 | ||||
|     # raise any msg type error NO MATTER WHAT! | ||||
|     except ValidationError as verr: | ||||
|         try: | ||||
|             mte: MsgTypeError = _mk_recv_mte( | ||||
|                 msg=roundtripped, | ||||
|                 codec=codec, | ||||
|                 src_validation_error=verr, | ||||
|                 verb_header='Trying to send ', | ||||
|                 is_invalid_payload=True, | ||||
|             ) | ||||
|         except BaseException: | ||||
|             __tracebackhide__: bool = False | ||||
|             raise | ||||
| 
 | ||||
|         if not raise_mte: | ||||
|             return mte | ||||
| 
 | ||||
|         raise mte from verr | ||||
|  | @ -0,0 +1,342 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Prettified version of `msgspec.Struct` for easier console grokin. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from collections import UserList | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Iterator, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     msgpack, | ||||
|     Struct as _Struct, | ||||
|     structs, | ||||
| ) | ||||
| # from pprint import ( | ||||
| #     saferepr, | ||||
| # ) | ||||
| 
 | ||||
| from tractor.log import get_logger | ||||
| 
 | ||||
| log = get_logger() | ||||
| # TODO: auto-gen type sig for input func both for | ||||
| # type-msgs and logging of RPC tasks? | ||||
| # taken and modified from: | ||||
| # https://stackoverflow.com/a/57110117 | ||||
| # import inspect | ||||
| # from typing import List | ||||
| 
 | ||||
| # def my_function(input_1: str, input_2: int) -> list[int]: | ||||
| #     pass | ||||
| 
 | ||||
| # def types_of(func): | ||||
| #     specs = inspect.getfullargspec(func) | ||||
| #     return_type = specs.annotations['return'] | ||||
| #     input_types = [t.__name__ for s, t in specs.annotations.items() if s != 'return'] | ||||
| #     return f'{func.__name__}({": ".join(input_types)}) -> {return_type}' | ||||
| 
 | ||||
| # types_of(my_function) | ||||
| 
 | ||||
| 
 | ||||
| class DiffDump(UserList): | ||||
|     ''' | ||||
|     Very simple list delegator that repr() dumps (presumed) tuple | ||||
|     elements of the form `tuple[str, Any, Any]` in a nice | ||||
|     multi-line readable form for analyzing `Struct` diffs. | ||||
| 
 | ||||
|     ''' | ||||
|     def __repr__(self) -> str: | ||||
|         if not len(self): | ||||
|             return super().__repr__() | ||||
| 
 | ||||
|         # format by displaying item pair's ``repr()`` on multiple, | ||||
|         # indented lines such that they are more easily visually | ||||
|         # comparable when printed to console when printed to | ||||
|         # console. | ||||
|         repstr: str = '[\n' | ||||
|         for k, left, right in self: | ||||
|             repstr += ( | ||||
|                 f'({k},\n' | ||||
|                 f' |_{repr(left)},\n' | ||||
|                 f' |_{repr(right)},\n' | ||||
|                 ')\n' | ||||
|             ) | ||||
|         repstr += ']\n' | ||||
|         return repstr | ||||
| 
 | ||||
| 
 | ||||
| def iter_fields(struct: Struct) -> Iterator[ | ||||
|     tuple[ | ||||
|         structs.FieldIinfo, | ||||
|         str, | ||||
|         Any, | ||||
|     ] | ||||
| ]: | ||||
|     ''' | ||||
|     Iterate over all non-@property fields of this struct. | ||||
| 
 | ||||
|     ''' | ||||
|     fi: structs.FieldInfo | ||||
|     for fi in structs.fields(struct): | ||||
|         key: str = fi.name | ||||
|         val: Any = getattr(struct, key) | ||||
|         yield ( | ||||
|             fi, | ||||
|             key, | ||||
|             val, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def pformat( | ||||
|     struct: Struct, | ||||
|     field_indent: int = 2, | ||||
|     indent: int = 0, | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Recursion-safe `pprint.pformat()` style formatting of | ||||
|     a `msgspec.Struct` for sane reading by a human using a REPL. | ||||
| 
 | ||||
|     ''' | ||||
|     # global whitespace indent | ||||
|     ws: str = ' '*indent | ||||
| 
 | ||||
|     # field whitespace indent | ||||
|     field_ws: str = ' '*(field_indent + indent) | ||||
| 
 | ||||
|     # qtn: str = ws + struct.__class__.__qualname__ | ||||
|     qtn: str = struct.__class__.__qualname__ | ||||
| 
 | ||||
|     obj_str: str = ''  # accumulator | ||||
|     fi: structs.FieldInfo | ||||
|     k: str | ||||
|     v: Any | ||||
|     for fi, k, v in iter_fields(struct): | ||||
| 
 | ||||
|         # TODO: how can we prefer `Literal['option1',  'option2, | ||||
|         # ..]` over .__name__ == `Literal` but still get only the | ||||
|         # latter for simple types like `str | int | None` etc..? | ||||
|         ft: type = fi.type | ||||
|         typ_name: str = getattr(ft, '__name__', str(ft)) | ||||
| 
 | ||||
|         # recurse to get sub-struct's `.pformat()` output Bo | ||||
|         if isinstance(v, Struct): | ||||
|             val_str: str =  v.pformat( | ||||
|                 indent=field_indent + indent, | ||||
|                 field_indent=indent + field_indent, | ||||
|             ) | ||||
| 
 | ||||
|         else: | ||||
|             val_str: str = repr(v) | ||||
| 
 | ||||
|             # XXX LOL, below just seems to be f#$%in causing | ||||
|             # recursion errs.. | ||||
|             # | ||||
|             # the `pprint` recursion-safe format: | ||||
|             # https://docs.python.org/3.11/library/pprint.html#pprint.saferepr | ||||
|             # try: | ||||
|             #     val_str: str = saferepr(v) | ||||
|             # except Exception: | ||||
|             #     log.exception( | ||||
|             #         'Failed to `saferepr({type(struct)})` !?\n' | ||||
|             #     ) | ||||
|                 # raise | ||||
|                 # return _Struct.__repr__(struct) | ||||
| 
 | ||||
|         # TODO: LOLOL use `textwrap.indent()` instead dawwwwwg! | ||||
|         obj_str += (field_ws + f'{k}: {typ_name} = {val_str},\n') | ||||
| 
 | ||||
|     return ( | ||||
|         f'{qtn}(\n' | ||||
|         f'{obj_str}' | ||||
|         f'{ws})' | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| class Struct( | ||||
|     _Struct, | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # tag='pikerstruct', | ||||
|     # tag=True, | ||||
| ): | ||||
|     ''' | ||||
|     A "human friendlier" (aka repl buddy) struct subtype. | ||||
| 
 | ||||
|     ''' | ||||
|     def to_dict( | ||||
|         self, | ||||
|         include_non_members: bool = True, | ||||
| 
 | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Like it sounds.. direct delegation to: | ||||
|         https://jcristharif.com/msgspec/api.html#msgspec.structs.asdict | ||||
| 
 | ||||
|         BUT, by default we pop all non-member (aka not defined as | ||||
|         struct fields) fields by default. | ||||
| 
 | ||||
|         ''' | ||||
|         asdict: dict = structs.asdict(self) | ||||
|         if include_non_members: | ||||
|             return asdict | ||||
| 
 | ||||
|         # only return a dict of the struct members | ||||
|         # which were provided as input, NOT anything | ||||
|         # added as type-defined `@property` methods! | ||||
|         sin_props: dict = {} | ||||
|         fi: structs.FieldInfo | ||||
|         for fi, k, v in iter_fields(self): | ||||
|             sin_props[k] = asdict[k] | ||||
| 
 | ||||
|         return sin_props | ||||
| 
 | ||||
|     pformat = pformat | ||||
| 
 | ||||
|     def __repr__(self) -> str: | ||||
|         try: | ||||
|             return pformat(self) | ||||
|         except Exception: | ||||
|             log.exception( | ||||
|                 f'Failed to `pformat({type(self)})` !?\n' | ||||
|             ) | ||||
|             return _Struct.__repr__(self) | ||||
| 
 | ||||
|     # __repr__ = pformat | ||||
|     # __str__ = __repr__ = pformat | ||||
|     # TODO: use a pprint.PrettyPrinter instance around ONLY rendering | ||||
|     # inside a known tty? | ||||
|     # def __repr__(self) -> str: | ||||
|     #     ... | ||||
| 
 | ||||
|     def copy( | ||||
|         self, | ||||
|         update: dict | None = None, | ||||
| 
 | ||||
|     ) -> Struct: | ||||
|         ''' | ||||
|         Validate-typecast all self defined fields, return a copy of | ||||
|         us with all such fields. | ||||
| 
 | ||||
|         NOTE: This is kinda like the default behaviour in | ||||
|         `pydantic.BaseModel` except a copy of the object is | ||||
|         returned making it compat with `frozen=True`. | ||||
| 
 | ||||
|         ''' | ||||
|         if update: | ||||
|             for k, v in update.items(): | ||||
|                 setattr(self, k, v) | ||||
| 
 | ||||
|         # NOTE: roundtrip serialize to validate | ||||
|         # - enode to msgpack binary format, | ||||
|         # - decode that back to a struct. | ||||
|         return msgpack.Decoder(type=type(self)).decode( | ||||
|             msgpack.Encoder().encode(self) | ||||
|         ) | ||||
| 
 | ||||
|     def typecast( | ||||
|         self, | ||||
| 
 | ||||
|         # TODO: allow only casting a named subset? | ||||
|         # fields: set[str] | None = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Cast all fields using their declared type annotations | ||||
|         (kinda like what `pydantic` does by default). | ||||
| 
 | ||||
|         NOTE: this of course won't work on frozen types, use | ||||
|         ``.copy()`` above in such cases. | ||||
| 
 | ||||
|         ''' | ||||
|         # https://jcristharif.com/msgspec/api.html#msgspec.structs.fields | ||||
|         fi: structs.FieldInfo | ||||
|         for fi in structs.fields(self): | ||||
|             setattr( | ||||
|                 self, | ||||
|                 fi.name, | ||||
|                 fi.type(getattr(self, fi.name)), | ||||
|             ) | ||||
| 
 | ||||
|     # TODO: make a mod func instead and just point to it here for | ||||
|     # method impl? | ||||
|     def __sub__( | ||||
|         self, | ||||
|         other: Struct, | ||||
| 
 | ||||
|     ) -> DiffDump[tuple[str, Any, Any]]: | ||||
|         ''' | ||||
|         Compare fields/items key-wise and return a `DiffDump` | ||||
|         for easy visual REPL comparison B) | ||||
| 
 | ||||
|         ''' | ||||
|         diffs: DiffDump[tuple[str, Any, Any]] = DiffDump() | ||||
|         for fi in structs.fields(self): | ||||
|             attr_name: str = fi.name | ||||
|             ours: Any = getattr(self, attr_name) | ||||
|             theirs: Any = getattr(other, attr_name) | ||||
|             if ours != theirs: | ||||
|                 diffs.append(( | ||||
|                     attr_name, | ||||
|                     ours, | ||||
|                     theirs, | ||||
|                 )) | ||||
| 
 | ||||
|         return diffs | ||||
| 
 | ||||
|     @classmethod | ||||
|     def fields_diff( | ||||
|         cls, | ||||
|         other: dict|Struct, | ||||
| 
 | ||||
|     ) -> DiffDump[tuple[str, Any, Any]]: | ||||
|         ''' | ||||
|         Very similar to `PrettyStruct.__sub__()` except accepts an | ||||
|         input `other: dict` (presumably that would normally be called | ||||
|         like `Struct(**other)`) which returns a `DiffDump` of the | ||||
|         fields of the struct and the `dict`'s fields. | ||||
| 
 | ||||
|         ''' | ||||
|         nullish = object() | ||||
|         consumed: dict = other.copy() | ||||
|         diffs: DiffDump[tuple[str, Any, Any]] = DiffDump() | ||||
|         for fi in structs.fields(cls): | ||||
|             field_name: str = fi.name | ||||
|             # ours: Any = getattr(self, field_name) | ||||
|             theirs: Any = consumed.pop(field_name, nullish) | ||||
|             if theirs is nullish: | ||||
|                 diffs.append(( | ||||
|                     field_name, | ||||
|                     f'{fi.type!r}', | ||||
|                     'NOT-DEFINED in `other: dict`', | ||||
|                 )) | ||||
| 
 | ||||
|         # when there are lingering fields in `other` that this struct | ||||
|         # DOES NOT define we also append those. | ||||
|         if consumed: | ||||
|             for k, v in consumed.items(): | ||||
|                 diffs.append(( | ||||
|                     k, | ||||
|                     f'NOT-DEFINED for `{cls.__name__}`', | ||||
|                     f'`other: dict` has value = {v!r}', | ||||
|                 )) | ||||
| 
 | ||||
|         return diffs | ||||
|  | @ -0,0 +1,139 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| IPC-compat cross-mem-boundary object pointer. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| # TODO: integration with our ``enable_modules: list[str]`` caps sys. | ||||
| 
 | ||||
| # ``pkgutil.resolve_name()`` internally uses | ||||
| # ``importlib.import_module()`` which can be filtered by inserting | ||||
| # a ``MetaPathFinder`` into ``sys.meta_path`` (which we could do before | ||||
| # entering the ``_runtime.process_messages()`` loop). | ||||
| # - https://github.com/python/cpython/blob/main/Lib/pkgutil.py#L645 | ||||
| # - https://stackoverflow.com/questions/1350466/preventing-python-code-from-importing-certain-modules | ||||
| #   - https://stackoverflow.com/a/63320902 | ||||
| #   - https://docs.python.org/3/library/sys.html#sys.meta_path | ||||
| 
 | ||||
| # the new "Implicit Namespace Packages" might be relevant? | ||||
| # - https://www.python.org/dev/peps/pep-0420/ | ||||
| 
 | ||||
| # add implicit serialized message type support so that paths can be | ||||
| # handed directly to IPC primitives such as streams and `Portal.run()` | ||||
| # calls: | ||||
| # - via ``msgspec``: | ||||
| #   - https://jcristharif.com/msgspec/api.html#struct | ||||
| #   - https://jcristharif.com/msgspec/extending.html | ||||
| # via ``msgpack-python``: | ||||
| # - https://github.com/msgpack/msgpack-python#packingunpacking-of-custom-data-type | ||||
| 
 | ||||
| from __future__ import annotations | ||||
| from inspect import ( | ||||
|     isfunction, | ||||
|     ismethod, | ||||
| ) | ||||
| from pkgutil import resolve_name | ||||
| 
 | ||||
| 
 | ||||
| class NamespacePath(str): | ||||
|     ''' | ||||
|     A serializeable `str`-subtype implementing a "namespace | ||||
|     pointer" to any Python object reference (like a function) | ||||
|     using the same format as the built-in `pkgutil.resolve_name()` | ||||
|     system. | ||||
| 
 | ||||
|     A value describes a target's module-path and namespace-key | ||||
|     separated by a ':' and thus can be easily used as | ||||
|     a IPC-message-native reference-type allowing memory isolated | ||||
|     actors to point-and-load objects via a minimal `str` value. | ||||
| 
 | ||||
|     ''' | ||||
|     _ref: object | type | None = None | ||||
| 
 | ||||
|     # TODO: support providing the ns instance in | ||||
|     # order to support 'self.<meth>` style to make | ||||
|     # `Portal.run_from_ns()` work! | ||||
|     # _ns: ModuleType|type|None = None | ||||
| 
 | ||||
|     def load_ref(self) -> object | type: | ||||
|         if self._ref is None: | ||||
|             self._ref = resolve_name(self) | ||||
|         return self._ref | ||||
| 
 | ||||
|     @staticmethod | ||||
|     def _mk_fqnp( | ||||
|         ref: type|object, | ||||
|     ) -> tuple[str, str]: | ||||
|         ''' | ||||
|         Generate a minial `str` pair which describes a python | ||||
|         object's namespace path and object/type name. | ||||
| 
 | ||||
|         In more precise terms something like: | ||||
|           - 'py.namespace.path:object_name', | ||||
|           - eg.'tractor.msg:NamespacePath' will be the ``str`` form | ||||
|             of THIS type XD | ||||
| 
 | ||||
|         ''' | ||||
|         if isfunction(ref): | ||||
|             name: str = getattr(ref, '__name__') | ||||
|             mod_name: str = ref.__module__ | ||||
| 
 | ||||
|         elif ismethod(ref): | ||||
|             # build out the path manually i guess..? | ||||
|             # TODO: better way? | ||||
|             name: str = '.'.join([ | ||||
|                 type(ref.__self__).__name__, | ||||
|                 ref.__func__.__name__, | ||||
|             ]) | ||||
|             mod_name: str = ref.__self__.__module__ | ||||
| 
 | ||||
|         else:  # object or other? | ||||
|             # isinstance(ref, object) | ||||
|             # and not isfunction(ref) | ||||
|             name: str = type(ref).__name__ | ||||
|             mod_name: str = ref.__module__ | ||||
| 
 | ||||
|         # TODO: return static value direactly? | ||||
|         # | ||||
|         # fully qualified namespace path, tuple. | ||||
|         fqnp: tuple[str, str] = ( | ||||
|             mod_name, | ||||
|             name, | ||||
|         ) | ||||
|         return fqnp | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_ref( | ||||
|         cls, | ||||
|         ref: type|object, | ||||
| 
 | ||||
|     ) -> NamespacePath: | ||||
| 
 | ||||
|         fqnp: tuple[str, str] = cls._mk_fqnp(ref) | ||||
|         return cls(':'.join(fqnp)) | ||||
| 
 | ||||
|     def to_tuple( | ||||
|         self, | ||||
| 
 | ||||
|         # TODO: could this work re `self:<meth>` case from above? | ||||
|         # load_ref: bool = True, | ||||
| 
 | ||||
|     ) -> tuple[str, str]: | ||||
|         return self._mk_fqnp( | ||||
|             self.load_ref() | ||||
|         ) | ||||
|  | @ -0,0 +1,730 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Define our strictly typed IPC message spec for the SCIPP: | ||||
| 
 | ||||
| that is, | ||||
| 
 | ||||
| the "Structurred-Concurrency-Inter-Process-(dialog)-(un)Protocol". | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| import types | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Generic, | ||||
|     Literal, | ||||
|     Type, | ||||
|     TypeVar, | ||||
|     TypeAlias, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     defstruct, | ||||
|     # field, | ||||
|     Raw, | ||||
|     Struct, | ||||
|     # UNSET, | ||||
|     # UnsetType, | ||||
| ) | ||||
| 
 | ||||
| from tractor.msg import ( | ||||
|     pretty_struct, | ||||
| ) | ||||
| from tractor.log import get_logger | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger('tractor.msgspec') | ||||
| 
 | ||||
| # type variable for the boxed payload field `.pld` | ||||
| PayloadT = TypeVar('PayloadT') | ||||
| 
 | ||||
| 
 | ||||
| class PayloadMsg( | ||||
|     Struct, | ||||
|     Generic[PayloadT], | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#field-ordering | ||||
|     # kw_only=True, | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#equality-and-order | ||||
|     # order=True, | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#encoding-decoding-as-arrays | ||||
|     # as_array=True, | ||||
| ): | ||||
|     ''' | ||||
|     An abstract payload boxing/shuttling IPC msg type. | ||||
| 
 | ||||
|     Boxes data-values passed to/from user code | ||||
| 
 | ||||
|     (i.e. any values passed by `tractor` application code using any of | ||||
| 
 | ||||
|       |_ `._streaming.MsgStream.send/receive()` | ||||
|       |_ `._context.Context.started/result()` | ||||
|       |_ `._ipc.Channel.send/recv()` | ||||
| 
 | ||||
|      aka our "IPC primitive APIs") | ||||
| 
 | ||||
|     as message "payloads" set to the `.pld` field and uses | ||||
|     `msgspec`'s "tagged unions" feature to support a subset of our | ||||
|     "SC-transitive shuttle protocol" specification with | ||||
|     a `msgspec.Struct` inheritance tree. | ||||
| 
 | ||||
|     ''' | ||||
|     cid: str  # call/context-id | ||||
|     # ^-TODO-^: more explicit type? | ||||
|     # -[ ] use UNSET here? | ||||
|     #  https://jcristharif.com/msgspec/supported-types.html#unset | ||||
|     # | ||||
|     # -[ ] `uuid.UUID` which has multi-protocol support | ||||
|     #  https://jcristharif.com/msgspec/supported-types.html#uuid | ||||
| 
 | ||||
|     # The msg's "payload" (spelled without vowels): | ||||
|     # https://en.wikipedia.org/wiki/Payload_(computing) | ||||
|     pld: Raw | ||||
| 
 | ||||
|     # ^-NOTE-^ inherited from any `PayloadMsg` (and maybe type | ||||
|     # overriden via the `._ops.limit_plds()` API), but by default is | ||||
|     # parameterized to be `Any`. | ||||
|     # | ||||
|     # XXX this `Union` must strictly NOT contain `Any` if | ||||
|     # a limited msg-type-spec is intended, such that when | ||||
|     # creating and applying a new `MsgCodec` its  | ||||
|     # `.decoder: Decoder` is configured with a `Union[Type[Struct]]` which | ||||
|     # restricts the allowed payload content (this `.pld` field)  | ||||
|     # by type system defined loading constraints B) | ||||
|     # | ||||
|     # TODO: could also be set to `msgspec.Raw` if the sub-decoders | ||||
|     # approach is preferred over the generic parameterization  | ||||
|     # approach as take by `mk_msg_spec()` below. | ||||
| 
 | ||||
| 
 | ||||
| # TODO: complete rename | ||||
| Msg = PayloadMsg | ||||
| 
 | ||||
| 
 | ||||
| class Aid( | ||||
|     Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| ): | ||||
|     ''' | ||||
|     Actor-identity msg. | ||||
| 
 | ||||
|     Initial contact exchange enabling an actor "mailbox handshake" | ||||
|     delivering the peer identity (and maybe eventually contact) | ||||
|     info. | ||||
| 
 | ||||
|     Used by discovery protocol to register actors as well as | ||||
|     conduct the initial comms (capability) filtering. | ||||
| 
 | ||||
|     ''' | ||||
|     name: str | ||||
|     uuid: str | ||||
|     # TODO: use built-in support for UUIDs? | ||||
|     # -[ ] `uuid.UUID` which has multi-protocol support | ||||
|     #  https://jcristharif.com/msgspec/supported-types.html#uuid | ||||
| 
 | ||||
| 
 | ||||
| class SpawnSpec( | ||||
|     pretty_struct.Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| ): | ||||
|     ''' | ||||
|     Initial runtime spec handed down from a spawning parent to its | ||||
|     child subactor immediately following first contact via an | ||||
|     `Aid` msg. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: similar to the `Start` kwargs spec needed below, we need | ||||
|     # a hard `Struct` def for all of these fields! | ||||
|     _parent_main_data: dict | ||||
|     _runtime_vars: dict[str, Any] | ||||
| 
 | ||||
|     # module import capability | ||||
|     enable_modules: dict[str, str] | ||||
| 
 | ||||
|     # TODO: not just sockaddr pairs? | ||||
|     # -[ ] abstract into a `TransportAddr` type? | ||||
|     reg_addrs: list[tuple[str, int]] | ||||
|     bind_addrs: list[tuple[str, int]] | ||||
| 
 | ||||
| 
 | ||||
| # TODO: caps based RPC support in the payload? | ||||
| # | ||||
| # -[ ] integration with our ``enable_modules: list[str]`` caps sys. | ||||
| #   ``pkgutil.resolve_name()`` internally uses | ||||
| #   ``importlib.import_module()`` which can be filtered by | ||||
| #   inserting a ``MetaPathFinder`` into ``sys.meta_path`` (which | ||||
| #   we could do before entering the ``Actor._process_messages()`` | ||||
| #   loop)? | ||||
| #   - https://github.com/python/cpython/blob/main/Lib/pkgutil.py#L645 | ||||
| #   - https://stackoverflow.com/questions/1350466/preventing-python-code-from-importing-certain-modules | ||||
| #   - https://stackoverflow.com/a/63320902 | ||||
| #   - https://docs.python.org/3/library/sys.html#sys.meta_path | ||||
| # | ||||
| # -[ ] can we combine .ns + .func into a native `NamespacePath` field? | ||||
| # | ||||
| # -[ ] better name, like `Call/TaskInput`? | ||||
| # | ||||
| # -[ ] XXX a debugger lock msg transaction with payloads like, | ||||
| #   child -> `.pld: DebugLock` -> root | ||||
| #   child <- `.pld: DebugLocked` <- root | ||||
| #   child -> `.pld: DebugRelease` -> root | ||||
| # | ||||
| #   WHY => when a pld spec is provided it might not allow for | ||||
| #   debug mode msgs as they currently are (using plain old `pld. | ||||
| #   str` payloads) so we only when debug_mode=True we need to | ||||
| #   union in this debugger payload set? | ||||
| # | ||||
| #   mk_msg_spec( | ||||
| #       MyPldSpec, | ||||
| #       debug_mode=True, | ||||
| #   ) -> ( | ||||
| #       Union[MyPldSpec] | ||||
| #      | Union[DebugLock, DebugLocked, DebugRelease] | ||||
| #   ) | ||||
| 
 | ||||
| # class Params( | ||||
| #     Struct, | ||||
| #     Generic[PayloadT], | ||||
| # ): | ||||
| #     spec: PayloadT|ParamSpec | ||||
| #     inputs: InputsT|dict[str, Any] | ||||
| 
 | ||||
|     # TODO: for eg. we could stringently check the target | ||||
|     # task-func's type sig and enforce it? | ||||
|     # as an example for an IPTC, | ||||
|     # @tractor.context | ||||
|     # async def send_back_nsp( | ||||
|     #     ctx: Context, | ||||
|     #     expect_debug: bool, | ||||
|     #     pld_spec_str: str, | ||||
|     #     add_hooks: bool, | ||||
|     #     started_msg_dict: dict, | ||||
|     # ) -> <WhatHere!>: | ||||
| 
 | ||||
|     # TODO: figure out which of the `typing` feats we want to | ||||
|     # support: | ||||
|     # - plain ol `ParamSpec`: | ||||
|     #   https://docs.python.org/3/library/typing.html#typing.ParamSpec | ||||
|     # - new in 3.12 type parameter lists Bo | ||||
|     # |_ https://docs.python.org/3/reference/compound_stmts.html#type-params | ||||
|     # |_ historical pep 695: https://peps.python.org/pep-0695/ | ||||
|     # |_ full lang spec: https://typing.readthedocs.io/en/latest/spec/ | ||||
|     # |_ on annotation scopes: | ||||
|     #    https://docs.python.org/3/reference/executionmodel.html#annotation-scopes | ||||
|     # spec: ParamSpec[ | ||||
|     #     expect_debug: bool, | ||||
|     #     pld_spec_str: str, | ||||
|     #     add_hooks: bool, | ||||
|     #     started_msg_dict: dict, | ||||
|     # ] | ||||
| 
 | ||||
| 
 | ||||
| # TODO: possibly sub-type for runtime method requests? | ||||
| # -[ ] `Runtime(Start)` with a `.ns: str = 'self' or | ||||
| #     we can just enforce any such method as having a strict | ||||
| #     ns for calling funcs, namely the `Actor` instance? | ||||
| class Start( | ||||
|     Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| ): | ||||
|     ''' | ||||
|     Initial request to remotely schedule an RPC `trio.Task` via | ||||
|     `Actor.start_remote_task()`. | ||||
| 
 | ||||
|     It is called by all the following public APIs: | ||||
| 
 | ||||
|     - `ActorNursery.run_in_actor()` | ||||
| 
 | ||||
|     - `Portal.run()` | ||||
|           `|_.run_from_ns()` | ||||
|           `|_.open_stream_from()` | ||||
|           `|_._submit_for_result()` | ||||
| 
 | ||||
|     - `Context.open_context()` | ||||
| 
 | ||||
|     ''' | ||||
|     cid: str | ||||
| 
 | ||||
|     ns: str | ||||
|     func: str | ||||
| 
 | ||||
|     # TODO: make this a sub-struct which can be further | ||||
|     # type-limited, maybe `Inputs`? | ||||
|     # => SEE ABOVE <= | ||||
|     kwargs: dict[str, Any] | ||||
|     uid: tuple[str, str]  # (calling) actor-id | ||||
| 
 | ||||
|     # TODO: enforcing a msg-spec in terms `Msg.pld` | ||||
|     # parameterizable msgs to be used in the appls IPC dialog. | ||||
|     # => SEE `._codec.MsgDec` for more <= | ||||
|     pld_spec: str = str(Any) | ||||
| 
 | ||||
| 
 | ||||
| class StartAck( | ||||
|     Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| ): | ||||
|     ''' | ||||
|     Init response to a `Cmd` request indicating the far | ||||
|     end's RPC spec, namely its callable "type". | ||||
| 
 | ||||
|     ''' | ||||
|     cid: str | ||||
|     # TODO: maybe better names for all these? | ||||
|     # -[ ] obvi ^ would need sync with `._rpc` | ||||
|     functype: Literal[ | ||||
|         'asyncfunc', | ||||
|         'asyncgen', | ||||
|         'context',  # TODO: the only one eventually? | ||||
|     ] | ||||
| 
 | ||||
|     # import typing | ||||
|     # eval(str(Any), {}, {'typing': typing}) | ||||
|     # started_spec: str = str(Any) | ||||
|     # return_spec | ||||
| 
 | ||||
| 
 | ||||
| class Started( | ||||
|     PayloadMsg, | ||||
|     Generic[PayloadT], | ||||
| ): | ||||
|     ''' | ||||
|     Packet to shuttle the "first value" delivered by | ||||
|     `Context.started(value: Any)` from a `@tractor.context` | ||||
|     decorated IPC endpoint. | ||||
| 
 | ||||
|     ''' | ||||
|     pld: PayloadT|Raw | ||||
| 
 | ||||
| 
 | ||||
| # TODO: cancel request dedicated msg? | ||||
| # -[ ] instead of using our existing `Start`? | ||||
| # | ||||
| # class Cancel: | ||||
| #     cid: str | ||||
| 
 | ||||
| 
 | ||||
| class Yield( | ||||
|     PayloadMsg, | ||||
|     Generic[PayloadT], | ||||
| ): | ||||
|     ''' | ||||
|     Per IPC transmission of a value from `await MsgStream.send(<value>)`. | ||||
| 
 | ||||
|     ''' | ||||
|     pld: PayloadT|Raw | ||||
| 
 | ||||
| 
 | ||||
| class Stop( | ||||
|     Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| ): | ||||
|     ''' | ||||
|     Stream termination signal much like an IPC version  | ||||
|     of `StopAsyncIteration`. | ||||
| 
 | ||||
|     ''' | ||||
|     cid: str | ||||
|     # TODO: do we want to support a payload on stop? | ||||
|     # pld: UnsetType = UNSET | ||||
| 
 | ||||
| 
 | ||||
| # TODO: is `Result` or `Out[come]` a better name? | ||||
| class Return( | ||||
|     PayloadMsg, | ||||
|     Generic[PayloadT], | ||||
| ): | ||||
|     ''' | ||||
|     Final `return <value>` from a remotely scheduled | ||||
|     func-as-`trio.Task`. | ||||
| 
 | ||||
|     ''' | ||||
|     pld: PayloadT|Raw | ||||
| 
 | ||||
| 
 | ||||
| class CancelAck( | ||||
|     PayloadMsg, | ||||
|     Generic[PayloadT], | ||||
| ): | ||||
|     ''' | ||||
|     Deliver the `bool` return-value from a cancellation `Actor` | ||||
|     method scheduled via and prior RPC request. | ||||
| 
 | ||||
|     - `Actor.cancel()` | ||||
|        `|_.cancel_soon()` | ||||
|        `|_.cancel_rpc_tasks()` | ||||
|        `|_._cancel_task()` | ||||
|        `|_.cancel_server()` | ||||
| 
 | ||||
|     RPCs to these methods must **always** be able to deliver a result | ||||
|     despite the currently configured IPC msg spec such that graceful | ||||
|     cancellation is always functional in the runtime. | ||||
| 
 | ||||
|     ''' | ||||
|     pld: bool | ||||
| 
 | ||||
| 
 | ||||
| # TODO: unify this with `._exceptions.RemoteActorError` | ||||
| # such that we can have a msg which is both raisable and | ||||
| # IPC-wire ready? | ||||
| # B~o | ||||
| class Error( | ||||
|     Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| 
 | ||||
|     # TODO may omit defaults? | ||||
|     # https://jcristharif.com/msgspec/structs.html#omitting-default-values | ||||
|     # omit_defaults=True, | ||||
| ): | ||||
|     ''' | ||||
|     A pkt that wraps `RemoteActorError`s for relay and raising. | ||||
| 
 | ||||
|     Fields are 1-to-1 meta-data as needed originally by | ||||
|     `RemoteActorError.msgdata: dict` but now are defined here. | ||||
| 
 | ||||
|     Note: this msg shuttles `ContextCancelled` and `StreamOverrun` | ||||
|     as well is used to rewrap any `MsgTypeError` for relay-reponse | ||||
|     to bad `Yield.pld` senders during an IPC ctx's streaming dialog | ||||
|     phase. | ||||
| 
 | ||||
|     ''' | ||||
|     src_uid: tuple[str, str] | ||||
|     src_type_str: str | ||||
|     boxed_type_str: str | ||||
|     relay_path: list[tuple[str, str]] | ||||
| 
 | ||||
|     # normally either both are provided or just | ||||
|     # a message for certain special cases where | ||||
|     # we pack a message for a locally raised | ||||
|     # mte or ctxc. | ||||
|     message: str|None = None | ||||
|     tb_str: str = '' | ||||
| 
 | ||||
|     # TODO: only optionally include sub-type specfic fields? | ||||
|     # -[ ] use UNSET or don't include them via `omit_defaults` (see | ||||
|     #      inheritance-line options above) | ||||
|     # | ||||
|     # `ContextCancelled` reports the src cancelling `Actor.uid` | ||||
|     canceller: tuple[str, str]|None = None | ||||
| 
 | ||||
|     # `StreamOverrun`-specific src `Actor.uid` | ||||
|     sender: tuple[str, str]|None = None | ||||
| 
 | ||||
|     # `MsgTypeError` meta-data | ||||
|     cid: str|None = None | ||||
|     # when the receiver side fails to decode a delivered | ||||
|     # `PayloadMsg`-subtype; one and/or both the msg-struct instance | ||||
|     # and `Any`-decoded to `dict` of the msg are set and relayed | ||||
|     # (back to the sender) for introspection. | ||||
|     _bad_msg: Started|Yield|Return|None = None | ||||
|     _bad_msg_as_dict: dict|None = None | ||||
| 
 | ||||
| 
 | ||||
| def from_dict_msg( | ||||
|     dict_msg: dict, | ||||
| 
 | ||||
|     msgT: MsgType|None = None, | ||||
|     tag_field: str = 'msg_type', | ||||
|     use_pretty: bool = False, | ||||
| 
 | ||||
| ) -> MsgType: | ||||
|     ''' | ||||
|     Helper to build a specific `MsgType` struct from a "vanilla" | ||||
|     decoded `dict`-ified equivalent of the msg: i.e. if the | ||||
|     `msgpack.Decoder.type == Any`, the default when using | ||||
|     `msgspec.msgpack` and not "typed decoding" using | ||||
|     `msgspec.Struct`. | ||||
| 
 | ||||
|     ''' | ||||
|     msg_type_tag_field: str = ( | ||||
|         msgT.__struct_config__.tag_field | ||||
|         if msgT is not None | ||||
|         else tag_field | ||||
|     ) | ||||
|     # XXX ensure tag field is removed | ||||
|     msgT_name: str = dict_msg.pop(msg_type_tag_field) | ||||
|     msgT: MsgType = _msg_table[msgT_name] | ||||
|     if use_pretty: | ||||
|         msgT = defstruct( | ||||
|             name=msgT_name, | ||||
|             fields=[ | ||||
|                 (key, fi.type) | ||||
|                 for fi, key, _ | ||||
|                 in pretty_struct.iter_fields(msgT) | ||||
|             ], | ||||
|             bases=( | ||||
|                 pretty_struct.Struct, | ||||
|                 msgT, | ||||
|             ), | ||||
|         ) | ||||
|     return msgT(**dict_msg) | ||||
| 
 | ||||
| # TODO: should be make a set of cancel msgs? | ||||
| # -[ ] a version of `ContextCancelled`? | ||||
| #     |_ and/or with a scope field? | ||||
| # -[ ] or, a full `ActorCancelled`? | ||||
| # | ||||
| # class Cancelled(MsgType): | ||||
| #     cid: str | ||||
| # | ||||
| # -[ ] what about overruns? | ||||
| # | ||||
| # class Overrun(MsgType): | ||||
| #     cid: str | ||||
| 
 | ||||
| _runtime_msgs: list[Struct] = [ | ||||
| 
 | ||||
|     # identity handshake on first IPC `Channel` contact. | ||||
|     Aid, | ||||
| 
 | ||||
|     # parent-to-child spawn specification passed as 2nd msg after | ||||
|     # handshake ONLY after child connects back to parent. | ||||
|     SpawnSpec, | ||||
| 
 | ||||
|     # inter-actor RPC initiation | ||||
|     Start,  # schedule remote task-as-func | ||||
|     StartAck,  # ack the schedule request | ||||
| 
 | ||||
|     # emission from `MsgStream.aclose()` | ||||
|     Stop, | ||||
| 
 | ||||
|     # `Return` sub-type that we always accept from | ||||
|     # runtime-internal cancel endpoints | ||||
|     CancelAck, | ||||
| 
 | ||||
|     # box remote errors, normally subtypes | ||||
|     # of `RemoteActorError`. | ||||
|     Error, | ||||
| ] | ||||
| 
 | ||||
| # the no-outcome-yet IAC (inter-actor-communication) sub-set which | ||||
| # can be `PayloadMsg.pld` payload field type-limited by application code | ||||
| # using `apply_codec()` and `limit_msg_spec()`. | ||||
| _payload_msgs: list[PayloadMsg] = [ | ||||
|     # first <value> from `Context.started(<value>)` | ||||
|     Started, | ||||
| 
 | ||||
|     # any <value> sent via `MsgStream.send(<value>)` | ||||
|     Yield, | ||||
| 
 | ||||
|     # the final value returned from a `@context` decorated | ||||
|     # IPC endpoint. | ||||
|     Return, | ||||
| ] | ||||
| 
 | ||||
| # built-in SC shuttle protocol msg type set in | ||||
| # approx order of the IPC txn-state spaces. | ||||
| __msg_types__: list[MsgType] = ( | ||||
|     _runtime_msgs | ||||
|     + | ||||
|     _payload_msgs | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| _msg_table: dict[str, MsgType] = { | ||||
|     msgT.__name__: msgT | ||||
|     for msgT in __msg_types__ | ||||
| } | ||||
| 
 | ||||
| # TODO: use new type declaration syntax for msg-type-spec | ||||
| # https://docs.python.org/3/library/typing.html#type-aliases | ||||
| # https://docs.python.org/3/reference/simple_stmts.html#type | ||||
| MsgType: TypeAlias = Union[*__msg_types__] | ||||
| 
 | ||||
| 
 | ||||
| def mk_msg_spec( | ||||
|     payload_type_union: Union[Type] = Any, | ||||
| 
 | ||||
|     spec_build_method: Literal[ | ||||
|         'indexed_generics',  # works | ||||
|         'defstruct', | ||||
|         'types_new_class', | ||||
| 
 | ||||
|     ] = 'indexed_generics', | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     Union[MsgType], | ||||
|     list[MsgType], | ||||
| ]: | ||||
|     ''' | ||||
|     Create a payload-(data-)type-parameterized IPC message specification. | ||||
| 
 | ||||
|     Allows generating IPC msg types from the above builtin set | ||||
|     with a payload (field) restricted data-type, the `Msg.pld: PayloadT`. | ||||
| 
 | ||||
|     This allows runtime-task contexts to use the python type system | ||||
|     to limit/filter payload values as determined by the input | ||||
|     `payload_type_union: Union[Type]`. | ||||
| 
 | ||||
|     Notes: originally multiple approaches for constructing the | ||||
|     type-union passed to `msgspec` were attempted as selected via the | ||||
|     `spec_build_method`, but it turns out only the defaul method | ||||
|     'indexed_generics' seems to work reliably in all use cases. As | ||||
|     such, the others will likely be removed in the near future. | ||||
| 
 | ||||
|     ''' | ||||
|     submsg_types: list[MsgType] = Msg.__subclasses__() | ||||
|     bases: tuple = ( | ||||
|         # XXX NOTE XXX the below generic-parameterization seems to | ||||
|         # be THE ONLY way to get this to work correctly in terms | ||||
|         # of getting ValidationError on a roundtrip? | ||||
|         Msg[payload_type_union], | ||||
|         Generic[PayloadT], | ||||
|     ) | ||||
|     defstruct_bases: tuple = ( | ||||
|         Msg, # [payload_type_union], | ||||
|         # Generic[PayloadT], | ||||
|         # ^-XXX-^: not allowed? lul.. | ||||
|     ) | ||||
|     ipc_msg_types: list[Msg] = [] | ||||
| 
 | ||||
|     idx_msg_types: list[Msg] = [] | ||||
|     defs_msg_types: list[Msg] = [] | ||||
|     nc_msg_types: list[Msg] = [] | ||||
| 
 | ||||
|     for msgtype in __msg_types__: | ||||
| 
 | ||||
|         # for the NON-payload (user api) type specify-able | ||||
|         # msgs types, we simply aggregate the def as is | ||||
|         # for inclusion in the output type `Union`. | ||||
|         if msgtype not in _payload_msgs: | ||||
|             ipc_msg_types.append(msgtype) | ||||
|             continue | ||||
| 
 | ||||
|         # check inheritance sanity | ||||
|         assert msgtype in submsg_types | ||||
| 
 | ||||
|         # TODO: wait why do we need the dynamic version here? | ||||
|         # XXX ANSWER XXX -> BC INHERITANCE.. don't work w generics.. | ||||
|         # | ||||
|         # NOTE previously bc msgtypes WERE NOT inheritting | ||||
|         # directly the `Generic[PayloadT]` type, the manual method | ||||
|         # of generic-paraming with `.__class_getitem__()` wasn't | ||||
|         # working.. | ||||
|         # | ||||
|         # XXX but bc i changed that to make every subtype inherit | ||||
|         # it, this manual "indexed parameterization" method seems | ||||
|         # to work? | ||||
|         # | ||||
|         # -[x] paraming the `PayloadT` values via `Generic[T]` | ||||
|         #   does work it seems but WITHOUT inheritance of generics | ||||
|         # | ||||
|         # -[-] is there a way to get it to work at module level | ||||
|         #   just using inheritance or maybe a metaclass? | ||||
|         #  => thot that `defstruct` might work, but NOPE, see | ||||
|         #   below.. | ||||
|         # | ||||
|         idxed_msg_type: Msg = msgtype[payload_type_union] | ||||
|         idx_msg_types.append(idxed_msg_type) | ||||
| 
 | ||||
|         # TODO: WHY do we need to dynamically generate the | ||||
|         # subtype-msgs here to ensure the `.pld` parameterization | ||||
|         # propagates as well as works at all in terms of the | ||||
|         # `msgpack.Decoder()`..? | ||||
|         # | ||||
|         # dynamically create the payload type-spec-limited msg set. | ||||
|         newclass_msgtype: Type = types.new_class( | ||||
|             name=msgtype.__name__, | ||||
|             bases=bases, | ||||
|             kwds={}, | ||||
|         ) | ||||
|         nc_msg_types.append( | ||||
|             newclass_msgtype[payload_type_union] | ||||
|         ) | ||||
| 
 | ||||
|         # with `msgspec.structs.defstruct` | ||||
|         # XXX ALSO DOESN'T WORK | ||||
|         defstruct_msgtype = defstruct( | ||||
|             name=msgtype.__name__, | ||||
|             fields=[ | ||||
|                 ('cid', str), | ||||
| 
 | ||||
|                 # XXX doesn't seem to work.. | ||||
|                 # ('pld', PayloadT), | ||||
| 
 | ||||
|                 ('pld', payload_type_union), | ||||
|             ], | ||||
|             bases=defstruct_bases, | ||||
|         ) | ||||
|         defs_msg_types.append(defstruct_msgtype) | ||||
| 
 | ||||
|         # assert index_paramed_msg_type == manual_paramed_msg_subtype | ||||
| 
 | ||||
|         # paramed_msg_type = manual_paramed_msg_subtype | ||||
| 
 | ||||
|         # ipc_payload_msgs_type_union |= index_paramed_msg_type | ||||
| 
 | ||||
|     idx_spec: Union[Type[Msg]] = Union[*idx_msg_types] | ||||
|     def_spec: Union[Type[Msg]] = Union[*defs_msg_types] | ||||
|     nc_spec: Union[Type[Msg]] = Union[*nc_msg_types] | ||||
| 
 | ||||
|     specs: dict[str, Union[Type[Msg]]] = { | ||||
|         'indexed_generics': idx_spec, | ||||
|         'defstruct': def_spec, | ||||
|         'types_new_class': nc_spec, | ||||
|     } | ||||
|     msgtypes_table: dict[str, list[Msg]] = { | ||||
|         'indexed_generics': idx_msg_types, | ||||
|         'defstruct': defs_msg_types, | ||||
|         'types_new_class': nc_msg_types, | ||||
|     } | ||||
| 
 | ||||
|     # XXX lol apparently type unions can't ever | ||||
|     # be equal eh? | ||||
|     # TODO: grok the diff here better.. | ||||
|     # | ||||
|     # assert ( | ||||
|     #     idx_spec | ||||
|     #     == | ||||
|     #     nc_spec | ||||
|     #     == | ||||
|     #     def_spec | ||||
|     # ) | ||||
|     # breakpoint() | ||||
| 
 | ||||
|     pld_spec: Union[Type] = specs[spec_build_method] | ||||
|     runtime_spec: Union[Type] = Union[*ipc_msg_types] | ||||
|     ipc_spec = pld_spec | runtime_spec | ||||
|     log.runtime( | ||||
|         'Generating new IPC msg-spec\n' | ||||
|         f'{ipc_spec}\n' | ||||
|     ) | ||||
|     assert ( | ||||
|         ipc_spec | ||||
|         and | ||||
|         ipc_spec is not Any | ||||
|     ) | ||||
|     return ( | ||||
|         ipc_spec, | ||||
|         msgtypes_table[spec_build_method] | ||||
|         + | ||||
|         ipc_msg_types, | ||||
|     ) | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -19,22 +19,13 @@ Sugary patterns for trio + tractor designs. | |||
| 
 | ||||
| ''' | ||||
| from ._mngrs import ( | ||||
|     gather_contexts, | ||||
|     maybe_open_context, | ||||
|     maybe_open_nursery, | ||||
|     gather_contexts as gather_contexts, | ||||
|     maybe_open_context as maybe_open_context, | ||||
|     maybe_open_nursery as maybe_open_nursery, | ||||
| ) | ||||
| from ._broadcast import ( | ||||
|     broadcast_receiver, | ||||
|     BroadcastReceiver, | ||||
|     Lagged, | ||||
|     AsyncReceiver as AsyncReceiver, | ||||
|     broadcast_receiver as broadcast_receiver, | ||||
|     BroadcastReceiver as BroadcastReceiver, | ||||
|     Lagged as Lagged, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'gather_contexts', | ||||
|     'broadcast_receiver', | ||||
|     'BroadcastReceiver', | ||||
|     'Lagged', | ||||
|     'maybe_open_context', | ||||
|     'maybe_open_nursery', | ||||
| ] | ||||
|  |  | |||
|  | @ -26,7 +26,6 @@ from contextlib import asynccontextmanager | |||
| from functools import partial | ||||
| from operator import ne | ||||
| from typing import ( | ||||
|     Optional, | ||||
|     Callable, | ||||
|     Awaitable, | ||||
|     Any, | ||||
|  | @ -45,6 +44,11 @@ from tractor.log import get_logger | |||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| # TODO: use new type-vars syntax from 3.12 | ||||
| # https://realpython.com/python312-new-features/#dedicated-type-variable-syntax | ||||
| # https://docs.python.org/3/whatsnew/3.12.html#whatsnew312-pep695 | ||||
| # https://docs.python.org/3/reference/simple_stmts.html#type | ||||
| # | ||||
| # A regular invariant generic type | ||||
| T = TypeVar("T") | ||||
| 
 | ||||
|  | @ -110,7 +114,7 @@ class BroadcastState(Struct): | |||
| 
 | ||||
|     # broadcast event to wake up all sleeping consumer tasks | ||||
|     # on a newly produced value from the sender. | ||||
|     recv_ready: Optional[tuple[int, trio.Event]] = None | ||||
|     recv_ready: tuple[int, trio.Event]|None = None | ||||
| 
 | ||||
|     # if a ``trio.EndOfChannel`` is received on any | ||||
|     # consumer all consumers should be placed in this state | ||||
|  | @ -152,11 +156,12 @@ class BroadcastState(Struct): | |||
| 
 | ||||
| class BroadcastReceiver(ReceiveChannel): | ||||
|     ''' | ||||
|     A memory receive channel broadcaster which is non-lossy for the | ||||
|     fastest consumer. | ||||
|     A memory receive channel broadcaster which is non-lossy for | ||||
|     the fastest consumer. | ||||
| 
 | ||||
|     Additional consumer tasks can receive all produced values by registering | ||||
|     with ``.subscribe()`` and receiving from the new instance it delivers. | ||||
|     Additional consumer tasks can receive all produced values by | ||||
|     registering with ``.subscribe()`` and receiving from the new | ||||
|     instance it delivers. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|  | @ -164,7 +169,7 @@ class BroadcastReceiver(ReceiveChannel): | |||
| 
 | ||||
|         rx_chan: AsyncReceiver, | ||||
|         state: BroadcastState, | ||||
|         receive_afunc: Optional[Callable[[], Awaitable[Any]]] = None, | ||||
|         receive_afunc: Callable[[], Awaitable[Any]]|None = None, | ||||
|         raise_on_lag: bool = True, | ||||
| 
 | ||||
|     ) -> None: | ||||
|  | @ -377,7 +382,7 @@ class BroadcastReceiver(ReceiveChannel): | |||
|                         # likely it makes sense to unwind back to the | ||||
|                         # underlying? | ||||
|                         # import tractor | ||||
|                         # await tractor.breakpoint() | ||||
|                         # await tractor.pause() | ||||
|                         log.warning( | ||||
|                             f'Only one sub left for {self}?\n' | ||||
|                             'We can probably unwind from breceiver?' | ||||
|  | @ -452,7 +457,7 @@ def broadcast_receiver( | |||
| 
 | ||||
|     recv_chan: AsyncReceiver, | ||||
|     max_buffer_size: int, | ||||
|     receive_afunc: Optional[Callable[[], Awaitable[Any]]] = None, | ||||
|     receive_afunc: Callable[[], Awaitable[Any]]|None = None, | ||||
|     raise_on_lag: bool = True, | ||||
| 
 | ||||
| ) -> BroadcastReceiver: | ||||
|  |  | |||
|  | @ -18,8 +18,12 @@ | |||
| Async context manager primitives with hard ``trio``-aware semantics | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| import inspect | ||||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncContextManager, | ||||
|  | @ -30,13 +34,15 @@ from typing import ( | |||
|     Optional, | ||||
|     Sequence, | ||||
|     TypeVar, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| from tractor._state import current_actor | ||||
| from tractor.log import get_logger | ||||
| 
 | ||||
| from .._state import current_actor | ||||
| from ..log import get_logger | ||||
| if TYPE_CHECKING: | ||||
|     from tractor import ActorNursery | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  | @ -47,8 +53,10 @@ T = TypeVar("T") | |||
| 
 | ||||
| @acm | ||||
| async def maybe_open_nursery( | ||||
|     nursery: trio.Nursery | None = None, | ||||
|     nursery: trio.Nursery|ActorNursery|None = None, | ||||
|     shield: bool = False, | ||||
|     lib: ModuleType = trio, | ||||
| 
 | ||||
| ) -> AsyncGenerator[trio.Nursery, Any]: | ||||
|     ''' | ||||
|     Create a new nursery if None provided. | ||||
|  | @ -59,17 +67,17 @@ async def maybe_open_nursery( | |||
|     if nursery is not None: | ||||
|         yield nursery | ||||
|     else: | ||||
|         async with trio.open_nursery() as nursery: | ||||
|         async with lib.open_nursery() as nursery: | ||||
|             nursery.cancel_scope.shield = shield | ||||
|             yield nursery | ||||
| 
 | ||||
| 
 | ||||
| async def _enter_and_wait( | ||||
| 
 | ||||
|     mngr: AsyncContextManager[T], | ||||
|     unwrapped: dict[int, T], | ||||
|     all_entered: trio.Event, | ||||
|     parent_exit: trio.Event, | ||||
|     seed: int, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|  | @ -80,7 +88,10 @@ async def _enter_and_wait( | |||
|     async with mngr as value: | ||||
|         unwrapped[id(mngr)] = value | ||||
| 
 | ||||
|         if all(unwrapped.values()): | ||||
|         if all( | ||||
|             val != seed | ||||
|             for val in unwrapped.values() | ||||
|         ): | ||||
|             all_entered.set() | ||||
| 
 | ||||
|         await parent_exit.wait() | ||||
|  | @ -88,23 +99,34 @@ async def _enter_and_wait( | |||
| 
 | ||||
| @acm | ||||
| async def gather_contexts( | ||||
| 
 | ||||
|     mngrs: Sequence[AsyncContextManager[T]], | ||||
| 
 | ||||
| ) -> AsyncGenerator[tuple[Optional[T], ...], None]: | ||||
| ) -> AsyncGenerator[ | ||||
|     tuple[ | ||||
|         T | None, | ||||
|         ... | ||||
|     ], | ||||
|     None, | ||||
| ]: | ||||
|     ''' | ||||
|     Concurrently enter a sequence of async context managers, each in | ||||
|     a separate ``trio`` task and deliver the unwrapped values in the | ||||
|     same order once all managers have entered. On exit all contexts are | ||||
|     subsequently and concurrently exited. | ||||
|     Concurrently enter a sequence of async context managers (acms), | ||||
|     each from a separate `trio` task and deliver the unwrapped | ||||
|     `yield`-ed values in the same order once all managers have entered. | ||||
| 
 | ||||
|     This function is somewhat similar to common usage of | ||||
|     ``contextlib.AsyncExitStack.enter_async_context()`` (in a loop) in | ||||
|     combo with ``asyncio.gather()`` except the managers are concurrently | ||||
|     entered and exited, and cancellation just works. | ||||
|     On exit, all acms are subsequently and concurrently exited. | ||||
| 
 | ||||
|     This function is somewhat similar to a batch of non-blocking | ||||
|     calls to `contextlib.AsyncExitStack.enter_async_context()` | ||||
|     (inside a loop) *in combo with* a `asyncio.gather()` to get the | ||||
|     `.__aenter__()`-ed values, except the managers are both | ||||
|     concurrently entered and exited and *cancellation just works*(R). | ||||
| 
 | ||||
|     ''' | ||||
|     unwrapped: dict[int, Optional[T]] = {}.fromkeys(id(mngr) for mngr in mngrs) | ||||
|     seed: int = id(mngrs) | ||||
|     unwrapped: dict[int, T | None] = {}.fromkeys( | ||||
|         (id(mngr) for mngr in mngrs), | ||||
|         seed, | ||||
|     ) | ||||
| 
 | ||||
|     all_entered = trio.Event() | ||||
|     parent_exit = trio.Event() | ||||
|  | @ -116,8 +138,9 @@ async def gather_contexts( | |||
| 
 | ||||
|     if not mngrs: | ||||
|         raise ValueError( | ||||
|             'input mngrs is empty?\n' | ||||
|             'Did try to use inline generator syntax?' | ||||
|             '`.trionics.gather_contexts()` input mngrs is empty?\n' | ||||
|             'Did try to use inline generator syntax?\n' | ||||
|             'Use a non-lazy iterator or sequence type intead!' | ||||
|         ) | ||||
| 
 | ||||
|     async with trio.open_nursery() as n: | ||||
|  | @ -128,6 +151,7 @@ async def gather_contexts( | |||
|                 unwrapped, | ||||
|                 all_entered, | ||||
|                 parent_exit, | ||||
|                 seed, | ||||
|             ) | ||||
| 
 | ||||
|         # deliver control once all managers have started up | ||||
|  | @ -168,7 +192,7 @@ class _Cache: | |||
|         cls, | ||||
|         mng, | ||||
|         ctx_key: tuple, | ||||
|         task_status: TaskStatus[T] = trio.TASK_STATUS_IGNORED, | ||||
|         task_status: trio.TaskStatus[T] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         async with mng as value: | ||||
|  | @ -195,9 +219,10 @@ async def maybe_open_context( | |||
| 
 | ||||
| ) -> AsyncIterator[tuple[bool, T]]: | ||||
|     ''' | ||||
|     Maybe open a context manager if there is not already a _Cached | ||||
|     version for the provided ``key`` for *this* actor. Return the | ||||
|     _Cached instance on a _Cache hit. | ||||
|     Maybe open an async-context-manager (acm) if there is not already | ||||
|     a `_Cached` version for the provided (input) `key` for *this* actor. | ||||
| 
 | ||||
|     Return the `_Cached` instance on a _Cache hit. | ||||
| 
 | ||||
|     ''' | ||||
|     fid = id(acm_func) | ||||
|  | @ -209,6 +234,7 @@ async def maybe_open_context( | |||
| 
 | ||||
|     # yielded output | ||||
|     yielded: Any = None | ||||
|     lock_registered: bool = False | ||||
| 
 | ||||
|     # Lock resource acquisition around task racing  / ``trio``'s | ||||
|     # scheduler protocol. | ||||
|  | @ -216,6 +242,7 @@ async def maybe_open_context( | |||
|     # to allow re-entrant use cases where one `maybe_open_context()` | ||||
|     # wrapped factor may want to call into another. | ||||
|     lock = _Cache.locks.setdefault(fid, trio.Lock()) | ||||
|     lock_registered: bool = True | ||||
|     await lock.acquire() | ||||
| 
 | ||||
|     # XXX: one singleton nursery per actor and we want to | ||||
|  | @ -254,8 +281,16 @@ async def maybe_open_context( | |||
|         yield False, yielded | ||||
| 
 | ||||
|     else: | ||||
|         log.info(f'Reusing _Cached resource for {ctx_key}') | ||||
|         _Cache.users += 1 | ||||
|         log.runtime( | ||||
|             f'Re-using cached resource for user {_Cache.users}\n\n' | ||||
|             f'{ctx_key!r} -> {type(yielded)}\n' | ||||
| 
 | ||||
|             # TODO: make this work with values but without | ||||
|             # `msgspec.Struct` causing frickin crashes on field-type | ||||
|             # lookups.. | ||||
|             # f'{ctx_key!r} -> {yielded!r}\n' | ||||
|         ) | ||||
|         lock.release() | ||||
|         yield True, yielded | ||||
| 
 | ||||
|  | @ -275,4 +310,9 @@ async def maybe_open_context( | |||
|                     _, no_more_users = entry | ||||
|                     no_more_users.set() | ||||
| 
 | ||||
|                 _Cache.locks.pop(fid) | ||||
|                 if lock_registered: | ||||
|                     maybe_lock = _Cache.locks.pop(fid, None) | ||||
|                     if maybe_lock is None: | ||||
|                         log.error( | ||||
|                             f'Resource lock for {fid} ALREADY POPPED?' | ||||
|                         ) | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue