Compare commits
	
		
			251 Commits 
		
	
	
		
			15f99c313e
			...
			e646ce5c0d
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | e646ce5c0d | |
|  | b6d800954a | |
|  | beb7097ab4 | |
|  | 724c22d266 | |
|  | ecd61226d8 | |
|  | 69fd46e1ce | |
|  | af660c1019 | |
|  | 34e9e529d2 | |
|  | 816b82f9fe | |
|  | e8111e40f9 | |
|  | b7aa72465d | |
|  | 1ff79f86b7 | |
|  | f26d487000 | |
|  | 1075ea3687 | |
|  | 2bd4cc9727 | |
|  | a60837550e | |
|  | 72035a20d7 | |
|  | 32e760284f | |
|  | 14fb56329b | |
|  | 46f644e748 | |
|  | cdd0c5384a | |
|  | 1afef149d4 | |
|  | 11d4c83aed | |
|  | 72fc6fce24 | |
|  | 4a195eef4c | |
|  | a5b8e009fd | |
|  | ddf6222eb6 | |
|  | 9412745aaf | |
|  | 4a5ab155e2 | |
|  | 526187d1a0 | |
|  | c738f8b540 | |
|  | 962941c56c | |
|  | b692979dda | |
|  | 5fcb46bbb9 | |
|  | ec6b2e8738 | |
|  | e1575051f0 | |
|  | 5f8ec63b0c | |
|  | a356233b47 | |
|  | 9af6271e99 | |
|  | 36021d1f2b | |
|  | 7443e387b5 | |
|  | d9662d9b34 | |
|  | 84dbf53817 | |
|  | e898a41e22 | |
|  | 46c9ee2551 | |
|  | e7adeee549 | |
|  | e10616fa4d | |
|  | f24e6f6e48 | |
|  | aac013ae5c | |
|  | ccbd35f273 | |
|  | 346e009730 | |
|  | 4ada92d2f7 | |
|  | 5cdd012417 | |
|  | 701dd135eb | |
|  | 060ee1457e | |
|  | 32e12c8b03 | |
|  | 50ba23e602 | |
|  | ddbda17338 | |
|  | 199247309e | |
|  | 10558b0986 | |
|  | eaa5d23543 | |
|  | 904d8ce8ff | |
|  | f14fb53958 | |
|  | 49cd00232e | |
|  | ae16368949 | |
|  | aa7448793a | |
|  | 2df7ffd702 | |
|  | dba2d87baf | |
|  | 276f88fd0c | |
|  | b2087404e3 | |
|  | 9bc7be30bf | |
|  | 1d9e60626c | |
|  | ef7f34ca1c | |
|  | 417f4f7255 | |
|  | 8de79372b7 | |
|  | d105da0fcf | |
|  | 3eef9aeac5 | |
|  | 521a2e353d | |
|  | 6927767d39 | |
|  | bd66450a79 | |
|  | 9811db9ac5 | |
|  | 6af320273b | |
|  | 74048b06a7 | |
|  | 5b9a2642f6 | |
|  | 778710efbb | |
|  | 4792ffcc04 | |
|  | 3c1f56f8d9 | |
|  | 682cf884c4 | |
|  | 8dcc49fce2 | |
|  | b517dacf0a | |
|  | d3680bfe6a | |
|  | e863159c7f | |
|  | ed42aa7e65 | |
|  | e8fee54534 | |
|  | aee1bf8456 | |
|  | 69fb7beff8 | |
|  | f5b1d0179e | |
|  | dee312cae1 | |
|  | 85fd312c22 | |
|  | 6754a80186 | |
|  | d3f7b83ea0 | |
|  | d8dd0c0a81 | |
|  | 0c8bb88cc5 | |
|  | 0687dac97a | |
|  | 4589ff307c | |
|  | c39427dc15 | |
|  | dc5d622e70 | |
|  | 319dda77b4 | |
|  | 59a3449455 | |
|  | 1ef1ebfa99 | |
|  | a95b84e4fb | |
|  | 54d397b726 | |
|  | 33e646fd6a | |
|  | f120ee72f5 | |
|  | 08dc32fbb7 | |
|  | fd0c14df80 | |
|  | a1779a8fa9 | |
|  | d154afd678 | |
|  | f05abbcfee | |
|  | 9330a75255 | |
|  | 235db17c9c | |
|  | f227ce6080 | |
|  | aa17635c4b | |
|  | b673d10e1b | |
|  | 46a1a54aeb | |
|  | d7ca1dfd94 | |
|  | deb61423c4 | |
|  | ea5eeba0a0 | |
|  | 3ea4617120 | |
|  | 6819ec01d0 | |
|  | 71518ea94a | |
|  | 4520183cdc | |
|  | 5b14baaf58 | |
|  | 18de9c1693 | |
|  | eb88511a8c | |
|  | 66048da832 | |
|  | 6c992a2fea | |
|  | d530002d66 | |
|  | 904c6895f7 | |
|  | f0912c9859 | |
|  | 3b5970f12b | |
|  | 5668328c8f | |
|  | e133911a44 | |
|  | 09948d71c6 | |
|  | 452094df27 | |
|  | e0dc1d73b2 | |
|  | 8881219eae | |
|  | 26d3ba7cc7 | |
|  | 6734dbb3cd | |
|  | 29a001c4ef | |
|  | 2ddfe11d71 | |
|  | 316afdec55 | |
|  | bc660a533c | |
|  | 61183f6a97 | |
|  | 8d5b40507c | |
|  | 194bb8f7fb | |
|  | c1747a290a | |
|  | 0c57e1a808 | |
|  | 17cf3d45ba | |
|  | 04bd53ff10 | |
|  | 332ce97650 | |
|  | d3e13658ab | |
|  | d680e31e4f | |
|  | 048c60f112 | |
|  | 219d5c1745 | |
|  | 467764d45e | |
|  | 998c0f0bd5 | |
|  | ceaafc064e | |
|  | 7b6881cf0a | |
|  | 2cdd5b5b8f | |
|  | 1f4c780b98 | |
|  | f9de439b87 | |
|  | 49443d3a7e | |
|  | b78732781f | |
|  | bf08066031 | |
|  | 3b38fa8673 | |
|  | 7910e1297b | |
|  | 0efc4c1b87 | |
|  | 83e3a75c10 | |
|  | 3fb99f2ba5 | |
|  | 94d8bef2d6 | |
|  | e46046a746 | |
|  | 875081e7a2 | |
|  | 6819cf908a | |
|  | 9e5bdd26d7 | |
|  | 5d4681df4b | |
|  | baee808654 | |
|  | 2ed43373c5 | |
|  | d982daa886 | |
|  | 97fc2a6628 | |
|  | 5bf27aca2c | |
|  | 85c9a8e628 | |
|  | 69b509d09e | |
|  | 41499c6d9e | |
|  | be0ded2a22 | |
|  | 7d71fce558 | |
|  | cbb9bbcbca | |
|  | ef3a7fbaa8 | |
|  | 14583307ee | |
|  | 59966e5650 | |
|  | ca43f15aa0 | |
|  | 36bf58887d | |
|  | 7ca746e96e | |
|  | 956ff11863 | |
|  | 515d5faa0a | |
|  | 2995a6afb7 | |
|  | 9381d21281 | |
|  | 9ea5aa1cde | |
|  | 304590abaa | |
|  | 797f7f6d63 | |
|  | d4d1dca812 | |
|  | 213e7dbb67 | |
|  | 162feec6e9 | |
|  | 7bb6a53581 | |
|  | 6628fa00d9 | |
|  | 7a050e5edb | |
|  | 6e72f2ef13 | |
|  | 28a8d15071 | |
|  | c9d2993338 | |
|  | a13160d920 | |
|  | e9f1d8e8be | |
|  | 6c672a67e2 | |
|  | 344d8ebc0c | |
|  | 78b08e2a91 | |
|  | 4e769e45e4 | |
|  | dbb5e7dc78 | |
|  | abc9e68f33 | |
|  | 1544849bbf | |
|  | fc6419251b | |
|  | f1dd6474bf | |
|  | 5a79a17dbb | |
|  | 13ecb151db | |
|  | 335997966c | |
|  | e72bc5c208 | |
|  | 7908c9575e | |
|  | 8d8a47ef7b | |
|  | afabef166e | |
|  | b5bdd20eb5 | |
|  | 405c2a27e6 | |
|  | 8d716f2113 | |
|  | c79c2d7ffd | |
|  | e0d7ed48e8 | |
|  | 9e16cfe8fd | |
|  | 6cd74a5dba | |
|  | fe9406be9b | |
|  | b589bef1b6 | |
|  | 79c71bfbaf | |
|  | 68f170fde1 | |
|  | 10b52ba98a | |
|  | 65192e80c1 | |
|  | 4e71b57bf5 | 
|  | @ -21,75 +21,12 @@ import trio | |||
| import pytest | ||||
| 
 | ||||
| 
 | ||||
| async def break_ipc( | ||||
|     stream: MsgStream, | ||||
|     method: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| 
 | ||||
|     def_method: str = 'eof', | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     XXX: close the channel right after an error is raised | ||||
|     purposely breaking the IPC transport to make sure the parent | ||||
|     doesn't get stuck in debug or hang on the connection join. | ||||
|     this more or less simulates an infinite msg-receive hang on | ||||
|     the other end. | ||||
| 
 | ||||
|     ''' | ||||
|     # close channel via IPC prot msging before | ||||
|     # any transport breakage | ||||
|     if pre_close: | ||||
|         await stream.aclose() | ||||
| 
 | ||||
|     method: str = method or def_method | ||||
|     print( | ||||
|         '#################################\n' | ||||
|         'Simulating CHILD-side IPC BREAK!\n' | ||||
|         f'method: {method}\n' | ||||
|         f'pre `.aclose()`: {pre_close}\n' | ||||
|         '#################################\n' | ||||
|     ) | ||||
| 
 | ||||
|     match method: | ||||
|         case 'trans_aclose': | ||||
|             await stream._ctx.chan.transport.stream.aclose() | ||||
| 
 | ||||
|         case 'eof': | ||||
|             await stream._ctx.chan.transport.stream.send_eof() | ||||
| 
 | ||||
|         case 'msg': | ||||
|             await stream._ctx.chan.send(None) | ||||
| 
 | ||||
|         # TODO: the actual real-world simulated cases like | ||||
|         # transport layer hangs and/or lower layer 2-gens type | ||||
|         # scenarios.. | ||||
|         # | ||||
|         # -[ ] already have some issues for this general testing | ||||
|         # area: | ||||
|         #  - https://github.com/goodboy/tractor/issues/97 | ||||
|         #  - https://github.com/goodboy/tractor/issues/124 | ||||
|         #   - PR from @guille: | ||||
|         #     https://github.com/goodboy/tractor/pull/149 | ||||
|         # case 'hang': | ||||
|         # TODO: framework research: | ||||
|         # | ||||
|         # - https://github.com/GuoTengda1993/pynetem | ||||
|         # - https://github.com/shopify/toxiproxy | ||||
|         # - https://manpages.ubuntu.com/manpages/trusty/man1/wirefilter.1.html | ||||
| 
 | ||||
|         case _: | ||||
|             raise RuntimeError( | ||||
|                 f'IPC break method unsupported: {method}' | ||||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| async def break_ipc_then_error( | ||||
|     stream: MsgStream, | ||||
|     break_ipc_with: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| ): | ||||
|     await break_ipc( | ||||
|     await _testing.break_ipc( | ||||
|         stream=stream, | ||||
|         method=break_ipc_with, | ||||
|         pre_close=pre_close, | ||||
|  | @ -121,6 +58,7 @@ async def recv_and_spawn_net_killers( | |||
|     Receive stream msgs and spawn some IPC killers mid-stream. | ||||
| 
 | ||||
|     ''' | ||||
|     broke_ipc: bool = False | ||||
|     await ctx.started() | ||||
|     async with ( | ||||
|         ctx.open_stream() as stream, | ||||
|  | @ -128,13 +66,17 @@ async def recv_and_spawn_net_killers( | |||
|     ): | ||||
|         async for i in stream: | ||||
|             print(f'child echoing {i}') | ||||
|             if not broke_ipc: | ||||
|                 await stream.send(i) | ||||
|             else: | ||||
|                 await trio.sleep(0.01) | ||||
| 
 | ||||
|             if ( | ||||
|                 break_ipc_after | ||||
|                 and | ||||
|                 i >= break_ipc_after | ||||
|             ): | ||||
|                 broke_ipc = True | ||||
|                 n.start_soon( | ||||
|                     iter_ipc_stream, | ||||
|                     stream, | ||||
|  | @ -242,14 +184,13 @@ async def main( | |||
|                         # await stream._ctx.chan.send(None) | ||||
|                         # await stream._ctx.chan.transport.stream.send_eof() | ||||
|                         await stream._ctx.chan.transport.stream.aclose() | ||||
| 
 | ||||
|                         ipc_break_sent = True | ||||
| 
 | ||||
|                     # it actually breaks right here in the | ||||
|                     # mp_spawn/forkserver backends and thus the zombie | ||||
|                     # reaper never even kicks in? | ||||
|                     print(f'parent sending {i}') | ||||
|                     # mp_spawn/forkserver backends and thus the | ||||
|                     # zombie reaper never even kicks in? | ||||
|                     try: | ||||
|                         print(f'parent sending {i}') | ||||
|                         await stream.send(i) | ||||
|                     except ContextCancelled as ctxc: | ||||
|                         print( | ||||
|  | @ -262,6 +203,13 @@ async def main( | |||
|                         # TODO: is this needed or no? | ||||
|                         raise | ||||
| 
 | ||||
|                     except trio.ClosedResourceError: | ||||
|                         # NOTE: don't send if we already broke the | ||||
|                         # connection to avoid raising a closed-error | ||||
|                         # such that we drop through to the ctl-c | ||||
|                         # mashing by user. | ||||
|                         await trio.sleep(0.01) | ||||
| 
 | ||||
|                     # timeout: int = 1 | ||||
|                     # with trio.move_on_after(timeout) as cs: | ||||
|                     async with stuff_hangin_ctlc() as timeout: | ||||
|  |  | |||
|  | @ -1,8 +1,16 @@ | |||
| ''' | ||||
| Examples of using the builtin `breakpoint()` from an `asyncio.Task` | ||||
| running in a subactor spawned with `infect_asyncio=True`. | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import to_asyncio | ||||
| from tractor import ( | ||||
|     to_asyncio, | ||||
|     Portal, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| async def aio_sleep_forever(): | ||||
|  | @ -23,15 +31,16 @@ async def bp_then_error( | |||
|     # NOTE: what happens here inside the hook needs some refinement.. | ||||
|     # => seems like it's still `._debug._set_trace()` but | ||||
|     #    we set `Lock.local_task_in_debug = 'sync'`, we probably want | ||||
|     #    some further, at least, meta-data about the task/actoq in debug | ||||
|     #    in terms of making it clear it's asyncio mucking about. | ||||
|     #    some further, at least, meta-data about the task/actor in debug | ||||
|     #    in terms of making it clear it's `asyncio` mucking about. | ||||
|     breakpoint() | ||||
| 
 | ||||
| 
 | ||||
|     # short checkpoint / delay | ||||
|     await asyncio.sleep(0.5) | ||||
|     await asyncio.sleep(0.5)  # asyncio-side | ||||
| 
 | ||||
|     if raise_after_bp: | ||||
|         raise ValueError('blah') | ||||
|         raise ValueError('asyncio side error!') | ||||
| 
 | ||||
|     # TODO: test case with this so that it gets cancelled? | ||||
|     else: | ||||
|  | @ -52,20 +61,19 @@ async def trio_ctx( | |||
| 
 | ||||
|         to_asyncio.open_channel_from( | ||||
|             bp_then_error, | ||||
|             raise_after_bp=not bp_before_started, | ||||
|             # raise_after_bp=not bp_before_started, | ||||
|         ) as (first, chan), | ||||
| 
 | ||||
|         trio.open_nursery() as n, | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
| 
 | ||||
|         assert first == 'start' | ||||
| 
 | ||||
|         if bp_before_started: | ||||
|             await tractor.breakpoint() | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|         await ctx.started(first) | ||||
|         await ctx.started(first)  # trio-side | ||||
| 
 | ||||
|         n.start_soon( | ||||
|         tn.start_soon( | ||||
|             to_asyncio.run_task, | ||||
|             aio_sleep_forever, | ||||
|         ) | ||||
|  | @ -73,39 +81,50 @@ async def trio_ctx( | |||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     bps_all_over: bool = False, | ||||
|     bps_all_over: bool = True, | ||||
| 
 | ||||
|     # TODO, WHICH OF THESE HAZ BUGZ? | ||||
|     cancel_from_root: bool = False, | ||||
|     err_from_root: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         # debug_mode=True, | ||||
|     ) as n: | ||||
| 
 | ||||
|         p = await n.start_actor( | ||||
|         debug_mode=True, | ||||
|         maybe_enable_greenback=True, | ||||
|         # loglevel='devx', | ||||
|     ) as an: | ||||
|         ptl: Portal = await an.start_actor( | ||||
|             'aio_daemon', | ||||
|             enable_modules=[__name__], | ||||
|             infect_asyncio=True, | ||||
|             debug_mode=True, | ||||
|             loglevel='cancel', | ||||
|             # loglevel='cancel', | ||||
|         ) | ||||
| 
 | ||||
|         async with p.open_context( | ||||
|         async with ptl.open_context( | ||||
|             trio_ctx, | ||||
|             bp_before_started=bps_all_over, | ||||
|         ) as (ctx, first): | ||||
| 
 | ||||
|             assert first == 'start' | ||||
| 
 | ||||
|             if bps_all_over: | ||||
|                 await tractor.breakpoint() | ||||
|             # pause in parent to ensure no cross-actor | ||||
|             # locking problems exist! | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|             # await trio.sleep_forever() | ||||
|             if cancel_from_root: | ||||
|                 await ctx.cancel() | ||||
| 
 | ||||
|             if err_from_root: | ||||
|                 assert 0 | ||||
|             else: | ||||
|                 await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
|         # TODO: case where we cancel from trio-side while asyncio task | ||||
|         # has debugger lock? | ||||
|         # await p.cancel_actor() | ||||
|         # await ptl.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -1,5 +1,5 @@ | |||
| ''' | ||||
| Fast fail test with a context. | ||||
| Fast fail test with a `Context`. | ||||
| 
 | ||||
| Ensure the partially initialized sub-actor process | ||||
| doesn't cause a hang on error/cancel of the parent | ||||
|  |  | |||
|  | @ -7,7 +7,7 @@ async def breakpoint_forever(): | |||
|     try: | ||||
|         while True: | ||||
|             yield 'yo' | ||||
|             await tractor.breakpoint() | ||||
|             await tractor.pause() | ||||
|     except BaseException: | ||||
|         tractor.log.get_console_log().exception( | ||||
|             'Cancelled while trying to enter pause point!' | ||||
|  | @ -25,7 +25,8 @@ async def main(): | |||
|     """ | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='cancel', | ||||
|         # loglevel='cancel', | ||||
|         # loglevel='devx', | ||||
|     ) as n: | ||||
| 
 | ||||
|         p0 = await n.start_actor('bp_forever', enable_modules=[__name__]) | ||||
|  |  | |||
|  | @ -10,7 +10,7 @@ async def name_error(): | |||
| async def breakpoint_forever(): | ||||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     while True: | ||||
|         await tractor.breakpoint() | ||||
|         await tractor.pause() | ||||
| 
 | ||||
|         # NOTE: if the test never sent 'q'/'quit' commands | ||||
|         # on the pdb repl, without this checkpoint line the | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ async def breakpoint_forever(): | |||
|     "Indefinitely re-enter debugger in child actor." | ||||
|     while True: | ||||
|         await trio.sleep(0.1) | ||||
|         await tractor.breakpoint() | ||||
|         await tractor.pause() | ||||
| 
 | ||||
| 
 | ||||
| async def name_error(): | ||||
|  |  | |||
|  | @ -0,0 +1,56 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def name_error( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     ''' | ||||
|     Raise a `NameError`, catch it and enter `.post_mortem()`, then | ||||
|     expect the `._rpc._invoke()` crash handler to also engage. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         getattr(doggypants)  # noqa (on purpose) | ||||
|     except NameError: | ||||
|         await tractor.post_mortem() | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     ''' | ||||
|     Test 3 `PdbREPL` entries: | ||||
|       - one in the child due to manual `.post_mortem()`, | ||||
|       - another in the child due to runtime RPC crash handling. | ||||
|       - final one here in parent from the RAE. | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX NOTE: ideally the REPL arrives at this frame in the parent | ||||
|     # ONE UP FROM the inner ctx block below! | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         # loglevel='cancel', | ||||
|     ) as an: | ||||
|         p: tractor.Portal = await an.start_actor( | ||||
|             'child', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         # XXX should raise `RemoteActorError[NameError]` | ||||
|         # AND be the active frame when REPL enters! | ||||
|         try: | ||||
|             async with p.open_context(name_error) as (ctx, first): | ||||
|                 assert first | ||||
|         except tractor.RemoteActorError as rae: | ||||
|             assert rae.boxed_type is NameError | ||||
| 
 | ||||
|             # manually handle in root's parent task | ||||
|             await tractor.post_mortem() | ||||
|             raise | ||||
|         else: | ||||
|             raise RuntimeError('IPC ctx should have remote errored!?') | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -6,19 +6,46 @@ import tractor | |||
| 
 | ||||
| 
 | ||||
| async def main() -> None: | ||||
|     async with tractor.open_nursery(debug_mode=True) as an: | ||||
| 
 | ||||
|         assert os.environ['PYTHONBREAKPOINT'] == 'tractor._debug._set_trace' | ||||
|     # intially unset, no entry. | ||||
|     orig_pybp_var: int = os.environ.get('PYTHONBREAKPOINT') | ||||
|     assert orig_pybp_var in {None, "0"} | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|     ) as an: | ||||
|         assert an | ||||
|         assert ( | ||||
|             (pybp_var := os.environ['PYTHONBREAKPOINT']) | ||||
|             == | ||||
|             'tractor.devx._debug._sync_pause_from_builtin' | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: an assert that verifies the hook has indeed been, hooked | ||||
|         # XD | ||||
|         assert sys.breakpointhook is not tractor._debug._set_trace | ||||
|         assert ( | ||||
|             (pybp_hook := sys.breakpointhook) | ||||
|             is not tractor.devx._debug._set_trace | ||||
|         ) | ||||
| 
 | ||||
|         print( | ||||
|             f'$PYTHONOBREAKPOINT: {pybp_var!r}\n' | ||||
|             f'`sys.breakpointhook`: {pybp_hook!r}\n' | ||||
|         ) | ||||
|         breakpoint() | ||||
|         pass  # first bp, tractor hook set. | ||||
| 
 | ||||
|     # TODO: an assert that verifies the hook is unhooked.. | ||||
|     # XXX AFTER EXIT (of actor-runtime) verify the hook is unset.. | ||||
|     # | ||||
|     # YES, this is weird but it's how stdlib docs say to do it.. | ||||
|     # https://docs.python.org/3/library/sys.html#sys.breakpointhook | ||||
|     assert os.environ.get('PYTHONBREAKPOINT') is orig_pybp_var | ||||
|     assert sys.breakpointhook | ||||
| 
 | ||||
|     # now ensure a regular builtin pause still works | ||||
|     breakpoint() | ||||
|     pass  # last bp, stdlib hook restored | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  |  | |||
|  | @ -10,7 +10,7 @@ async def main(): | |||
| 
 | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|         await tractor.breakpoint() | ||||
|         await tractor.pause() | ||||
| 
 | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|  |  | |||
|  | @ -11,7 +11,7 @@ async def main( | |||
|         # loglevel='runtime', | ||||
|     ): | ||||
|         while True: | ||||
|             await tractor.breakpoint() | ||||
|             await tractor.pause() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|  |  | |||
|  | @ -0,0 +1,83 @@ | |||
| ''' | ||||
| Verify we can dump a `stackscope` tree on a hang. | ||||
| 
 | ||||
| ''' | ||||
| import os | ||||
| import signal | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| @tractor.context | ||||
| async def start_n_shield_hang( | ||||
|     ctx: tractor.Context, | ||||
| ): | ||||
|     # actor: tractor.Actor = tractor.current_actor() | ||||
| 
 | ||||
|     # sync to parent-side task | ||||
|     await ctx.started(os.getpid()) | ||||
| 
 | ||||
|     print('Entering shield sleep..') | ||||
|     with trio.CancelScope(shield=True): | ||||
|         await trio.sleep_forever()  # in subactor | ||||
| 
 | ||||
|     # XXX NOTE ^^^ since this shields, we expect | ||||
|     # the zombie reaper (aka T800) to engage on | ||||
|     # SIGINT from the user and eventually hard-kill | ||||
|     # this subprocess! | ||||
| 
 | ||||
| 
 | ||||
| async def main( | ||||
|     from_test: bool = False, | ||||
| ) -> None: | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|             enable_stack_on_sig=True, | ||||
|             # maybe_enable_greenback=False, | ||||
|             loglevel='devx', | ||||
|         ) as an, | ||||
|     ): | ||||
|         ptl: tractor.Portal  = await an.start_actor( | ||||
|             'hanger', | ||||
|             enable_modules=[__name__], | ||||
|             debug_mode=True, | ||||
|         ) | ||||
|         async with ptl.open_context( | ||||
|             start_n_shield_hang, | ||||
|         ) as (ctx, cpid): | ||||
| 
 | ||||
|             _, proc, _ = an._children[ptl.chan.uid] | ||||
|             assert cpid == proc.pid | ||||
| 
 | ||||
|             print( | ||||
|                 'Yo my child hanging..?\n' | ||||
|                 # "i'm a user who wants to see a `stackscope` tree!\n" | ||||
|             ) | ||||
| 
 | ||||
|             # XXX simulate the wrapping test's "user actions" | ||||
|             # (i.e. if a human didn't run this manually but wants to | ||||
|             # know what they should do to reproduce test behaviour) | ||||
|             if from_test: | ||||
|                 print( | ||||
|                     f'Sending SIGUSR1 to {cpid!r}!\n' | ||||
|                 ) | ||||
|                 os.kill( | ||||
|                     cpid, | ||||
|                     signal.SIGUSR1, | ||||
|                 ) | ||||
| 
 | ||||
|                 # simulate user cancelling program | ||||
|                 await trio.sleep(0.5) | ||||
|                 os.kill( | ||||
|                     os.getpid(), | ||||
|                     signal.SIGINT, | ||||
|                 ) | ||||
|             else: | ||||
|                 # actually let user send the ctl-c | ||||
|                 await trio.sleep_forever()  # in root | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -0,0 +1,88 @@ | |||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| 
 | ||||
| async def cancellable_pause_loop( | ||||
|     task_status: trio.TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED | ||||
| ): | ||||
|     with trio.CancelScope() as cs: | ||||
|         task_status.started(cs) | ||||
|         for _ in range(3): | ||||
|             try: | ||||
|                 # ON first entry, there is no level triggered | ||||
|                 # cancellation yet, so this cp does a parent task | ||||
|                 # ctx-switch so that this scope raises for the NEXT | ||||
|                 # checkpoint we hit. | ||||
|                 await trio.lowlevel.checkpoint() | ||||
|                 await tractor.pause() | ||||
| 
 | ||||
|                 cs.cancel() | ||||
| 
 | ||||
|                 # parent should have called `cs.cancel()` by now | ||||
|                 await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|             except trio.Cancelled: | ||||
|                 print('INSIDE SHIELDED PAUSE') | ||||
|                 await tractor.pause(shield=True) | ||||
|         else: | ||||
|             # should raise it again, bubbling up to parent | ||||
|             print('BUBBLING trio.Cancelled to parent task-nursery') | ||||
|             await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
| 
 | ||||
| async def pm_on_cancelled(): | ||||
|     async with trio.open_nursery() as tn: | ||||
|         tn.cancel_scope.cancel() | ||||
|         try: | ||||
|             await trio.sleep_forever() | ||||
|         except trio.Cancelled: | ||||
|             # should also raise `Cancelled` since | ||||
|             # we didn't pass `shield=True`. | ||||
|             try: | ||||
|                 await tractor.post_mortem(hide_tb=False) | ||||
|             except trio.Cancelled as taskc: | ||||
| 
 | ||||
|                 # should enter just fine, in fact it should | ||||
|                 # be debugging the internals of the previous | ||||
|                 # sin-shield call above Bo | ||||
|                 await tractor.post_mortem( | ||||
|                     hide_tb=False, | ||||
|                     shield=True, | ||||
|                 ) | ||||
|                 raise taskc | ||||
| 
 | ||||
|         else: | ||||
|             raise RuntimeError('Dint cancel as expected!?') | ||||
| 
 | ||||
| 
 | ||||
| async def cancelled_before_pause( | ||||
| ): | ||||
|     ''' | ||||
|     Verify that using a shielded pause works despite surrounding | ||||
|     cancellation called state in the calling task. | ||||
| 
 | ||||
|     ''' | ||||
|     async with trio.open_nursery() as tn: | ||||
|         cs: trio.CancelScope = await tn.start(cancellable_pause_loop) | ||||
|         await trio.sleep(0.1) | ||||
| 
 | ||||
|     assert cs.cancelled_caught | ||||
| 
 | ||||
|     await pm_on_cancelled() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|     ) as n: | ||||
|         portal: tractor.Portal = await n.run_in_actor( | ||||
|             cancelled_before_pause, | ||||
|         ) | ||||
|         await portal.result() | ||||
| 
 | ||||
|         # ensure the same works in the root actor! | ||||
|         await pm_on_cancelled() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -4,9 +4,9 @@ import trio | |||
| 
 | ||||
| async def gen(): | ||||
|     yield 'yo' | ||||
|     await tractor.breakpoint() | ||||
|     await tractor.pause() | ||||
|     yield 'yo' | ||||
|     await tractor.breakpoint() | ||||
|     await tractor.pause() | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -15,7 +15,7 @@ async def just_bp( | |||
| ) -> None: | ||||
| 
 | ||||
|     await ctx.started() | ||||
|     await tractor.breakpoint() | ||||
|     await tractor.pause() | ||||
| 
 | ||||
|     # TODO: bps and errors in this call.. | ||||
|     async for val in gen(): | ||||
|  |  | |||
|  | @ -1,16 +1,37 @@ | |||
| from functools import partial | ||||
| import time | ||||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| 
 | ||||
| # TODO: only import these when not running from test harness? | ||||
| # can we detect `pexpect` usage maybe? | ||||
| # from tractor.devx._debug import ( | ||||
| #     get_lock, | ||||
| #     get_debug_req, | ||||
| # ) | ||||
| 
 | ||||
| 
 | ||||
| def sync_pause( | ||||
|     use_builtin: bool = True, | ||||
|     use_builtin: bool = False, | ||||
|     error: bool = False, | ||||
|     hide_tb: bool = True, | ||||
|     pre_sleep: float|None = None, | ||||
| ): | ||||
|     if pre_sleep: | ||||
|         time.sleep(pre_sleep) | ||||
| 
 | ||||
|     if use_builtin: | ||||
|         breakpoint(hide_tb=False) | ||||
|         breakpoint(hide_tb=hide_tb) | ||||
| 
 | ||||
|     else: | ||||
|         # TODO: maybe for testing some kind of cm style interface | ||||
|         # where the `._set_trace()` call doesn't happen until block | ||||
|         # exit? | ||||
|         # assert get_lock().ctx_in_debug is None | ||||
|         # assert get_debug_req().repl is None | ||||
|         tractor.pause_from_sync() | ||||
|         # assert get_debug_req().repl is None | ||||
| 
 | ||||
|     if error: | ||||
|         raise RuntimeError('yoyo sync code error') | ||||
|  | @ -25,43 +46,116 @@ async def start_n_sync_pause( | |||
|     # sync to parent-side task | ||||
|     await ctx.started() | ||||
| 
 | ||||
|     print(f'entering SYNC PAUSE in {actor.uid}') | ||||
|     print(f'Entering `sync_pause()` in subactor: {actor.uid}\n') | ||||
|     sync_pause() | ||||
|     print(f'back from SYNC PAUSE in {actor.uid}') | ||||
|     print(f'Exited `sync_pause()` in subactor: {actor.uid}\n') | ||||
| 
 | ||||
| 
 | ||||
| async def main() -> None: | ||||
|     async with tractor.open_nursery( | ||||
|         # NOTE: required for pausing from sync funcs | ||||
|         maybe_enable_greenback=True, | ||||
|     async with ( | ||||
|         tractor.open_nursery( | ||||
|             debug_mode=True, | ||||
|     ) as an: | ||||
|             maybe_enable_greenback=True, | ||||
|             enable_stack_on_sig=True, | ||||
|             # loglevel='warning', | ||||
|             # loglevel='devx', | ||||
|         ) as an, | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         # just from root task | ||||
|         sync_pause() | ||||
| 
 | ||||
|         p: tractor.Portal  = await an.start_actor( | ||||
|             'subactor', | ||||
|             enable_modules=[__name__], | ||||
|             # infect_asyncio=True, | ||||
|             debug_mode=True, | ||||
|             loglevel='cancel', | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: 3 sub-actor usage cases: | ||||
|         # -[x] via a `.open_context()` | ||||
|         # -[ ] via a `.run_in_actor()` call | ||||
|         # -[ ] via a `.run()` | ||||
|         # -[ ] via a `.open_context()` | ||||
|         # | ||||
|         # -[ ] via a `.to_thread.run_sync()` in subactor | ||||
|         async with p.open_context( | ||||
|             start_n_sync_pause, | ||||
|         ) as (ctx, first): | ||||
|             assert first is None | ||||
| 
 | ||||
|             await tractor.pause() | ||||
|             sync_pause() | ||||
|             # TODO: handle bg-thread-in-root-actor special cases! | ||||
|             # | ||||
|             # there are a couple very subtle situations possible here | ||||
|             # and they are likely to become more important as cpython | ||||
|             # moves to support no-GIL. | ||||
|             # | ||||
|             # Cases: | ||||
|             # 1. root-actor bg-threads that call `.pause_from_sync()` | ||||
|             #   whilst an in-tree subactor also is using ` .pause()`. | ||||
|             # |_ since the root-actor bg thread can not | ||||
|             #   `Lock._debug_lock.acquire_nowait()` without running | ||||
|             #   a `trio.Task`, AND because the | ||||
|             #   `PdbREPL.set_continue()` is called from that | ||||
|             #   bg-thread, we can not `._debug_lock.release()` | ||||
|             #   either! | ||||
|             #  |_ this results in no actor-tree `Lock` being used | ||||
|             #    on behalf of the bg-thread and thus the subactor's | ||||
|             #    task and the thread trying to to use stdio | ||||
|             #    simultaneously which results in the classic TTY | ||||
|             #    clobbering! | ||||
|             # | ||||
|             # 2. mutiple sync-bg-threads that call | ||||
|             #   `.pause_from_sync()` where one is scheduled via | ||||
|             #   `Nursery.start_soon(to_thread.run_sync)` in a bg | ||||
|             #   task. | ||||
|             # | ||||
|             #   Due to the GIL, the threads never truly try to step | ||||
|             #   through the REPL simultaneously, BUT their `logging` | ||||
|             #   and traceback outputs are interleaved since the GIL | ||||
|             #   (seemingly) on every REPL-input from the user | ||||
|             #   switches threads.. | ||||
|             # | ||||
|             #   Soo, the context switching semantics of the GIL | ||||
|             #   result in a very confusing and messy interaction UX | ||||
|             #   since eval and (tb) print output is NOT synced to | ||||
|             #   each REPL-cycle (like we normally make it via | ||||
|             #   a `.set_continue()` callback triggering the | ||||
|             #   `Lock.release()`). Ideally we can solve this | ||||
|             #   usability issue NOW because this will of course be | ||||
|             #   that much more important when eventually there is no | ||||
|             #   GIL! | ||||
| 
 | ||||
|         # TODO: make this work!! | ||||
|         await trio.to_thread.run_sync( | ||||
|             # XXX should cause double REPL entry and thus TTY | ||||
|             # clobbering due to case 1. above! | ||||
|             tn.start_soon( | ||||
|                 partial( | ||||
|                     trio.to_thread.run_sync, | ||||
|                     partial( | ||||
|                         sync_pause, | ||||
|             abandon_on_cancel=False, | ||||
|                         use_builtin=False, | ||||
|                         # pre_sleep=0.5, | ||||
|                     ), | ||||
|                     abandon_on_cancel=True, | ||||
|                     thread_name='start_soon_root_bg_thread', | ||||
|                 ) | ||||
|             ) | ||||
| 
 | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|             # XXX should cause double REPL entry and thus TTY | ||||
|             # clobbering due to case 2. above! | ||||
|             await trio.to_thread.run_sync( | ||||
|                 partial( | ||||
|                     sync_pause, | ||||
|                     # NOTE this already works fine since in the new | ||||
|                     # thread the `breakpoint()` built-in is never | ||||
|                     # overloaded, thus NO locking is used, HOWEVER | ||||
|                     # the case 2. from above still exists! | ||||
|                     use_builtin=True, | ||||
|                 ), | ||||
|                 # TODO: with this `False` we can hang!??! | ||||
|                 # abandon_on_cancel=False, | ||||
|                 abandon_on_cancel=True, | ||||
|                 thread_name='inline_root_bg_thread', | ||||
|             ) | ||||
| 
 | ||||
|         await ctx.cancel() | ||||
|  |  | |||
|  | @ -1,6 +1,11 @@ | |||
| import time | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     ActorNursery, | ||||
|     MsgStream, | ||||
|     Portal, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # this is the first 2 actors, streamer_1 and streamer_2 | ||||
|  | @ -12,14 +17,18 @@ async def stream_data(seed): | |||
| 
 | ||||
| # this is the third actor; the aggregator | ||||
| async def aggregate(seed): | ||||
|     """Ensure that the two streams we receive match but only stream | ||||
|     ''' | ||||
|     Ensure that the two streams we receive match but only stream | ||||
|     a single set of values to the parent. | ||||
|     """ | ||||
|     async with tractor.open_nursery() as nursery: | ||||
|         portals = [] | ||||
| 
 | ||||
|     ''' | ||||
|     an: ActorNursery | ||||
|     async with tractor.open_nursery() as an: | ||||
|         portals: list[Portal] = [] | ||||
|         for i in range(1, 3): | ||||
|             # fork point | ||||
|             portal = await nursery.start_actor( | ||||
| 
 | ||||
|             # fork/spawn call | ||||
|             portal = await an.start_actor( | ||||
|                 name=f'streamer_{i}', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
|  | @ -43,7 +52,11 @@ async def aggregate(seed): | |||
|         async with trio.open_nursery() as n: | ||||
| 
 | ||||
|             for portal in portals: | ||||
|                 n.start_soon(push_to_chan, portal, send_chan.clone()) | ||||
|                 n.start_soon( | ||||
|                     push_to_chan, | ||||
|                     portal, | ||||
|                     send_chan.clone(), | ||||
|                 ) | ||||
| 
 | ||||
|             # close this local task's reference to send side | ||||
|             await send_chan.aclose() | ||||
|  | @ -60,7 +73,7 @@ async def aggregate(seed): | |||
| 
 | ||||
|             print("FINISHED ITERATING in aggregator") | ||||
| 
 | ||||
|         await nursery.cancel() | ||||
|         await an.cancel() | ||||
|         print("WAITING on `ActorNursery` to finish") | ||||
|     print("AGGREGATOR COMPLETE!") | ||||
| 
 | ||||
|  | @ -75,18 +88,21 @@ async def main() -> list[int]: | |||
| 
 | ||||
|     ''' | ||||
|     # yes, a nursery which spawns `trio`-"actors" B) | ||||
|     nursery: tractor.ActorNursery | ||||
|     async with tractor.open_nursery() as nursery: | ||||
|     an: ActorNursery | ||||
|     async with tractor.open_nursery( | ||||
|         loglevel='cancel', | ||||
|         debug_mode=True, | ||||
|     ) as an: | ||||
| 
 | ||||
|         seed = int(1e3) | ||||
|         pre_start = time.time() | ||||
| 
 | ||||
|         portal: tractor.Portal = await nursery.start_actor( | ||||
|         portal: Portal = await an.start_actor( | ||||
|             name='aggregator', | ||||
|             enable_modules=[__name__], | ||||
|         ) | ||||
| 
 | ||||
|         stream: tractor.MsgStream | ||||
|         stream: MsgStream | ||||
|         async with portal.open_stream_from( | ||||
|             aggregate, | ||||
|             seed=seed, | ||||
|  | @ -95,11 +111,12 @@ async def main() -> list[int]: | |||
|             start = time.time() | ||||
|             # the portal call returns exactly what you'd expect | ||||
|             # as if the remote "aggregate" function was called locally | ||||
|             result_stream = [] | ||||
|             result_stream: list[int] = [] | ||||
|             async for value in stream: | ||||
|                 result_stream.append(value) | ||||
| 
 | ||||
|         await portal.cancel_actor() | ||||
|         cancelled: bool = await portal.cancel_actor() | ||||
|         assert cancelled | ||||
| 
 | ||||
|         print(f"STREAM TIME = {time.time() - start}") | ||||
|         print(f"STREAM + SPAWN TIME = {time.time() - pre_start}") | ||||
|  |  | |||
|  | @ -9,7 +9,7 @@ async def main(service_name): | |||
|     async with tractor.open_nursery() as an: | ||||
|         await an.start_actor(service_name) | ||||
| 
 | ||||
|         async with tractor.get_arbiter('127.0.0.1', 1616) as portal: | ||||
|         async with tractor.get_registry('127.0.0.1', 1616) as portal: | ||||
|             print(f"Arbiter is listening on {portal.channel}") | ||||
| 
 | ||||
|         async with tractor.wait_for_actor(service_name) as sockaddr: | ||||
|  |  | |||
|  | @ -0,0 +1,18 @@ | |||
| First generate a built disti: | ||||
| 
 | ||||
| ``` | ||||
| python -m pip install --upgrade build | ||||
| python -m build --sdist --outdir dist/alpha5/ | ||||
| ``` | ||||
| 
 | ||||
| Then try a test ``pypi`` upload: | ||||
| 
 | ||||
| ``` | ||||
| python -m twine upload --repository testpypi dist/alpha5/* | ||||
| ``` | ||||
| 
 | ||||
| The push to `pypi` for realz. | ||||
| 
 | ||||
| ``` | ||||
| python -m twine upload --repository testpypi dist/alpha5/* | ||||
| ``` | ||||
							
								
								
									
										2
									
								
								setup.py
								
								
								
								
							
							
						
						
									
										2
									
								
								setup.py
								
								
								
								
							|  | @ -61,7 +61,7 @@ setup( | |||
|         'wrapt', | ||||
| 
 | ||||
|         # IPC serialization | ||||
|         'msgspec', | ||||
|         'msgspec>=0.18.5', | ||||
| 
 | ||||
|         # debug mode REPL | ||||
|         'pdbp', | ||||
|  |  | |||
|  | @ -150,6 +150,18 @@ def pytest_generate_tests(metafunc): | |||
|         metafunc.parametrize("start_method", [spawn_backend], scope='module') | ||||
| 
 | ||||
| 
 | ||||
| # TODO: a way to let test scripts (like from `examples/`) | ||||
| # guarantee they won't registry addr collide! | ||||
| # @pytest.fixture | ||||
| # def open_test_runtime( | ||||
| #     reg_addr: tuple, | ||||
| # ) -> AsyncContextManager: | ||||
| #     return partial( | ||||
| #         tractor.open_nursery, | ||||
| #         registry_addrs=[reg_addr], | ||||
| #     ) | ||||
| 
 | ||||
| 
 | ||||
| def sig_prog(proc, sig): | ||||
|     "Kill the actor-process with ``sig``." | ||||
|     proc.send_signal(sig) | ||||
|  |  | |||
|  | @ -0,0 +1,243 @@ | |||
| ''' | ||||
| `tractor.devx.*` tooling sub-pkg test space. | ||||
| 
 | ||||
| ''' | ||||
| import time | ||||
| from typing import ( | ||||
|     Callable, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
| ) | ||||
| from pexpect.spawnbase import SpawnBase | ||||
| 
 | ||||
| from tractor._testing import ( | ||||
|     mk_cmd, | ||||
| ) | ||||
| from tractor.devx._debug import ( | ||||
|     _pause_msg as _pause_msg, | ||||
|     _crash_msg as _crash_msg, | ||||
|     _repl_fail_msg as _repl_fail_msg, | ||||
|     _ctlc_ignore_header as _ctlc_ignore_header, | ||||
| ) | ||||
| from ..conftest import ( | ||||
|     _ci_env, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def spawn( | ||||
|     start_method, | ||||
|     testdir: pytest.Pytester, | ||||
|     reg_addr: tuple[str, int], | ||||
| 
 | ||||
| ) -> Callable[[str], None]: | ||||
|     ''' | ||||
|     Use the `pexpect` module shipped via `testdir.spawn()` to | ||||
|     run an `./examples/..` script by name. | ||||
| 
 | ||||
|     ''' | ||||
|     if start_method != 'trio': | ||||
|         pytest.skip( | ||||
|             '`pexpect` based tests only supported on `trio` backend' | ||||
|         ) | ||||
| 
 | ||||
|     def unset_colors(): | ||||
|         ''' | ||||
|         Python 3.13 introduced colored tracebacks that break patt | ||||
|         matching, | ||||
| 
 | ||||
|         https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS | ||||
|         https://docs.python.org/3/using/cmdline.html#using-on-controlling-color | ||||
| 
 | ||||
|         ''' | ||||
|         import os | ||||
|         os.environ['PYTHON_COLORS'] = '0' | ||||
| 
 | ||||
|     def _spawn( | ||||
|         cmd: str, | ||||
|         **mkcmd_kwargs, | ||||
|     ): | ||||
|         unset_colors() | ||||
|         return testdir.spawn( | ||||
|             cmd=mk_cmd( | ||||
|                 cmd, | ||||
|                 **mkcmd_kwargs, | ||||
|             ), | ||||
|             expect_timeout=3, | ||||
|             # preexec_fn=unset_colors, | ||||
|             # ^TODO? get `pytest` core to expose underlying | ||||
|             # `pexpect.spawn()` stuff? | ||||
|         ) | ||||
| 
 | ||||
|     # such that test-dep can pass input script name. | ||||
|     return _spawn | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|     params=[False, True], | ||||
|     ids='ctl-c={}'.format, | ||||
| ) | ||||
| def ctlc( | ||||
|     request, | ||||
|     ci_env: bool, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     use_ctlc = request.param | ||||
| 
 | ||||
|     node = request.node | ||||
|     markers = node.own_markers | ||||
|     for mark in markers: | ||||
|         if mark.name == 'has_nested_actors': | ||||
|             pytest.skip( | ||||
|                 f'Test {node} has nested actors and fails with Ctrl-C.\n' | ||||
|                 f'The test can sometimes run fine locally but until' | ||||
|                 ' we solve' 'this issue this CI test will be xfail:\n' | ||||
|                 'https://github.com/goodboy/tractor/issues/320' | ||||
|             ) | ||||
| 
 | ||||
|         if mark.name == 'ctlcs_bish': | ||||
|             pytest.skip( | ||||
|                 f'Test {node} prolly uses something from the stdlib (namely `asyncio`..)\n' | ||||
|                 f'The test and/or underlying example script can *sometimes* run fine ' | ||||
|                 f'locally but more then likely until the cpython peeps get their sh#$ together, ' | ||||
|                 f'this test will definitely not behave like `trio` under SIGINT..\n' | ||||
|             ) | ||||
| 
 | ||||
|     if use_ctlc: | ||||
|         # XXX: disable pygments highlighting for auto-tests | ||||
|         # since some envs (like actions CI) will struggle | ||||
|         # the the added color-char encoding.. | ||||
|         from tractor.devx._debug import TractorConfig | ||||
|         TractorConfig.use_pygements = False | ||||
| 
 | ||||
|     yield use_ctlc | ||||
| 
 | ||||
| 
 | ||||
| def expect( | ||||
|     child, | ||||
| 
 | ||||
|     # normally a `pdb` prompt by default | ||||
|     patt: str, | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Expect wrapper that prints last seen console | ||||
|     data before failing. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         child.expect( | ||||
|             patt, | ||||
|             **kwargs, | ||||
|         ) | ||||
|     except TIMEOUT: | ||||
|         before = str(child.before.decode()) | ||||
|         print(before) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| PROMPT = r"\(Pdb\+\)" | ||||
| 
 | ||||
| 
 | ||||
| def in_prompt_msg( | ||||
|     child: SpawnBase, | ||||
|     parts: list[str], | ||||
| 
 | ||||
|     pause_on_false: bool = False, | ||||
|     err_on_false: bool = False, | ||||
|     print_prompt_on_false: bool = True, | ||||
| 
 | ||||
| ) -> bool: | ||||
|     ''' | ||||
|     Predicate check if (the prompt's) std-streams output has all | ||||
|     `str`-parts in it. | ||||
| 
 | ||||
|     Can be used in test asserts for bulk matching expected | ||||
|     log/REPL output for a given `pdb` interact point. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = False | ||||
| 
 | ||||
|     before: str = str(child.before.decode()) | ||||
|     for part in parts: | ||||
|         if part not in before: | ||||
|             if pause_on_false: | ||||
|                 import pdbp | ||||
|                 pdbp.set_trace() | ||||
| 
 | ||||
|             if print_prompt_on_false: | ||||
|                 print(before) | ||||
| 
 | ||||
|             if err_on_false: | ||||
|                 raise ValueError( | ||||
|                     f'Could not find pattern in `before` output?\n' | ||||
|                     f'part: {part!r}\n' | ||||
|                 ) | ||||
|             return False | ||||
| 
 | ||||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| # TODO: todo support terminal color-chars stripping so we can match | ||||
| # against call stack frame output from the the 'll' command the like! | ||||
| # -[ ] SO answer for stipping ANSI codes: https://stackoverflow.com/a/14693789 | ||||
| def assert_before( | ||||
|     child: SpawnBase, | ||||
|     patts: list[str], | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     __tracebackhide__: bool = False | ||||
| 
 | ||||
|     assert in_prompt_msg( | ||||
|         child=child, | ||||
|         parts=patts, | ||||
| 
 | ||||
|         # since this is an "assert" helper ;) | ||||
|         err_on_false=True, | ||||
|         **kwargs | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def do_ctlc( | ||||
|     child, | ||||
|     count: int = 3, | ||||
|     delay: float = 0.1, | ||||
|     patt: str|None = None, | ||||
| 
 | ||||
|     # expect repl UX to reprint the prompt after every | ||||
|     # ctrl-c send. | ||||
|     # XXX: no idea but, in CI this never seems to work even on 3.10 so | ||||
|     # needs some further investigation potentially... | ||||
|     expect_prompt: bool = not _ci_env, | ||||
| 
 | ||||
| ) -> str|None: | ||||
| 
 | ||||
|     before: str|None = None | ||||
| 
 | ||||
|     # make sure ctl-c sends don't do anything but repeat output | ||||
|     for _ in range(count): | ||||
|         time.sleep(delay) | ||||
|         child.sendcontrol('c') | ||||
| 
 | ||||
|         # TODO: figure out why this makes CI fail.. | ||||
|         # if you run this test manually it works just fine.. | ||||
|         if expect_prompt: | ||||
|             time.sleep(delay) | ||||
|             child.expect(PROMPT) | ||||
|             before = str(child.before.decode()) | ||||
|             time.sleep(delay) | ||||
| 
 | ||||
|             if patt: | ||||
|                 # should see the last line on console | ||||
|                 assert patt in before | ||||
| 
 | ||||
|     # return the console content up to the final prompt | ||||
|     return before | ||||
|  | @ -12,27 +12,26 @@ TODO: | |||
| """ | ||||
| from functools import partial | ||||
| import itertools | ||||
| from typing import Optional | ||||
| import platform | ||||
| import pathlib | ||||
| import time | ||||
| 
 | ||||
| import pytest | ||||
| import pexpect | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| from tractor._testing import ( | ||||
|     examples_dir, | ||||
| ) | ||||
| from tractor.devx._debug import ( | ||||
| from .conftest import ( | ||||
|     do_ctlc, | ||||
|     PROMPT, | ||||
|     _pause_msg, | ||||
|     _crash_msg, | ||||
|     _repl_fail_msg, | ||||
| ) | ||||
| from .conftest import ( | ||||
|     _ci_env, | ||||
|     expect, | ||||
|     in_prompt_msg, | ||||
|     assert_before, | ||||
| ) | ||||
| 
 | ||||
| # TODO: The next great debugger audit could be done by you! | ||||
|  | @ -52,15 +51,6 @@ if platform.system() == 'Windows': | |||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def mk_cmd(ex_name: str) -> str: | ||||
|     ''' | ||||
|     Generate a command suitable to pass to ``pexpect.spawn()``. | ||||
| 
 | ||||
|     ''' | ||||
|     script_path: pathlib.Path = examples_dir() / 'debugging' / f'{ex_name}.py' | ||||
|     return ' '.join(['python', str(script_path)]) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: was trying to this xfail style but some weird bug i see in CI | ||||
| # that's happening at collect time.. pretty soon gonna dump actions i'm | ||||
| # thinkin... | ||||
|  | @ -79,136 +69,6 @@ has_nested_actors = pytest.mark.has_nested_actors | |||
| # ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture | ||||
| def spawn( | ||||
|     start_method, | ||||
|     testdir, | ||||
|     reg_addr, | ||||
| ) -> 'pexpect.spawn': | ||||
| 
 | ||||
|     if start_method != 'trio': | ||||
|         pytest.skip( | ||||
|             "Debugger tests are only supported on the trio backend" | ||||
|         ) | ||||
| 
 | ||||
|     def _spawn(cmd): | ||||
|         return testdir.spawn( | ||||
|             cmd=mk_cmd(cmd), | ||||
|             expect_timeout=3, | ||||
|         ) | ||||
| 
 | ||||
|     return _spawn | ||||
| 
 | ||||
| 
 | ||||
| PROMPT = r"\(Pdb\+\)" | ||||
| 
 | ||||
| 
 | ||||
| def expect( | ||||
|     child, | ||||
| 
 | ||||
|     # prompt by default | ||||
|     patt: str = PROMPT, | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Expect wrapper that prints last seen console | ||||
|     data before failing. | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         child.expect( | ||||
|             patt, | ||||
|             **kwargs, | ||||
|         ) | ||||
|     except TIMEOUT: | ||||
|         before = str(child.before.decode()) | ||||
|         print(before) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| def in_prompt_msg( | ||||
|     prompt: str, | ||||
|     parts: list[str], | ||||
| 
 | ||||
|     pause_on_false: bool = False, | ||||
|     print_prompt_on_false: bool = True, | ||||
| 
 | ||||
| ) -> bool: | ||||
|     ''' | ||||
|     Predicate check if (the prompt's) std-streams output has all | ||||
|     `str`-parts in it. | ||||
| 
 | ||||
|     Can be used in test asserts for bulk matching expected | ||||
|     log/REPL output for a given `pdb` interact point. | ||||
| 
 | ||||
|     ''' | ||||
|     for part in parts: | ||||
|         if part not in prompt: | ||||
| 
 | ||||
|             if pause_on_false: | ||||
|                 import pdbp | ||||
|                 pdbp.set_trace() | ||||
| 
 | ||||
|             if print_prompt_on_false: | ||||
|                 print(prompt) | ||||
| 
 | ||||
|             return False | ||||
| 
 | ||||
|     return True | ||||
| 
 | ||||
| def assert_before( | ||||
|     child, | ||||
|     patts: list[str], | ||||
| 
 | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # as in before the prompt end | ||||
|     before: str = str(child.before.decode()) | ||||
|     assert in_prompt_msg( | ||||
|         prompt=before, | ||||
|         parts=patts, | ||||
| 
 | ||||
|         **kwargs | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.fixture( | ||||
|     params=[False, True], | ||||
|     ids='ctl-c={}'.format, | ||||
| ) | ||||
| def ctlc( | ||||
|     request, | ||||
|     ci_env: bool, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     use_ctlc = request.param | ||||
| 
 | ||||
|     node = request.node | ||||
|     markers = node.own_markers | ||||
|     for mark in markers: | ||||
|         if mark.name == 'has_nested_actors': | ||||
|             pytest.skip( | ||||
|                 f'Test {node} has nested actors and fails with Ctrl-C.\n' | ||||
|                 f'The test can sometimes run fine locally but until' | ||||
|                 ' we solve' 'this issue this CI test will be xfail:\n' | ||||
|                 'https://github.com/goodboy/tractor/issues/320' | ||||
|             ) | ||||
| 
 | ||||
|     if use_ctlc: | ||||
|         # XXX: disable pygments highlighting for auto-tests | ||||
|         # since some envs (like actions CI) will struggle | ||||
|         # the the added color-char encoding.. | ||||
|         from tractor.devx._debug import TractorConfig | ||||
|         TractorConfig.use_pygements = False | ||||
| 
 | ||||
|     yield use_ctlc | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'user_in_out', | ||||
|     [ | ||||
|  | @ -217,7 +77,10 @@ def ctlc( | |||
|     ], | ||||
|     ids=lambda item: f'{item[0]} -> {item[1]}', | ||||
| ) | ||||
| def test_root_actor_error(spawn, user_in_out): | ||||
| def test_root_actor_error( | ||||
|     spawn, | ||||
|     user_in_out, | ||||
| ): | ||||
|     ''' | ||||
|     Demonstrate crash handler entering pdb from basic error in root actor. | ||||
| 
 | ||||
|  | @ -229,14 +92,15 @@ def test_root_actor_error(spawn, user_in_out): | |||
|     # scan for the prompt | ||||
|     expect(child, PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     # make sure expected logging and error arrives | ||||
|     assert in_prompt_msg( | ||||
|         before, | ||||
|         [_crash_msg, "('root'"] | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('root'", | ||||
|             'AssertionError', | ||||
|         ] | ||||
|     ) | ||||
|     assert 'AssertionError' in before | ||||
| 
 | ||||
|     # send user command | ||||
|     child.sendline(user_input) | ||||
|  | @ -255,8 +119,10 @@ def test_root_actor_error(spawn, user_in_out): | |||
|     ids=lambda item: f'{item[0]} -> {item[1]}', | ||||
| ) | ||||
| def test_root_actor_bp(spawn, user_in_out): | ||||
|     """Demonstrate breakpoint from in root actor. | ||||
|     """ | ||||
|     ''' | ||||
|     Demonstrate breakpoint from in root actor. | ||||
| 
 | ||||
|     ''' | ||||
|     user_input, expect_err_str = user_in_out | ||||
|     child = spawn('root_actor_breakpoint') | ||||
| 
 | ||||
|  | @ -270,7 +136,7 @@ def test_root_actor_bp(spawn, user_in_out): | |||
|     child.expect('\r\n') | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
|     if expect_err_str is None: | ||||
|         assert 'Error' not in str(child.before) | ||||
|  | @ -278,38 +144,6 @@ def test_root_actor_bp(spawn, user_in_out): | |||
|         assert expect_err_str in str(child.before) | ||||
| 
 | ||||
| 
 | ||||
| def do_ctlc( | ||||
|     child, | ||||
|     count: int = 3, | ||||
|     delay: float = 0.1, | ||||
|     patt: Optional[str] = None, | ||||
| 
 | ||||
|     # expect repl UX to reprint the prompt after every | ||||
|     # ctrl-c send. | ||||
|     # XXX: no idea but, in CI this never seems to work even on 3.10 so | ||||
|     # needs some further investigation potentially... | ||||
|     expect_prompt: bool = not _ci_env, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|     # make sure ctl-c sends don't do anything but repeat output | ||||
|     for _ in range(count): | ||||
|         time.sleep(delay) | ||||
|         child.sendcontrol('c') | ||||
| 
 | ||||
|         # TODO: figure out why this makes CI fail.. | ||||
|         # if you run this test manually it works just fine.. | ||||
|         if expect_prompt: | ||||
|             before = str(child.before.decode()) | ||||
|             time.sleep(delay) | ||||
|             child.expect(PROMPT) | ||||
|             time.sleep(delay) | ||||
| 
 | ||||
|             if patt: | ||||
|                 # should see the last line on console | ||||
|                 assert patt in before | ||||
| 
 | ||||
| 
 | ||||
| def test_root_actor_bp_forever( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
|  | @ -349,7 +183,7 @@ def test_root_actor_bp_forever( | |||
| 
 | ||||
|     # quit out of the loop | ||||
|     child.sendline('q') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|  | @ -371,10 +205,12 @@ def test_subactor_error( | |||
|     # scan for the prompt | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert in_prompt_msg( | ||||
|         before, | ||||
|         [_crash_msg, "('name_error'"] | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('name_error'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if do_next: | ||||
|  | @ -393,17 +229,15 @@ def test_subactor_error( | |||
|         child.sendline('continue') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             # root actor gets debugger engaged | ||||
|     assert in_prompt_msg( | ||||
|         before, | ||||
|         [_crash_msg, "('root'"] | ||||
|     ) | ||||
|             "('root'", | ||||
|             # error is a remote error propagated from the subactor | ||||
|     assert in_prompt_msg( | ||||
|         before, | ||||
|         [_crash_msg, "('name_error'"] | ||||
|             "('name_error'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # another round | ||||
|  | @ -414,7 +248,7 @@ def test_subactor_error( | |||
|     child.expect('\r\n') | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_subactor_breakpoint( | ||||
|  | @ -424,14 +258,11 @@ def test_subactor_breakpoint( | |||
|     "Single subactor with an infinite breakpoint loop" | ||||
| 
 | ||||
|     child = spawn('subactor_breakpoint') | ||||
| 
 | ||||
|     # scan for the prompt | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert in_prompt_msg( | ||||
|         before, | ||||
|         [_pause_msg, "('breakpoint_forever'"] | ||||
|         child, | ||||
|         [_pause_msg, | ||||
|          "('breakpoint_forever'",] | ||||
|     ) | ||||
| 
 | ||||
|     # do some "next" commands to demonstrate recurrent breakpoint | ||||
|  | @ -447,9 +278,8 @@ def test_subactor_breakpoint( | |||
|     for _ in range(5): | ||||
|         child.sendline('continue') | ||||
|         child.expect(PROMPT) | ||||
|         before = str(child.before.decode()) | ||||
|         assert in_prompt_msg( | ||||
|             before, | ||||
|             child, | ||||
|             [_pause_msg, "('breakpoint_forever'"] | ||||
|         ) | ||||
| 
 | ||||
|  | @ -462,9 +292,12 @@ def test_subactor_breakpoint( | |||
|     # child process should exit but parent will capture pdb.BdbQuit | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "RemoteActorError: ('breakpoint_forever'" in before | ||||
|     assert 'bdb.BdbQuit' in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         ['RemoteActorError:', | ||||
|          "('breakpoint_forever'", | ||||
|          'bdb.BdbQuit',] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|  | @ -473,11 +306,14 @@ def test_subactor_breakpoint( | |||
|     child.sendline('c') | ||||
| 
 | ||||
|     # process should exit | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "RemoteActorError: ('breakpoint_forever'" in before | ||||
|     assert 'bdb.BdbQuit' in before | ||||
|     assert in_prompt_msg( | ||||
|         child, | ||||
|         ['RemoteActorError:', | ||||
|          "('breakpoint_forever'", | ||||
|          'bdb.BdbQuit',] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| @has_nested_actors | ||||
|  | @ -497,7 +333,7 @@ def test_multi_subactors( | |||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert in_prompt_msg( | ||||
|         before, | ||||
|         child, | ||||
|         [_pause_msg, "('breakpoint_forever'"] | ||||
|     ) | ||||
| 
 | ||||
|  | @ -518,12 +354,14 @@ def test_multi_subactors( | |||
| 
 | ||||
|     # first name_error failure | ||||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
|     assert in_prompt_msg( | ||||
|         before, | ||||
|         [_crash_msg, "('name_error'"] | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('name_error'", | ||||
|             "NameError", | ||||
|         ] | ||||
|     ) | ||||
|     assert "NameError" in before | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|  | @ -547,9 +385,8 @@ def test_multi_subactors( | |||
|     # breakpoint loop should re-engage | ||||
|     child.sendline('c') | ||||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
|     assert in_prompt_msg( | ||||
|         before, | ||||
|         child, | ||||
|         [_pause_msg, "('breakpoint_forever'"] | ||||
|     ) | ||||
| 
 | ||||
|  | @ -612,7 +449,7 @@ def test_multi_subactors( | |||
| 
 | ||||
|     # process should exit | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
|     # repeat of previous multierror for final output | ||||
|     assert_before(child, [ | ||||
|  | @ -642,25 +479,28 @@ def test_multi_daemon_subactors( | |||
|     # the root's tty lock first so anticipate either crash | ||||
|     # message on the first entry. | ||||
| 
 | ||||
|     bp_forev_parts = [_pause_msg, "('bp_forever'"] | ||||
|     bp_forev_parts = [ | ||||
|         _pause_msg, | ||||
|         "('bp_forever'", | ||||
|     ] | ||||
|     bp_forev_in_msg = partial( | ||||
|         in_prompt_msg, | ||||
|         parts=bp_forev_parts, | ||||
|     ) | ||||
| 
 | ||||
|     name_error_msg = "NameError: name 'doggypants' is not defined" | ||||
|     name_error_parts = [name_error_msg] | ||||
|     name_error_msg: str = "NameError: name 'doggypants' is not defined" | ||||
|     name_error_parts: list[str] = [name_error_msg] | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     if bp_forev_in_msg(prompt=before): | ||||
|     if bp_forev_in_msg(child=child): | ||||
|         next_parts = name_error_parts | ||||
| 
 | ||||
|     elif name_error_msg in before: | ||||
|         next_parts = bp_forev_parts | ||||
| 
 | ||||
|     else: | ||||
|         raise ValueError("Neither log msg was found !?") | ||||
|         raise ValueError('Neither log msg was found !?') | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|  | @ -729,14 +569,12 @@ def test_multi_daemon_subactors( | |||
|     # wait for final error in root | ||||
|     # where it crashs with boxed error | ||||
|     while True: | ||||
|         try: | ||||
|         child.sendline('c') | ||||
|         child.expect(PROMPT) | ||||
|             assert_before( | ||||
|         if not in_prompt_msg( | ||||
|             child, | ||||
|             bp_forev_parts | ||||
|             ) | ||||
|         except AssertionError: | ||||
|         ): | ||||
|             break | ||||
| 
 | ||||
|     assert_before( | ||||
|  | @ -745,13 +583,14 @@ def test_multi_daemon_subactors( | |||
|             # boxed error raised in root task | ||||
|             # "Attaching to pdb in crashed actor: ('root'", | ||||
|             _crash_msg, | ||||
|             "('root'", | ||||
|             "_exceptions.RemoteActorError: ('name_error'", | ||||
|             "('root'",  # should attach in root | ||||
|             "_exceptions.RemoteActorError:",  # with an embedded RAE for.. | ||||
|             "('name_error'",  # the src subactor which raised | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| @has_nested_actors | ||||
|  | @ -827,7 +666,7 @@ def test_multi_subactors_root_errors( | |||
|     ]) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
|     assert_before(child, [ | ||||
|         # "Attaching to pdb in crashed actor: ('root'", | ||||
|  | @ -847,10 +686,11 @@ def test_multi_nested_subactors_error_through_nurseries( | |||
|     # https://github.com/goodboy/tractor/issues/320 | ||||
|     # ctlc: bool, | ||||
| ): | ||||
|     """Verify deeply nested actors that error trigger debugger entries | ||||
|     ''' | ||||
|     Verify deeply nested actors that error trigger debugger entries | ||||
|     at each actor nurserly (level) all the way up the tree. | ||||
| 
 | ||||
|     """ | ||||
|     ''' | ||||
|     # NOTE: previously, inside this script was a bug where if the | ||||
|     # parent errors before a 2-levels-lower actor has released the lock, | ||||
|     # the parent tries to cancel it but it's stuck in the debugger? | ||||
|  | @ -870,22 +710,31 @@ def test_multi_nested_subactors_error_through_nurseries( | |||
|         except EOF: | ||||
|             break | ||||
| 
 | ||||
|     assert_before(child, [ | ||||
| 
 | ||||
|         # boxed source errors | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ # boxed source errors | ||||
|             "NameError: name 'doggypants' is not defined", | ||||
|         "tractor._exceptions.RemoteActorError: ('name_error'", | ||||
|             "tractor._exceptions.RemoteActorError:", | ||||
|             "('name_error'", | ||||
|             "bdb.BdbQuit", | ||||
| 
 | ||||
|             # first level subtrees | ||||
|         "tractor._exceptions.RemoteActorError: ('spawner0'", | ||||
|             # "tractor._exceptions.RemoteActorError: ('spawner0'", | ||||
|             "src_uid=('spawner0'", | ||||
| 
 | ||||
|             # "tractor._exceptions.RemoteActorError: ('spawner1'", | ||||
| 
 | ||||
|             # propagation of errors up through nested subtrees | ||||
|         "tractor._exceptions.RemoteActorError: ('spawn_until_0'", | ||||
|         "tractor._exceptions.RemoteActorError: ('spawn_until_1'", | ||||
|         "tractor._exceptions.RemoteActorError: ('spawn_until_2'", | ||||
|     ]) | ||||
|             # "tractor._exceptions.RemoteActorError: ('spawn_until_0'", | ||||
|             # "tractor._exceptions.RemoteActorError: ('spawn_until_1'", | ||||
|             # "tractor._exceptions.RemoteActorError: ('spawn_until_2'", | ||||
|             # ^-NOTE-^ old RAE repr, new one is below with a field | ||||
|             # showing the src actor's uid. | ||||
|             "src_uid=('spawn_until_0'", | ||||
|             "relay_uid=('spawn_until_1'", | ||||
|             "src_uid=('spawn_until_2'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.timeout(15) | ||||
|  | @ -906,10 +755,13 @@ def test_root_nursery_cancels_before_child_releases_tty_lock( | |||
|     child = spawn('root_cancelled_but_child_is_in_tty_lock') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     assert "NameError: name 'doggypants' is not defined" in before | ||||
|     assert "tractor._exceptions.RemoteActorError: ('name_error'" not in before | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             "NameError: name 'doggypants' is not defined", | ||||
|             "tractor._exceptions.RemoteActorError: ('name_error'", | ||||
|         ], | ||||
|     ) | ||||
|     time.sleep(0.5) | ||||
| 
 | ||||
|     if ctlc: | ||||
|  | @ -947,7 +799,7 @@ def test_root_nursery_cancels_before_child_releases_tty_lock( | |||
| 
 | ||||
|     for i in range(3): | ||||
|         try: | ||||
|             child.expect(pexpect.EOF, timeout=0.5) | ||||
|             child.expect(EOF, timeout=0.5) | ||||
|             break | ||||
|         except TIMEOUT: | ||||
|             child.sendline('c') | ||||
|  | @ -989,7 +841,7 @@ def test_root_cancels_child_context_during_startup( | |||
|         do_ctlc(child) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_different_debug_mode_per_actor( | ||||
|  | @ -1000,9 +852,8 @@ def test_different_debug_mode_per_actor( | |||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     # only one actor should enter the debugger | ||||
|     before = str(child.before.decode()) | ||||
|     assert in_prompt_msg( | ||||
|         before, | ||||
|         child, | ||||
|         [_crash_msg, "('debugged_boi'", "RuntimeError"], | ||||
|     ) | ||||
| 
 | ||||
|  | @ -1010,82 +861,240 @@ def test_different_debug_mode_per_actor( | |||
|         do_ctlc(child) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
| 
 | ||||
|     before = str(child.before.decode()) | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
|     # NOTE: this debugged actor error currently WON'T show up since the | ||||
|     # root will actually cancel and terminate the nursery before the error | ||||
|     # msg reported back from the debug mode actor is processed. | ||||
|     # assert "tractor._exceptions.RemoteActorError: ('debugged_boi'" in before | ||||
| 
 | ||||
|     assert "tractor._exceptions.RemoteActorError: ('crash_boi'" in before | ||||
| 
 | ||||
|     # the crash boi should not have made a debugger request but | ||||
|     # instead crashed completely | ||||
|     assert "tractor._exceptions.RemoteActorError: ('crash_boi'" in before | ||||
|     assert "RuntimeError" in before | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             "tractor._exceptions.RemoteActorError:", | ||||
|             "src_uid=('crash_boi'", | ||||
|             "RuntimeError", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| def test_pause_from_sync( | ||||
| def test_post_mortem_api( | ||||
|     spawn, | ||||
|     ctlc: bool | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify we can use the `pdbp` REPL from sync functions AND from | ||||
|     any thread spawned with `trio.to_thread.run_sync()`. | ||||
| 
 | ||||
|     `examples/debugging/sync_bp.py` | ||||
|     Verify the `tractor.post_mortem()` API works in an exception | ||||
|     handler block. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('sync_bp') | ||||
|     child = spawn('pm_in_subactor') | ||||
| 
 | ||||
|     # First entry is via manual `.post_mortem()` | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             '`greenback` portal opened!', | ||||
|             # pre-prompt line | ||||
|             _pause_msg, "('root'", | ||||
|             _crash_msg, | ||||
|             "<Task 'name_error'", | ||||
|             "NameError", | ||||
|             "('child'", | ||||
|             "tractor.post_mortem()", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|     child.sendline('c') | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     # XXX shouldn't see gb loaded again | ||||
|     before = str(child.before.decode()) | ||||
|     assert not in_prompt_msg( | ||||
|         before, | ||||
|         ['`greenback` portal opened!'], | ||||
|     ) | ||||
|     # 2nd is RPC crash handler | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [_pause_msg, "('root'",], | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task 'name_error'", | ||||
|             "NameError", | ||||
|             "('child'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # 3rd is via RAE bubbled to root's parent ctx task and | ||||
|     # crash-handled via another manual pm call. | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|             "NameError", | ||||
|             "tractor.post_mortem()", | ||||
|             "src_uid=('child'", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # 4th and FINAL is via RAE bubbled to root's parent ctx task and | ||||
|     # crash-handled via another manual pm call. | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|             "NameError", | ||||
|             "src_uid=('child'", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
| 
 | ||||
| 
 | ||||
|     # TODO: ensure we're stopped and showing the right call stack frame | ||||
|     # -[ ] need a way to strip the terminal color chars in order to | ||||
|     #    pattern match... see TODO around `assert_before()` above! | ||||
|     # child.sendline('w') | ||||
|     # child.expect(PROMPT) | ||||
|     # assert_before( | ||||
|     #     child, | ||||
|     #     [ | ||||
|     #         # error src block annot at ctx open | ||||
|     #         '-> async with p.open_context(name_error) as (ctx, first):', | ||||
|     #     ] | ||||
|     # ) | ||||
| 
 | ||||
|     # # step up a frame to ensure the it's the root's nursery | ||||
|     # child.sendline('u') | ||||
|     # child.expect(PROMPT) | ||||
|     # assert_before( | ||||
|     #     child, | ||||
|     #     [ | ||||
|     #         # handler block annotation | ||||
|     #         '-> async with tractor.open_nursery(', | ||||
|     #     ] | ||||
|     # ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_shield_pause( | ||||
|     spawn, | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `tractor.pause()/.post_mortem()` API works inside an | ||||
|     already cancelled `trio.CancelScope` and that you can step to the | ||||
|     next checkpoint wherein the cancelled will get raised. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('shielded_pause') | ||||
| 
 | ||||
|     # First entry is via manual `.post_mortem()` | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "cancellable_pause_loop'", | ||||
|             "('cancelled_before_pause'",  # actor name | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # since 3 tries in ex. shield pause loop | ||||
|     for i in range(3): | ||||
|         child.sendline('c') | ||||
|         child.expect(PROMPT) | ||||
|         assert_before( | ||||
|             child, | ||||
|         [_pause_msg, "('subactor'",], | ||||
|             [ | ||||
|                 _pause_msg, | ||||
|                 "INSIDE SHIELDED PAUSE", | ||||
|                 "('cancelled_before_pause'",  # actor name | ||||
|             ] | ||||
|         ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|     # back inside parent task that opened nursery | ||||
|     child.sendline('c') | ||||
|     child.expect(PROMPT) | ||||
|     # non-main thread case | ||||
|     # TODO: should we agument the pre-prompt msg in this case? | ||||
|     assert_before( | ||||
|         child, | ||||
|         [_pause_msg, "('root'",], | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('cancelled_before_pause'",  # actor name | ||||
|             _repl_fail_msg, | ||||
|             "trio.Cancelled", | ||||
|             "raise Cancelled._create()", | ||||
| 
 | ||||
|             # we should be handling a taskc inside | ||||
|             # the first `.port_mortem()` sin-shield! | ||||
|             'await DebugStatus.req_finished.wait()', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|     # same as above but in the root actor's task | ||||
|     child.sendline('c') | ||||
|     child.expect(pexpect.EOF) | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "('root'",  # actor name | ||||
|             _repl_fail_msg, | ||||
|             "trio.Cancelled", | ||||
|             "raise Cancelled._create()", | ||||
| 
 | ||||
|             # handling a taskc inside the first unshielded | ||||
|             # `.port_mortem()`. | ||||
|             # BUT in this case in the root-proc path ;) | ||||
|             'wait Lock._debug_lock.acquire()', | ||||
|         ] | ||||
|     ) | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: better error for "non-ideal" usage from the root actor. | ||||
| # -[ ] if called from an async scope emit a message that suggests | ||||
| #    using `await tractor.pause()` instead since it's less overhead | ||||
| #    (in terms of `greenback` and/or extra threads) and if it's from | ||||
| #    a sync scope suggest that usage must first call | ||||
| #    `ensure_portal()` in the (eventual parent) async calling scope? | ||||
| def test_sync_pause_from_bg_task_in_root_actor_(): | ||||
|     ''' | ||||
|     When used from the root actor, normally we can only implicitly | ||||
|     support `.pause_from_sync()` from the main-parent-task (that | ||||
|     opens the runtime via `open_root_actor()`) since `greenback` | ||||
|     requires a `.ensure_portal()` call per `trio.Task` where it is | ||||
|     used. | ||||
| 
 | ||||
|     ''' | ||||
|     ... | ||||
| 
 | ||||
| # TODO: needs ANSI code stripping tho, see `assert_before()` # above! | ||||
| def test_correct_frames_below_hidden(): | ||||
|     ''' | ||||
|     Ensure that once a `tractor.pause()` enages, when the user | ||||
|     inputs a "next"/"n" command the actual next line steps | ||||
|     and that using a "step"/"s" into the next LOC, particuarly | ||||
|     `tractor` APIs, you can step down into that code. | ||||
| 
 | ||||
|     ''' | ||||
|     ... | ||||
| 
 | ||||
| 
 | ||||
| def test_cant_pause_from_paused_task(): | ||||
|     ''' | ||||
|     Pausing from with an already paused task should raise an error. | ||||
| 
 | ||||
|     Normally this should only happen in practise while debugging the call stack of `tractor.pause()` itself, likely | ||||
|     by a `.pause()` line somewhere inside our runtime. | ||||
| 
 | ||||
|     ''' | ||||
|     ... | ||||
|  | @ -0,0 +1,381 @@ | |||
| ''' | ||||
| That "foreign loop/thread" debug REPL support better ALSO WORK! | ||||
| 
 | ||||
| Same as `test_native_pause.py`. | ||||
| All these tests can be understood (somewhat) by running the | ||||
| equivalent `examples/debugging/` scripts manually. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| # from functools import partial | ||||
| # import itertools | ||||
| import time | ||||
| # from typing import ( | ||||
| #     Iterator, | ||||
| # ) | ||||
| 
 | ||||
| import pytest | ||||
| from pexpect.exceptions import ( | ||||
|     TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| from .conftest import ( | ||||
|     # _ci_env, | ||||
|     do_ctlc, | ||||
|     PROMPT, | ||||
|     # expect, | ||||
|     in_prompt_msg, | ||||
|     assert_before, | ||||
|     _pause_msg, | ||||
|     _crash_msg, | ||||
|     _ctlc_ignore_header, | ||||
|     # _repl_fail_msg, | ||||
| ) | ||||
| 
 | ||||
| @cm | ||||
| def maybe_expect_timeout( | ||||
|     ctlc: bool = False, | ||||
| ) -> None: | ||||
|     try: | ||||
|         yield | ||||
|     except TIMEOUT: | ||||
|         # breakpoint() | ||||
|         if ctlc: | ||||
|             pytest.xfail( | ||||
|                 'Some kinda redic threading SIGINT bug i think?\n' | ||||
|                 'See the notes in `examples/debugging/sync_bp.py`..\n' | ||||
|             ) | ||||
|         raise | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.ctlcs_bish | ||||
| def test_pause_from_sync( | ||||
|     spawn, | ||||
|     ctlc: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify we can use the `pdbp` REPL from sync functions AND from | ||||
|     any thread spawned with `trio.to_thread.run_sync()`. | ||||
| 
 | ||||
|     `examples/debugging/sync_bp.py` | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('sync_bp') | ||||
| 
 | ||||
|     # first `sync_pause()` after nurseries open | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # pre-prompt line | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ] | ||||
|     ) | ||||
|     if ctlc: | ||||
|         do_ctlc(child) | ||||
|         # ^NOTE^ subactor not spawned yet; don't need extra delay. | ||||
| 
 | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # first `await tractor.pause()` inside `p.open_context()` body | ||||
|     child.expect(PROMPT) | ||||
| 
 | ||||
|     # XXX shouldn't see gb loaded message with PDB loglevel! | ||||
|     # assert not in_prompt_msg( | ||||
|     #     child, | ||||
|     #     ['`greenback` portal opened!'], | ||||
|     # ) | ||||
|     # should be same root task | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             # NOTE: setting this to 0 (or some other sufficient | ||||
|             # small val) can cause the test to fail since the | ||||
|             # `subactor` suffers a race where the root/parent | ||||
|             # sends an actor-cancel prior to it hitting its pause | ||||
|             # point; by def the value is 0.1 | ||||
|             delay=0.4, | ||||
|         ) | ||||
| 
 | ||||
|     # XXX, fwiw without a brief sleep here the SIGINT might actually | ||||
|     # trigger "subactor" cancellation by its parent  before the | ||||
|     # shield-handler is engaged. | ||||
|     # | ||||
|     # => similar to the `delay` input to `do_ctlc()` below, setting | ||||
|     # this too low can cause the test to fail since the `subactor` | ||||
|     # suffers a race where the root/parent sends an actor-cancel | ||||
|     # prior to the context task hitting its pause point (and thus | ||||
|     # engaging the `sigint_shield()` handler in time); this value | ||||
|     # seems be good enuf? | ||||
|     time.sleep(0.6) | ||||
| 
 | ||||
|     # one of the bg thread or subactor should have | ||||
|     # `Lock.acquire()`-ed | ||||
|     # (NOT both, which will result in REPL clobbering!) | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
|         'subactor': [ | ||||
|             "'start_n_sync_pause'", | ||||
|             "('subactor'", | ||||
|         ], | ||||
|         'inline_root_bg_thread': [ | ||||
|             "<Thread(inline_root_bg_thread", | ||||
|             "('root'", | ||||
|         ], | ||||
|         'start_soon_root_bg_thread': [ | ||||
|             "<Thread(start_soon_root_bg_thread", | ||||
|             "('root'", | ||||
|         ], | ||||
|     } | ||||
|     conts: int = 0  # for debugging below matching logic on failure | ||||
|     while attach_patts: | ||||
|         child.sendline('c') | ||||
|         conts += 1 | ||||
|         child.expect(PROMPT) | ||||
|         before = str(child.before.decode()) | ||||
|         for key in attach_patts: | ||||
|             if key in before: | ||||
|                 attach_key: str = key | ||||
|                 expected_patts: str = attach_patts.pop(key) | ||||
|                 assert_before( | ||||
|                     child, | ||||
|                     [_pause_msg] | ||||
|                     + | ||||
|                     expected_patts | ||||
|                 ) | ||||
|                 break | ||||
|         else: | ||||
|             pytest.fail( | ||||
|                 f'No keys found?\n\n' | ||||
|                 f'{attach_patts.keys()}\n\n' | ||||
|                 f'{before}\n' | ||||
|             ) | ||||
| 
 | ||||
|         # ensure no other task/threads engaged a REPL | ||||
|         # at the same time as the one that was detected above. | ||||
|         for key, other_patts in attach_patts.copy().items(): | ||||
|             assert not in_prompt_msg( | ||||
|                 child, | ||||
|                 other_patts, | ||||
|             ) | ||||
| 
 | ||||
|         if ctlc: | ||||
|             do_ctlc( | ||||
|                 child, | ||||
|                 patt=attach_key, | ||||
|                 # NOTE same as comment above | ||||
|                 delay=0.4, | ||||
|             ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
| 
 | ||||
|     # XXX TODO, weird threading bug it seems despite the | ||||
|     # `abandon_on_cancel: bool` setting to | ||||
|     # `trio.to_thread.run_sync()`.. | ||||
|     with maybe_expect_timeout( | ||||
|         ctlc=ctlc, | ||||
|     ): | ||||
|         child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def expect_any_of( | ||||
|     attach_patts: dict[str, list[str]], | ||||
|     child,   # what type? | ||||
|     ctlc: bool = False, | ||||
|     prompt: str = _ctlc_ignore_header, | ||||
|     ctlc_delay: float = .4, | ||||
| 
 | ||||
| ) -> list[str]: | ||||
|     ''' | ||||
|     Receive any of a `list[str]` of patterns provided in | ||||
|     `attach_patts`. | ||||
| 
 | ||||
|     Used to test racing prompts from multiple actors and/or | ||||
|     tasks using a common root process' `pdbp` REPL. | ||||
| 
 | ||||
|     ''' | ||||
|     assert attach_patts | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     before = str(child.before.decode()) | ||||
| 
 | ||||
|     for attach_key in attach_patts: | ||||
|         if attach_key in before: | ||||
|             expected_patts: str = attach_patts.pop(attach_key) | ||||
|             assert_before( | ||||
|                 child, | ||||
|                 expected_patts | ||||
|             ) | ||||
|             break  # from for | ||||
|     else: | ||||
|         pytest.fail( | ||||
|             f'No keys found?\n\n' | ||||
|             f'{attach_patts.keys()}\n\n' | ||||
|             f'{before}\n' | ||||
|         ) | ||||
| 
 | ||||
|     # ensure no other task/threads engaged a REPL | ||||
|     # at the same time as the one that was detected above. | ||||
|     for key, other_patts in attach_patts.copy().items(): | ||||
|         assert not in_prompt_msg( | ||||
|             child, | ||||
|             other_patts, | ||||
|         ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             patt=prompt, | ||||
|             # NOTE same as comment above | ||||
|             delay=ctlc_delay, | ||||
|         ) | ||||
| 
 | ||||
|     return expected_patts | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.ctlcs_bish | ||||
| def test_sync_pause_from_aio_task( | ||||
|     spawn, | ||||
| 
 | ||||
|     ctlc: bool | ||||
|     # ^TODO, fix for `asyncio`!! | ||||
| ): | ||||
|     ''' | ||||
|     Verify we can use the `pdbp` REPL from an `asyncio.Task` spawned using | ||||
|     APIs in `.to_asyncio`. | ||||
| 
 | ||||
|     `examples/debugging/asycio_bp.py` | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('asyncio_bp') | ||||
| 
 | ||||
|     # RACE on whether trio/asyncio task bps first | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
| 
 | ||||
|         # first pause in guest-mode (aka "infecting") | ||||
|         # `trio.Task`. | ||||
|         'trio-side': [ | ||||
|             _pause_msg, | ||||
|             "<Task 'trio_ctx'", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
| 
 | ||||
|         # `breakpoint()` from `asyncio.Task`. | ||||
|         'asyncio-side': [ | ||||
|             _pause_msg, | ||||
|             "<Task pending name='Task-2' coro=<greenback_shim()", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
|     } | ||||
| 
 | ||||
|     while attach_patts: | ||||
|         expect_any_of( | ||||
|             attach_patts=attach_patts, | ||||
|             child=child, | ||||
|             ctlc=ctlc, | ||||
|         ) | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     # NOW in race order, | ||||
|     # - the asyncio-task will error | ||||
|     # - the root-actor parent task will pause | ||||
|     # | ||||
|     attach_patts: dict[str, list[str]] = { | ||||
| 
 | ||||
|         # error raised in `asyncio.Task` | ||||
|         "raise ValueError('asyncio side error!')": [ | ||||
|             _crash_msg, | ||||
|             "<Task 'trio_ctx'", | ||||
|             "@ ('aio_daemon'", | ||||
|             "ValueError: asyncio side error!", | ||||
| 
 | ||||
|             # XXX, we no longer show this frame by default! | ||||
|             # 'return await chan.receive()',  # `.to_asyncio` impl internals in tb | ||||
|         ], | ||||
| 
 | ||||
|         # parent-side propagation via actor-nursery/portal | ||||
|         # "tractor._exceptions.RemoteActorError: remote task raised a 'ValueError'": [ | ||||
|         "remote task raised a 'ValueError'": [ | ||||
|             _crash_msg, | ||||
|             "src_uid=('aio_daemon'", | ||||
|             "('aio_daemon'", | ||||
|         ], | ||||
| 
 | ||||
|         # a final pause in root-actor | ||||
|         "<Task '__main__.main'": [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ], | ||||
|     } | ||||
|     while attach_patts: | ||||
|         expect_any_of( | ||||
|             attach_patts=attach_patts, | ||||
|             child=child, | ||||
|             ctlc=ctlc, | ||||
|         ) | ||||
|         child.sendline('c') | ||||
| 
 | ||||
|     assert not attach_patts | ||||
| 
 | ||||
|     # final boxed error propagates to root | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _crash_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|             "remote task raised a 'ValueError'", | ||||
|             "ValueError: asyncio side error!", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     if ctlc: | ||||
|         do_ctlc( | ||||
|             child, | ||||
|             # NOTE: setting this to 0 (or some other sufficient | ||||
|             # small val) can cause the test to fail since the | ||||
|             # `subactor` suffers a race where the root/parent | ||||
|             # sends an actor-cancel prior to it hitting its pause | ||||
|             # point; by def the value is 0.1 | ||||
|             delay=0.4, | ||||
|         ) | ||||
| 
 | ||||
|     child.sendline('c') | ||||
|     # with maybe_expect_timeout(): | ||||
|     child.expect(EOF) | ||||
| 
 | ||||
| 
 | ||||
| def test_sync_pause_from_non_greenbacked_aio_task(): | ||||
|     ''' | ||||
|     Where the `breakpoint()` caller task is NOT spawned by | ||||
|     `tractor.to_asyncio` and thus never activates | ||||
|     a `greenback.ensure_portal()` beforehand, presumably bc the task | ||||
|     was started by some lib/dep as in often seen in the field. | ||||
| 
 | ||||
|     Ensure sync pausing works when the pause is in, | ||||
| 
 | ||||
|     - the root actor running in infected-mode? | ||||
|       |_ since we don't need any IPC to acquire the debug lock? | ||||
|       |_ is there some way to handle this like the non-main-thread case? | ||||
| 
 | ||||
|     All other cases need to error out appropriately right? | ||||
| 
 | ||||
|     - for any subactor we can't avoid needing the repl lock.. | ||||
|       |_ is there a way to hook into `asyncio.ensure_future(obj)`? | ||||
| 
 | ||||
|     ''' | ||||
|     pass | ||||
|  | @ -0,0 +1,172 @@ | |||
| ''' | ||||
| That "native" runtime-hackin toolset better be dang useful! | ||||
| 
 | ||||
| Verify the funtion of a variety of "developer-experience" tools we | ||||
| offer from the `.devx` sub-pkg: | ||||
| 
 | ||||
| - use of the lovely `stackscope` for dumping actor `trio`-task trees | ||||
|   during operation and hangs. | ||||
| 
 | ||||
| TODO: | ||||
| - demonstration of `CallerInfo` call stack frame filtering such that | ||||
|   for logging and REPL purposes a user sees exactly the layers needed | ||||
|   when debugging a problem inside the stack vs. in their app. | ||||
| 
 | ||||
| ''' | ||||
| import os | ||||
| import signal | ||||
| import time | ||||
| 
 | ||||
| from .conftest import ( | ||||
|     expect, | ||||
|     assert_before, | ||||
|     in_prompt_msg, | ||||
|     PROMPT, | ||||
|     _pause_msg, | ||||
| ) | ||||
| from pexpect.exceptions import ( | ||||
|     # TIMEOUT, | ||||
|     EOF, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def test_shield_pause( | ||||
|     spawn, | ||||
| ): | ||||
|     ''' | ||||
|     Verify the `tractor.pause()/.post_mortem()` API works inside an | ||||
|     already cancelled `trio.CancelScope` and that you can step to the | ||||
|     next checkpoint wherein the cancelled will get raised. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn( | ||||
|         'shield_hang_in_sub' | ||||
|     ) | ||||
|     expect( | ||||
|         child, | ||||
|         'Yo my child hanging..?', | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             'Entering shield sleep..', | ||||
|             'Enabling trace-trees on `SIGUSR1` since `stackscope` is installed @', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     script_pid: int = child.pid | ||||
|     print( | ||||
|         f'Sending SIGUSR1 to {script_pid}\n' | ||||
|         f'(kill -s SIGUSR1 {script_pid})\n' | ||||
|     ) | ||||
|     os.kill( | ||||
|         script_pid, | ||||
|         signal.SIGUSR1, | ||||
|     ) | ||||
|     time.sleep(0.2) | ||||
|     expect( | ||||
|         child, | ||||
|         # end-of-tree delimiter | ||||
|         "end-of-\('root'", | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # 'Srying to dump `stackscope` tree..', | ||||
|             # 'Dumping `stackscope` tree for actor', | ||||
|             "('root'",  # uid line | ||||
| 
 | ||||
|             # TODO!? this used to show? | ||||
|             # -[ ] mk reproducable for @oremanj? | ||||
|             # | ||||
|             # parent block point (non-shielded) | ||||
|             # 'await trio.sleep_forever()  # in root', | ||||
|         ] | ||||
|     ) | ||||
|     expect( | ||||
|         child, | ||||
|         # end-of-tree delimiter | ||||
|         "end-of-\('hanger'", | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             # relay to the sub should be reported | ||||
|             'Relaying `SIGUSR1`[10] to sub-actor', | ||||
| 
 | ||||
|             "('hanger'",  # uid line | ||||
| 
 | ||||
|             # TODO!? SEE ABOVE | ||||
|             # hanger LOC where it's shield-halted | ||||
|             # 'await trio.sleep_forever()  # in subactor', | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # simulate the user sending a ctl-c to the hanging program. | ||||
|     # this should result in the terminator kicking in since | ||||
|     # the sub is shield blocking and can't respond to SIGINT. | ||||
|     os.kill( | ||||
|         child.pid, | ||||
|         signal.SIGINT, | ||||
|     ) | ||||
|     expect( | ||||
|         child, | ||||
|         'Shutting down actor runtime', | ||||
|         timeout=6, | ||||
|     ) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             'raise KeyboardInterrupt', | ||||
|             # 'Shutting down actor runtime', | ||||
|             '#T-800 deployed to collect zombie B0', | ||||
|             "'--uid', \"('hanger',", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def test_breakpoint_hook_restored( | ||||
|     spawn, | ||||
| ): | ||||
|     ''' | ||||
|     Ensures our actor runtime sets a custom `breakpoint()` hook | ||||
|     on open then restores the stdlib's default on close. | ||||
| 
 | ||||
|     The hook state validation is done via `assert`s inside the | ||||
|     invoked script with only `breakpoint()` (not `tractor.pause()`) | ||||
|     calls used. | ||||
| 
 | ||||
|     ''' | ||||
|     child = spawn('restore_builtin_breakpoint') | ||||
| 
 | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|             "first bp, tractor hook set", | ||||
|         ] | ||||
|     ) | ||||
|     child.sendline('c') | ||||
|     child.expect(PROMPT) | ||||
|     assert_before( | ||||
|         child, | ||||
|         [ | ||||
|             "last bp, stdlib hook restored", | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
|     # since the stdlib hook was already restored there should be NO | ||||
|     # `tractor` `log.pdb()` content from console! | ||||
|     assert not in_prompt_msg( | ||||
|         child, | ||||
|         [ | ||||
|             _pause_msg, | ||||
|             "<Task '__main__.main'", | ||||
|             "('root'", | ||||
|         ], | ||||
|     ) | ||||
|     child.sendline('c') | ||||
|     child.expect(EOF) | ||||
|  | @ -13,6 +13,7 @@ import trio | |||
| import tractor | ||||
| from tractor._testing import ( | ||||
|     examples_dir, | ||||
|     break_ipc, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
|  | @ -90,10 +91,12 @@ def test_ipc_channel_break_during_stream( | |||
| 
 | ||||
|         # non-`trio` spawners should never hit the hang condition that | ||||
|         # requires the user to do ctl-c to cancel the actor tree. | ||||
|         expect_final_exc = trio.ClosedResourceError | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
| 
 | ||||
|     mod: ModuleType = import_path( | ||||
|         examples_dir() / 'advanced_faults' / 'ipc_failure_during_stream.py', | ||||
|         examples_dir() / 'advanced_faults' | ||||
|         / 'ipc_failure_during_stream.py', | ||||
|         root=examples_dir(), | ||||
|         consider_namespace_packages=False, | ||||
|     ) | ||||
|  | @ -155,7 +158,7 @@ def test_ipc_channel_break_during_stream( | |||
|         if pre_aclose_msgstream: | ||||
|             expect_final_exc = KeyboardInterrupt | ||||
| 
 | ||||
|     # NOTE when the parent IPC side dies (even if the child's does as well | ||||
|     # NOTE when the parent IPC side dies (even if the child does as well | ||||
|     # but the child fails BEFORE the parent) we always expect the | ||||
|     # IPC layer to raise a closed-resource, NEVER do we expect | ||||
|     # a stop msg since the parent-side ctx apis will error out | ||||
|  | @ -167,7 +170,8 @@ def test_ipc_channel_break_during_stream( | |||
|         and | ||||
|         ipc_break['break_child_ipc_after'] is False | ||||
|     ): | ||||
|         expect_final_exc = trio.ClosedResourceError | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
| 
 | ||||
|     # BOTH but, PARENT breaks FIRST | ||||
|     elif ( | ||||
|  | @ -178,7 +182,8 @@ def test_ipc_channel_break_during_stream( | |||
|             ipc_break['break_parent_ipc_after'] | ||||
|         ) | ||||
|     ): | ||||
|         expect_final_exc = trio.ClosedResourceError | ||||
|         # expect_final_exc = trio.ClosedResourceError | ||||
|         expect_final_exc = tractor.TransportClosed | ||||
| 
 | ||||
|     with pytest.raises( | ||||
|         expected_exception=( | ||||
|  | @ -197,8 +202,8 @@ def test_ipc_channel_break_during_stream( | |||
|                     **ipc_break, | ||||
|                 ) | ||||
|             ) | ||||
|         except KeyboardInterrupt as kbi: | ||||
|             _err = kbi | ||||
|         except KeyboardInterrupt as _kbi: | ||||
|             kbi = _kbi | ||||
|             if expect_final_exc is not KeyboardInterrupt: | ||||
|                 pytest.fail( | ||||
|                     'Rxed unexpected KBI !?\n' | ||||
|  | @ -207,6 +212,21 @@ def test_ipc_channel_break_during_stream( | |||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|         except tractor.TransportClosed as _tc: | ||||
|             tc = _tc | ||||
|             if expect_final_exc is KeyboardInterrupt: | ||||
|                 pytest.fail( | ||||
|                     'Unexpected transport failure !?\n' | ||||
|                     f'{repr(tc)}' | ||||
|                 ) | ||||
|             cause: Exception = tc.__cause__ | ||||
|             assert ( | ||||
|                 type(cause) is trio.ClosedResourceError | ||||
|                 and | ||||
|                 cause.args[0] == 'another task closed this fd' | ||||
|             ) | ||||
|             raise | ||||
| 
 | ||||
|     # get raw instance from pytest wrapper | ||||
|     value = excinfo.value | ||||
|     if isinstance(value, ExceptionGroup): | ||||
|  | @ -225,9 +245,15 @@ async def break_ipc_after_started( | |||
| ) -> None: | ||||
|     await ctx.started() | ||||
|     async with ctx.open_stream() as stream: | ||||
|         await stream.aclose() | ||||
|         await trio.sleep(0.2) | ||||
|         await ctx.chan.send(None) | ||||
| 
 | ||||
|         # TODO: make a test which verifies the error | ||||
|         # for this, i.e. raises a `MsgTypeError` | ||||
|         # await ctx.chan.send(None) | ||||
| 
 | ||||
|         await break_ipc( | ||||
|             stream=stream, | ||||
|             pre_close=True, | ||||
|         ) | ||||
|         print('child broke IPC and terminating') | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -89,17 +89,30 @@ def test_remote_error(reg_addr, args_err): | |||
|         assert excinfo.value.boxed_type == errtype | ||||
| 
 | ||||
|     else: | ||||
|         # the root task will also error on the `.result()` call | ||||
|         # so we expect an error from there AND the child. | ||||
|         with pytest.raises(BaseExceptionGroup) as excinfo: | ||||
|         # the root task will also error on the `Portal.result()` | ||||
|         # call so we expect an error from there AND the child. | ||||
|         # |_ tho seems like on new `trio` this doesn't always | ||||
|         #    happen? | ||||
|         with pytest.raises(( | ||||
|             BaseExceptionGroup, | ||||
|             tractor.RemoteActorError, | ||||
|         )) as excinfo: | ||||
|             trio.run(main) | ||||
| 
 | ||||
|         # ensure boxed errors | ||||
|         for exc in excinfo.value.exceptions: | ||||
|         # ensure boxed errors are `errtype` | ||||
|         err: BaseException = excinfo.value | ||||
|         if isinstance(err, BaseExceptionGroup): | ||||
|             suberrs: list[BaseException] = err.exceptions | ||||
|         else: | ||||
|             suberrs: list[BaseException] = [err] | ||||
| 
 | ||||
|         for exc in suberrs: | ||||
|             assert exc.boxed_type == errtype | ||||
| 
 | ||||
| 
 | ||||
| def test_multierror(reg_addr): | ||||
| def test_multierror( | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
|     ''' | ||||
|     Verify we raise a ``BaseExceptionGroup`` out of a nursery where | ||||
|     more then one actor errors. | ||||
|  | @ -117,7 +130,7 @@ def test_multierror(reg_addr): | |||
|             try: | ||||
|                 await portal2.result() | ||||
|             except tractor.RemoteActorError as err: | ||||
|                 assert err.boxed_type == AssertionError | ||||
|                 assert err.boxed_type is AssertionError | ||||
|                 print("Look Maa that first actor failed hard, hehh") | ||||
|                 raise | ||||
| 
 | ||||
|  | @ -169,7 +182,7 @@ def test_multierror_fast_nursery(reg_addr, start_method, num_subactors, delay): | |||
| 
 | ||||
|     for exc in exceptions: | ||||
|         assert isinstance(exc, tractor.RemoteActorError) | ||||
|         assert exc.boxed_type == AssertionError | ||||
|         assert exc.boxed_type is AssertionError | ||||
| 
 | ||||
| 
 | ||||
| async def do_nothing(): | ||||
|  | @ -491,7 +504,9 @@ def test_cancel_via_SIGINT_other_task( | |||
|     if is_win():  # smh | ||||
|         timeout += 1 | ||||
| 
 | ||||
|     async def spawn_and_sleep_forever(task_status=trio.TASK_STATUS_IGNORED): | ||||
|     async def spawn_and_sleep_forever( | ||||
|         task_status=trio.TASK_STATUS_IGNORED | ||||
|     ): | ||||
|         async with tractor.open_nursery() as tn: | ||||
|             for i in range(3): | ||||
|                 await tn.run_in_actor( | ||||
|  |  | |||
|  | @ -0,0 +1,917 @@ | |||
| ''' | ||||
| Low-level functional audits for our | ||||
| "capability based messaging"-spec feats. | ||||
| 
 | ||||
| B~) | ||||
| 
 | ||||
| ''' | ||||
| import typing | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Type, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     structs, | ||||
|     msgpack, | ||||
|     Struct, | ||||
|     ValidationError, | ||||
| ) | ||||
| import pytest | ||||
| 
 | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     _state, | ||||
|     MsgTypeError, | ||||
|     Context, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _codec, | ||||
|     _ctxvar_MsgCodec, | ||||
| 
 | ||||
|     NamespacePath, | ||||
|     MsgCodec, | ||||
|     mk_codec, | ||||
|     apply_codec, | ||||
|     current_codec, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|     _payload_msgs, | ||||
|     log, | ||||
|     PayloadMsg, | ||||
|     Started, | ||||
|     mk_msg_spec, | ||||
| ) | ||||
| import trio | ||||
| 
 | ||||
| 
 | ||||
| def mk_custom_codec( | ||||
|     pld_spec: Union[Type]|Any, | ||||
|     add_hooks: bool, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     Create custom `msgpack` enc/dec-hooks and set a `Decoder` | ||||
|     which only loads `pld_spec` (like `NamespacePath`) types. | ||||
| 
 | ||||
|     ''' | ||||
|     uid: tuple[str, str] = tractor.current_actor().uid | ||||
| 
 | ||||
|     # XXX NOTE XXX: despite defining `NamespacePath` as a type | ||||
|     # field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair | ||||
|     # to cast to/from that type on the wire. See the docs: | ||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
| 
 | ||||
|     def enc_nsp(obj: Any) -> Any: | ||||
|         print(f'{uid} ENC HOOK') | ||||
|         match obj: | ||||
|             case NamespacePath(): | ||||
|                 print( | ||||
|                     f'{uid}: `NamespacePath`-Only ENCODE?\n' | ||||
|                     f'obj-> `{obj}`: {type(obj)}\n' | ||||
|                 ) | ||||
|                 # if type(obj) != NamespacePath: | ||||
|                 #     breakpoint() | ||||
|                 return str(obj) | ||||
| 
 | ||||
|         print( | ||||
|             f'{uid}\n' | ||||
|             'CUSTOM ENCODE\n' | ||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n' | ||||
|         ) | ||||
|         logmsg: str = ( | ||||
|             f'{uid}\n' | ||||
|             'FAILED ENCODE\n' | ||||
|             f'obj-> `{obj}: {type(obj)}`\n' | ||||
|         ) | ||||
|         raise NotImplementedError(logmsg) | ||||
| 
 | ||||
|     def dec_nsp( | ||||
|         obj_type: Type, | ||||
|         obj: Any, | ||||
| 
 | ||||
|     ) -> Any: | ||||
|         print( | ||||
|             f'{uid}\n' | ||||
|             'CUSTOM DECODE\n' | ||||
|             f'type-arg-> {obj_type}\n' | ||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n' | ||||
|         ) | ||||
|         nsp = None | ||||
| 
 | ||||
|         if ( | ||||
|             obj_type is NamespacePath | ||||
|             and isinstance(obj, str) | ||||
|             and ':' in obj | ||||
|         ): | ||||
|             nsp = NamespacePath(obj) | ||||
|             # TODO: we could built a generic handler using | ||||
|             # JUST matching the obj_type part? | ||||
|             # nsp = obj_type(obj) | ||||
| 
 | ||||
|         if nsp: | ||||
|             print(f'Returning NSP instance: {nsp}') | ||||
|             return nsp | ||||
| 
 | ||||
|         logmsg: str = ( | ||||
|             f'{uid}\n' | ||||
|             'FAILED DECODE\n' | ||||
|             f'type-> {obj_type}\n' | ||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n\n' | ||||
|             f'current codec:\n' | ||||
|             f'{current_codec()}\n' | ||||
|         ) | ||||
|         # TODO: figure out the ignore subsys for this! | ||||
|         # -[ ] option whether to defense-relay backc the msg | ||||
|         #   inside an `Invalid`/`Ignore` | ||||
|         # -[ ] how to make this handling pluggable such that a | ||||
|         #   `Channel`/`MsgTransport` can intercept and process | ||||
|         #   back msgs either via exception handling or some other | ||||
|         #   signal? | ||||
|         log.warning(logmsg) | ||||
|         # NOTE: this delivers the invalid | ||||
|         # value up to `msgspec`'s decoding | ||||
|         # machinery for error raising. | ||||
|         return obj | ||||
|         # raise NotImplementedError(logmsg) | ||||
| 
 | ||||
|     nsp_codec: MsgCodec = mk_codec( | ||||
|         ipc_pld_spec=pld_spec, | ||||
| 
 | ||||
|         # NOTE XXX: the encode hook MUST be used no matter what since | ||||
|         # our `NamespacePath` is not any of a `Any` native type nor | ||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||
|         # how to encode it unless we provide the custom hook. | ||||
|         # | ||||
|         # AGAIN that is, regardless of whether we spec an | ||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||
|         # how to enc `NamespacePath` (nsp), so we add a custom | ||||
|         # hook to do that ALWAYS. | ||||
|         enc_hook=enc_nsp if add_hooks else None, | ||||
| 
 | ||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||
|         # `Decoder`? so it won't work in tandem with the | ||||
|         # `ipc_pld_spec` passed above? | ||||
|         dec_hook=dec_nsp if add_hooks else None, | ||||
|     ) | ||||
|     return nsp_codec | ||||
| 
 | ||||
| 
 | ||||
| def chk_codec_applied( | ||||
|     expect_codec: MsgCodec, | ||||
|     enter_value: MsgCodec|None = None, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     buncha sanity checks ensuring that the IPC channel's | ||||
|     context-vars are set to the expected codec and that are | ||||
|     ctx-var wrapper APIs match the same. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: play with tricyle again, bc this is supposed to work | ||||
|     # the way we want? | ||||
|     # | ||||
|     # TreeVar | ||||
|     # task: trio.Task = trio.lowlevel.current_task() | ||||
|     # curr_codec = _ctxvar_MsgCodec.get_in(task) | ||||
| 
 | ||||
|     # ContextVar | ||||
|     # task_ctx: Context = task.context | ||||
|     # assert _ctxvar_MsgCodec in task_ctx | ||||
|     # curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec] | ||||
| 
 | ||||
|     # NOTE: currently we use this! | ||||
|     # RunVar | ||||
|     curr_codec: MsgCodec = current_codec() | ||||
|     last_read_codec = _ctxvar_MsgCodec.get() | ||||
|     # assert curr_codec is last_read_codec | ||||
| 
 | ||||
|     assert ( | ||||
|         (same_codec := expect_codec) is | ||||
|         # returned from `mk_codec()` | ||||
| 
 | ||||
|         # yielded value from `apply_codec()` | ||||
| 
 | ||||
|         # read from current task's `contextvars.Context` | ||||
|         curr_codec is | ||||
|         last_read_codec | ||||
| 
 | ||||
|         # the default `msgspec` settings | ||||
|         is not _codec._def_msgspec_codec | ||||
|         is not _codec._def_tractor_codec | ||||
|     ) | ||||
| 
 | ||||
|     if enter_value: | ||||
|         enter_value is same_codec | ||||
| 
 | ||||
| 
 | ||||
| def iter_maybe_sends( | ||||
|     send_items: dict[Union[Type], Any] | list[tuple], | ||||
|     ipc_pld_spec: Union[Type] | Any, | ||||
|     add_codec_hooks: bool, | ||||
| 
 | ||||
|     codec: MsgCodec|None = None, | ||||
| 
 | ||||
| ) -> tuple[Any, bool]: | ||||
| 
 | ||||
|     if isinstance(send_items, dict): | ||||
|         send_items = send_items.items() | ||||
| 
 | ||||
|     for ( | ||||
|         send_type_spec, | ||||
|         send_value, | ||||
|     ) in send_items: | ||||
| 
 | ||||
|         expect_roundtrip: bool = False | ||||
| 
 | ||||
|         # values-to-typespec santiy | ||||
|         send_type = type(send_value) | ||||
|         assert send_type == send_type_spec or ( | ||||
|             (subtypes := getattr(send_type_spec, '__args__', None)) | ||||
|             and send_type in subtypes | ||||
|         ) | ||||
| 
 | ||||
|         spec_subtypes: set[Union[Type]] = ( | ||||
|              getattr( | ||||
|                  ipc_pld_spec, | ||||
|                  '__args__', | ||||
|                  {ipc_pld_spec,}, | ||||
|              ) | ||||
|         ) | ||||
|         send_in_spec: bool = ( | ||||
|             send_type == ipc_pld_spec | ||||
|             or ( | ||||
|                 ipc_pld_spec != Any | ||||
|                 and  # presume `Union` of types | ||||
|                 send_type in spec_subtypes | ||||
|             ) | ||||
|             or ( | ||||
|                 ipc_pld_spec == Any | ||||
|                 and | ||||
|                 send_type != NamespacePath | ||||
|             ) | ||||
|         ) | ||||
|         expect_roundtrip = ( | ||||
|             send_in_spec | ||||
|             # any spec should support all other | ||||
|             # builtin py values that we send | ||||
|             # except our custom nsp type which | ||||
|             # we should be able to send as long | ||||
|             # as we provide the custom codec hooks. | ||||
|             or ( | ||||
|                 ipc_pld_spec == Any | ||||
|                 and | ||||
|                 send_type == NamespacePath | ||||
|                 and | ||||
|                 add_codec_hooks | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         if codec is not None: | ||||
|             # XXX FIRST XXX ensure roundtripping works | ||||
|             # before touching any IPC primitives/APIs. | ||||
|             wire_bytes: bytes = codec.encode( | ||||
|                 Started( | ||||
|                     cid='blahblah', | ||||
|                     pld=send_value, | ||||
|                 ) | ||||
|             ) | ||||
|             # NOTE: demonstrates the decoder loading | ||||
|             # to via our native SCIPP msg-spec | ||||
|             # (structurred-conc-inter-proc-protocol) | ||||
|             # implemented as per, | ||||
|             try: | ||||
|                 msg: Started = codec.decode(wire_bytes) | ||||
|                 if not expect_roundtrip: | ||||
|                     pytest.fail( | ||||
|                         f'NOT-EXPECTED able to roundtrip value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {send_type}\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 pld = msg.pld | ||||
|                 assert pld == send_value | ||||
| 
 | ||||
|             except ValidationError: | ||||
|                 if expect_roundtrip: | ||||
|                     pytest.fail( | ||||
|                         f'EXPECTED to roundtrip value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {send_type}\n' | ||||
|                     ) | ||||
| 
 | ||||
|         yield ( | ||||
|             str(send_type), | ||||
|             send_value, | ||||
|             expect_roundtrip, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def dec_type_union( | ||||
|     type_names: list[str], | ||||
| ) -> Type: | ||||
|     ''' | ||||
|     Look up types by name, compile into a list and then create and | ||||
|     return a `typing.Union` from the full set. | ||||
| 
 | ||||
|     ''' | ||||
|     import importlib | ||||
|     types: list[Type] = [] | ||||
|     for type_name in type_names: | ||||
|         for mod in [ | ||||
|             typing, | ||||
|             importlib.import_module(__name__), | ||||
|         ]: | ||||
|             if type_ref := getattr( | ||||
|                 mod, | ||||
|                 type_name, | ||||
|                 False, | ||||
|             ): | ||||
|                 types.append(type_ref) | ||||
| 
 | ||||
|     # special case handling only.. | ||||
|     # ipc_pld_spec: Union[Type] = eval( | ||||
|     #     pld_spec_str, | ||||
|     #     {},  # globals | ||||
|     #     {'typing': typing},  # locals | ||||
|     # ) | ||||
| 
 | ||||
|     return Union[*types] | ||||
| 
 | ||||
| 
 | ||||
| def enc_type_union( | ||||
|     union_or_type: Union[Type]|Type, | ||||
| ) -> list[str]: | ||||
|     ''' | ||||
|     Encode a type-union or single type to a list of type-name-strings | ||||
|     ready for IPC interchange. | ||||
| 
 | ||||
|     ''' | ||||
|     type_strs: list[str] = [] | ||||
|     for typ in getattr( | ||||
|         union_or_type, | ||||
|         '__args__', | ||||
|         {union_or_type,}, | ||||
|     ): | ||||
|         type_strs.append(typ.__qualname__) | ||||
| 
 | ||||
|     return type_strs | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def send_back_values( | ||||
|     ctx: Context, | ||||
|     expect_debug: bool, | ||||
|     pld_spec_type_strs: list[str], | ||||
|     add_hooks: bool, | ||||
|     started_msg_bytes: bytes, | ||||
|     expect_ipc_send: dict[str, tuple[Any, bool]], | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Setup up a custom codec to load instances of `NamespacePath` | ||||
|     and ensure we can round trip a func ref with our parent. | ||||
| 
 | ||||
|     ''' | ||||
|     uid: tuple = tractor.current_actor().uid | ||||
| 
 | ||||
|     # debug mode sanity check (prolly superfluous but, meh) | ||||
|     assert expect_debug == _state.debug_mode() | ||||
| 
 | ||||
|     # init state in sub-actor should be default | ||||
|     chk_codec_applied( | ||||
|         expect_codec=_codec._def_tractor_codec, | ||||
|     ) | ||||
| 
 | ||||
|     # load pld spec from input str | ||||
|     ipc_pld_spec = dec_type_union( | ||||
|         pld_spec_type_strs, | ||||
|     ) | ||||
|     pld_spec_str = str(ipc_pld_spec) | ||||
| 
 | ||||
|     # same as on parent side config. | ||||
|     nsp_codec: MsgCodec = mk_custom_codec( | ||||
|         pld_spec=ipc_pld_spec, | ||||
|         add_hooks=add_hooks, | ||||
|     ) | ||||
|     with ( | ||||
|         apply_codec(nsp_codec) as codec, | ||||
|     ): | ||||
|         chk_codec_applied( | ||||
|             expect_codec=nsp_codec, | ||||
|             enter_value=codec, | ||||
|         ) | ||||
| 
 | ||||
|         print( | ||||
|             f'{uid}: attempting `Started`-bytes DECODE..\n' | ||||
|         ) | ||||
|         try: | ||||
|             msg: Started = nsp_codec.decode(started_msg_bytes) | ||||
|             expected_pld_spec_str: str = msg.pld | ||||
|             assert pld_spec_str == expected_pld_spec_str | ||||
| 
 | ||||
|         # TODO: maybe we should add our own wrapper error so as to | ||||
|         # be interchange-lib agnostic? | ||||
|         # -[ ] the error type is wtv is raised from the hook so we | ||||
|         #   could also require a type-class of errors for | ||||
|         #   indicating whether the hook-failure can be handled by | ||||
|         #   a nasty-dialog-unprot sub-sys? | ||||
|         except ValidationError: | ||||
| 
 | ||||
|             # NOTE: only in the `Any` spec case do we expect this to | ||||
|             # work since otherwise no spec covers a plain-ol' | ||||
|             # `.pld: str` | ||||
|             if pld_spec_str == 'Any': | ||||
|                 raise | ||||
|             else: | ||||
|                 print( | ||||
|                     f'{uid}: (correctly) unable to DECODE `Started`-bytes\n' | ||||
|                     f'{started_msg_bytes}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         iter_send_val_items = iter(expect_ipc_send.values()) | ||||
|         sent: list[Any] = [] | ||||
|         for send_value, expect_send in iter_send_val_items: | ||||
|             try: | ||||
|                 print( | ||||
|                     f'{uid}: attempting to `.started({send_value})`\n' | ||||
|                     f'=> expect_send: {expect_send}\n' | ||||
|                     f'SINCE, ipc_pld_spec: {ipc_pld_spec}\n' | ||||
|                     f'AND, codec: {codec}\n' | ||||
|                 ) | ||||
|                 await ctx.started(send_value) | ||||
|                 sent.append(send_value) | ||||
|                 if not expect_send: | ||||
| 
 | ||||
|                     # XXX NOTE XXX THIS WON'T WORK WITHOUT SPECIAL | ||||
|                     # `str` handling! or special debug mode IPC | ||||
|                     # msgs! | ||||
|                     await tractor.pause() | ||||
| 
 | ||||
|                     raise RuntimeError( | ||||
|                         f'NOT-EXPECTED able to roundtrip value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {type(send_value)}\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 break  # move on to streaming block.. | ||||
| 
 | ||||
|             except tractor.MsgTypeError: | ||||
|                 await tractor.pause() | ||||
| 
 | ||||
|                 if expect_send: | ||||
|                     raise RuntimeError( | ||||
|                         f'EXPECTED to `.started()` value given spec:\n' | ||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                         f'value -> {send_value}: {type(send_value)}\n' | ||||
|                     ) | ||||
| 
 | ||||
|         async with ctx.open_stream() as ipc: | ||||
|             print( | ||||
|                 f'{uid}: Entering streaming block to send remaining values..' | ||||
|             ) | ||||
| 
 | ||||
|             for send_value, expect_send in iter_send_val_items: | ||||
|                 send_type: Type = type(send_value) | ||||
|                 print( | ||||
|                     '------ - ------\n' | ||||
|                     f'{uid}: SENDING NEXT VALUE\n' | ||||
|                     f'ipc_pld_spec: {ipc_pld_spec}\n' | ||||
|                     f'expect_send: {expect_send}\n' | ||||
|                     f'val: {send_value}\n' | ||||
|                     '------ - ------\n' | ||||
|                 ) | ||||
|                 try: | ||||
|                     await ipc.send(send_value) | ||||
|                     print(f'***\n{uid}-CHILD sent {send_value!r}\n***\n') | ||||
|                     sent.append(send_value) | ||||
| 
 | ||||
|                     # NOTE: should only raise above on | ||||
|                     # `.started()` or a `Return` | ||||
|                     # if not expect_send: | ||||
|                     #     raise RuntimeError( | ||||
|                     #         f'NOT-EXPECTED able to roundtrip value given spec:\n' | ||||
|                     #         f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                     #         f'value -> {send_value}: {send_type}\n' | ||||
|                     #     ) | ||||
| 
 | ||||
|                 except ValidationError: | ||||
|                     print(f'{uid} FAILED TO SEND {send_value}!') | ||||
| 
 | ||||
|                     # await tractor.pause() | ||||
|                     if expect_send: | ||||
|                         raise RuntimeError( | ||||
|                             f'EXPECTED to roundtrip value given spec:\n' | ||||
|                             f'ipc_pld_spec -> {ipc_pld_spec}\n' | ||||
|                             f'value -> {send_value}: {send_type}\n' | ||||
|                         ) | ||||
|                     # continue | ||||
| 
 | ||||
|             else: | ||||
|                 print( | ||||
|                     f'{uid}: finished sending all values\n' | ||||
|                     'Should be exiting stream block!\n' | ||||
|                 ) | ||||
| 
 | ||||
|         print(f'{uid}: exited streaming block!') | ||||
| 
 | ||||
|         # TODO: this won't be true bc in streaming phase we DO NOT | ||||
|         # msgspec check outbound msgs! | ||||
|         # -[ ] once we implement the receiver side `InvalidMsg` | ||||
|         #   then we can expect it here? | ||||
|         # assert ( | ||||
|         #     len(sent) | ||||
|         #     == | ||||
|         #     len([val | ||||
|         #          for val, expect in | ||||
|         #          expect_ipc_send.values() | ||||
|         #          if expect is True]) | ||||
|         # ) | ||||
| 
 | ||||
| 
 | ||||
| def ex_func(*args): | ||||
|     print(f'ex_func({args})') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'ipc_pld_spec', | ||||
|     [ | ||||
|         Any, | ||||
|         NamespacePath, | ||||
|         NamespacePath|None,  # the "maybe" spec Bo | ||||
|     ], | ||||
|     ids=[ | ||||
|         'any_type', | ||||
|         'nsp_type', | ||||
|         'maybe_nsp_type', | ||||
|     ] | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'add_codec_hooks', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=['use_codec_hooks', 'no_codec_hooks'], | ||||
| ) | ||||
| def test_codec_hooks_mod( | ||||
|     debug_mode: bool, | ||||
|     ipc_pld_spec: Union[Type]|Any, | ||||
|     # send_value: None|str|NamespacePath, | ||||
|     add_codec_hooks: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Audit the `.msg.MsgCodec` override apis details given our impl | ||||
|     uses `contextvars` to accomplish per `trio` task codec | ||||
|     application around an inter-proc-task-comms context. | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         nsp = NamespacePath.from_ref(ex_func) | ||||
|         send_items: dict[Union, Any] = { | ||||
|             Union[None]: None, | ||||
|             Union[NamespacePath]: nsp, | ||||
|             Union[str]: str(nsp), | ||||
|         } | ||||
| 
 | ||||
|         # init default state for actor | ||||
|         chk_codec_applied( | ||||
|             expect_codec=_codec._def_tractor_codec, | ||||
|         ) | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as an: | ||||
|             p: tractor.Portal = await an.start_actor( | ||||
|                 'sub', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             # TODO: 2 cases: | ||||
|             # - codec not modified -> decode nsp as `str` | ||||
|             # - codec modified with hooks -> decode nsp as | ||||
|             #   `NamespacePath` | ||||
|             nsp_codec: MsgCodec = mk_custom_codec( | ||||
|                 pld_spec=ipc_pld_spec, | ||||
|                 add_hooks=add_codec_hooks, | ||||
|             ) | ||||
|             with apply_codec(nsp_codec) as codec: | ||||
|                 chk_codec_applied( | ||||
|                     expect_codec=nsp_codec, | ||||
|                     enter_value=codec, | ||||
|                 ) | ||||
| 
 | ||||
|                 expect_ipc_send: dict[str, tuple[Any, bool]] = {} | ||||
| 
 | ||||
|                 report: str = ( | ||||
|                     'Parent report on send values with\n' | ||||
|                     f'ipc_pld_spec: {ipc_pld_spec}\n' | ||||
|                     '       ------ - ------\n' | ||||
|                 ) | ||||
|                 for val_type_str, val, expect_send in iter_maybe_sends( | ||||
|                     send_items, | ||||
|                     ipc_pld_spec, | ||||
|                     add_codec_hooks=add_codec_hooks, | ||||
|                 ): | ||||
|                     report += ( | ||||
|                         f'send_value: {val}: {type(val)} ' | ||||
|                         f'=> expect_send: {expect_send}\n' | ||||
|                     ) | ||||
|                     expect_ipc_send[val_type_str] = (val, expect_send) | ||||
| 
 | ||||
|                 print( | ||||
|                     report + | ||||
|                     '       ------ - ------\n' | ||||
|                 ) | ||||
|                 assert len(expect_ipc_send) == len(send_items) | ||||
|                 # now try over real IPC with a the subactor | ||||
|                 # expect_ipc_rountrip: bool = True | ||||
|                 expected_started = Started( | ||||
|                     cid='cid', | ||||
|                     pld=str(ipc_pld_spec), | ||||
|                 ) | ||||
|                 # build list of values we expect to receive from | ||||
|                 # the subactor. | ||||
|                 expect_to_send: list[Any] = [ | ||||
|                     val | ||||
|                     for val, expect_send in expect_ipc_send.values() | ||||
|                     if expect_send | ||||
|                 ] | ||||
| 
 | ||||
|                 pld_spec_type_strs: list[str] = enc_type_union(ipc_pld_spec) | ||||
| 
 | ||||
|                 # XXX should raise an mte (`MsgTypeError`) | ||||
|                 # when `add_codec_hooks == False` bc the input | ||||
|                 # `expect_ipc_send` kwarg has a nsp which can't be | ||||
|                 # serialized! | ||||
|                 # | ||||
|                 # TODO:can we ensure this happens from the | ||||
|                 # `Return`-side (aka the sub) as well? | ||||
|                 if not add_codec_hooks: | ||||
|                     try: | ||||
|                         async with p.open_context( | ||||
|                             send_back_values, | ||||
|                             expect_debug=debug_mode, | ||||
|                             pld_spec_type_strs=pld_spec_type_strs, | ||||
|                             add_hooks=add_codec_hooks, | ||||
|                             started_msg_bytes=nsp_codec.encode(expected_started), | ||||
| 
 | ||||
|                             # XXX NOTE bc we send a `NamespacePath` in this kwarg | ||||
|                             expect_ipc_send=expect_ipc_send, | ||||
| 
 | ||||
|                         ) as (ctx, first): | ||||
|                             pytest.fail('ctx should fail to open without custom enc_hook!?') | ||||
| 
 | ||||
|                     # this test passes bc we can go no further! | ||||
|                     except MsgTypeError: | ||||
|                         # teardown nursery | ||||
|                         await p.cancel_actor() | ||||
|                         return | ||||
| 
 | ||||
|                 # TODO: send the original nsp here and | ||||
|                 # test with `limit_msg_spec()` above? | ||||
|                 # await tractor.pause() | ||||
|                 print('PARENT opening IPC ctx!\n') | ||||
|                 async with ( | ||||
| 
 | ||||
|                     # XXX should raise an mte (`MsgTypeError`) | ||||
|                     # when `add_codec_hooks == False`.. | ||||
|                     p.open_context( | ||||
|                         send_back_values, | ||||
|                         expect_debug=debug_mode, | ||||
|                         pld_spec_type_strs=pld_spec_type_strs, | ||||
|                         add_hooks=add_codec_hooks, | ||||
|                         started_msg_bytes=nsp_codec.encode(expected_started), | ||||
|                         expect_ipc_send=expect_ipc_send, | ||||
|                     ) as (ctx, first), | ||||
| 
 | ||||
|                     ctx.open_stream() as ipc, | ||||
|                 ): | ||||
|                     # ensure codec is still applied across | ||||
|                     # `tractor.Context` + its embedded nursery. | ||||
|                     chk_codec_applied( | ||||
|                         expect_codec=nsp_codec, | ||||
|                         enter_value=codec, | ||||
|                     ) | ||||
|                     print( | ||||
|                         'root: ENTERING CONTEXT BLOCK\n' | ||||
|                         f'type(first): {type(first)}\n' | ||||
|                         f'first: {first}\n' | ||||
|                     ) | ||||
|                     expect_to_send.remove(first) | ||||
| 
 | ||||
|                     # TODO: explicit values we expect depending on | ||||
|                     # codec config! | ||||
|                     # assert first == first_val | ||||
|                     # assert first == f'{__name__}:ex_func' | ||||
| 
 | ||||
|                     async for next_sent in ipc: | ||||
|                         print( | ||||
|                             'Parent: child sent next value\n' | ||||
|                             f'{next_sent}: {type(next_sent)}\n' | ||||
|                         ) | ||||
|                         if expect_to_send: | ||||
|                             expect_to_send.remove(next_sent) | ||||
|                         else: | ||||
|                             print('PARENT should terminate stream loop + block!') | ||||
| 
 | ||||
|                     # all sent values should have arrived! | ||||
|                     assert not expect_to_send | ||||
| 
 | ||||
|             await p.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| def chk_pld_type( | ||||
|     payload_spec: Type[Struct]|Any, | ||||
|     pld: Any, | ||||
| 
 | ||||
|     expect_roundtrip: bool|None = None, | ||||
| 
 | ||||
| ) -> bool: | ||||
| 
 | ||||
|     pld_val_type: Type = type(pld) | ||||
| 
 | ||||
|     # TODO: verify that the overridden subtypes | ||||
|     # DO NOT have modified type-annots from original! | ||||
|     # 'Start',  .pld: FuncSpec | ||||
|     # 'StartAck',  .pld: IpcCtxSpec | ||||
|     # 'Stop',  .pld: UNSEt | ||||
|     # 'Error',  .pld: ErrorData | ||||
| 
 | ||||
|     codec: MsgCodec = mk_codec( | ||||
|         # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified | ||||
|         # type union. | ||||
|         ipc_pld_spec=payload_spec, | ||||
|     ) | ||||
| 
 | ||||
|     # make a one-off dec to compare with our `MsgCodec` instance | ||||
|     # which does the below `mk_msg_spec()` call internally | ||||
|     ipc_msg_spec: Union[Type[Struct]] | ||||
|     msg_types: list[PayloadMsg[payload_spec]] | ||||
|     ( | ||||
|         ipc_msg_spec, | ||||
|         msg_types, | ||||
|     ) = mk_msg_spec( | ||||
|         payload_type_union=payload_spec, | ||||
|     ) | ||||
|     _enc = msgpack.Encoder() | ||||
|     _dec = msgpack.Decoder( | ||||
|         type=ipc_msg_spec or Any,  # like `PayloadMsg[Any]` | ||||
|     ) | ||||
| 
 | ||||
|     assert ( | ||||
|         payload_spec | ||||
|         == | ||||
|         codec.pld_spec | ||||
|     ) | ||||
| 
 | ||||
|     # assert codec.dec == dec | ||||
|     # | ||||
|     # ^-XXX-^ not sure why these aren't "equal" but when cast | ||||
|     # to `str` they seem to match ?? .. kk | ||||
| 
 | ||||
|     assert ( | ||||
|         str(ipc_msg_spec) | ||||
|         == | ||||
|         str(codec.msg_spec) | ||||
|         == | ||||
|         str(_dec.type) | ||||
|         == | ||||
|         str(codec.dec.type) | ||||
|     ) | ||||
| 
 | ||||
|     # verify the boxed-type for all variable payload-type msgs. | ||||
|     if not msg_types: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     roundtrip: bool|None = None | ||||
|     pld_spec_msg_names: list[str] = [ | ||||
|         td.__name__ for td in _payload_msgs | ||||
|     ] | ||||
|     for typedef in msg_types: | ||||
| 
 | ||||
|         skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names | ||||
|         if skip_runtime_msg: | ||||
|             continue | ||||
| 
 | ||||
|         pld_field = structs.fields(typedef)[1] | ||||
|         assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere? | ||||
| 
 | ||||
|         kwargs: dict[str, Any] = { | ||||
|             'cid': '666', | ||||
|             'pld': pld, | ||||
|         } | ||||
|         enc_msg: PayloadMsg = typedef(**kwargs) | ||||
| 
 | ||||
|         _wire_bytes: bytes = _enc.encode(enc_msg) | ||||
|         wire_bytes: bytes = codec.enc.encode(enc_msg) | ||||
|         assert _wire_bytes == wire_bytes | ||||
| 
 | ||||
|         ve: ValidationError|None = None | ||||
|         try: | ||||
|             dec_msg = codec.dec.decode(wire_bytes) | ||||
|             _dec_msg = _dec.decode(wire_bytes) | ||||
| 
 | ||||
|             # decoded msg and thus payload should be exactly same! | ||||
|             assert (roundtrip := ( | ||||
|                 _dec_msg | ||||
|                 == | ||||
|                 dec_msg | ||||
|                 == | ||||
|                 enc_msg | ||||
|             )) | ||||
| 
 | ||||
|             if ( | ||||
|                 expect_roundtrip is not None | ||||
|                 and expect_roundtrip != roundtrip | ||||
|             ): | ||||
|                 breakpoint() | ||||
| 
 | ||||
|             assert ( | ||||
|                 pld | ||||
|                 == | ||||
|                 dec_msg.pld | ||||
|                 == | ||||
|                 enc_msg.pld | ||||
|             ) | ||||
|             # assert (roundtrip := (_dec_msg == enc_msg)) | ||||
| 
 | ||||
|         except ValidationError as _ve: | ||||
|             ve = _ve | ||||
|             roundtrip: bool = False | ||||
|             if pld_val_type is payload_spec: | ||||
|                 raise ValueError( | ||||
|                    'Got `ValidationError` despite type-var match!?\n' | ||||
|                     f'pld_val_type: {pld_val_type}\n' | ||||
|                     f'payload_type: {payload_spec}\n' | ||||
|                 ) from ve | ||||
| 
 | ||||
|             else: | ||||
|                 # ow we good cuz the pld spec mismatched. | ||||
|                 print( | ||||
|                     'Got expected `ValidationError` since,\n' | ||||
|                     f'{pld_val_type} is not {payload_spec}\n' | ||||
|                 ) | ||||
|         else: | ||||
|             if ( | ||||
|                 payload_spec is not Any | ||||
|                 and | ||||
|                 pld_val_type is not payload_spec | ||||
|             ): | ||||
|                 raise ValueError( | ||||
|                    'DID NOT `ValidationError` despite expected type match!?\n' | ||||
|                     f'pld_val_type: {pld_val_type}\n' | ||||
|                     f'payload_type: {payload_spec}\n' | ||||
|                 ) | ||||
| 
 | ||||
|     # full code decode should always be attempted! | ||||
|     if roundtrip is None: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     return roundtrip | ||||
| 
 | ||||
| 
 | ||||
| def test_limit_msgspec(): | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_root_actor( | ||||
|             debug_mode=True | ||||
|         ): | ||||
| 
 | ||||
|             # ensure we can round-trip a boxing `PayloadMsg` | ||||
|             assert chk_pld_type( | ||||
|                 payload_spec=Any, | ||||
|                 pld=None, | ||||
|                 expect_roundtrip=True, | ||||
|             ) | ||||
| 
 | ||||
|             # verify that a mis-typed payload value won't decode | ||||
|             assert not chk_pld_type( | ||||
|                 payload_spec=int, | ||||
|                 pld='doggy', | ||||
|             ) | ||||
| 
 | ||||
|             # parametrize the boxed `.pld` type as a custom-struct | ||||
|             # and ensure that parametrization propagates | ||||
|             # to all payload-msg-spec-able subtypes! | ||||
|             class CustomPayload(Struct): | ||||
|                 name: str | ||||
|                 value: Any | ||||
| 
 | ||||
|             assert not chk_pld_type( | ||||
|                 payload_spec=CustomPayload, | ||||
|                 pld='doggy', | ||||
|             ) | ||||
| 
 | ||||
|             assert chk_pld_type( | ||||
|                 payload_spec=CustomPayload, | ||||
|                 pld=CustomPayload(name='doggy', value='urmom') | ||||
|             ) | ||||
| 
 | ||||
|             # yah, we can `.pause_from_sync()` now! | ||||
|             # breakpoint() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -6,6 +6,7 @@ sync-opening a ``tractor.Context`` beforehand. | |||
| 
 | ||||
| ''' | ||||
| from itertools import count | ||||
| import math | ||||
| import platform | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|  | @ -24,6 +25,7 @@ from tractor._exceptions import ( | |||
|     StreamOverrun, | ||||
|     ContextCancelled, | ||||
| ) | ||||
| from tractor._state import current_ipc_ctx | ||||
| 
 | ||||
| from tractor._testing import ( | ||||
|     tractor_test, | ||||
|  | @ -143,6 +145,8 @@ async def simple_setup_teardown( | |||
|     global _state | ||||
|     _state = True | ||||
| 
 | ||||
|     assert current_ipc_ctx() is ctx | ||||
| 
 | ||||
|     # signal to parent that we're up | ||||
|     await ctx.started(data + 1) | ||||
| 
 | ||||
|  | @ -203,6 +207,7 @@ def test_simple_context( | |||
|                             block_forever=callee_blocks_forever, | ||||
|                         ) as (ctx, sent), | ||||
|                     ): | ||||
|                         assert current_ipc_ctx() is ctx | ||||
|                         assert sent == 11 | ||||
| 
 | ||||
|                         if callee_blocks_forever: | ||||
|  | @ -795,10 +800,12 @@ async def test_callee_cancels_before_started( | |||
| 
 | ||||
|         # raises a special cancel signal | ||||
|         except tractor.ContextCancelled as ce: | ||||
|             _ce = ce  # for debug on crash | ||||
|             ce.boxed_type == trio.Cancelled | ||||
| 
 | ||||
|             # the traceback should be informative | ||||
|             assert 'itself' in ce.msgdata['tb_str'] | ||||
|             assert 'itself' in ce.tb_str | ||||
|             assert ce.tb_str == ce.msgdata['tb_str'] | ||||
| 
 | ||||
|         # teardown the actor | ||||
|         await portal.cancel_actor() | ||||
|  | @ -845,7 +852,10 @@ async def keep_sending_from_callee( | |||
|         ('caller', 1, never_open_stream), | ||||
|         ('callee', 0, keep_sending_from_callee), | ||||
|     ], | ||||
|     ids='overrun_condition={}'.format, | ||||
|     ids=[ | ||||
|          ('caller_1buf_never_open_stream'), | ||||
|          ('callee_0buf_keep_sending_from_callee'), | ||||
|     ] | ||||
| ) | ||||
| def test_one_end_stream_not_opened( | ||||
|     overrun_by: tuple[str, int, Callable], | ||||
|  | @ -869,6 +879,7 @@ def test_one_end_stream_not_opened( | |||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             with trio.fail_after(1): | ||||
|                 async with portal.open_context( | ||||
|                     entrypoint, | ||||
|                 ) as (ctx, sent): | ||||
|  | @ -944,7 +955,7 @@ async def echo_back_sequence( | |||
|     ) | ||||
| 
 | ||||
|     await ctx.started() | ||||
|     # await tractor.breakpoint() | ||||
|     # await tractor.pause() | ||||
|     async with ctx.open_stream( | ||||
|         msg_buffer_size=msg_buffer_size, | ||||
| 
 | ||||
|  | @ -1055,7 +1066,17 @@ def test_maybe_allow_overruns_stream( | |||
|                 loglevel=loglevel, | ||||
|                 debug_mode=debug_mode, | ||||
|             ) | ||||
|             seq = list(range(10)) | ||||
| 
 | ||||
|             # stream-sequence batch info with send delay to determine | ||||
|             # approx timeout determining whether test has hung. | ||||
|             total_batches: int = 2 | ||||
|             num_items: int = 10 | ||||
|             seq = list(range(num_items)) | ||||
|             parent_send_delay: float = 0.16 | ||||
|             timeout: float = math.ceil( | ||||
|                 total_batches * num_items * parent_send_delay | ||||
|             ) | ||||
|             with trio.fail_after(timeout): | ||||
|                 async with portal.open_context( | ||||
|                     echo_back_sequence, | ||||
|                     seq=seq, | ||||
|  | @ -1071,7 +1092,6 @@ def test_maybe_allow_overruns_stream( | |||
|                         allow_overruns=(allow_overruns_side in {'parent', 'both'}), | ||||
|                     ) as stream: | ||||
| 
 | ||||
|                     total_batches: int = 2 | ||||
|                         for _ in range(total_batches): | ||||
|                             for msg in seq: | ||||
|                                 # print(f'root tx {msg}') | ||||
|  | @ -1080,7 +1100,7 @@ def test_maybe_allow_overruns_stream( | |||
|                                     # NOTE: we make the parent slightly | ||||
|                                     # slower, when it is slow, to make sure | ||||
|                                     # that in the overruns everywhere case | ||||
|                                 await trio.sleep(0.16) | ||||
|                                     await trio.sleep(parent_send_delay) | ||||
| 
 | ||||
|                             batch = [] | ||||
|                             async for msg in stream: | ||||
|  | @ -1143,7 +1163,8 @@ def test_maybe_allow_overruns_stream( | |||
| 
 | ||||
|         elif slow_side == 'parent': | ||||
|             assert err.boxed_type == tractor.RemoteActorError | ||||
|             assert 'StreamOverrun' in err.msgdata['tb_str'] | ||||
|             assert 'StreamOverrun' in err.tb_str | ||||
|             assert err.tb_str == err.msgdata['tb_str'] | ||||
| 
 | ||||
|     else: | ||||
|         # if this hits the logic blocks from above are not | ||||
|  |  | |||
|  | @ -26,7 +26,7 @@ async def test_reg_then_unreg(reg_addr): | |||
|         portal = await n.start_actor('actor', enable_modules=[__name__]) | ||||
|         uid = portal.channel.uid | ||||
| 
 | ||||
|         async with tractor.get_arbiter(*reg_addr) as aportal: | ||||
|         async with tractor.get_registry(*reg_addr) as aportal: | ||||
|             # this local actor should be the arbiter | ||||
|             assert actor is aportal.actor | ||||
| 
 | ||||
|  | @ -160,7 +160,7 @@ async def spawn_and_check_registry( | |||
|     async with tractor.open_root_actor( | ||||
|         registry_addrs=[reg_addr], | ||||
|     ): | ||||
|         async with tractor.get_arbiter(*reg_addr) as portal: | ||||
|         async with tractor.get_registry(*reg_addr) as portal: | ||||
|             # runtime needs to be up to call this | ||||
|             actor = tractor.current_actor() | ||||
| 
 | ||||
|  | @ -298,7 +298,7 @@ async def close_chans_before_nursery( | |||
|     async with tractor.open_root_actor( | ||||
|         registry_addrs=[reg_addr], | ||||
|     ): | ||||
|         async with tractor.get_arbiter(*reg_addr) as aportal: | ||||
|         async with tractor.get_registry(*reg_addr) as aportal: | ||||
|             try: | ||||
|                 get_reg = partial(unpack_reg, aportal) | ||||
| 
 | ||||
|  |  | |||
|  | @ -19,7 +19,7 @@ from tractor._testing import ( | |||
| @pytest.fixture | ||||
| def run_example_in_subproc( | ||||
|     loglevel: str, | ||||
|     testdir, | ||||
|     testdir: pytest.Testdir, | ||||
|     reg_addr: tuple[str, int], | ||||
| ): | ||||
| 
 | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -55,9 +55,10 @@ from tractor._testing import ( | |||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def sleep_forever( | ||||
| async def open_stream_then_sleep_forever( | ||||
|     ctx: Context, | ||||
|     expect_ctxc: bool = False, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Sync the context, open a stream then just sleep. | ||||
|  | @ -67,6 +68,10 @@ async def sleep_forever( | |||
|     ''' | ||||
|     try: | ||||
|         await ctx.started() | ||||
| 
 | ||||
|         # NOTE: the below means this child will send a `Stop` | ||||
|         # to it's parent-side task despite that side never | ||||
|         # opening a stream itself. | ||||
|         async with ctx.open_stream(): | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|  | @ -100,7 +105,7 @@ async def error_before_started( | |||
|     ''' | ||||
|     async with tractor.wait_for_actor('sleeper') as p2: | ||||
|         async with ( | ||||
|             p2.open_context(sleep_forever) as (peer_ctx, first), | ||||
|             p2.open_context(open_stream_then_sleep_forever) as (peer_ctx, first), | ||||
|             peer_ctx.open_stream(), | ||||
|         ): | ||||
|             # NOTE: this WAS inside an @acm body but i factored it | ||||
|  | @ -165,7 +170,7 @@ def test_do_not_swallow_error_before_started_by_remote_contextcancelled( | |||
|         trio.run(main) | ||||
| 
 | ||||
|     rae = excinfo.value | ||||
|     assert rae.boxed_type == TypeError | ||||
|     assert rae.boxed_type is TypeError | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -185,6 +190,10 @@ async def sleep_a_bit_then_cancel_peer( | |||
|         await trio.sleep(cancel_after) | ||||
|         await peer.cancel_actor() | ||||
| 
 | ||||
|         # such that we're cancelled by our rent ctx-task | ||||
|         await trio.sleep(3) | ||||
|         print('CANCELLER RETURNING!') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def stream_ints( | ||||
|  | @ -200,9 +209,13 @@ async def stream_ints( | |||
| @tractor.context | ||||
| async def stream_from_peer( | ||||
|     ctx: Context, | ||||
|     debug_mode: bool, | ||||
|     peer_name: str = 'sleeper', | ||||
| ) -> None: | ||||
| 
 | ||||
|     # sanity | ||||
|     assert tractor._state.debug_mode() == debug_mode | ||||
| 
 | ||||
|     peer: Portal | ||||
|     try: | ||||
|         async with ( | ||||
|  | @ -236,20 +249,54 @@ async def stream_from_peer( | |||
|                 assert msg is not None | ||||
|                 print(msg) | ||||
| 
 | ||||
|     # NOTE: cancellation of the (sleeper) peer should always | ||||
|     # cause a `ContextCancelled` raise in this streaming | ||||
|     # actor. | ||||
|     except ContextCancelled as ctxc: | ||||
|         ctxerr = ctxc | ||||
|     # NOTE: cancellation of the (sleeper) peer should always cause | ||||
|     # a `ContextCancelled` raise in this streaming actor. | ||||
|     except ContextCancelled as _ctxc: | ||||
|         ctxc = _ctxc | ||||
| 
 | ||||
|         assert peer_ctx._remote_error is ctxerr | ||||
|         assert peer_ctx._remote_error.msgdata == ctxerr.msgdata | ||||
|         # print("TRYING TO ENTER PAUSSE!!!") | ||||
|         # await tractor.pause(shield=True) | ||||
|         re: ContextCancelled = peer_ctx._remote_error | ||||
| 
 | ||||
|         # XXX YES XXX, remote error should be unpacked only once! | ||||
|         assert ( | ||||
|             re | ||||
|             is | ||||
|             peer_ctx.maybe_error | ||||
|             is | ||||
|             ctxc | ||||
|             is | ||||
|             peer_ctx._local_error | ||||
|         ) | ||||
|         # NOTE: these errors should all match! | ||||
|         #   ------ - ------ | ||||
|         # XXX [2024-05-03] XXX | ||||
|         #   ------ - ------ | ||||
|         # broke this due to a re-raise inside `.msg._ops.drain_to_final_msg()` | ||||
|         # where the `Error()` msg was directly raising the ctxc | ||||
|         # instead of just returning up to the caller inside | ||||
|         # `Context.return()` which would results in a diff instance of | ||||
|         # the same remote error bubbling out above vs what was | ||||
|         # already unpacked and set inside `Context. | ||||
|         assert ( | ||||
|             peer_ctx._remote_error.msgdata | ||||
|             == | ||||
|             ctxc.msgdata | ||||
|         ) | ||||
|         # ^-XXX-^ notice the data is of course the exact same.. so | ||||
|         # the above larger assert makes sense to also always be true! | ||||
| 
 | ||||
|         # XXX YES XXX, bc should be exact same msg instances | ||||
|         assert peer_ctx._remote_error._ipc_msg is ctxc._ipc_msg | ||||
| 
 | ||||
|         # XXX NO XXX, bc new one always created for property accesss | ||||
|         assert peer_ctx._remote_error.ipc_msg != ctxc.ipc_msg | ||||
| 
 | ||||
|         # the peer ctx is the canceller even though it's canceller | ||||
|         # is the "canceller" XD | ||||
|         assert peer_name in peer_ctx.canceller | ||||
| 
 | ||||
|         assert "canceller" in ctxerr.canceller | ||||
|         assert "canceller" in ctxc.canceller | ||||
| 
 | ||||
|         # caller peer should not be the cancel requester | ||||
|         assert not ctx.cancel_called | ||||
|  | @ -273,12 +320,13 @@ async def stream_from_peer( | |||
| 
 | ||||
|         # TODO / NOTE `.canceller` won't have been set yet | ||||
|         # here because that machinery is inside | ||||
|         # `.open_context().__aexit__()` BUT, if we had | ||||
|         # `Portal.open_context().__aexit__()` BUT, if we had | ||||
|         # a way to know immediately (from the last | ||||
|         # checkpoint) that cancellation was due to | ||||
|         # a remote, we COULD assert this here..see, | ||||
|         # https://github.com/goodboy/tractor/issues/368 | ||||
|         # | ||||
|         # await tractor.pause() | ||||
|         # assert 'canceller' in ctx.canceller | ||||
| 
 | ||||
|         # root/parent actor task should NEVER HAVE cancelled us! | ||||
|  | @ -382,12 +430,13 @@ def test_peer_canceller( | |||
|             try: | ||||
|                 async with ( | ||||
|                     sleeper.open_context( | ||||
|                         sleep_forever, | ||||
|                         open_stream_then_sleep_forever, | ||||
|                         expect_ctxc=True, | ||||
|                     ) as (sleeper_ctx, sent), | ||||
| 
 | ||||
|                     just_caller.open_context( | ||||
|                         stream_from_peer, | ||||
|                         debug_mode=debug_mode, | ||||
|                     ) as (caller_ctx, sent), | ||||
| 
 | ||||
|                     canceller.open_context( | ||||
|  | @ -413,10 +462,11 @@ def test_peer_canceller( | |||
| 
 | ||||
|                     # should always raise since this root task does | ||||
|                     # not request the sleeper cancellation ;) | ||||
|                     except ContextCancelled as ctxerr: | ||||
|                     except ContextCancelled as _ctxc: | ||||
|                         ctxc = _ctxc | ||||
|                         print( | ||||
|                             'CAUGHT REMOTE CONTEXT CANCEL\n\n' | ||||
|                             f'{ctxerr}\n' | ||||
|                             f'{ctxc}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                         # canceller and caller peers should not | ||||
|  | @ -427,7 +477,7 @@ def test_peer_canceller( | |||
|                         # we were not the actor, our peer was | ||||
|                         assert not sleeper_ctx.cancel_acked | ||||
| 
 | ||||
|                         assert ctxerr.canceller[0] == 'canceller' | ||||
|                         assert ctxc.canceller[0] == 'canceller' | ||||
| 
 | ||||
|                         # XXX NOTE XXX: since THIS `ContextCancelled` | ||||
|                         # HAS NOT YET bubbled up to the | ||||
|  | @ -438,7 +488,7 @@ def test_peer_canceller( | |||
| 
 | ||||
|                         # CASE_1: error-during-ctxc-handling, | ||||
|                         if error_during_ctxerr_handling: | ||||
|                             raise RuntimeError('Simulated error during teardown') | ||||
|                             raise RuntimeError('Simulated RTE re-raise during ctxc handling') | ||||
| 
 | ||||
|                         # CASE_2: standard teardown inside in `.open_context()` block | ||||
|                         raise | ||||
|  | @ -503,6 +553,9 @@ def test_peer_canceller( | |||
|                 #   should be cancelled by US. | ||||
|                 # | ||||
|                 if error_during_ctxerr_handling: | ||||
|                     print(f'loc_err: {_loc_err}\n') | ||||
|                     assert isinstance(loc_err, RuntimeError) | ||||
| 
 | ||||
|                     # since we do a rte reraise above, the | ||||
|                     # `.open_context()` error handling should have | ||||
|                     # raised a local rte, thus the internal | ||||
|  | @ -511,9 +564,6 @@ def test_peer_canceller( | |||
|                     # a `trio.Cancelled` due to a local | ||||
|                     # `._scope.cancel()` call. | ||||
|                     assert not sleeper_ctx._scope.cancelled_caught | ||||
| 
 | ||||
|                     assert isinstance(loc_err, RuntimeError) | ||||
|                     print(f'_loc_err: {_loc_err}\n') | ||||
|                     # assert sleeper_ctx._local_error is _loc_err | ||||
|                     # assert sleeper_ctx._local_error is _loc_err | ||||
|                     assert not ( | ||||
|  | @ -550,10 +600,13 @@ def test_peer_canceller( | |||
| 
 | ||||
|                         else:  # the other 2 ctxs | ||||
|                             assert ( | ||||
|                                 isinstance(re, ContextCancelled) | ||||
|                                 and ( | ||||
|                                     re.canceller | ||||
|                                     == | ||||
|                                     canceller.channel.uid | ||||
|                                 ) | ||||
|                             ) | ||||
| 
 | ||||
|                     # since the sleeper errors while handling a | ||||
|                     # peer-cancelled (by ctxc) scenario, we expect | ||||
|  | @ -801,8 +854,7 @@ async def serve_subactors( | |||
|     async with open_nursery() as an: | ||||
| 
 | ||||
|         # sanity | ||||
|         if debug_mode: | ||||
|             assert tractor._state.debug_mode() | ||||
|         assert tractor._state.debug_mode() == debug_mode | ||||
| 
 | ||||
|         await ctx.started(peer_name) | ||||
|         async with ctx.open_stream() as ipc: | ||||
|  | @ -1081,7 +1133,6 @@ def test_peer_spawns_and_cancels_service_subactor( | |||
|                             '-> root checking `client_ctx.result()`,\n' | ||||
|                             f'-> checking that sub-spawn {peer_name} is down\n' | ||||
|                         ) | ||||
|                     # else: | ||||
| 
 | ||||
|                     try: | ||||
|                         res = await client_ctx.result(hide_tb=False) | ||||
|  |  | |||
|  | @ -38,7 +38,7 @@ async def test_self_is_registered_localportal(reg_addr): | |||
|     "Verify waiting on the arbiter to register itself using a local portal." | ||||
|     actor = tractor.current_actor() | ||||
|     assert actor.is_arbiter | ||||
|     async with tractor.get_arbiter(*reg_addr) as portal: | ||||
|     async with tractor.get_registry(*reg_addr) as portal: | ||||
|         assert isinstance(portal, tractor._portal.LocalPortal) | ||||
| 
 | ||||
|         with trio.fail_after(0.2): | ||||
|  |  | |||
|  | @ -32,7 +32,7 @@ def test_abort_on_sigint(daemon): | |||
| @tractor_test | ||||
| async def test_cancel_remote_arbiter(daemon, reg_addr): | ||||
|     assert not tractor.current_actor().is_arbiter | ||||
|     async with tractor.get_arbiter(*reg_addr) as portal: | ||||
|     async with tractor.get_registry(*reg_addr) as portal: | ||||
|         await portal.cancel_actor() | ||||
| 
 | ||||
|     time.sleep(0.1) | ||||
|  | @ -41,7 +41,7 @@ async def test_cancel_remote_arbiter(daemon, reg_addr): | |||
| 
 | ||||
|     # no arbiter socket should exist | ||||
|     with pytest.raises(OSError): | ||||
|         async with tractor.get_arbiter(*reg_addr) as portal: | ||||
|         async with tractor.get_registry(*reg_addr) as portal: | ||||
|             pass | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -0,0 +1,364 @@ | |||
| ''' | ||||
| Audit sub-sys APIs from `.msg._ops` | ||||
| mostly for ensuring correct `contextvars` | ||||
| related settings around IPC contexts. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     Struct, | ||||
| ) | ||||
| import pytest | ||||
| import trio | ||||
| 
 | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     Context, | ||||
|     MsgTypeError, | ||||
|     current_ipc_ctx, | ||||
|     Portal, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _ops as msgops, | ||||
|     Return, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _codec, | ||||
| ) | ||||
| from tractor.msg.types import ( | ||||
|     log, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| class PldMsg( | ||||
|     Struct, | ||||
| 
 | ||||
|     # TODO: with multiple structs in-spec we need to tag them! | ||||
|     # -[ ] offer a built-in `PldMsg` type to inherit from which takes | ||||
|     #      case of these details? | ||||
|     # | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # tag=True, | ||||
|     # tag_field='msg_type', | ||||
| ): | ||||
|     field: str | ||||
| 
 | ||||
| 
 | ||||
| maybe_msg_spec = PldMsg|None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_expect_raises( | ||||
|     raises: BaseException|None = None, | ||||
|     ensure_in_message: list[str]|None = None, | ||||
|     post_mortem: bool = False, | ||||
|     timeout: int = 3, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Async wrapper for ensuring errors propagate from the inner scope. | ||||
| 
 | ||||
|     ''' | ||||
|     if tractor._state.debug_mode(): | ||||
|         timeout += 999 | ||||
| 
 | ||||
|     with trio.fail_after(timeout): | ||||
|         try: | ||||
|             yield | ||||
|         except BaseException as _inner_err: | ||||
|             inner_err = _inner_err | ||||
|             # wasn't-expected to error.. | ||||
|             if raises is None: | ||||
|                 raise | ||||
| 
 | ||||
|             else: | ||||
|                 assert type(inner_err) is raises | ||||
| 
 | ||||
|                 # maybe check for error txt content | ||||
|                 if ensure_in_message: | ||||
|                     part: str | ||||
|                     err_repr: str = repr(inner_err) | ||||
|                     for part in ensure_in_message: | ||||
|                         for i, arg in enumerate(inner_err.args): | ||||
|                             if part in err_repr: | ||||
|                                 break | ||||
|                         # if part never matches an arg, then we're | ||||
|                         # missing a match. | ||||
|                         else: | ||||
|                             raise ValueError( | ||||
|                                 'Failed to find error message content?\n\n' | ||||
|                                 f'expected: {ensure_in_message!r}\n' | ||||
|                                 f'part: {part!r}\n\n' | ||||
|                                 f'{inner_err.args}' | ||||
|                         ) | ||||
| 
 | ||||
|                 if post_mortem: | ||||
|                     await tractor.post_mortem() | ||||
| 
 | ||||
|         else: | ||||
|             if raises: | ||||
|                 raise RuntimeError( | ||||
|                     f'Expected a {raises.__name__!r} to be raised?' | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context( | ||||
|     pld_spec=maybe_msg_spec, | ||||
| ) | ||||
| async def child( | ||||
|     ctx: Context, | ||||
|     started_value: int|PldMsg|None, | ||||
|     return_value: str|None, | ||||
|     validate_pld_spec: bool, | ||||
|     raise_on_started_mte: bool = True, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Call ``Context.started()`` more then once (an error). | ||||
| 
 | ||||
|     ''' | ||||
|     expect_started_mte: bool = started_value == 10 | ||||
| 
 | ||||
|     # sanaity check that child RPC context is the current one | ||||
|     curr_ctx: Context = current_ipc_ctx() | ||||
|     assert ctx is curr_ctx | ||||
| 
 | ||||
|     rx: msgops.PldRx = ctx._pld_rx | ||||
|     curr_pldec: _codec.MsgDec = rx.pld_dec | ||||
| 
 | ||||
|     ctx_meta: dict = getattr( | ||||
|         child, | ||||
|         '_tractor_context_meta', | ||||
|         None, | ||||
|     ) | ||||
|     if ctx_meta: | ||||
|         assert ( | ||||
|             ctx_meta['pld_spec'] | ||||
|             is curr_pldec.spec | ||||
|             is curr_pldec.pld_spec | ||||
|         ) | ||||
| 
 | ||||
|     # 2 cases: hdndle send-side and recv-only validation | ||||
|     # - when `raise_on_started_mte == True`, send validate | ||||
|     # - else, parent-recv-side only validation | ||||
|     mte: MsgTypeError|None = None | ||||
|     try: | ||||
|         await ctx.started( | ||||
|             value=started_value, | ||||
|             validate_pld_spec=validate_pld_spec, | ||||
|         ) | ||||
| 
 | ||||
|     except MsgTypeError as _mte: | ||||
|         mte = _mte | ||||
|         log.exception('started()` raised an MTE!\n') | ||||
|         if not expect_started_mte: | ||||
|             raise RuntimeError( | ||||
|                 'Child-ctx-task SHOULD NOT HAVE raised an MTE for\n\n' | ||||
|                 f'{started_value!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|         boxed_div: str = '------ - ------' | ||||
|         assert boxed_div not in mte._message | ||||
|         assert boxed_div not in mte.tb_str | ||||
|         assert boxed_div not in repr(mte) | ||||
|         assert boxed_div not in str(mte) | ||||
|         mte_repr: str = repr(mte) | ||||
|         for line in mte.message.splitlines(): | ||||
|             assert line in mte_repr | ||||
| 
 | ||||
|         # since this is a *local error* there should be no | ||||
|         # boxed traceback content! | ||||
|         assert not mte.tb_str | ||||
| 
 | ||||
|         # propagate to parent? | ||||
|         if raise_on_started_mte: | ||||
|             raise | ||||
| 
 | ||||
|     # no-send-side-error fallthrough | ||||
|     if ( | ||||
|         validate_pld_spec | ||||
|         and | ||||
|         expect_started_mte | ||||
|     ): | ||||
|         raise RuntimeError( | ||||
|             'Child-ctx-task SHOULD HAVE raised an MTE for\n\n' | ||||
|             f'{started_value!r}\n' | ||||
|         ) | ||||
| 
 | ||||
|     assert ( | ||||
|         not expect_started_mte | ||||
|         or | ||||
|         not validate_pld_spec | ||||
|     ) | ||||
| 
 | ||||
|     # if wait_for_parent_to_cancel: | ||||
|     #     ... | ||||
|     # | ||||
|     # ^-TODO-^ logic for diff validation policies on each side: | ||||
|     # | ||||
|     # -[ ] ensure that if we don't validate on the send | ||||
|     #   side, that we are eventually error-cancelled by our | ||||
|     #   parent due to the bad `Started` payload! | ||||
|     # -[ ] the boxed error should be srced from the parent's | ||||
|     #   runtime NOT ours! | ||||
|     # -[ ] we should still error on bad `return_value`s | ||||
|     #   despite the parent not yet error-cancelling us? | ||||
|     #   |_ how do we want the parent side to look in that | ||||
|     #     case? | ||||
|     #     -[ ] maybe the equiv of "during handling of the | ||||
|     #       above error another occurred" for the case where | ||||
|     #       the parent sends a MTE to this child and while | ||||
|     #       waiting for the child to terminate it gets back | ||||
|     #       the MTE for this case? | ||||
|     # | ||||
| 
 | ||||
|     # XXX should always fail on recv side since we can't | ||||
|     # really do much else beside terminate and relay the | ||||
|     # msg-type-error from this RPC task ;) | ||||
|     return return_value | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'return_value', | ||||
|     [ | ||||
|         'yo', | ||||
|         None, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'return[invalid-"yo"]', | ||||
|         'return[valid-None]', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'started_value', | ||||
|     [ | ||||
|         10, | ||||
|         PldMsg(field='yo'), | ||||
|     ], | ||||
|     ids=[ | ||||
|         'Started[invalid-10]', | ||||
|         'Started[valid-PldMsg]', | ||||
|     ], | ||||
| ) | ||||
| @pytest.mark.parametrize( | ||||
|     'pld_check_started_value', | ||||
|     [ | ||||
|         True, | ||||
|         False, | ||||
|     ], | ||||
|     ids=[ | ||||
|         'check-started-pld', | ||||
|         'no-started-pld-validate', | ||||
|     ], | ||||
| ) | ||||
| def test_basic_payload_spec( | ||||
|     debug_mode: bool, | ||||
|     loglevel: str, | ||||
|     return_value: str|None, | ||||
|     started_value: int|PldMsg, | ||||
|     pld_check_started_value: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Validate the most basic `PldRx` msg-type-spec semantics around | ||||
|     a IPC `Context` endpoint start, started-sync, and final return | ||||
|     value depending on set payload types and the currently applied | ||||
|     pld-spec. | ||||
| 
 | ||||
|     ''' | ||||
|     invalid_return: bool = return_value == 'yo' | ||||
|     invalid_started: bool = started_value == 10 | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|             loglevel=loglevel, | ||||
|         ) as an: | ||||
|             p: Portal = await an.start_actor( | ||||
|                 'child', | ||||
|                 enable_modules=[__name__], | ||||
|             ) | ||||
| 
 | ||||
|             # since not opened yet. | ||||
|             assert current_ipc_ctx() is None | ||||
| 
 | ||||
|             if invalid_started: | ||||
|                 msg_type_str: str = 'Started' | ||||
|                 bad_value: int = 10 | ||||
|             elif invalid_return: | ||||
|                 msg_type_str: str = 'Return' | ||||
|                 bad_value: str = 'yo' | ||||
|             else: | ||||
|                 # XXX but should never be used below then.. | ||||
|                 msg_type_str: str = '' | ||||
|                 bad_value: str = '' | ||||
| 
 | ||||
|             maybe_mte: MsgTypeError|None = None | ||||
|             should_raise: Exception|None = ( | ||||
|                 MsgTypeError if ( | ||||
|                     invalid_return | ||||
|                     or | ||||
|                     invalid_started | ||||
|                 ) else None | ||||
|             ) | ||||
|             async with ( | ||||
|                 maybe_expect_raises( | ||||
|                     raises=should_raise, | ||||
|                     ensure_in_message=[ | ||||
|                         f"invalid `{msg_type_str}` msg payload", | ||||
|                         f'{bad_value}', | ||||
|                         f'has type {type(bad_value)!r}', | ||||
|                         'not match type-spec', | ||||
|                         f'`{msg_type_str}.pld: PldMsg|NoneType`', | ||||
|                     ], | ||||
|                     # only for debug | ||||
|                     # post_mortem=True, | ||||
|                 ), | ||||
|                 p.open_context( | ||||
|                     child, | ||||
|                     return_value=return_value, | ||||
|                     started_value=started_value, | ||||
|                     validate_pld_spec=pld_check_started_value, | ||||
|                 ) as (ctx, first), | ||||
|             ): | ||||
|                 # now opened with 'child' sub | ||||
|                 assert current_ipc_ctx() is ctx | ||||
| 
 | ||||
|                 assert type(first) is PldMsg | ||||
|                 assert first.field == 'yo' | ||||
| 
 | ||||
|                 try: | ||||
|                     res: None|PldMsg = await ctx.result(hide_tb=False) | ||||
|                     assert res is None | ||||
|                 except MsgTypeError as mte: | ||||
|                     maybe_mte = mte | ||||
|                     if not invalid_return: | ||||
|                         raise | ||||
| 
 | ||||
|                     # expected this invalid `Return.pld` so audit | ||||
|                     # the error state + meta-data | ||||
|                     assert mte.expected_msg_type is Return | ||||
|                     assert mte.cid == ctx.cid | ||||
|                     mte_repr: str = repr(mte) | ||||
|                     for line in mte.message.splitlines(): | ||||
|                         assert line in mte_repr | ||||
| 
 | ||||
|                     assert mte.tb_str | ||||
|                     # await tractor.pause(shield=True) | ||||
| 
 | ||||
|                     # verify expected remote mte deats | ||||
|                     assert ctx._local_error is None | ||||
|                     assert ( | ||||
|                         mte is | ||||
|                         ctx._remote_error is | ||||
|                         ctx.maybe_error is | ||||
|                         ctx.outcome | ||||
|                     ) | ||||
| 
 | ||||
|             if should_raise is None: | ||||
|                 assert maybe_mte is None | ||||
| 
 | ||||
|             await p.cancel_actor() | ||||
| 
 | ||||
|     trio.run(main) | ||||
|  | @ -0,0 +1,248 @@ | |||
| ''' | ||||
| Special attention cases for using "infect `asyncio`" mode from a root | ||||
| actor; i.e. not using a std `trio.run()` bootstrap. | ||||
| 
 | ||||
| ''' | ||||
| import asyncio | ||||
| from functools import partial | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| import tractor | ||||
| from tractor import ( | ||||
|     to_asyncio, | ||||
| ) | ||||
| from tests.test_infected_asyncio import ( | ||||
|     aio_echo_server, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'raise_error_mid_stream', | ||||
|     [ | ||||
|         False, | ||||
|         Exception, | ||||
|         KeyboardInterrupt, | ||||
|     ], | ||||
|     ids='raise_error={}'.format, | ||||
| ) | ||||
| def test_infected_root_actor( | ||||
|     raise_error_mid_stream: bool|Exception, | ||||
| 
 | ||||
|     # conftest wide | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Verify you can run the `tractor` runtime with `Actor.is_infected_aio() == True` | ||||
|     in the root actor. | ||||
| 
 | ||||
|     ''' | ||||
|     async def _trio_main(): | ||||
|         with trio.fail_after(2 if not debug_mode else 999): | ||||
|             first: str | ||||
|             chan: to_asyncio.LinkedTaskChannel | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=debug_mode, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|                 to_asyncio.open_channel_from( | ||||
|                     aio_echo_server, | ||||
|                 ) as (first, chan), | ||||
|             ): | ||||
|                 assert first == 'start' | ||||
| 
 | ||||
|                 for i in range(1000): | ||||
|                     await chan.send(i) | ||||
|                     out = await chan.receive() | ||||
|                     assert out == i | ||||
|                     print(f'asyncio echoing {i}') | ||||
| 
 | ||||
|                     if ( | ||||
|                         raise_error_mid_stream | ||||
|                         and | ||||
|                         i == 500 | ||||
|                     ): | ||||
|                         raise raise_error_mid_stream | ||||
| 
 | ||||
|                     if out is None: | ||||
|                         try: | ||||
|                             out = await chan.receive() | ||||
|                         except trio.EndOfChannel: | ||||
|                             break | ||||
|                         else: | ||||
|                             raise RuntimeError( | ||||
|                                 'aio channel never stopped?' | ||||
|                             ) | ||||
| 
 | ||||
|     if raise_error_mid_stream: | ||||
|         with pytest.raises(raise_error_mid_stream): | ||||
|             tractor.to_asyncio.run_as_asyncio_guest( | ||||
|                 trio_main=_trio_main, | ||||
|             ) | ||||
|     else: | ||||
|         tractor.to_asyncio.run_as_asyncio_guest( | ||||
|             trio_main=_trio_main, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| async def sync_and_err( | ||||
|     # just signature placeholders for compat with | ||||
|     # ``to_asyncio.open_channel_from()`` | ||||
|     to_trio: trio.MemorySendChannel, | ||||
|     from_trio: asyncio.Queue, | ||||
|     ev: asyncio.Event, | ||||
| 
 | ||||
| ): | ||||
|     if to_trio: | ||||
|         to_trio.send_nowait('start') | ||||
| 
 | ||||
|     await ev.wait() | ||||
|     raise RuntimeError('asyncio-side') | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     'aio_err_trigger', | ||||
|     [ | ||||
|         'before_start_point', | ||||
|         'after_trio_task_starts', | ||||
|         'after_start_point', | ||||
|     ], | ||||
|     ids='aio_err_triggered={}'.format | ||||
| ) | ||||
| def test_trio_prestarted_task_bubbles( | ||||
|     aio_err_trigger: str, | ||||
| 
 | ||||
|     # conftest wide | ||||
|     loglevel: str, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     async def pre_started_err( | ||||
|         raise_err: bool = False, | ||||
|         pre_sleep: float|None = None, | ||||
|         aio_trigger: asyncio.Event|None = None, | ||||
|         task_status=trio.TASK_STATUS_IGNORED, | ||||
|     ): | ||||
|         ''' | ||||
|         Maybe pre-started error then sleep. | ||||
| 
 | ||||
|         ''' | ||||
|         if pre_sleep is not None: | ||||
|             print(f'Sleeping from trio for {pre_sleep!r}s !') | ||||
|             await trio.sleep(pre_sleep) | ||||
| 
 | ||||
|         # signal aio-task to raise JUST AFTER this task | ||||
|         # starts but has not yet `.started()` | ||||
|         if aio_trigger: | ||||
|             print('Signalling aio-task to raise from `trio`!!') | ||||
|             aio_trigger.set() | ||||
| 
 | ||||
|         if raise_err: | ||||
|             print('Raising from trio!') | ||||
|             raise TypeError('trio-side') | ||||
| 
 | ||||
|         task_status.started() | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
|     async def _trio_main(): | ||||
|         # with trio.fail_after(2): | ||||
|         with trio.fail_after(999): | ||||
|             first: str | ||||
|             chan: to_asyncio.LinkedTaskChannel | ||||
|             aio_ev = asyncio.Event() | ||||
| 
 | ||||
|             async with ( | ||||
|                 tractor.open_root_actor( | ||||
|                     debug_mode=False, | ||||
|                     loglevel=loglevel, | ||||
|                 ), | ||||
|             ): | ||||
|                 # TODO, tests for this with 3.13 egs? | ||||
|                 # from tractor.devx import open_crash_handler | ||||
|                 # with open_crash_handler(): | ||||
|                 async with ( | ||||
|                     # where we'll start a sub-task that errors BEFORE | ||||
|                     # calling `.started()` such that the error should | ||||
|                     # bubble before the guest run terminates! | ||||
|                     trio.open_nursery() as tn, | ||||
| 
 | ||||
|                     # THEN start an infect task which should error just | ||||
|                     # after the trio-side's task does. | ||||
|                     to_asyncio.open_channel_from( | ||||
|                         partial( | ||||
|                             sync_and_err, | ||||
|                             ev=aio_ev, | ||||
|                         ) | ||||
|                     ) as (first, chan), | ||||
|                 ): | ||||
| 
 | ||||
|                     for i in range(5): | ||||
|                         pre_sleep: float|None = None | ||||
|                         last_iter: bool = (i == 4) | ||||
| 
 | ||||
|                         # TODO, missing cases? | ||||
|                         # -[ ] error as well on | ||||
|                         #    'after_start_point' case as well for | ||||
|                         #    another case? | ||||
|                         raise_err: bool = False | ||||
| 
 | ||||
|                         if last_iter: | ||||
|                             raise_err: bool = True | ||||
| 
 | ||||
|                             # trigger aio task to error on next loop | ||||
|                             # tick/checkpoint | ||||
|                             if aio_err_trigger == 'before_start_point': | ||||
|                                 aio_ev.set() | ||||
| 
 | ||||
|                             pre_sleep: float = 0 | ||||
| 
 | ||||
|                         await tn.start( | ||||
|                             pre_started_err, | ||||
|                             raise_err, | ||||
|                             pre_sleep, | ||||
|                             (aio_ev if ( | ||||
|                                     aio_err_trigger == 'after_trio_task_starts' | ||||
|                                     and | ||||
|                                     last_iter | ||||
|                                 ) else None | ||||
|                             ), | ||||
|                         ) | ||||
| 
 | ||||
|                         if ( | ||||
|                             aio_err_trigger == 'after_start_point' | ||||
|                             and | ||||
|                             last_iter | ||||
|                         ): | ||||
|                             aio_ev.set() | ||||
| 
 | ||||
|     with pytest.raises( | ||||
|         expected_exception=ExceptionGroup, | ||||
|     ) as excinfo: | ||||
|         tractor.to_asyncio.run_as_asyncio_guest( | ||||
|             trio_main=_trio_main, | ||||
|         ) | ||||
| 
 | ||||
|     eg = excinfo.value | ||||
|     rte_eg, rest_eg = eg.split(RuntimeError) | ||||
| 
 | ||||
|     # ensure the trio-task's error bubbled despite the aio-side | ||||
|     # having (maybe) errored first. | ||||
|     if aio_err_trigger in ( | ||||
|         'after_trio_task_starts', | ||||
|         'after_start_point', | ||||
|     ): | ||||
|         assert len(errs := rest_eg.exceptions) == 1 | ||||
|         typerr = errs[0] | ||||
|         assert ( | ||||
|             type(typerr) is TypeError | ||||
|             and | ||||
|             'trio-side' in typerr.args | ||||
|         ) | ||||
| 
 | ||||
|     # when aio errors BEFORE (last) trio task is scheduled, we should | ||||
|     # never see anythinb but the aio-side. | ||||
|     else: | ||||
|         assert len(rtes := rte_eg.exceptions) == 1 | ||||
|         assert 'asyncio-side' in rtes[0].args[0] | ||||
|  | @ -2,7 +2,9 @@ | |||
| Spawning basics | ||||
| 
 | ||||
| """ | ||||
| from typing import Optional | ||||
| from typing import ( | ||||
|     Any, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
|  | @ -25,13 +27,11 @@ async def spawn( | |||
|     async with tractor.open_root_actor( | ||||
|         arbiter_addr=reg_addr, | ||||
|     ): | ||||
| 
 | ||||
|         actor = tractor.current_actor() | ||||
|         assert actor.is_arbiter == is_arbiter | ||||
|         data = data_to_pass_down | ||||
| 
 | ||||
|         if actor.is_arbiter: | ||||
| 
 | ||||
|             async with tractor.open_nursery() as nursery: | ||||
| 
 | ||||
|                 # forks here | ||||
|  | @ -95,7 +95,9 @@ async def test_movie_theatre_convo(start_method): | |||
|         await portal.cancel_actor() | ||||
| 
 | ||||
| 
 | ||||
| async def cellar_door(return_value: Optional[str]): | ||||
| async def cellar_door( | ||||
|     return_value: str|None, | ||||
| ): | ||||
|     return return_value | ||||
| 
 | ||||
| 
 | ||||
|  | @ -105,16 +107,18 @@ async def cellar_door(return_value: Optional[str]): | |||
| ) | ||||
| @tractor_test | ||||
| async def test_most_beautiful_word( | ||||
|     start_method, | ||||
|     return_value | ||||
|     start_method: str, | ||||
|     return_value: Any, | ||||
|     debug_mode: bool, | ||||
| ): | ||||
|     ''' | ||||
|     The main ``tractor`` routine. | ||||
| 
 | ||||
|     ''' | ||||
|     with trio.fail_after(1): | ||||
|         async with tractor.open_nursery() as n: | ||||
| 
 | ||||
|         async with tractor.open_nursery( | ||||
|             debug_mode=debug_mode, | ||||
|         ) as n: | ||||
|             portal = await n.run_in_actor( | ||||
|                 cellar_door, | ||||
|                 return_value=return_value, | ||||
|  |  | |||
|  | @ -271,7 +271,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | |||
|                         # the faster subtask was cancelled | ||||
|                         break | ||||
| 
 | ||||
|                 # await tractor.breakpoint() | ||||
|                 # await tractor.pause() | ||||
|                 # await stream.receive() | ||||
|                 print(f'final value: {value}') | ||||
| 
 | ||||
|  |  | |||
|  | @ -3,6 +3,10 @@ Reminders for oddities in `trio` that we need to stay aware of and/or | |||
| want to see changed. | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| 
 | ||||
| import pytest | ||||
| import trio | ||||
| from trio import TaskStatus | ||||
|  | @ -80,3 +84,115 @@ def test_stashed_child_nursery(use_start_soon): | |||
| 
 | ||||
|     with pytest.raises(NameError): | ||||
|         trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.parametrize( | ||||
|     ('unmask_from_canc', 'canc_from_finally'), | ||||
|     [ | ||||
|         (True, False), | ||||
|         (True, True), | ||||
|         pytest.param(False, True, | ||||
|                      marks=pytest.mark.xfail(reason="never raises!") | ||||
|         ), | ||||
|     ], | ||||
|     # TODO, ask ronny how to impl this .. XD | ||||
|     # ids='unmask_from_canc={0}, canc_from_finally={1}',#.format, | ||||
| ) | ||||
| def test_acm_embedded_nursery_propagates_enter_err( | ||||
|     canc_from_finally: bool, | ||||
|     unmask_from_canc: bool, | ||||
| ): | ||||
|     ''' | ||||
|     Demo how a masking `trio.Cancelled` could be handled by unmasking from the | ||||
|     `.__context__` field when a user (by accident) re-raises from a `finally:`. | ||||
| 
 | ||||
|     ''' | ||||
|     import tractor | ||||
| 
 | ||||
|     @acm | ||||
|     async def maybe_raise_from_masking_exc( | ||||
|         tn: trio.Nursery, | ||||
|         unmask_from: BaseException|None = trio.Cancelled | ||||
| 
 | ||||
|         # TODO, maybe offer a collection? | ||||
|         # unmask_from: set[BaseException] = { | ||||
|         #     trio.Cancelled, | ||||
|         # }, | ||||
|     ): | ||||
|         if not unmask_from: | ||||
|             yield | ||||
|             return | ||||
| 
 | ||||
|         try: | ||||
|             yield | ||||
|         except* unmask_from as be_eg: | ||||
| 
 | ||||
|             # TODO, if we offer `unmask_from: set` | ||||
|             # for masker_exc_type in unmask_from: | ||||
| 
 | ||||
|             matches, rest = be_eg.split(unmask_from) | ||||
|             if not matches: | ||||
|                 raise | ||||
| 
 | ||||
|             for exc_match in be_eg.exceptions: | ||||
|                 if ( | ||||
|                     (exc_ctx := exc_match.__context__) | ||||
|                     and | ||||
|                     type(exc_ctx) not in { | ||||
|                         # trio.Cancelled,  # always by default? | ||||
|                         unmask_from, | ||||
|                     } | ||||
|                 ): | ||||
|                     exc_ctx.add_note( | ||||
|                         f'\n' | ||||
|                         f'WARNING: the above error was masked by a {unmask_from!r} !?!\n' | ||||
|                         f'Are you always cancelling? Say from a `finally:` ?\n\n' | ||||
| 
 | ||||
|                         f'{tn!r}' | ||||
|                     ) | ||||
|                     raise exc_ctx from exc_match | ||||
| 
 | ||||
| 
 | ||||
|     @acm | ||||
|     async def wraps_tn_that_always_cancels(): | ||||
|         async with ( | ||||
|             trio.open_nursery() as tn, | ||||
|             maybe_raise_from_masking_exc( | ||||
|                 tn=tn, | ||||
|                 unmask_from=( | ||||
|                     trio.Cancelled | ||||
|                     if unmask_from_canc | ||||
|                     else None | ||||
|                 ), | ||||
|             ) | ||||
|         ): | ||||
|             try: | ||||
|                 yield tn | ||||
|             finally: | ||||
|                 if canc_from_finally: | ||||
|                     tn.cancel_scope.cancel() | ||||
|                     await trio.lowlevel.checkpoint() | ||||
| 
 | ||||
|     async def _main(): | ||||
|         with tractor.devx.open_crash_handler() as bxerr: | ||||
|             assert not bxerr.value | ||||
| 
 | ||||
|             async with ( | ||||
|                 wraps_tn_that_always_cancels() as tn, | ||||
|             ): | ||||
|                 assert not tn.cancel_scope.cancel_called | ||||
|                 assert 0 | ||||
| 
 | ||||
|         assert ( | ||||
|             (err := bxerr.value) | ||||
|             and | ||||
|             type(err) is AssertionError | ||||
|         ) | ||||
| 
 | ||||
|     with pytest.raises(ExceptionGroup) as excinfo: | ||||
|         trio.run(_main) | ||||
| 
 | ||||
|     eg: ExceptionGroup = excinfo.value | ||||
|     assert_eg, rest_eg = eg.split(AssertionError) | ||||
| 
 | ||||
|     assert len(assert_eg.exceptions) == 1 | ||||
|  |  | |||
|  | @ -31,7 +31,7 @@ from ._streaming import ( | |||
|     stream as stream, | ||||
| ) | ||||
| from ._discovery import ( | ||||
|     get_arbiter as get_arbiter, | ||||
|     get_registry as get_registry, | ||||
|     find_actor as find_actor, | ||||
|     wait_for_actor as wait_for_actor, | ||||
|     query_actor as query_actor, | ||||
|  | @ -43,11 +43,14 @@ from ._supervise import ( | |||
| from ._state import ( | ||||
|     current_actor as current_actor, | ||||
|     is_root_process as is_root_process, | ||||
|     current_ipc_ctx as current_ipc_ctx, | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     RemoteActorError as RemoteActorError, | ||||
|     ModuleNotExposed as ModuleNotExposed, | ||||
|     ContextCancelled as ContextCancelled, | ||||
|     ModuleNotExposed as ModuleNotExposed, | ||||
|     MsgTypeError as MsgTypeError, | ||||
|     RemoteActorError as RemoteActorError, | ||||
|     TransportClosed as TransportClosed, | ||||
| ) | ||||
| from .devx import ( | ||||
|     breakpoint as breakpoint, | ||||
|  |  | |||
							
								
								
									
										1688
									
								
								tractor/_context.py
								
								
								
								
							
							
						
						
									
										1688
									
								
								tractor/_context.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -26,8 +26,8 @@ from typing import ( | |||
|     TYPE_CHECKING, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| import warnings | ||||
| 
 | ||||
| from tractor.log import get_logger | ||||
| from .trionics import gather_contexts | ||||
| from ._ipc import _connect_chan, Channel | ||||
| from ._portal import ( | ||||
|  | @ -40,11 +40,13 @@ from ._state import ( | |||
|     _runtime_vars, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_registry( | ||||
|     host: str, | ||||
|  | @ -56,14 +58,12 @@ async def get_registry( | |||
| ]: | ||||
|     ''' | ||||
|     Return a portal instance connected to a local or remote | ||||
|     arbiter. | ||||
|     registry-service actor; if a connection already exists re-use it | ||||
|     (presumably to call a `.register_actor()` registry runtime RPC | ||||
|     ep). | ||||
| 
 | ||||
|     ''' | ||||
|     actor = current_actor() | ||||
| 
 | ||||
|     if not actor: | ||||
|         raise RuntimeError("No actor instance has been defined yet?") | ||||
| 
 | ||||
|     actor: Actor = current_actor() | ||||
|     if actor.is_registrar: | ||||
|         # we're already the arbiter | ||||
|         # (likely a re-entrant call from the arbiter actor) | ||||
|  | @ -72,6 +72,8 @@ async def get_registry( | |||
|             Channel((host, port)) | ||||
|         ) | ||||
|     else: | ||||
|         # TODO: try to look pre-existing connection from | ||||
|         # `Actor._peers` and use it instead? | ||||
|         async with ( | ||||
|             _connect_chan(host, port) as chan, | ||||
|             open_portal(chan) as regstr_ptl, | ||||
|  | @ -80,19 +82,6 @@ async def get_registry( | |||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| # TODO: deprecate and this remove _arbiter form! | ||||
| @acm | ||||
| async def get_arbiter(*args, **kwargs): | ||||
|     warnings.warn( | ||||
|         '`tractor.get_arbiter()` is now deprecated!\n' | ||||
|         'Use `.get_registry()` instead!', | ||||
|         DeprecationWarning, | ||||
|         stacklevel=2, | ||||
|     ) | ||||
|     async with get_registry(*args, **kwargs) as to_yield: | ||||
|         yield to_yield | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def get_root( | ||||
|     **kwargs, | ||||
|  | @ -110,10 +99,41 @@ async def get_root( | |||
|         yield portal | ||||
| 
 | ||||
| 
 | ||||
| def get_peer_by_name( | ||||
|     name: str, | ||||
|     # uuid: str|None = None, | ||||
| 
 | ||||
| ) -> list[Channel]|None:  # at least 1 | ||||
|     ''' | ||||
|     Scan for an existing connection (set) to a named actor | ||||
|     and return any channels from `Actor._peers`. | ||||
| 
 | ||||
|     This is an optimization method over querying the registrar for | ||||
|     the same info. | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = current_actor() | ||||
|     to_scan: dict[tuple, list[Channel]] = actor._peers.copy() | ||||
|     pchan: Channel|None = actor._parent_chan | ||||
|     if pchan: | ||||
|         to_scan[pchan.uid].append(pchan) | ||||
| 
 | ||||
|     for aid, chans in to_scan.items(): | ||||
|         _, peer_name = aid | ||||
|         if name == peer_name: | ||||
|             if not chans: | ||||
|                 log.warning( | ||||
|                     'No IPC chans for matching peer {peer_name}\n' | ||||
|                 ) | ||||
|                 continue | ||||
|             return chans | ||||
| 
 | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def query_actor( | ||||
|     name: str, | ||||
|     arbiter_sockaddr: tuple[str, int] | None = None, | ||||
|     regaddr: tuple[str, int]|None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[ | ||||
|  | @ -121,11 +141,11 @@ async def query_actor( | |||
|     None, | ||||
| ]: | ||||
|     ''' | ||||
|     Make a transport address lookup for an actor name to a specific | ||||
|     registrar. | ||||
|     Lookup a transport address (by actor name) via querying a registrar | ||||
|     listening @ `regaddr`. | ||||
| 
 | ||||
|     Returns the (socket) address or ``None`` if no entry under that | ||||
|     name exists for the given registrar listening @ `regaddr`. | ||||
|     Returns the transport protocol (socket) address or `None` if no | ||||
|     entry under that name exists. | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = current_actor() | ||||
|  | @ -137,14 +157,10 @@ async def query_actor( | |||
|             'The current actor IS the registry!?' | ||||
|         ) | ||||
| 
 | ||||
|     if arbiter_sockaddr is not None: | ||||
|         warnings.warn( | ||||
|             '`tractor.query_actor(regaddr=<blah>)` is deprecated.\n' | ||||
|             'Use `registry_addrs: list[tuple]` instead!', | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         regaddr: list[tuple[str, int]] = arbiter_sockaddr | ||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) | ||||
|     if maybe_peers: | ||||
|         yield maybe_peers[0].raddr | ||||
|         return | ||||
| 
 | ||||
|     reg_portal: Portal | ||||
|     regaddr: tuple[str, int] = regaddr or actor.reg_addrs[0] | ||||
|  | @ -159,10 +175,28 @@ async def query_actor( | |||
|         yield sockaddr | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_open_portal( | ||||
|     addr: tuple[str, int], | ||||
|     name: str, | ||||
| ): | ||||
|     async with query_actor( | ||||
|         name=name, | ||||
|         regaddr=addr, | ||||
|     ) as sockaddr: | ||||
|         pass | ||||
| 
 | ||||
|     if sockaddr: | ||||
|         async with _connect_chan(*sockaddr) as chan: | ||||
|             async with open_portal(chan) as portal: | ||||
|                 yield portal | ||||
|     else: | ||||
|         yield None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def find_actor( | ||||
|     name: str, | ||||
|     arbiter_sockaddr: tuple[str, int]|None = None, | ||||
|     registry_addrs: list[tuple[str, int]]|None = None, | ||||
| 
 | ||||
|     only_first: bool = True, | ||||
|  | @ -179,29 +213,12 @@ async def find_actor( | |||
|     known to the arbiter. | ||||
| 
 | ||||
|     ''' | ||||
|     if arbiter_sockaddr is not None: | ||||
|         warnings.warn( | ||||
|             '`tractor.find_actor(arbiter_sockaddr=<blah>)` is deprecated.\n' | ||||
|             'Use `registry_addrs: list[tuple]` instead!', | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         registry_addrs: list[tuple[str, int]] = [arbiter_sockaddr] | ||||
| 
 | ||||
|     @acm | ||||
|     async def maybe_open_portal_from_reg_addr( | ||||
|         addr: tuple[str, int], | ||||
|     ): | ||||
|         async with query_actor( | ||||
|             name=name, | ||||
|             regaddr=addr, | ||||
|         ) as sockaddr: | ||||
|             if sockaddr: | ||||
|                 async with _connect_chan(*sockaddr) as chan: | ||||
|                     async with open_portal(chan) as portal: | ||||
|                         yield portal | ||||
|             else: | ||||
|                 yield None | ||||
|     # optimization path, use any pre-existing peer channel | ||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) | ||||
|     if maybe_peers and only_first: | ||||
|         async with open_portal(maybe_peers[0]) as peer_portal: | ||||
|             yield peer_portal | ||||
|             return | ||||
| 
 | ||||
|     if not registry_addrs: | ||||
|         # XXX NOTE: make sure to dynamically read the value on | ||||
|  | @ -217,10 +234,13 @@ async def find_actor( | |||
|     maybe_portals: list[ | ||||
|         AsyncContextManager[tuple[str, int]] | ||||
|     ] = list( | ||||
|         maybe_open_portal_from_reg_addr(addr) | ||||
|         maybe_open_portal( | ||||
|             addr=addr, | ||||
|             name=name, | ||||
|         ) | ||||
|         for addr in registry_addrs | ||||
|     ) | ||||
| 
 | ||||
|     portals: list[Portal] | ||||
|     async with gather_contexts( | ||||
|         mngrs=maybe_portals, | ||||
|     ) as portals: | ||||
|  | @ -254,31 +274,31 @@ async def find_actor( | |||
| @acm | ||||
| async def wait_for_actor( | ||||
|     name: str, | ||||
|     arbiter_sockaddr: tuple[str, int] | None = None, | ||||
|     registry_addr: tuple[str, int] | None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[Portal, None]: | ||||
|     ''' | ||||
|     Wait on an actor to register with the arbiter. | ||||
| 
 | ||||
|     A portal to the first registered actor is returned. | ||||
|     Wait on at least one peer actor to register `name` with the | ||||
|     registrar, yield a `Portal to the first registree. | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = current_actor() | ||||
| 
 | ||||
|     if arbiter_sockaddr is not None: | ||||
|         warnings.warn( | ||||
|             '`tractor.wait_for_actor(arbiter_sockaddr=<foo>)` is deprecated.\n' | ||||
|             'Use `registry_addr: tuple` instead!', | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|         registry_addr: tuple[str, int] = arbiter_sockaddr | ||||
|     # optimization path, use any pre-existing peer channel | ||||
|     maybe_peers: list[Channel]|None = get_peer_by_name(name) | ||||
|     if maybe_peers: | ||||
|         async with open_portal(maybe_peers[0]) as peer_portal: | ||||
|             yield peer_portal | ||||
|             return | ||||
| 
 | ||||
|     regaddr: tuple[str, int] = ( | ||||
|         registry_addr | ||||
|         or | ||||
|         actor.reg_addrs[0] | ||||
|     ) | ||||
|     # TODO: use `.trionics.gather_contexts()` like | ||||
|     # above in `find_actor()` as well? | ||||
|     reg_portal: Portal | ||||
|     regaddr: tuple[str, int] = registry_addr or actor.reg_addrs[0] | ||||
|     async with get_registry(*regaddr) as reg_portal: | ||||
|         sockaddrs = await reg_portal.run_from_ns( | ||||
|             'self', | ||||
|  |  | |||
|  | @ -20,6 +20,9 @@ Sub-process entry points. | |||
| """ | ||||
| from __future__ import annotations | ||||
| from functools import partial | ||||
| import multiprocessing as mp | ||||
| import os | ||||
| import textwrap | ||||
| from typing import ( | ||||
|     Any, | ||||
|     TYPE_CHECKING, | ||||
|  | @ -32,6 +35,7 @@ from .log import ( | |||
|     get_logger, | ||||
| ) | ||||
| from . import _state | ||||
| from .devx import _debug | ||||
| from .to_asyncio import run_as_asyncio_guest | ||||
| from ._runtime import ( | ||||
|     async_main, | ||||
|  | @ -56,25 +60,27 @@ def _mp_main( | |||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     The routine called *after fork* which invokes a fresh ``trio.run`` | ||||
|     The routine called *after fork* which invokes a fresh `trio.run()` | ||||
| 
 | ||||
|     ''' | ||||
|     actor._forkserver_info = forkserver_info | ||||
|     from ._spawn import try_set_start_method | ||||
|     spawn_ctx = try_set_start_method(start_method) | ||||
|     spawn_ctx: mp.context.BaseContext = try_set_start_method(start_method) | ||||
|     assert spawn_ctx | ||||
| 
 | ||||
|     if actor.loglevel is not None: | ||||
|         log.info( | ||||
|             f"Setting loglevel for {actor.uid} to {actor.loglevel}") | ||||
|             f'Setting loglevel for {actor.uid} to {actor.loglevel}' | ||||
|         ) | ||||
|         get_console_log(actor.loglevel) | ||||
| 
 | ||||
|     assert spawn_ctx | ||||
|     # TODO: use scops headers like for `trio` below! | ||||
|     # (well after we libify it maybe..) | ||||
|     log.info( | ||||
|         f"Started new {spawn_ctx.current_process()} for {actor.uid}") | ||||
| 
 | ||||
|     _state._current_actor = actor | ||||
| 
 | ||||
|     log.debug(f"parent_addr is {parent_addr}") | ||||
|         f'Started new {spawn_ctx.current_process()} for {actor.uid}' | ||||
|     #     f"parent_addr is {parent_addr}" | ||||
|     ) | ||||
|     _state._current_actor: Actor = actor | ||||
|     trio_main = partial( | ||||
|         async_main, | ||||
|         actor=actor, | ||||
|  | @ -91,11 +97,113 @@ def _mp_main( | |||
|         pass  # handle it the same way trio does? | ||||
| 
 | ||||
|     finally: | ||||
|         log.info(f"Actor {actor.uid} terminated") | ||||
|         log.info( | ||||
|             f'`mp`-subactor {actor.uid} exited' | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: move this func to some kinda `.devx._conc_lang.py` eventually | ||||
| # as we work out our multi-domain state-flow-syntax! | ||||
| def nest_from_op( | ||||
|     input_op: str, | ||||
|     # | ||||
|     # ?TODO? an idea for a syntax to the state of concurrent systems | ||||
|     # as a "3-domain" (execution, scope, storage) model and using | ||||
|     # a minimal ascii/utf-8 operator-set. | ||||
|     # | ||||
|     # try not to take any of this seriously yet XD | ||||
|     # | ||||
|     # > is a "play operator" indicating (CPU bound) | ||||
|     #   exec/work/ops required at the "lowest level computing" | ||||
|     # | ||||
|     # execution primititves (tasks, threads, actors..) denote their | ||||
|     # lifetime with '(' and ')' since parentheses normally are used | ||||
|     # in many langs to denote function calls. | ||||
|     # | ||||
|     # starting = ( | ||||
|     # >(  opening/starting; beginning of the thread-of-exec (toe?) | ||||
|     # (>  opened/started,  (finished spawning toe) | ||||
|     # |_<Task: blah blah..>  repr of toe, in py these look like <objs> | ||||
|     # | ||||
|     # >) closing/exiting/stopping, | ||||
|     # )> closed/exited/stopped, | ||||
|     # |_<Task: blah blah..> | ||||
|     #   [OR <), )< ?? ] | ||||
|     # | ||||
|     # ending = ) | ||||
|     # >c) cancelling to close/exit | ||||
|     # c)> cancelled (caused close), OR? | ||||
|     #  |_<Actor: ..> | ||||
|     #   OR maybe "<c)" which better indicates the cancel being | ||||
|     #   "delivered/returned" / returned" to LHS? | ||||
|     # | ||||
|     # >x)  erroring to eventuall exit | ||||
|     # x)>  errored and terminated | ||||
|     #  |_<Actor: ...> | ||||
|     # | ||||
|     # scopes: supers/nurseries, IPC-ctxs, sessions, perms, etc. | ||||
|     # >{  opening | ||||
|     # {>  opened | ||||
|     # }>  closed | ||||
|     # >}  closing | ||||
|     # | ||||
|     # storage: like queues, shm-buffers, files, etc.. | ||||
|     # >[  opening | ||||
|     # [>  opened | ||||
|     #  |_<FileObj: ..> | ||||
|     # | ||||
|     # >]  closing | ||||
|     # ]>  closed | ||||
| 
 | ||||
|     # IPC ops: channels, transports, msging | ||||
|     # =>  req msg | ||||
|     # <=  resp msg | ||||
|     # <=> 2-way streaming (of msgs) | ||||
|     # <-  recv 1 msg | ||||
|     # ->  send 1 msg | ||||
|     # | ||||
|     # TODO: still not sure on R/L-HS approach..? | ||||
|     # =>(  send-req to exec start (task, actor, thread..) | ||||
|     # (<=  recv-req to ^ | ||||
|     # | ||||
|     # (<=  recv-req ^ | ||||
|     # <=(  recv-resp opened remote exec primitive | ||||
|     # <=)  recv-resp closed | ||||
|     # | ||||
|     # )<=c req to stop due to cancel | ||||
|     # c=>) req to stop due to cancel | ||||
|     # | ||||
|     # =>{  recv-req to open | ||||
|     # <={  send-status that it closed | ||||
| 
 | ||||
|     tree_str: str, | ||||
| 
 | ||||
|     # NOTE: so move back-from-the-left of the `input_op` by | ||||
|     # this amount. | ||||
|     back_from_op: int = 0, | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Depth-increment the input (presumably hierarchy/supervision) | ||||
|     input "tree string" below the provided `input_op` execution | ||||
|     operator, so injecting a `"\n|_{input_op}\n"`and indenting the | ||||
|     `tree_str` to nest content aligned with the ops last char. | ||||
| 
 | ||||
|     ''' | ||||
|     return ( | ||||
|         f'{input_op}\n' | ||||
|         + | ||||
|         textwrap.indent( | ||||
|             tree_str, | ||||
|             prefix=( | ||||
|                 len(input_op) | ||||
|                 - | ||||
|                 (back_from_op + 1) | ||||
|             ) * ' ', | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def _trio_main( | ||||
| 
 | ||||
|     actor: Actor, | ||||
|     *, | ||||
|     parent_addr: tuple[str, int] | None = None, | ||||
|  | @ -106,7 +214,8 @@ def _trio_main( | |||
|     Entry point for a `trio_run_in_process` subactor. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = True | ||||
|     _debug.hide_runtime_frames() | ||||
| 
 | ||||
|     _state._current_actor = actor | ||||
|     trio_main = partial( | ||||
|         async_main, | ||||
|  | @ -116,7 +225,6 @@ def _trio_main( | |||
| 
 | ||||
|     if actor.loglevel is not None: | ||||
|         get_console_log(actor.loglevel) | ||||
|         import os | ||||
|         actor_info: str = ( | ||||
|             f'|_{actor}\n' | ||||
|             f'  uid: {actor.uid}\n' | ||||
|  | @ -125,27 +233,54 @@ def _trio_main( | |||
|             f'  loglevel: {actor.loglevel}\n' | ||||
|         ) | ||||
|         log.info( | ||||
|             'Started new trio process:\n' | ||||
|             'Starting new `trio` subactor:\n' | ||||
|             + | ||||
|             actor_info | ||||
|             nest_from_op( | ||||
|                 input_op='>(',  # see syntax ideas above | ||||
|                 tree_str=actor_info, | ||||
|                 back_from_op=1, | ||||
|             ) | ||||
|         ) | ||||
|     logmeth = log.info | ||||
|     exit_status: str = ( | ||||
|         'Subactor exited\n' | ||||
|         + | ||||
|         nest_from_op( | ||||
|             input_op=')>',  # like a "closed-to-play"-icon from super perspective | ||||
|             tree_str=actor_info, | ||||
|             back_from_op=1, | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
|     try: | ||||
|         if infect_asyncio: | ||||
|             actor._infected_aio = True | ||||
|             run_as_asyncio_guest(trio_main) | ||||
|         else: | ||||
|             trio.run(trio_main) | ||||
| 
 | ||||
|     except KeyboardInterrupt: | ||||
|         log.cancel( | ||||
|             'Actor received KBI\n' | ||||
|         logmeth = log.cancel | ||||
|         exit_status: str = ( | ||||
|             'Actor received KBI (aka an OS-cancel)\n' | ||||
|             + | ||||
|             actor_info | ||||
|             nest_from_op( | ||||
|                 input_op='c)>',  # closed due to cancel (see above) | ||||
|                 tree_str=actor_info, | ||||
|             ) | ||||
|         ) | ||||
|     except BaseException as err: | ||||
|         logmeth = log.error | ||||
|         exit_status: str = ( | ||||
|             'Main actor task exited due to crash?\n' | ||||
|             + | ||||
|             nest_from_op( | ||||
|                 input_op='x)>',  # closed by error | ||||
|                 tree_str=actor_info, | ||||
|             ) | ||||
|         ) | ||||
|         # NOTE since we raise a tb will already be shown on the | ||||
|         # console, thus we do NOT use `.exception()` above. | ||||
|         raise err | ||||
| 
 | ||||
|     finally: | ||||
|         log.info( | ||||
|             'Actor terminated\n' | ||||
|             + | ||||
|             actor_info | ||||
|         ) | ||||
|         logmeth(exit_status) | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										414
									
								
								tractor/_ipc.py
								
								
								
								
							
							
						
						
									
										414
									
								
								tractor/_ipc.py
								
								
								
								
							|  | @ -23,13 +23,17 @@ from collections.abc import ( | |||
|     AsyncGenerator, | ||||
|     AsyncIterator, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| import platform | ||||
| from pprint import pformat | ||||
| import struct | ||||
| import typing | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     runtime_checkable, | ||||
|     Protocol, | ||||
|     Type, | ||||
|  | @ -41,15 +45,38 @@ from tricycle import BufferedReceiveStream | |||
| import trio | ||||
| 
 | ||||
| from tractor.log import get_logger | ||||
| from tractor._exceptions import TransportClosed | ||||
| from tractor._exceptions import ( | ||||
|     MsgTypeError, | ||||
|     pack_from_raise, | ||||
|     TransportClosed, | ||||
|     _mk_send_mte, | ||||
|     _mk_recv_mte, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     _ctxvar_MsgCodec, | ||||
|     # _codec,  XXX see `self._codec` sanity/debug checks | ||||
|     MsgCodec, | ||||
|     types as msgtypes, | ||||
|     pretty_struct, | ||||
| ) | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _is_windows = platform.system() == 'Windows' | ||||
| 
 | ||||
| 
 | ||||
| def get_stream_addrs(stream: trio.SocketStream) -> tuple: | ||||
|     # should both be IP sockets | ||||
| def get_stream_addrs( | ||||
|     stream: trio.SocketStream | ||||
| ) -> tuple[ | ||||
|     tuple[str, int],  # local | ||||
|     tuple[str, int],  # remote | ||||
| ]: | ||||
|     ''' | ||||
|     Return the `trio` streaming transport prot's socket-addrs for | ||||
|     both the local and remote sides as a pair. | ||||
| 
 | ||||
|     ''' | ||||
|     # rn, should both be IP sockets | ||||
|     lsockname = stream.socket.getsockname() | ||||
|     rsockname = stream.socket.getpeername() | ||||
|     return ( | ||||
|  | @ -58,16 +85,22 @@ def get_stream_addrs(stream: trio.SocketStream) -> tuple: | |||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| MsgType = TypeVar("MsgType") | ||||
| 
 | ||||
| # TODO: consider using a generic def and indexing with our eventual | ||||
| # msg definition/types? | ||||
| # - https://docs.python.org/3/library/typing.html#typing.Protocol | ||||
| # - https://jcristharif.com/msgspec/usage.html#structs | ||||
| # from tractor.msg.types import MsgType | ||||
| # ?TODO? this should be our `Union[*msgtypes.__spec__]` alias now right..? | ||||
| # => BLEH, except can't bc prots must inherit typevar or param-spec | ||||
| #   vars.. | ||||
| MsgType = TypeVar('MsgType') | ||||
| 
 | ||||
| 
 | ||||
| # TODO: break up this mod into a subpkg so we can start adding new | ||||
| # backends and move this type stuff into a dedicated file.. Bo | ||||
| # | ||||
| @runtime_checkable | ||||
| class MsgTransport(Protocol[MsgType]): | ||||
| # | ||||
| # ^-TODO-^ consider using a generic def and indexing with our | ||||
| # eventual msg definition/types? | ||||
| # - https://docs.python.org/3/library/typing.html#typing.Protocol | ||||
| 
 | ||||
|     stream: trio.SocketStream | ||||
|     drained: list[MsgType] | ||||
|  | @ -102,9 +135,9 @@ class MsgTransport(Protocol[MsgType]): | |||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| # TODO: not sure why we have to inherit here, but it seems to be an | ||||
| # issue with ``get_msg_transport()`` returning a ``Type[Protocol]``; | ||||
| # probably should make a `mypy` issue? | ||||
| # TODO: typing oddity.. not sure why we have to inherit here, but it | ||||
| # seems to be an issue with `get_msg_transport()` returning | ||||
| # a `Type[Protocol]`; probably should make a `mypy` issue? | ||||
| class MsgpackTCPStream(MsgTransport): | ||||
|     ''' | ||||
|     A ``trio.SocketStream`` delivering ``msgpack`` formatted data | ||||
|  | @ -123,6 +156,16 @@ class MsgpackTCPStream(MsgTransport): | |||
|         stream: trio.SocketStream, | ||||
|         prefix_size: int = 4, | ||||
| 
 | ||||
|         # XXX optionally provided codec pair for `msgspec`: | ||||
|         # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
|         # | ||||
|         # TODO: define this as a `Codec` struct which can be | ||||
|         # overriden dynamically by the application/runtime? | ||||
|         codec: tuple[ | ||||
|             Callable[[Any], Any]|None,  # coder | ||||
|             Callable[[type, Any], Any]|None,  # decoder | ||||
|         ]|None = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         self.stream = stream | ||||
|  | @ -132,30 +175,44 @@ class MsgpackTCPStream(MsgTransport): | |||
|         self._laddr, self._raddr = get_stream_addrs(stream) | ||||
| 
 | ||||
|         # create read loop instance | ||||
|         self._agen = self._iter_packets() | ||||
|         self._aiter_pkts = self._iter_packets() | ||||
|         self._send_lock = trio.StrictFIFOLock() | ||||
| 
 | ||||
|         # public i guess? | ||||
|         self.drained: list[dict] = [] | ||||
| 
 | ||||
|         self.recv_stream = BufferedReceiveStream(transport_stream=stream) | ||||
|         self.recv_stream = BufferedReceiveStream( | ||||
|             transport_stream=stream | ||||
|         ) | ||||
|         self.prefix_size = prefix_size | ||||
| 
 | ||||
|         # TODO: struct aware messaging coders | ||||
|         self.encode = msgspec.msgpack.Encoder().encode | ||||
|         self.decode = msgspec.msgpack.Decoder().decode  # dict[str, Any]) | ||||
|         # allow for custom IPC msg interchange format | ||||
|         # dynamic override Bo | ||||
|         self._task = trio.lowlevel.current_task() | ||||
| 
 | ||||
|         # XXX for ctxvar debug only! | ||||
|         # self._codec: MsgCodec = ( | ||||
|         #     codec | ||||
|         #     or | ||||
|         #     _codec._ctxvar_MsgCodec.get() | ||||
|         # ) | ||||
| 
 | ||||
|     async def _iter_packets(self) -> AsyncGenerator[dict, None]: | ||||
|         '''Yield packets from the underlying stream. | ||||
|         ''' | ||||
|         Yield `bytes`-blob decoded packets from the underlying TCP | ||||
|         stream using the current task's `MsgCodec`. | ||||
| 
 | ||||
|         This is a streaming routine implemented as an async generator | ||||
|         func (which was the original design, but could be changed?) | ||||
|         and is allocated by a `.__call__()` inside `.__init__()` where | ||||
|         it is assigned to the `._aiter_pkts` attr. | ||||
| 
 | ||||
|         ''' | ||||
|         import msgspec  # noqa | ||||
|         decodes_failed: int = 0 | ||||
| 
 | ||||
|         while True: | ||||
|             try: | ||||
|                 header = await self.recv_stream.receive_exactly(4) | ||||
| 
 | ||||
|                 header: bytes = await self.recv_stream.receive_exactly(4) | ||||
|             except ( | ||||
|                 ValueError, | ||||
|                 ConnectionResetError, | ||||
|  | @ -164,25 +221,122 @@ class MsgpackTCPStream(MsgTransport): | |||
|                 # seem to be getting racy failures here on | ||||
|                 # arbiter/registry name subs.. | ||||
|                 trio.BrokenResourceError, | ||||
|             ): | ||||
|                 raise TransportClosed( | ||||
|                     f'transport {self} was already closed prior ro read' | ||||
|                 ) | ||||
| 
 | ||||
|             ) as trans_err: | ||||
| 
 | ||||
|                 loglevel = 'transport' | ||||
|                 match trans_err: | ||||
|                     # case ( | ||||
|                     #     ConnectionResetError() | ||||
|                     # ): | ||||
|                     #     loglevel = 'transport' | ||||
| 
 | ||||
|                     # peer actor (graceful??) TCP EOF but `tricycle` | ||||
|                     # seems to raise a 0-bytes-read? | ||||
|                     case ValueError() if ( | ||||
|                         'unclean EOF' in trans_err.args[0] | ||||
|                     ): | ||||
|                         pass | ||||
| 
 | ||||
|                     # peer actor (task) prolly shutdown quickly due | ||||
|                     # to cancellation | ||||
|                     case trio.BrokenResourceError() if ( | ||||
|                         'Connection reset by peer' in trans_err.args[0] | ||||
|                     ): | ||||
|                         pass | ||||
| 
 | ||||
|                     # unless the disconnect condition falls under "a | ||||
|                     # normal operation breakage" we usualy console warn | ||||
|                     # about it. | ||||
|                     case _: | ||||
|                         loglevel: str = 'warning' | ||||
| 
 | ||||
| 
 | ||||
|                 raise TransportClosed( | ||||
|                     message=( | ||||
|                         f'IPC transport already closed by peer\n' | ||||
|                         f'x)> {type(trans_err)}\n' | ||||
|                         f' |_{self}\n' | ||||
|                     ), | ||||
|                     loglevel=loglevel, | ||||
|                 ) from trans_err | ||||
| 
 | ||||
|             # XXX definitely can happen if transport is closed | ||||
|             # manually by another `trio.lowlevel.Task` in the | ||||
|             # same actor; we use this in some simulated fault | ||||
|             # testing for ex, but generally should never happen | ||||
|             # under normal operation! | ||||
|             # | ||||
|             # NOTE: as such we always re-raise this error from the | ||||
|             #       RPC msg loop! | ||||
|             except trio.ClosedResourceError as closure_err: | ||||
|                 raise TransportClosed( | ||||
|                     message=( | ||||
|                         f'IPC transport already manually closed locally?\n' | ||||
|                         f'x)> {type(closure_err)} \n' | ||||
|                         f' |_{self}\n' | ||||
|                     ), | ||||
|                     loglevel='error', | ||||
|                     raise_on_report=( | ||||
|                         closure_err.args[0] == 'another task closed this fd' | ||||
|                         or | ||||
|                         closure_err.args[0] in ['another task closed this fd'] | ||||
|                     ), | ||||
|                 ) from closure_err | ||||
| 
 | ||||
|             # graceful TCP EOF disconnect | ||||
|             if header == b'': | ||||
|                 raise TransportClosed( | ||||
|                     f'transport {self} was already closed prior ro read' | ||||
|                     message=( | ||||
|                         f'IPC transport already gracefully closed\n' | ||||
|                         f')>\n' | ||||
|                         f'|_{self}\n' | ||||
|                     ), | ||||
|                     loglevel='transport', | ||||
|                     # cause=???  # handy or no? | ||||
|                 ) | ||||
| 
 | ||||
|             size: int | ||||
|             size, = struct.unpack("<I", header) | ||||
| 
 | ||||
|             log.transport(f'received header {size}')  # type: ignore | ||||
| 
 | ||||
|             msg_bytes = await self.recv_stream.receive_exactly(size) | ||||
|             msg_bytes: bytes = await self.recv_stream.receive_exactly(size) | ||||
| 
 | ||||
|             log.transport(f"received {msg_bytes}")  # type: ignore | ||||
|             try: | ||||
|                 yield self.decode(msg_bytes) | ||||
|                 # NOTE: lookup the `trio.Task.context`'s var for | ||||
|                 # the current `MsgCodec`. | ||||
|                 codec: MsgCodec = _ctxvar_MsgCodec.get() | ||||
| 
 | ||||
|                 # XXX for ctxvar debug only! | ||||
|                 # if self._codec.pld_spec != codec.pld_spec: | ||||
|                 #     assert ( | ||||
|                 #         task := trio.lowlevel.current_task() | ||||
|                 #     ) is not self._task | ||||
|                 #     self._task = task | ||||
|                 #     self._codec = codec | ||||
|                 #     log.runtime( | ||||
|                 #         f'Using new codec in {self}.recv()\n' | ||||
|                 #         f'codec: {self._codec}\n\n' | ||||
|                 #         f'msg_bytes: {msg_bytes}\n' | ||||
|                 #     ) | ||||
|                 yield codec.decode(msg_bytes) | ||||
| 
 | ||||
|             # XXX NOTE: since the below error derives from | ||||
|             # `DecodeError` we need to catch is specially | ||||
|             # and always raise such that spec violations | ||||
|             # are never allowed to be caught silently! | ||||
|             except msgspec.ValidationError as verr: | ||||
|                 msgtyperr: MsgTypeError = _mk_recv_mte( | ||||
|                     msg=msg_bytes, | ||||
|                     codec=codec, | ||||
|                     src_validation_error=verr, | ||||
|                 ) | ||||
|                 # XXX deliver up to `Channel.recv()` where | ||||
|                 # a re-raise and `Error`-pack can inject the far | ||||
|                 # end actor `.uid`. | ||||
|                 yield msgtyperr | ||||
| 
 | ||||
|             except ( | ||||
|                 msgspec.DecodeError, | ||||
|                 UnicodeDecodeError, | ||||
|  | @ -196,10 +350,11 @@ class MsgpackTCPStream(MsgTransport): | |||
|                     except UnicodeDecodeError: | ||||
|                         msg_str = msg_bytes | ||||
| 
 | ||||
|                     log.error( | ||||
|                         '`msgspec` failed to decode!?\n' | ||||
|                         'dumping bytes:\n' | ||||
|                         f'{msg_str!r}' | ||||
|                     log.exception( | ||||
|                         'Failed to decode msg?\n' | ||||
|                         f'{codec}\n\n' | ||||
|                         'Rxed bytes from wire:\n\n' | ||||
|                         f'{msg_str!r}\n' | ||||
|                     ) | ||||
|                     decodes_failed += 1 | ||||
|                 else: | ||||
|  | @ -207,25 +362,80 @@ class MsgpackTCPStream(MsgTransport): | |||
| 
 | ||||
|     async def send( | ||||
|         self, | ||||
|         msg: Any, | ||||
|         msg: msgtypes.MsgType, | ||||
| 
 | ||||
|         strict_types: bool = True, | ||||
|         hide_tb: bool = False, | ||||
| 
 | ||||
|         # hide_tb: bool = False, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a msgpack coded blob-as-msg over TCP. | ||||
|         Send a msgpack encoded py-object-blob-as-msg over TCP. | ||||
| 
 | ||||
|         If `strict_types == True` then a `MsgTypeError` will be raised on any | ||||
|         invalid msg type | ||||
| 
 | ||||
|         ''' | ||||
|         # __tracebackhide__: bool = hide_tb | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         # XXX see `trio._sync.AsyncContextManagerMixin` for details | ||||
|         # on the `.acquire()`/`.release()` sequencing.. | ||||
|         async with self._send_lock: | ||||
| 
 | ||||
|             bytes_data: bytes = self.encode(msg) | ||||
|             # NOTE: lookup the `trio.Task.context`'s var for | ||||
|             # the current `MsgCodec`. | ||||
|             codec: MsgCodec = _ctxvar_MsgCodec.get() | ||||
| 
 | ||||
|             # XXX for ctxvar debug only! | ||||
|             # if self._codec.pld_spec != codec.pld_spec: | ||||
|             #     self._codec = codec | ||||
|             #     log.runtime( | ||||
|             #         f'Using new codec in {self}.send()\n' | ||||
|             #         f'codec: {self._codec}\n\n' | ||||
|             #         f'msg: {msg}\n' | ||||
|             #     ) | ||||
| 
 | ||||
|             if type(msg) not in msgtypes.__msg_types__: | ||||
|                 if strict_types: | ||||
|                     raise _mk_send_mte( | ||||
|                         msg, | ||||
|                         codec=codec, | ||||
|                     ) | ||||
|                 else: | ||||
|                     log.warning( | ||||
|                         'Sending non-`Msg`-spec msg?\n\n' | ||||
|                         f'{msg}\n' | ||||
|                     ) | ||||
| 
 | ||||
|             try: | ||||
|                 bytes_data: bytes = codec.encode(msg) | ||||
|             except TypeError as _err: | ||||
|                 typerr = _err | ||||
|                 msgtyperr: MsgTypeError = _mk_send_mte( | ||||
|                     msg, | ||||
|                     codec=codec, | ||||
|                     message=( | ||||
|                         f'IPC-msg-spec violation in\n\n' | ||||
|                         f'{pretty_struct.Struct.pformat(msg)}' | ||||
|                     ), | ||||
|                     src_type_error=typerr, | ||||
|                 ) | ||||
|                 raise msgtyperr from typerr | ||||
| 
 | ||||
|             # supposedly the fastest says, | ||||
|             # https://stackoverflow.com/a/54027962 | ||||
|             size: bytes = struct.pack("<I", len(bytes_data)) | ||||
| 
 | ||||
|             return await self.stream.send_all(size + bytes_data) | ||||
| 
 | ||||
|         # ?TODO? does it help ever to dynamically show this | ||||
|         # frame? | ||||
|         # try: | ||||
|         #     <the-above_code> | ||||
|         # except BaseException as _err: | ||||
|         #     err = _err | ||||
|         #     if not isinstance(err, MsgTypeError): | ||||
|         #         __tracebackhide__: bool = False | ||||
|         #     raise | ||||
| 
 | ||||
|     @property | ||||
|     def laddr(self) -> tuple[str, int]: | ||||
|         return self._laddr | ||||
|  | @ -235,7 +445,7 @@ class MsgpackTCPStream(MsgTransport): | |||
|         return self._raddr | ||||
| 
 | ||||
|     async def recv(self) -> Any: | ||||
|         return await self._agen.asend(None) | ||||
|         return await self._aiter_pkts.asend(None) | ||||
| 
 | ||||
|     async def drain(self) -> AsyncIterator[dict]: | ||||
|         ''' | ||||
|  | @ -252,7 +462,7 @@ class MsgpackTCPStream(MsgTransport): | |||
|                 yield msg | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self._agen | ||||
|         return self._aiter_pkts | ||||
| 
 | ||||
|     def connected(self) -> bool: | ||||
|         return self.stream.socket.fileno() != -1 | ||||
|  | @ -307,7 +517,7 @@ class Channel: | |||
|         # set after handshake - always uid of far end | ||||
|         self.uid: tuple[str, str]|None = None | ||||
| 
 | ||||
|         self._agen = self._aiter_recv() | ||||
|         self._aiter_msgs = self._iter_msgs() | ||||
|         self._exc: Exception|None = None  # set if far end actor errors | ||||
|         self._closed: bool = False | ||||
| 
 | ||||
|  | @ -318,7 +528,9 @@ class Channel: | |||
| 
 | ||||
|     @property | ||||
|     def msgstream(self) -> MsgTransport: | ||||
|         log.info('`Channel.msgstream` is an old name, use `._transport`') | ||||
|         log.info( | ||||
|             '`Channel.msgstream` is an old name, use `._transport`' | ||||
|         ) | ||||
|         return self._transport | ||||
| 
 | ||||
|     @property | ||||
|  | @ -349,11 +561,45 @@ class Channel: | |||
|         stream: trio.SocketStream, | ||||
|         type_key: tuple[str, str]|None = None, | ||||
| 
 | ||||
|         # XXX optionally provided codec pair for `msgspec`: | ||||
|         # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
|         codec: MsgCodec|None = None, | ||||
| 
 | ||||
|     ) -> MsgTransport: | ||||
|         type_key = type_key or self._transport_key | ||||
|         self._transport = get_msg_transport(type_key)(stream) | ||||
|         type_key = ( | ||||
|             type_key | ||||
|             or | ||||
|             self._transport_key | ||||
|         ) | ||||
|         # get transport type, then | ||||
|         self._transport = get_msg_transport( | ||||
|             type_key | ||||
|         # instantiate an instance of the msg-transport | ||||
|         )( | ||||
|             stream, | ||||
|             codec=codec, | ||||
|         ) | ||||
|         return self._transport | ||||
| 
 | ||||
|     @cm | ||||
|     def apply_codec( | ||||
|         self, | ||||
|         codec: MsgCodec, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Temporarily override the underlying IPC msg codec for | ||||
|         dynamic enforcement of messaging schema. | ||||
| 
 | ||||
|         ''' | ||||
|         orig: MsgCodec = self._transport.codec | ||||
|         try: | ||||
|             self._transport.codec = codec | ||||
|             yield | ||||
|         finally: | ||||
|             self._transport.codec = orig | ||||
| 
 | ||||
|     # TODO: do a .src/.dst: str for maddrs? | ||||
|     def __repr__(self) -> str: | ||||
|         if not self._transport: | ||||
|             return '<Channel with inactive transport?>' | ||||
|  | @ -397,33 +643,53 @@ class Channel: | |||
|         ) | ||||
|         return transport | ||||
| 
 | ||||
|     # TODO: something like, | ||||
|     # `pdbp.hideframe_on(errors=[MsgTypeError])` | ||||
|     # instead of the `try/except` hack we have rn.. | ||||
|     # seems like a pretty useful thing to have in general | ||||
|     # along with being able to filter certain stack frame(s / sets) | ||||
|     # possibly based on the current log-level? | ||||
|     async def send( | ||||
|         self, | ||||
|         payload: Any, | ||||
| 
 | ||||
|         # hide_tb: bool = False, | ||||
|         hide_tb: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a coded msg-blob over the transport. | ||||
| 
 | ||||
|         ''' | ||||
|         # __tracebackhide__: bool = hide_tb | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         try: | ||||
|             log.transport( | ||||
|                 '=> send IPC msg:\n\n' | ||||
|                 f'{pformat(payload)}\n' | ||||
|         )  # type: ignore | ||||
|         assert self._transport | ||||
| 
 | ||||
|             ) | ||||
|             # assert self._transport  # but why typing? | ||||
|             await self._transport.send( | ||||
|                 payload, | ||||
|             # hide_tb=hide_tb, | ||||
|                 hide_tb=hide_tb, | ||||
|             ) | ||||
|         except BaseException as _err: | ||||
|             err = _err  # bind for introspection | ||||
|             if not isinstance(_err, MsgTypeError): | ||||
|                 # assert err | ||||
|                 __tracebackhide__: bool = False | ||||
|             else: | ||||
|                 assert err.cid | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
|     async def recv(self) -> Any: | ||||
|         assert self._transport | ||||
|         return await self._transport.recv() | ||||
| 
 | ||||
|         # TODO: auto-reconnect features like 0mq/nanomsg? | ||||
|         # -[ ] implement it manually with nods to SC prot | ||||
|         #      possibly on multiple transport backends? | ||||
|         #  -> seems like that might be re-inventing scalability | ||||
|         #     prots tho no? | ||||
|         # try: | ||||
|         #     return await self._transport.recv() | ||||
|         # except trio.BrokenResourceError: | ||||
|  | @ -450,8 +716,11 @@ class Channel: | |||
|         await self.aclose(*args) | ||||
| 
 | ||||
|     def __aiter__(self): | ||||
|         return self._agen | ||||
|         return self._aiter_msgs | ||||
| 
 | ||||
|     # ?TODO? run any reconnection sequence? | ||||
|     # -[ ] prolly should be impl-ed as deco-API? | ||||
|     # | ||||
|     # async def _reconnect(self) -> None: | ||||
|     #     """Handle connection failures by polling until a reconnect can be | ||||
|     #     established. | ||||
|  | @ -469,7 +738,6 @@ class Channel: | |||
|     #             else: | ||||
|     #                 log.transport("Stream connection re-established!") | ||||
| 
 | ||||
|     #                 # TODO: run any reconnection sequence | ||||
|     #                 # on_recon = self._recon_seq | ||||
|     #                 # if on_recon: | ||||
|     #                 #     await on_recon(self) | ||||
|  | @ -483,23 +751,42 @@ class Channel: | |||
|     #                     " for re-establishment") | ||||
|     #             await trio.sleep(1) | ||||
| 
 | ||||
|     async def _aiter_recv( | ||||
|     async def _iter_msgs( | ||||
|         self | ||||
|     ) -> AsyncGenerator[Any, None]: | ||||
|         ''' | ||||
|         Async iterate items from underlying stream. | ||||
|         Yield `MsgType` IPC msgs decoded and deliverd from | ||||
|         an underlying `MsgTransport` protocol. | ||||
| 
 | ||||
|         This is a streaming routine alo implemented as an async-gen | ||||
|         func (same a `MsgTransport._iter_pkts()`) gets allocated by | ||||
|         a `.__call__()` inside `.__init__()` where it is assigned to | ||||
|         the `._aiter_msgs` attr. | ||||
| 
 | ||||
|         ''' | ||||
|         assert self._transport | ||||
|         while True: | ||||
|             try: | ||||
|                 async for item in self._transport: | ||||
|                     yield item | ||||
|                     # sent = yield item | ||||
|                     # if sent is not None: | ||||
|                     #     # optimization, passing None through all the | ||||
|                     #     # time is pointless | ||||
|                     #     await self._transport.send(sent) | ||||
|                 async for msg in self._transport: | ||||
|                     match msg: | ||||
|                         # NOTE: if transport/interchange delivers | ||||
|                         # a type error, we pack it with the far | ||||
|                         # end peer `Actor.uid` and relay the | ||||
|                         # `Error`-msg upward to the `._rpc` stack | ||||
|                         # for normal RAE handling. | ||||
|                         case MsgTypeError(): | ||||
|                             yield pack_from_raise( | ||||
|                                 local_err=msg, | ||||
|                                 cid=msg.cid, | ||||
| 
 | ||||
|                                 # XXX we pack it here bc lower | ||||
|                                 # layers have no notion of an | ||||
|                                 # actor-id ;) | ||||
|                                 src_uid=self.uid, | ||||
|                             ) | ||||
|                         case _: | ||||
|                             yield msg | ||||
| 
 | ||||
|             except trio.BrokenResourceError: | ||||
| 
 | ||||
|                 # if not self._autorecon: | ||||
|  | @ -529,4 +816,5 @@ async def _connect_chan( | |||
|     chan = Channel((host, port)) | ||||
|     await chan.connect() | ||||
|     yield chan | ||||
|     with trio.CancelScope(shield=True): | ||||
|         await chan.aclose() | ||||
|  |  | |||
|  | @ -31,7 +31,7 @@ from typing import ( | |||
|     Any, | ||||
|     Callable, | ||||
|     AsyncGenerator, | ||||
|     # Type, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| from functools import partial | ||||
| from dataclasses import dataclass | ||||
|  | @ -45,9 +45,14 @@ from ._state import ( | |||
| ) | ||||
| from ._ipc import Channel | ||||
| from .log import get_logger | ||||
| from .msg import NamespacePath | ||||
| from .msg import ( | ||||
|     # Error, | ||||
|     PayloadMsg, | ||||
|     NamespacePath, | ||||
|     Return, | ||||
| ) | ||||
| from ._exceptions import ( | ||||
|     unpack_error, | ||||
|     # unpack_error, | ||||
|     NoResult, | ||||
| ) | ||||
| from ._context import ( | ||||
|  | @ -58,41 +63,12 @@ from ._streaming import ( | |||
|     MsgStream, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: rename to `unwrap_result()` and use | ||||
| # `._raise_from_no_key_in_msg()` (after tweak to | ||||
| # accept a `chan: Channel` arg) in key block! | ||||
| def _unwrap_msg( | ||||
|     msg: dict[str, Any], | ||||
|     channel: Channel, | ||||
| 
 | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
| ) -> Any: | ||||
|     ''' | ||||
|     Unwrap a final result from a `{return: <Any>}` IPC msg. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|     try: | ||||
|         return msg['return'] | ||||
|     except KeyError as ke: | ||||
| 
 | ||||
|         # internal error should never get here | ||||
|         assert msg.get('cid'), ( | ||||
|             "Received internal error at portal?" | ||||
|         ) | ||||
| 
 | ||||
|         raise unpack_error( | ||||
|             msg, | ||||
|             channel | ||||
|         ) from ke | ||||
| 
 | ||||
| 
 | ||||
| class Portal: | ||||
|     ''' | ||||
|     A 'portal' to a memory-domain-separated `Actor`. | ||||
|  | @ -116,17 +92,26 @@ class Portal: | |||
|     # connected (peer) actors. | ||||
|     cancel_timeout: float = 0.5 | ||||
| 
 | ||||
|     def __init__(self, channel: Channel) -> None: | ||||
|         self.chan = channel | ||||
|     def __init__( | ||||
|         self, | ||||
|         channel: Channel, | ||||
|     ) -> None: | ||||
| 
 | ||||
|         self._chan: Channel = channel | ||||
|         # during the portal's lifetime | ||||
|         self._result_msg: dict|None = None | ||||
|         self._final_result_pld: Any|None = None | ||||
|         self._final_result_msg: PayloadMsg|None = None | ||||
| 
 | ||||
|         # When set to a ``Context`` (when _submit_for_result is called) | ||||
|         # it is expected that ``result()`` will be awaited at some | ||||
|         # point. | ||||
|         self._expect_result: Context | None = None | ||||
|         self._expect_result_ctx: Context|None = None | ||||
|         self._streams: set[MsgStream] = set() | ||||
|         self.actor = current_actor() | ||||
|         self.actor: Actor = current_actor() | ||||
| 
 | ||||
|     @property | ||||
|     def chan(self) -> Channel: | ||||
|         return self._chan | ||||
| 
 | ||||
|     @property | ||||
|     def channel(self) -> Channel: | ||||
|  | @ -140,6 +125,8 @@ class Portal: | |||
|         ) | ||||
|         return self.chan | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     async def _submit_for_result( | ||||
|         self, | ||||
|         ns: str, | ||||
|  | @ -147,32 +134,34 @@ class Portal: | |||
|         **kwargs | ||||
|     ) -> None: | ||||
| 
 | ||||
|         assert self._expect_result is None, ( | ||||
|             "A pending main result has already been submitted" | ||||
|         if self._expect_result_ctx is not None: | ||||
|             raise RuntimeError( | ||||
|                 'A pending main result has already been submitted' | ||||
|             ) | ||||
| 
 | ||||
|         self._expect_result = await self.actor.start_remote_task( | ||||
|         self._expect_result_ctx: Context = await self.actor.start_remote_task( | ||||
|             self.channel, | ||||
|             nsf=NamespacePath(f'{ns}:{func}'), | ||||
|             kwargs=kwargs | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
| 
 | ||||
|     async def _return_once( | ||||
|     # TODO: we should deprecate this API right? since if we remove | ||||
|     # `.run_in_actor()` (and instead move it to a `.highlevel` | ||||
|     # wrapper api (around a single `.open_context()` call) we don't | ||||
|     # really have any notion of a "main" remote task any more? | ||||
|     # | ||||
|     # @api_frame | ||||
|     async def wait_for_result( | ||||
|         self, | ||||
|         ctx: Context, | ||||
| 
 | ||||
|     ) -> dict[str, Any]: | ||||
| 
 | ||||
|         assert ctx._remote_func_type == 'asyncfunc'  # single response | ||||
|         msg: dict = await ctx._recv_chan.receive() | ||||
|         return msg | ||||
| 
 | ||||
|     async def result(self) -> Any: | ||||
|         hide_tb: bool = True, | ||||
|     ) -> Any: | ||||
|         ''' | ||||
|         Return the result(s) from the remote actor's "main" task. | ||||
|         Return the final result delivered by a `Return`-msg from the | ||||
|         remote peer actor's "main" task's `return` statement. | ||||
| 
 | ||||
|         ''' | ||||
|         # __tracebackhide__ = True | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         # Check for non-rpc errors slapped on the | ||||
|         # channel for which we always raise | ||||
|         exc = self.channel._exc | ||||
|  | @ -180,7 +169,7 @@ class Portal: | |||
|             raise exc | ||||
| 
 | ||||
|         # not expecting a "main" result | ||||
|         if self._expect_result is None: | ||||
|         if self._expect_result_ctx is None: | ||||
|             log.warning( | ||||
|                 f"Portal for {self.channel.uid} not expecting a final" | ||||
|                 " result?\nresult() should only be called if subactor" | ||||
|  | @ -188,16 +177,40 @@ class Portal: | |||
|             return NoResult | ||||
| 
 | ||||
|         # expecting a "main" result | ||||
|         assert self._expect_result | ||||
|         assert self._expect_result_ctx | ||||
| 
 | ||||
|         if self._result_msg is None: | ||||
|             self._result_msg = await self._return_once( | ||||
|                 self._expect_result | ||||
|         if self._final_result_msg is None: | ||||
|             try: | ||||
|                 ( | ||||
|                     self._final_result_msg, | ||||
|                     self._final_result_pld, | ||||
|                 ) = await self._expect_result_ctx._pld_rx.recv_msg_w_pld( | ||||
|                     ipc=self._expect_result_ctx, | ||||
|                     expect_msg=Return, | ||||
|                 ) | ||||
|             except BaseException as err: | ||||
|                 # TODO: wrap this into `@api_frame` optionally with | ||||
|                 # some kinda filtering mechanism like log levels? | ||||
|                 __tracebackhide__: bool = False | ||||
|                 raise err | ||||
| 
 | ||||
|         return _unwrap_msg( | ||||
|             self._result_msg, | ||||
|             self.channel, | ||||
|         return self._final_result_pld | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     async def result( | ||||
|         self, | ||||
|         *args, | ||||
|         **kwargs, | ||||
|     ) -> Any|Exception: | ||||
|         typname: str = type(self).__name__ | ||||
|         log.warning( | ||||
|             f'`{typname}.result()` is DEPRECATED!\n' | ||||
|             f'Use `{typname}.wait_for_result()` instead!\n' | ||||
|         ) | ||||
|         return await self.wait_for_result( | ||||
|             *args, | ||||
|             **kwargs, | ||||
|         ) | ||||
| 
 | ||||
|     async def _cancel_streams(self): | ||||
|  | @ -240,6 +253,8 @@ class Portal: | |||
|         purpose. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
| 
 | ||||
|         chan: Channel = self.channel | ||||
|         if not chan.connected(): | ||||
|             log.runtime( | ||||
|  | @ -248,14 +263,15 @@ class Portal: | |||
|             return False | ||||
| 
 | ||||
|         reminfo: str = ( | ||||
|             f'`Portal.cancel_actor()` => {self.channel.uid}\n' | ||||
|             f'c)=> {self.channel.uid}\n' | ||||
|             f'  |_{chan}\n' | ||||
|         ) | ||||
|         log.cancel( | ||||
|             f'Sending runtime `.cancel()` request to peer\n\n' | ||||
|             f'Requesting actor-runtime cancel for peer\n\n' | ||||
|             f'{reminfo}' | ||||
|         ) | ||||
| 
 | ||||
|         # XXX the one spot we set it? | ||||
|         self.channel._cancel_called: bool = True | ||||
|         try: | ||||
|             # send cancel cmd - might not get response | ||||
|  | @ -295,6 +311,8 @@ class Portal: | |||
|             ) | ||||
|             return False | ||||
| 
 | ||||
|     # TODO: do we still need this for low level `Actor`-runtime | ||||
|     # method calls or can we also remove it? | ||||
|     async def run_from_ns( | ||||
|         self, | ||||
|         namespace_path: str, | ||||
|  | @ -317,21 +335,23 @@ class Portal: | |||
|           internals! | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         nsf = NamespacePath( | ||||
|             f'{namespace_path}:{function_name}' | ||||
|         ) | ||||
|         ctx = await self.actor.start_remote_task( | ||||
|         ctx: Context = await self.actor.start_remote_task( | ||||
|             chan=self.channel, | ||||
|             nsf=nsf, | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
|         ctx._portal = self | ||||
|         msg = await self._return_once(ctx) | ||||
|         return _unwrap_msg( | ||||
|             msg, | ||||
|             self.channel, | ||||
|         return await ctx._pld_rx.recv_pld( | ||||
|             ipc=ctx, | ||||
|             expect_msg=Return, | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     async def run( | ||||
|         self, | ||||
|         func: str, | ||||
|  | @ -347,6 +367,8 @@ class Portal: | |||
|         remote rpc task or a local async generator instance. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
| 
 | ||||
|         if isinstance(func, str): | ||||
|             warnings.warn( | ||||
|                 "`Portal.run(namespace: str, funcname: str)` is now" | ||||
|  | @ -377,13 +399,15 @@ class Portal: | |||
|             self.channel, | ||||
|             nsf=nsf, | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
|         ctx._portal = self | ||||
|         return _unwrap_msg( | ||||
|             await self._return_once(ctx), | ||||
|             self.channel, | ||||
|         return await ctx._pld_rx.recv_pld( | ||||
|             ipc=ctx, | ||||
|             expect_msg=Return, | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: factor this out into a `.highlevel` API-wrapper that uses | ||||
|     # a single `.open_context()` call underneath. | ||||
|     @acm | ||||
|     async def open_stream_from( | ||||
|         self, | ||||
|  | @ -391,6 +415,14 @@ class Portal: | |||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> AsyncGenerator[MsgStream, None]: | ||||
|         ''' | ||||
|         Legacy one-way streaming API. | ||||
| 
 | ||||
|         TODO: re-impl on top `Portal.open_context()` + an async gen | ||||
|         around `Context.open_stream()`. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
| 
 | ||||
|         if not inspect.isasyncgenfunction(async_gen_func): | ||||
|             if not ( | ||||
|  | @ -404,8 +436,8 @@ class Portal: | |||
|             self.channel, | ||||
|             nsf=NamespacePath.from_ref(async_gen_func), | ||||
|             kwargs=kwargs, | ||||
|             portal=self, | ||||
|         ) | ||||
|         ctx._portal = self | ||||
| 
 | ||||
|         # ensure receive-only stream entrypoint | ||||
|         assert ctx._remote_func_type == 'asyncgen' | ||||
|  | @ -414,13 +446,13 @@ class Portal: | |||
|             # deliver receive only stream | ||||
|             async with MsgStream( | ||||
|                 ctx=ctx, | ||||
|                 rx_chan=ctx._recv_chan, | ||||
|             ) as rchan: | ||||
|                 self._streams.add(rchan) | ||||
|                 yield rchan | ||||
|                 rx_chan=ctx._rx_chan, | ||||
|             ) as stream: | ||||
|                 self._streams.add(stream) | ||||
|                 ctx._stream = stream | ||||
|                 yield stream | ||||
| 
 | ||||
|         finally: | ||||
| 
 | ||||
|             # cancel the far end task on consumer close | ||||
|             # NOTE: this is a special case since we assume that if using | ||||
|             # this ``.open_fream_from()`` api, the stream is one a one | ||||
|  | @ -439,7 +471,7 @@ class Portal: | |||
| 
 | ||||
|             # XXX: should this always be done? | ||||
|             # await recv_chan.aclose() | ||||
|             self._streams.remove(rchan) | ||||
|             self._streams.remove(stream) | ||||
| 
 | ||||
|     # NOTE: impl is found in `._context`` mod to make | ||||
|     # reading/groking the details simpler code-org-wise. This | ||||
|  | @ -481,7 +513,7 @@ class LocalPortal: | |||
| async def open_portal( | ||||
| 
 | ||||
|     channel: Channel, | ||||
|     nursery: trio.Nursery|None = None, | ||||
|     tn: trio.Nursery|None = None, | ||||
|     start_msg_loop: bool = True, | ||||
|     shield: bool = False, | ||||
| 
 | ||||
|  | @ -489,15 +521,19 @@ async def open_portal( | |||
|     ''' | ||||
|     Open a ``Portal`` through the provided ``channel``. | ||||
| 
 | ||||
|     Spawns a background task to handle message processing (normally | ||||
|     done by the actor-runtime implicitly). | ||||
|     Spawns a background task to handle RPC processing, normally | ||||
|     done by the actor-runtime implicitly via a call to | ||||
|     `._rpc.process_messages()`. just after connection establishment. | ||||
| 
 | ||||
|     ''' | ||||
|     actor = current_actor() | ||||
|     assert actor | ||||
|     was_connected = False | ||||
|     was_connected: bool = False | ||||
| 
 | ||||
|     async with maybe_open_nursery(nursery, shield=shield) as nursery: | ||||
|     async with maybe_open_nursery( | ||||
|         tn, | ||||
|         shield=shield, | ||||
|     ) as tn: | ||||
| 
 | ||||
|         if not channel.connected(): | ||||
|             await channel.connect() | ||||
|  | @ -509,7 +545,7 @@ async def open_portal( | |||
|         msg_loop_cs: trio.CancelScope|None = None | ||||
|         if start_msg_loop: | ||||
|             from ._runtime import process_messages | ||||
|             msg_loop_cs = await nursery.start( | ||||
|             msg_loop_cs = await tn.start( | ||||
|                 partial( | ||||
|                     process_messages, | ||||
|                     actor, | ||||
|  | @ -526,12 +562,10 @@ async def open_portal( | |||
|             await portal.aclose() | ||||
| 
 | ||||
|             if was_connected: | ||||
|                 # gracefully signal remote channel-msg loop | ||||
|                 await channel.send(None) | ||||
|                 # await channel.aclose() | ||||
|                 await channel.aclose() | ||||
| 
 | ||||
|             # cancel background msg loop task | ||||
|             if msg_loop_cs: | ||||
|             if msg_loop_cs is not None: | ||||
|                 msg_loop_cs.cancel() | ||||
| 
 | ||||
|             nursery.cancel_scope.cancel() | ||||
|             tn.cancel_scope.cancel() | ||||
|  |  | |||
|  | @ -18,9 +18,10 @@ | |||
| Root actor runtime ignition(s). | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from functools import partial | ||||
| import importlib | ||||
| import inspect | ||||
| import logging | ||||
| import os | ||||
| import signal | ||||
|  | @ -60,7 +61,7 @@ _default_lo_addrs: list[tuple[str, int]] = [( | |||
| logger = log.get_logger('tractor') | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def open_root_actor( | ||||
| 
 | ||||
|     *, | ||||
|  | @ -79,7 +80,7 @@ async def open_root_actor( | |||
| 
 | ||||
|     # enables the multi-process debugger support | ||||
|     debug_mode: bool = False, | ||||
|     maybe_enable_greenback: bool = False,  # `.pause_from_sync()/breakpoint()` support | ||||
|     maybe_enable_greenback: bool = True,  # `.pause_from_sync()/breakpoint()` support | ||||
|     enable_stack_on_sig: bool = False, | ||||
| 
 | ||||
|     # internal logging | ||||
|  | @ -92,11 +93,27 @@ async def open_root_actor( | |||
|     # and that this call creates it. | ||||
|     ensure_registry: bool = False, | ||||
| 
 | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
|     # XXX, proxied directly to `.devx._debug._maybe_enter_pm()` | ||||
|     # for REPL-entry logic. | ||||
|     debug_filter: Callable[ | ||||
|         [BaseException|BaseExceptionGroup], | ||||
|         bool, | ||||
|     ] = lambda err: not is_multi_cancelled(err), | ||||
| 
 | ||||
|     # TODO, a way for actors to augment passing derived | ||||
|     # read-only state to sublayers? | ||||
|     # extra_rt_vars: dict|None = None, | ||||
| 
 | ||||
| ) -> Actor: | ||||
|     ''' | ||||
|     Runtime init entry point for ``tractor``. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     _debug.hide_runtime_frames() | ||||
| 
 | ||||
|     # TODO: stick this in a `@cm` defined in `devx._debug`? | ||||
|     # | ||||
|     # Override the global debugger hook to make it play nice with | ||||
|  | @ -110,20 +127,28 @@ async def open_root_actor( | |||
|     if ( | ||||
|         debug_mode | ||||
|         and maybe_enable_greenback | ||||
|         and await _debug.maybe_init_greenback( | ||||
|         and ( | ||||
|             maybe_mod := await _debug.maybe_init_greenback( | ||||
|                 raise_not_found=False, | ||||
|             ) | ||||
|     ): | ||||
|         os.environ['PYTHONBREAKPOINT'] = ( | ||||
|             'tractor.devx._debug.pause_from_sync' | ||||
|         ) | ||||
|     ): | ||||
|         logger.info( | ||||
|             f'Found `greenback` installed @ {maybe_mod}\n' | ||||
|             'Enabling `tractor.pause_from_sync()` support!\n' | ||||
|         ) | ||||
|         os.environ['PYTHONBREAKPOINT'] = ( | ||||
|             'tractor.devx._debug._sync_pause_from_builtin' | ||||
|         ) | ||||
|         _state._runtime_vars['use_greenback'] = True | ||||
| 
 | ||||
|     else: | ||||
|         # TODO: disable `breakpoint()` by default (without | ||||
|         # `greenback`) since it will break any multi-actor | ||||
|         # usage by a clobbered TTY's stdstreams! | ||||
|         def block_bps(*args, **kwargs): | ||||
|             raise RuntimeError( | ||||
|                 'Trying to use `breakpoint()` eh?\n' | ||||
|                 'Trying to use `breakpoint()` eh?\n\n' | ||||
|                 'Welp, `tractor` blocks `breakpoint()` built-in calls by default!\n' | ||||
|                 'If you need to use it please install `greenback` and set ' | ||||
|                 '`debug_mode=True` when opening the runtime ' | ||||
|  | @ -131,11 +156,13 @@ async def open_root_actor( | |||
|             ) | ||||
| 
 | ||||
|         sys.breakpointhook = block_bps | ||||
|         # os.environ['PYTHONBREAKPOINT'] = None | ||||
|         # lol ok, | ||||
|         # https://docs.python.org/3/library/sys.html#sys.breakpointhook | ||||
|         os.environ['PYTHONBREAKPOINT'] = "0" | ||||
| 
 | ||||
|     # attempt to retreive ``trio``'s sigint handler and stash it | ||||
|     # on our debugger lock state. | ||||
|     _debug.Lock._trio_handler = signal.getsignal(signal.SIGINT) | ||||
|     _debug.DebugStatus._trio_handler = signal.getsignal(signal.SIGINT) | ||||
| 
 | ||||
|     # mark top most level process as root actor | ||||
|     _state._runtime_vars['_is_root'] = True | ||||
|  | @ -201,6 +228,7 @@ async def open_root_actor( | |||
|         ): | ||||
|             loglevel = 'PDB' | ||||
| 
 | ||||
| 
 | ||||
|     elif debug_mode: | ||||
|         raise RuntimeError( | ||||
|             "Debug mode is only supported for the `trio` backend!" | ||||
|  | @ -216,14 +244,8 @@ async def open_root_actor( | |||
|         and | ||||
|         enable_stack_on_sig | ||||
|     ): | ||||
|         try: | ||||
|             logger.info('Enabling `stackscope` traces on SIGUSR1') | ||||
|             from .devx import enable_stack_on_sig | ||||
|         from .devx._stackscope import enable_stack_on_sig | ||||
|         enable_stack_on_sig() | ||||
|         except ImportError: | ||||
|             logger.warning( | ||||
|                 '`stackscope` not installed for use in debug mode!' | ||||
|             ) | ||||
| 
 | ||||
|     # closed into below ping task-func | ||||
|     ponged_addrs: list[tuple[str, int]] = [] | ||||
|  | @ -254,7 +276,9 @@ async def open_root_actor( | |||
| 
 | ||||
|         except OSError: | ||||
|             # TODO: make this a "discovery" log level? | ||||
|             logger.warning(f'No actor registry found @ {addr}') | ||||
|             logger.info( | ||||
|                 f'No actor registry found @ {addr}\n' | ||||
|             ) | ||||
| 
 | ||||
|     async with trio.open_nursery() as tn: | ||||
|         for addr in registry_addrs: | ||||
|  | @ -268,7 +292,6 @@ async def open_root_actor( | |||
|     # Create a new local root-actor instance which IS NOT THE | ||||
|     # REGISTRAR | ||||
|     if ponged_addrs: | ||||
| 
 | ||||
|         if ensure_registry: | ||||
|             raise RuntimeError( | ||||
|                  f'Failed to open `{name}`@{ponged_addrs}: ' | ||||
|  | @ -318,6 +341,10 @@ async def open_root_actor( | |||
|             loglevel=loglevel, | ||||
|             enable_modules=enable_modules, | ||||
|         ) | ||||
|         # XXX, in case the root actor runtime was actually run from | ||||
|         # `tractor.to_asyncio.run_as_asyncio_guest()` and NOt | ||||
|         # `.trio.run()`. | ||||
|         actor._infected_aio = _state._runtime_vars['_is_infected_aio'] | ||||
| 
 | ||||
|     # Start up main task set via core actor-runtime nurseries. | ||||
|     try: | ||||
|  | @ -355,19 +382,30 @@ async def open_root_actor( | |||
|             ) | ||||
|             try: | ||||
|                 yield actor | ||||
| 
 | ||||
|             except ( | ||||
|                 Exception, | ||||
|                 BaseExceptionGroup, | ||||
|             ) as err: | ||||
| 
 | ||||
|                 entered: bool = await _debug._maybe_enter_pm(err) | ||||
|                 # XXX NOTE XXX see equiv note inside | ||||
|                 # `._runtime.Actor._stream_handler()` where in the | ||||
|                 # non-root or root-that-opened-this-mahually case we | ||||
|                 # wait for the local actor-nursery to exit before | ||||
|                 # exiting the transport channel handler. | ||||
|                 entered: bool = await _debug._maybe_enter_pm( | ||||
|                     err, | ||||
|                     api_frame=inspect.currentframe(), | ||||
|                     debug_filter=debug_filter, | ||||
|                 ) | ||||
| 
 | ||||
|                 if ( | ||||
|                     not entered | ||||
|                     and | ||||
|                     not is_multi_cancelled(err) | ||||
|                     not is_multi_cancelled( | ||||
|                         err, | ||||
|                     ) | ||||
|                 ): | ||||
|                     logger.exception('Root actor crashed:\n') | ||||
|                     logger.exception('Root actor crashed\n') | ||||
| 
 | ||||
|                 # ALWAYS re-raise any error bubbled up from the | ||||
|                 # runtime! | ||||
|  | @ -392,14 +430,20 @@ async def open_root_actor( | |||
|         _state._last_actor_terminated = actor | ||||
| 
 | ||||
|         # restore built-in `breakpoint()` hook state | ||||
|         if debug_mode: | ||||
|         if ( | ||||
|             debug_mode | ||||
|             and | ||||
|             maybe_enable_greenback | ||||
|         ): | ||||
|             if builtin_bp_handler is not None: | ||||
|                 sys.breakpointhook = builtin_bp_handler | ||||
| 
 | ||||
|             if orig_bp_path is not None: | ||||
|                 os.environ['PYTHONBREAKPOINT'] = orig_bp_path | ||||
| 
 | ||||
|             else: | ||||
|                 # clear env back to having no entry | ||||
|                 os.environ.pop('PYTHONBREAKPOINT') | ||||
|                 os.environ.pop('PYTHONBREAKPOINT', None) | ||||
| 
 | ||||
|         logger.runtime("Root actor terminated") | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										843
									
								
								tractor/_rpc.py
								
								
								
								
							
							
						
						
									
										843
									
								
								tractor/_rpc.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -43,12 +43,16 @@ from tractor._state import ( | |||
|     is_main_process, | ||||
|     is_root_process, | ||||
|     debug_mode, | ||||
|     _runtime_vars, | ||||
| ) | ||||
| from tractor.log import get_logger | ||||
| from tractor._portal import Portal | ||||
| from tractor._runtime import Actor | ||||
| from tractor._entry import _mp_main | ||||
| from tractor._exceptions import ActorFailure | ||||
| from tractor.msg.types import ( | ||||
|     SpawnSpec, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|  | @ -139,11 +143,13 @@ async def exhaust_portal( | |||
|     ''' | ||||
|     __tracebackhide__ = True | ||||
|     try: | ||||
|         log.debug(f"Waiting on final result from {actor.uid}") | ||||
|         log.debug( | ||||
|             f'Waiting on final result from {actor.uid}' | ||||
|         ) | ||||
| 
 | ||||
|         # XXX: streams should never be reaped here since they should | ||||
|         # always be established and shutdown using a context manager api | ||||
|         final: Any = await portal.result() | ||||
|         final: Any = await portal.wait_for_result() | ||||
| 
 | ||||
|     except ( | ||||
|         Exception, | ||||
|  | @ -192,7 +198,10 @@ async def cancel_on_completion( | |||
|     # if this call errors we store the exception for later | ||||
|     # in ``errors`` which will be reraised inside | ||||
|     # an exception group and we still send out a cancel request | ||||
|     result: Any|Exception = await exhaust_portal(portal, actor) | ||||
|     result: Any|Exception = await exhaust_portal( | ||||
|         portal, | ||||
|         actor, | ||||
|     ) | ||||
|     if isinstance(result, Exception): | ||||
|         errors[actor.uid]: Exception = result | ||||
|         log.cancel( | ||||
|  | @ -214,8 +223,8 @@ async def cancel_on_completion( | |||
| 
 | ||||
| async def hard_kill( | ||||
|     proc: trio.Process, | ||||
|     terminate_after: int = 1.6, | ||||
| 
 | ||||
|     terminate_after: int = 1.6, | ||||
|     # NOTE: for mucking with `.pause()`-ing inside the runtime | ||||
|     # whilst also hacking on it XD | ||||
|     # terminate_after: int = 99999, | ||||
|  | @ -241,7 +250,8 @@ async def hard_kill( | |||
| 
 | ||||
|     ''' | ||||
|     log.cancel( | ||||
|         'Terminating sub-proc:\n' | ||||
|         'Terminating sub-proc\n' | ||||
|         f'>x)\n' | ||||
|         f' |_{proc}\n' | ||||
|     ) | ||||
|     # NOTE: this timeout used to do nothing since we were shielding | ||||
|  | @ -288,14 +298,13 @@ async def hard_kill( | |||
|         log.critical( | ||||
|             # 'Well, the #ZOMBIE_LORD_IS_HERE# to collect\n' | ||||
|             '#T-800 deployed to collect zombie B0\n' | ||||
|             f'|\n' | ||||
|             f'>x)\n' | ||||
|             f' |_{proc}\n' | ||||
|         ) | ||||
|         proc.kill() | ||||
| 
 | ||||
| 
 | ||||
| async def soft_kill( | ||||
| 
 | ||||
|     proc: ProcessType, | ||||
|     wait_func: Callable[ | ||||
|         [ProcessType], | ||||
|  | @ -318,13 +327,26 @@ async def soft_kill( | |||
|     uid: tuple[str, str] = portal.channel.uid | ||||
|     try: | ||||
|         log.cancel( | ||||
|             'Soft killing sub-actor via `Portal.cancel_actor()`\n' | ||||
|             'Soft killing sub-actor via portal request\n' | ||||
|             f'c)> {portal.chan.uid}\n' | ||||
|             f' |_{proc}\n' | ||||
|         ) | ||||
|         # wait on sub-proc to signal termination | ||||
|         await wait_func(proc) | ||||
| 
 | ||||
|     except trio.Cancelled: | ||||
|         with trio.CancelScope(shield=True): | ||||
|             await maybe_wait_for_debugger( | ||||
|                 child_in_debug=_runtime_vars.get( | ||||
|                     '_debug_mode', False | ||||
|                 ), | ||||
|                 header_msg=( | ||||
|                     'Delaying `soft_kill()` subproc reaper while debugger locked..\n' | ||||
|                 ), | ||||
|                 # TODO: need a diff value then default? | ||||
|                 # poll_steps=9999999, | ||||
|             ) | ||||
| 
 | ||||
|         # if cancelled during a soft wait, cancel the child | ||||
|         # actor before entering the hard reap sequence | ||||
|         # below. This means we try to do a graceful teardown | ||||
|  | @ -452,10 +474,9 @@ async def trio_proc( | |||
|     proc: trio.Process|None = None | ||||
|     try: | ||||
|         try: | ||||
|             # TODO: needs ``trio_typing`` patch? | ||||
|             proc = await trio.lowlevel.open_process(spawn_cmd) | ||||
|             proc: trio.Process = await trio.lowlevel.open_process(spawn_cmd) | ||||
|             log.runtime( | ||||
|                 'Started new sub-proc\n' | ||||
|                 'Started new child\n' | ||||
|                 f'|_{proc}\n' | ||||
|             ) | ||||
| 
 | ||||
|  | @ -493,14 +514,17 @@ async def trio_proc( | |||
|             portal, | ||||
|         ) | ||||
| 
 | ||||
|         # send additional init params | ||||
|         await chan.send({ | ||||
|             '_parent_main_data': subactor._parent_main_data, | ||||
|             'enable_modules': subactor.enable_modules, | ||||
|             'reg_addrs': subactor.reg_addrs, | ||||
|             'bind_addrs': bind_addrs, | ||||
|             '_runtime_vars': _runtime_vars, | ||||
|         }) | ||||
|         # send a "spawning specification" which configures the | ||||
|         # initial runtime state of the child. | ||||
|         await chan.send( | ||||
|             SpawnSpec( | ||||
|                 _parent_main_data=subactor._parent_main_data, | ||||
|                 enable_modules=subactor.enable_modules, | ||||
|                 reg_addrs=subactor.reg_addrs, | ||||
|                 bind_addrs=bind_addrs, | ||||
|                 _runtime_vars=_runtime_vars, | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         # track subactor in current nursery | ||||
|         curr_actor: Actor = current_actor() | ||||
|  | @ -534,8 +558,9 @@ async def trio_proc( | |||
|             # cancel result waiter that may have been spawned in | ||||
|             # tandem if not done already | ||||
|             log.cancel( | ||||
|                 'Cancelling existing result waiter task for ' | ||||
|                 f'{subactor.uid}' | ||||
|                 'Cancelling portal result reaper task\n' | ||||
|                 f'>c)\n' | ||||
|                 f' |_{subactor.uid}\n' | ||||
|             ) | ||||
|             nursery.cancel_scope.cancel() | ||||
| 
 | ||||
|  | @ -544,9 +569,13 @@ async def trio_proc( | |||
|         # allowed! Do this **after** cancellation/teardown to avoid | ||||
|         # killing the process too early. | ||||
|         if proc: | ||||
|             log.cancel(f'Hard reap sequence starting for {subactor.uid}') | ||||
|             with trio.CancelScope(shield=True): | ||||
|             log.cancel( | ||||
|                 f'Hard reap sequence starting for subactor\n' | ||||
|                 f'>x)\n' | ||||
|                 f' |_{subactor}@{subactor.uid}\n' | ||||
|             ) | ||||
| 
 | ||||
|             with trio.CancelScope(shield=True): | ||||
|                 # don't clobber an ongoing pdb | ||||
|                 if cancelled_during_spawn: | ||||
|                     # Try again to avoid TTY clobbering. | ||||
|  |  | |||
|  | @ -19,13 +19,19 @@ Per process state | |||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from contextvars import ( | ||||
|     ContextVar, | ||||
| ) | ||||
| from typing import ( | ||||
|     Any, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| from trio.lowlevel import current_task | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
|     from ._context import Context | ||||
| 
 | ||||
| 
 | ||||
| _current_actor: Actor|None = None  # type: ignore # noqa | ||||
|  | @ -38,7 +44,9 @@ _runtime_vars: dict[str, Any] = { | |||
|     '_root_mailbox': (None, None), | ||||
|     '_registry_addrs': [], | ||||
| 
 | ||||
|     # for `breakpoint()` support | ||||
|     '_is_infected_aio': False, | ||||
| 
 | ||||
|     # for `tractor.pause_from_sync()` & `breakpoint()` support | ||||
|     'use_greenback': False, | ||||
| } | ||||
| 
 | ||||
|  | @ -64,9 +72,10 @@ def current_actor( | |||
|     ''' | ||||
|     if ( | ||||
|         err_on_no_runtime | ||||
|         and _current_actor is None | ||||
|         and | ||||
|         _current_actor is None | ||||
|     ): | ||||
|         msg: str = 'No local actor has been initialized yet' | ||||
|         msg: str = 'No local actor has been initialized yet?\n' | ||||
|         from ._exceptions import NoRuntime | ||||
| 
 | ||||
|         if last := last_actor(): | ||||
|  | @ -79,8 +88,8 @@ def current_actor( | |||
|         # this process. | ||||
|         else: | ||||
|             msg += ( | ||||
|                 'No last actor found?\n' | ||||
|                 'Did you forget to open one of:\n\n' | ||||
|                 # 'No last actor found?\n' | ||||
|                 '\nDid you forget to call one of,\n' | ||||
|                 '- `tractor.open_root_actor()`\n' | ||||
|                 '- `tractor.open_nursery()`\n' | ||||
|             ) | ||||
|  | @ -110,3 +119,26 @@ def debug_mode() -> bool: | |||
| 
 | ||||
| def is_root_process() -> bool: | ||||
|     return _runtime_vars['_is_root'] | ||||
| 
 | ||||
| 
 | ||||
| _ctxvar_Context: ContextVar[Context] = ContextVar( | ||||
|     'ipc_context', | ||||
|     default=None, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def current_ipc_ctx( | ||||
|     error_on_not_set: bool = False, | ||||
| ) -> Context|None: | ||||
|     ctx: Context = _ctxvar_Context.get() | ||||
| 
 | ||||
|     if ( | ||||
|         not ctx | ||||
|         and error_on_not_set | ||||
|     ): | ||||
|         from ._exceptions import InternalError | ||||
|         raise InternalError( | ||||
|             'No IPC context has been allocated for this task yet?\n' | ||||
|             f'|_{current_task()}\n' | ||||
|         ) | ||||
|     return ctx | ||||
|  |  | |||
|  | @ -26,6 +26,7 @@ import inspect | |||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncGenerator, | ||||
|     Callable, | ||||
|     AsyncIterator, | ||||
|     TYPE_CHECKING, | ||||
|  | @ -35,17 +36,25 @@ import warnings | |||
| import trio | ||||
| 
 | ||||
| from ._exceptions import ( | ||||
|     _raise_from_no_key_in_msg, | ||||
|     ContextCancelled, | ||||
|     RemoteActorError, | ||||
| ) | ||||
| from .log import get_logger | ||||
| from .trionics import ( | ||||
|     broadcast_receiver, | ||||
|     BroadcastReceiver, | ||||
| ) | ||||
| from tractor.msg import ( | ||||
|     # Return, | ||||
|     # Stop, | ||||
|     MsgType, | ||||
|     Yield, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._runtime import Actor | ||||
|     from ._context import Context | ||||
|     from ._ipc import Channel | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  | @ -59,10 +68,10 @@ log = get_logger(__name__) | |||
| class MsgStream(trio.abc.Channel): | ||||
|     ''' | ||||
|     A bidirectional message stream for receiving logically sequenced | ||||
|     values over an inter-actor IPC ``Channel``. | ||||
|     values over an inter-actor IPC `Channel`. | ||||
| 
 | ||||
|     This is the type returned to a local task which entered either | ||||
|     ``Portal.open_stream_from()`` or ``Context.open_stream()``. | ||||
|     `Portal.open_stream_from()` or `Context.open_stream()`. | ||||
| 
 | ||||
|     Termination rules: | ||||
| 
 | ||||
|  | @ -89,35 +98,44 @@ class MsgStream(trio.abc.Channel): | |||
|         self._eoc: bool|trio.EndOfChannel = False | ||||
|         self._closed: bool|trio.ClosedResourceError = False | ||||
| 
 | ||||
|     @property | ||||
|     def ctx(self) -> Context: | ||||
|         ''' | ||||
|         A read-only ref to this stream's inter-actor-task `Context`. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._ctx | ||||
| 
 | ||||
|     @property | ||||
|     def chan(self) -> Channel: | ||||
|         ''' | ||||
|         Ref to the containing `Context`'s transport `Channel`. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._ctx.chan | ||||
| 
 | ||||
|     # TODO: could we make this a direct method bind to `PldRx`? | ||||
|     # -> receive_nowait = PldRx.recv_pld | ||||
|     # |_ means latter would have to accept `MsgStream`-as-`self`? | ||||
|     #  => should be fine as long as, | ||||
|     #  -[ ] both define `._rx_chan` | ||||
|     #  -[ ] .ctx is bound into `PldRx` using a `@cm`? | ||||
|     # | ||||
|     # delegate directly to underlying mem channel | ||||
|     def receive_nowait( | ||||
|         self, | ||||
|         allow_msg_keys: list[str] = ['yield'], | ||||
|         expect_msg: MsgType = Yield, | ||||
|     ): | ||||
|         msg: dict = self._rx_chan.receive_nowait() | ||||
|         for ( | ||||
|             i, | ||||
|             key, | ||||
|         ) in enumerate(allow_msg_keys): | ||||
|             try: | ||||
|                 return msg[key] | ||||
|             except KeyError as kerr: | ||||
|                 if i < (len(allow_msg_keys) - 1): | ||||
|                     continue | ||||
| 
 | ||||
|                 _raise_from_no_key_in_msg( | ||||
|                     ctx=self._ctx, | ||||
|                     msg=msg, | ||||
|                     src_err=kerr, | ||||
|                     log=log, | ||||
|                     expect_key=key, | ||||
|                     stream=self, | ||||
|         ctx: Context = self._ctx | ||||
|         return ctx._pld_rx.recv_pld_nowait( | ||||
|             ipc=self, | ||||
|             expect_msg=expect_msg, | ||||
|         ) | ||||
| 
 | ||||
|     async def receive( | ||||
|         self, | ||||
| 
 | ||||
|         hide_tb: bool = True, | ||||
|         hide_tb: bool = False, | ||||
|     ): | ||||
|         ''' | ||||
|         Receive a single msg from the IPC transport, the next in | ||||
|  | @ -127,9 +145,8 @@ class MsgStream(trio.abc.Channel): | |||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         # NOTE: `trio.ReceiveChannel` implements | ||||
|         # EOC handling as follows (aka uses it | ||||
|         # to gracefully exit async for loops): | ||||
|         # NOTE FYI: `trio.ReceiveChannel` implements EOC handling as | ||||
|         # follows (aka uses it to gracefully exit async for loops): | ||||
|         # | ||||
|         # async def __anext__(self) -> ReceiveType: | ||||
|         #     try: | ||||
|  | @ -147,62 +164,29 @@ class MsgStream(trio.abc.Channel): | |||
| 
 | ||||
|         src_err: Exception|None = None  # orig tb | ||||
|         try: | ||||
|             try: | ||||
|                 msg = await self._rx_chan.receive() | ||||
|                 return msg['yield'] | ||||
| 
 | ||||
|             except KeyError as kerr: | ||||
|                 src_err = kerr | ||||
| 
 | ||||
|                 # NOTE: may raise any of the below error types | ||||
|                 # includg EoC when a 'stop' msg is found. | ||||
|                 _raise_from_no_key_in_msg( | ||||
|                     ctx=self._ctx, | ||||
|                     msg=msg, | ||||
|                     src_err=kerr, | ||||
|                     log=log, | ||||
|                     expect_key='yield', | ||||
|                     stream=self, | ||||
|                 ) | ||||
|             ctx: Context = self._ctx | ||||
|             return await ctx._pld_rx.recv_pld(ipc=self) | ||||
| 
 | ||||
|         # XXX: the stream terminates on either of: | ||||
|         # - via `self._rx_chan.receive()` raising  after manual closure | ||||
|         #   by the rpc-runtime OR, | ||||
|         # - via a received `{'stop': ...}` msg from remote side. | ||||
|         #   |_ NOTE: previously this was triggered by calling | ||||
|         #   ``._rx_chan.aclose()`` on the send side of the channel inside | ||||
|         #   `Actor._push_result()`, but now the 'stop' message handling | ||||
|         #   has been put just above inside `_raise_from_no_key_in_msg()`. | ||||
|         except ( | ||||
|             trio.EndOfChannel, | ||||
|         ) as eoc: | ||||
|             src_err = eoc | ||||
|         # - `self._rx_chan.receive()` raising  after manual closure | ||||
|         #   by the rpc-runtime, | ||||
|         #   OR | ||||
|         # - via a `Stop`-msg received from remote peer task. | ||||
|         #   NOTE | ||||
|         #   |_ previously this was triggered by calling | ||||
|         #   ``._rx_chan.aclose()`` on the send side of the channel | ||||
|         #   inside `Actor._deliver_ctx_payload()`, but now the 'stop' | ||||
|         #   message handling gets delegated to `PldRFx.recv_pld()` | ||||
|         #   internals. | ||||
|         except trio.EndOfChannel as eoc: | ||||
|             # a graceful stream finished signal | ||||
|             self._eoc = eoc | ||||
|             src_err = eoc | ||||
| 
 | ||||
|             # TODO: Locally, we want to close this stream gracefully, by | ||||
|             # terminating any local consumers tasks deterministically. | ||||
|             # Once we have broadcast support, we **don't** want to be | ||||
|             # closing this stream and not flushing a final value to | ||||
|             # remaining (clone) consumers who may not have been | ||||
|             # scheduled to receive it yet. | ||||
|             # try: | ||||
|             #     maybe_err_msg_or_res: dict = self._rx_chan.receive_nowait() | ||||
|             #     if maybe_err_msg_or_res: | ||||
|             #         log.warning( | ||||
|             #             'Discarding un-processed msg:\n' | ||||
|             #             f'{maybe_err_msg_or_res}' | ||||
|             #         ) | ||||
|             # except trio.WouldBlock: | ||||
|             #     # no queued msgs that might be another remote | ||||
|             #     # error, so just raise the original EoC | ||||
|             #     pass | ||||
| 
 | ||||
|             # raise eoc | ||||
| 
 | ||||
|         # a ``ClosedResourceError`` indicates that the internal | ||||
|         # feeder memory receive channel was closed likely by the | ||||
|         # runtime after the associated transport-channel | ||||
|         # disconnected or broke. | ||||
|         # a `ClosedResourceError` indicates that the internal feeder | ||||
|         # memory receive channel was closed likely by the runtime | ||||
|         # after the associated transport-channel disconnected or | ||||
|         # broke. | ||||
|         except trio.ClosedResourceError as cre:  # by self._rx_chan.receive() | ||||
|             src_err = cre | ||||
|             log.warning( | ||||
|  | @ -214,47 +198,57 @@ class MsgStream(trio.abc.Channel): | |||
|         # terminated and signal this local iterator to stop | ||||
|         drained: list[Exception|dict] = await self.aclose() | ||||
|         if drained: | ||||
|             # ?TODO? pass these to the `._ctx._drained_msgs: deque` | ||||
|             # and then iterate them as part of any `.wait_for_result()` call? | ||||
|             # | ||||
|             # from .devx import pause | ||||
|             # await pause() | ||||
|             log.warning( | ||||
|                 'Drained context msgs during closure:\n' | ||||
|                 'Drained context msgs during closure\n\n' | ||||
|                 f'{drained}' | ||||
|             ) | ||||
|         # TODO: pass these to the `._ctx._drained_msgs: deque` | ||||
|         # and then iterate them as part of any `.result()` call? | ||||
| 
 | ||||
|         # NOTE XXX: if the context was cancelled or remote-errored | ||||
|         # but we received the stream close msg first, we | ||||
|         # probably want to instead raise the remote error | ||||
|         # over the end-of-stream connection error since likely | ||||
|         # the remote error was the source cause? | ||||
|         ctx: Context = self._ctx | ||||
|         # ctx: Context = self._ctx | ||||
|         ctx.maybe_raise( | ||||
|             raise_ctxc_from_self_call=True, | ||||
|             from_src_exc=src_err, | ||||
|         ) | ||||
| 
 | ||||
|         # propagate any error but hide low-level frame details | ||||
|         # from the caller by default for debug noise reduction. | ||||
|         # propagate any error but hide low-level frame details from | ||||
|         # the caller by default for console/debug-REPL noise | ||||
|         # reduction. | ||||
|         if ( | ||||
|             hide_tb | ||||
|             and ( | ||||
| 
 | ||||
|             # XXX NOTE XXX don't reraise on certain | ||||
|             # stream-specific internal error types like, | ||||
|                 # XXX NOTE special conditions: don't reraise on | ||||
|                 # certain stream-specific internal error types like, | ||||
|                 # | ||||
|                 # - `trio.EoC` since we want to use the exact instance | ||||
|                 #   to ensure that it is the error that bubbles upward | ||||
|                 #   for silent absorption by `Context.open_stream()`. | ||||
|             and not self._eoc | ||||
|                 not self._eoc | ||||
| 
 | ||||
|             # - `RemoteActorError` (or `ContextCancelled`) if it gets | ||||
|             #   raised from `_raise_from_no_key_in_msg()` since we | ||||
|             #   want the same (as the above bullet) for any | ||||
|             #   `.open_context()` block bubbled error raised by | ||||
|             #   any nearby ctx API remote-failures. | ||||
|             # and not isinstance(src_err, RemoteActorError) | ||||
|                 # - `RemoteActorError` (or subtypes like ctxc) | ||||
|                 #    since we want to present the error as though it is | ||||
|                 #    "sourced" directly from this `.receive()` call and | ||||
|                 #    generally NOT include the stack frames raised from | ||||
|                 #    inside the `PldRx` and/or the transport stack | ||||
|                 #    layers. | ||||
|                 or isinstance(src_err, RemoteActorError) | ||||
|             ) | ||||
|         ): | ||||
|             raise type(src_err)(*src_err.args) from src_err | ||||
|         else: | ||||
|             # for any non-graceful-EOC we want to NOT hide this frame | ||||
|             if not self._eoc: | ||||
|                 __tracebackhide__: bool = False | ||||
| 
 | ||||
|             raise src_err | ||||
| 
 | ||||
|     async def aclose(self) -> list[Exception|dict]: | ||||
|  | @ -292,7 +286,8 @@ class MsgStream(trio.abc.Channel): | |||
|         while not drained: | ||||
|             try: | ||||
|                 maybe_final_msg = self.receive_nowait( | ||||
|                     allow_msg_keys=['yield', 'return'], | ||||
|                     # allow_msgs=[Yield, Return], | ||||
|                     expect_msg=Yield, | ||||
|                 ) | ||||
|                 if maybe_final_msg: | ||||
|                     log.debug( | ||||
|  | @ -377,14 +372,15 @@ class MsgStream(trio.abc.Channel): | |||
|         #         await rx_chan.aclose() | ||||
| 
 | ||||
|         if not self._eoc: | ||||
|             log.cancel( | ||||
|                 'Stream closed before it received an EoC?\n' | ||||
|                 'Setting eoc manually..\n..' | ||||
|             ) | ||||
|             self._eoc: bool = trio.EndOfChannel( | ||||
|                 f'Context stream closed by {self._ctx.side}\n' | ||||
|             message: str = ( | ||||
|                 f'Stream self-closed by {self._ctx.side!r}-side before EoC\n' | ||||
|                 # } bc a stream is a "scope"/msging-phase inside an IPC | ||||
|                 f'x}}>\n' | ||||
|                 f'|_{self}\n' | ||||
|             ) | ||||
|             log.cancel(message) | ||||
|             self._eoc = trio.EndOfChannel(message) | ||||
| 
 | ||||
|         # ?XXX WAIT, why do we not close the local mem chan `._rx_chan` XXX? | ||||
|         # => NO, DEFINITELY NOT! <= | ||||
|         # if we're a bi-dir ``MsgStream`` BECAUSE this same | ||||
|  | @ -469,6 +465,9 @@ class MsgStream(trio.abc.Channel): | |||
|                 self, | ||||
|                 # use memory channel size by default | ||||
|                 self._rx_chan._state.max_buffer_size,  # type: ignore | ||||
| 
 | ||||
|                 # TODO: can remove this kwarg right since | ||||
|                 # by default behaviour is to do this anyway? | ||||
|                 receive_afunc=self.receive, | ||||
|             ) | ||||
| 
 | ||||
|  | @ -515,11 +514,10 @@ class MsgStream(trio.abc.Channel): | |||
| 
 | ||||
|         try: | ||||
|             await self._ctx.chan.send( | ||||
|                 payload={ | ||||
|                     'yield': data, | ||||
|                     'cid': self._ctx.cid, | ||||
|                 }, | ||||
|                 # hide_tb=hide_tb, | ||||
|                 payload=Yield( | ||||
|                     cid=self._ctx.cid, | ||||
|                     pld=data, | ||||
|                 ), | ||||
|             ) | ||||
|         except ( | ||||
|             trio.ClosedResourceError, | ||||
|  | @ -533,6 +531,224 @@ class MsgStream(trio.abc.Channel): | |||
|             else: | ||||
|                 raise | ||||
| 
 | ||||
|     # TODO: msg capability context api1 | ||||
|     # @acm | ||||
|     # async def enable_msg_caps( | ||||
|     #     self, | ||||
|     #     msg_subtypes: Union[ | ||||
|     #         list[list[Struct]], | ||||
|     #         Protocol,   # hypothetical type that wraps a msg set | ||||
|     #     ], | ||||
|     # ) -> tuple[Callable, Callable]:  # payload enc, dec pair | ||||
|     #     ... | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_stream_from_ctx( | ||||
|     ctx: Context, | ||||
|     allow_overruns: bool|None = False, | ||||
|     msg_buffer_size: int|None = None, | ||||
| 
 | ||||
| ) -> AsyncGenerator[MsgStream, None]: | ||||
|     ''' | ||||
|     Open a `MsgStream`, a bi-directional msg transport dialog | ||||
|     connected to the cross-actor peer task for an IPC `Context`. | ||||
| 
 | ||||
|     This context manager must be entered in both the "parent" (task | ||||
|     which entered `Portal.open_context()`) and "child" (RPC task | ||||
|     which is decorated by `@context`) tasks for the stream to | ||||
|     logically be considered "open"; if one side begins sending to an | ||||
|     un-opened peer, depending on policy config, msgs will either be | ||||
|     queued until the other side opens and/or a `StreamOverrun` will | ||||
|     (eventually) be raised. | ||||
| 
 | ||||
|                          ------ - ------ | ||||
| 
 | ||||
|     Runtime semantics design: | ||||
| 
 | ||||
|     A `MsgStream` session adheres to "one-shot use" semantics, | ||||
|     meaning if you close the scope it **can not** be "re-opened". | ||||
| 
 | ||||
|     Instead you must re-establish a new surrounding RPC `Context` | ||||
|     (RTC: remote task context?) using `Portal.open_context()`. | ||||
| 
 | ||||
|     In the future this *design choice* may need to be changed but | ||||
|     currently there seems to be no obvious reason to support such | ||||
|     semantics.. | ||||
| 
 | ||||
|     - "pausing a stream" can be supported with a message implemented | ||||
|       by the `tractor` application dev. | ||||
| 
 | ||||
|     - any remote error will normally require a restart of the entire | ||||
|       `trio.Task`'s scope due to the nature of `trio`'s cancellation | ||||
|       (`CancelScope`) system and semantics (level triggered). | ||||
| 
 | ||||
|     ''' | ||||
|     actor: Actor = ctx._actor | ||||
| 
 | ||||
|     # If the surrounding context has been cancelled by some | ||||
|     # task with a handle to THIS, we error here immediately | ||||
|     # since it likely means the surrounding lexical-scope has | ||||
|     # errored, been `trio.Cancelled` or at the least | ||||
|     # `Context.cancel()` was called by some task. | ||||
|     if ctx._cancel_called: | ||||
| 
 | ||||
|         # XXX NOTE: ALWAYS RAISE any remote error here even if | ||||
|         # it's an expected `ContextCancelled` due to a local | ||||
|         # task having called `.cancel()`! | ||||
|         # | ||||
|         # WHY: we expect the error to always bubble up to the | ||||
|         # surrounding `Portal.open_context()` call and be | ||||
|         # absorbed there (silently) and we DO NOT want to | ||||
|         # actually try to stream - a cancel msg was already | ||||
|         # sent to the other side! | ||||
|         ctx.maybe_raise( | ||||
|             raise_ctxc_from_self_call=True, | ||||
|         ) | ||||
|         # NOTE: this is diff then calling | ||||
|         # `._maybe_raise_remote_err()` specifically | ||||
|         # because we want to raise a ctxc on any task entering this `.open_stream()` | ||||
|         # AFTER cancellation was already been requested, | ||||
|         # we DO NOT want to absorb any ctxc ACK silently! | ||||
|         # if ctx._remote_error: | ||||
|         #     raise ctx._remote_error | ||||
| 
 | ||||
|         # XXX NOTE: if no `ContextCancelled` has been responded | ||||
|         # back from the other side (yet), we raise a different | ||||
|         # runtime error indicating that this task's usage of | ||||
|         # `Context.cancel()` and then `.open_stream()` is WRONG! | ||||
|         task: str = trio.lowlevel.current_task().name | ||||
|         raise RuntimeError( | ||||
|             'Stream opened after `Context.cancel()` called..?\n' | ||||
|             f'task: {actor.uid[0]}:{task}\n' | ||||
|             f'{ctx}' | ||||
|         ) | ||||
| 
 | ||||
|     if ( | ||||
|         not ctx._portal | ||||
|         and not ctx._started_called | ||||
|     ): | ||||
|         raise RuntimeError( | ||||
|             'Context.started()` must be called before opening a stream' | ||||
|         ) | ||||
| 
 | ||||
|     # NOTE: in one way streaming this only happens on the | ||||
|     # parent-ctx-task side (on the side that calls | ||||
|     # `Actor.start_remote_task()`) so if you try to send | ||||
|     # a stop from the caller to the callee in the | ||||
|     # single-direction-stream case you'll get a lookup error | ||||
|     # currently. | ||||
|     ctx: Context = actor.get_context( | ||||
|         chan=ctx.chan, | ||||
|         cid=ctx.cid, | ||||
|         nsf=ctx._nsf, | ||||
|         # side=ctx.side, | ||||
| 
 | ||||
|         msg_buffer_size=msg_buffer_size, | ||||
|         allow_overruns=allow_overruns, | ||||
|     ) | ||||
|     ctx._allow_overruns: bool = allow_overruns | ||||
|     assert ctx is ctx | ||||
| 
 | ||||
|     # XXX: If the underlying channel feeder receive mem chan has | ||||
|     # been closed then likely client code has already exited | ||||
|     # a ``.open_stream()`` block prior or there was some other | ||||
|     # unanticipated error or cancellation from ``trio``. | ||||
| 
 | ||||
|     if ctx._rx_chan._closed: | ||||
|         raise trio.ClosedResourceError( | ||||
|             'The underlying channel for this stream was already closed!\n' | ||||
|         ) | ||||
| 
 | ||||
|     # NOTE: implicitly this will call `MsgStream.aclose()` on | ||||
|     # `.__aexit__()` due to stream's parent `Channel` type! | ||||
|     # | ||||
|     # XXX NOTE XXX: ensures the stream is "one-shot use", | ||||
|     # which specifically means that on exit, | ||||
|     # - signal ``trio.EndOfChannel``/``StopAsyncIteration`` to | ||||
|     #   the far end indicating that the caller exited | ||||
|     #   the streaming context purposefully by letting | ||||
|     #   the exit block exec. | ||||
|     # - this is diff from the cancel/error case where | ||||
|     #   a cancel request from this side or an error | ||||
|     #   should be sent to the far end indicating the | ||||
|     #   stream WAS NOT just closed normally/gracefully. | ||||
|     async with MsgStream( | ||||
|         ctx=ctx, | ||||
|         rx_chan=ctx._rx_chan, | ||||
|     ) as stream: | ||||
| 
 | ||||
|         # NOTE: we track all existing streams per portal for | ||||
|         # the purposes of attempting graceful closes on runtime | ||||
|         # cancel requests. | ||||
|         if ctx._portal: | ||||
|             ctx._portal._streams.add(stream) | ||||
| 
 | ||||
|         try: | ||||
|             ctx._stream_opened: bool = True | ||||
|             ctx._stream = stream | ||||
| 
 | ||||
|             # XXX: do we need this? | ||||
|             # ensure we aren't cancelled before yielding the stream | ||||
|             # await trio.lowlevel.checkpoint() | ||||
|             yield stream | ||||
| 
 | ||||
|             # XXX: (MEGA IMPORTANT) if this is a root opened process we | ||||
|             # wait for any immediate child in debug before popping the | ||||
|             # context from the runtime msg loop otherwise inside | ||||
|             # ``Actor._deliver_ctx_payload()`` the msg will be discarded and in | ||||
|             # the case where that msg is global debugger unlock (via | ||||
|             # a "stop" msg for a stream), this can result in a deadlock | ||||
|             # where the root is waiting on the lock to clear but the | ||||
|             # child has already cleared it and clobbered IPC. | ||||
|             # | ||||
|             # await maybe_wait_for_debugger() | ||||
| 
 | ||||
|             # XXX TODO: pretty sure this isn't needed (see | ||||
|             # note above this block) AND will result in | ||||
|             # a double `.send_stop()` call. The only reason to | ||||
|             # put it here would be to due with "order" in | ||||
|             # terms of raising any remote error (as per | ||||
|             # directly below) or bc the stream's | ||||
|             # `.__aexit__()` block might not get run | ||||
|             # (doubtful)? Either way if we did put this back | ||||
|             # in we also need a state var to avoid the double | ||||
|             # stop-msg send.. | ||||
|             # | ||||
|             # await stream.aclose() | ||||
| 
 | ||||
|         # NOTE: absorb and do not raise any | ||||
|         # EoC received from the other side such that | ||||
|         # it is not raised inside the surrounding | ||||
|         # context block's scope! | ||||
|         except trio.EndOfChannel as eoc: | ||||
|             if ( | ||||
|                 eoc | ||||
|                 and | ||||
|                 stream.closed | ||||
|             ): | ||||
|                 # sanity, can remove? | ||||
|                 assert eoc is stream._eoc | ||||
| 
 | ||||
|                 log.warning( | ||||
|                     'Stream was terminated by EoC\n\n' | ||||
|                     # NOTE: won't show the error <Type> but | ||||
|                     # does show txt followed by IPC msg. | ||||
|                     f'{str(eoc)}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         finally: | ||||
|             if ctx._portal: | ||||
|                 try: | ||||
|                     ctx._portal._streams.remove(stream) | ||||
|                 except KeyError: | ||||
|                     log.warning( | ||||
|                         f'Stream was already destroyed?\n' | ||||
|                         f'actor: {ctx.chan.uid}\n' | ||||
|                         f'ctx id: {ctx.cid}' | ||||
|                     ) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| def stream(func: Callable) -> Callable: | ||||
|     ''' | ||||
|  | @ -541,7 +757,7 @@ def stream(func: Callable) -> Callable: | |||
|     ''' | ||||
|     # TODO: apply whatever solution ``mypy`` ends up picking for this: | ||||
|     # https://github.com/python/mypy/issues/2087#issuecomment-769266912 | ||||
|     func._tractor_stream_function = True  # type: ignore | ||||
|     func._tractor_stream_function: bool = True  # type: ignore | ||||
| 
 | ||||
|     sig = inspect.signature(func) | ||||
|     params = sig.parameters | ||||
|  |  | |||
|  | @ -80,15 +80,19 @@ class ActorNursery: | |||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         # TODO: maybe def these as fields of a struct looking type? | ||||
|         actor: Actor, | ||||
|         ria_nursery: trio.Nursery, | ||||
|         da_nursery: trio.Nursery, | ||||
|         errors: dict[tuple[str, str], BaseException], | ||||
| 
 | ||||
|     ) -> None: | ||||
|         # self.supervisor = supervisor  # TODO | ||||
|         self._actor: Actor = actor | ||||
|         self._ria_nursery = ria_nursery | ||||
| 
 | ||||
|         # TODO: rename to `._tn` for our conventional "task-nursery" | ||||
|         self._da_nursery = da_nursery | ||||
| 
 | ||||
|         self._children: dict[ | ||||
|             tuple[str, str], | ||||
|             tuple[ | ||||
|  | @ -97,13 +101,12 @@ class ActorNursery: | |||
|                 Portal | None, | ||||
|             ] | ||||
|         ] = {} | ||||
|         # portals spawned with ``run_in_actor()`` are | ||||
|         # cancelled when their "main" result arrives | ||||
|         self._cancel_after_result_on_exit: set = set() | ||||
| 
 | ||||
|         self.cancelled: bool = False | ||||
|         self._join_procs = trio.Event() | ||||
|         self._at_least_one_child_in_debug: bool = False | ||||
|         self.errors = errors | ||||
|         self._scope_error: BaseException|None = None | ||||
|         self.exited = trio.Event() | ||||
| 
 | ||||
|         # NOTE: when no explicit call is made to | ||||
|  | @ -114,28 +117,48 @@ class ActorNursery: | |||
|         # and syncing purposes to any actor opened nurseries. | ||||
|         self._implicit_runtime_started: bool = False | ||||
| 
 | ||||
|         # TODO: remove the `.run_in_actor()` API and thus this 2ndary | ||||
|         # nursery when that API get's moved outside this primitive! | ||||
|         self._ria_nursery = ria_nursery | ||||
|         # portals spawned with ``run_in_actor()`` are | ||||
|         # cancelled when their "main" result arrives | ||||
|         self._cancel_after_result_on_exit: set = set() | ||||
| 
 | ||||
|     async def start_actor( | ||||
|         self, | ||||
|         name: str, | ||||
| 
 | ||||
|         *, | ||||
| 
 | ||||
|         bind_addrs: list[tuple[str, int]] = [_default_bind_addr], | ||||
|         rpc_module_paths: list[str]|None = None, | ||||
|         enable_modules: list[str]|None = None, | ||||
|         loglevel: str|None = None,  # set log level per subactor | ||||
|         nursery: trio.Nursery|None = None, | ||||
|         debug_mode: bool|None = None, | ||||
|         infect_asyncio: bool = False, | ||||
| 
 | ||||
|         # TODO: ideally we can rm this once we no longer have | ||||
|         # a `._ria_nursery` since the dependent APIs have been | ||||
|         # removed! | ||||
|         nursery: trio.Nursery|None = None, | ||||
| 
 | ||||
|     ) -> Portal: | ||||
|         ''' | ||||
|         Start a (daemon) actor: an process that has no designated | ||||
|         "main task" besides the runtime. | ||||
| 
 | ||||
|         ''' | ||||
|         loglevel = loglevel or self._actor.loglevel or get_loglevel() | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         loglevel: str = ( | ||||
|             loglevel | ||||
|             or self._actor.loglevel | ||||
|             or get_loglevel() | ||||
|         ) | ||||
| 
 | ||||
|         # configure and pass runtime state | ||||
|         _rtv = _state._runtime_vars.copy() | ||||
|         _rtv['_is_root'] = False | ||||
|         _rtv['_is_infected_aio'] = infect_asyncio | ||||
| 
 | ||||
|         # allow setting debug policy per actor | ||||
|         if debug_mode is not None: | ||||
|  | @ -184,6 +207,14 @@ class ActorNursery: | |||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|     # TODO: DEPRECATE THIS: | ||||
|     # -[ ] impl instead as a hilevel wrapper on | ||||
|     #   top of a `@context` style invocation. | ||||
|     #  |_ dynamic @context decoration on child side | ||||
|     #  |_ implicit `Portal.open_context() as (ctx, first):` | ||||
|     #    and `return first` on parent side. | ||||
|     #  |_ mention how it's similar to `trio-parallel` API? | ||||
|     # -[ ] use @api_frame on the wrapper | ||||
|     async def run_in_actor( | ||||
|         self, | ||||
| 
 | ||||
|  | @ -209,13 +240,14 @@ class ActorNursery: | |||
|         the actor is terminated. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         mod_path: str = fn.__module__ | ||||
| 
 | ||||
|         if name is None: | ||||
|             # use the explicit function name if not provided | ||||
|             name = fn.__name__ | ||||
| 
 | ||||
|         portal = await self.start_actor( | ||||
|         portal: Portal = await self.start_actor( | ||||
|             name, | ||||
|             enable_modules=[mod_path] + ( | ||||
|                 enable_modules or rpc_module_paths or [] | ||||
|  | @ -244,19 +276,24 @@ class ActorNursery: | |||
|         ) | ||||
|         return portal | ||||
| 
 | ||||
|     # @api_frame | ||||
|     async def cancel( | ||||
|         self, | ||||
|         hard_kill: bool = False, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Cancel this nursery by instructing each subactor to cancel | ||||
|         itself and wait for all subactors to terminate. | ||||
|         Cancel this actor-nursery by instructing each subactor's | ||||
|         runtime to cancel and wait for all underlying sub-processes | ||||
|         to terminate. | ||||
| 
 | ||||
|         If ``hard_killl`` is set to ``True`` then kill the processes | ||||
|         directly without any far end graceful ``trio`` cancellation. | ||||
|         If `hard_kill` is set then kill the processes directly using | ||||
|         the spawning-backend's API/OS-machinery without any attempt | ||||
|         at (graceful) `trio`-style cancellation using our | ||||
|         `Actor.cancel()`. | ||||
| 
 | ||||
|         ''' | ||||
|         __runtimeframe__: int = 1  # noqa | ||||
|         self.cancelled = True | ||||
| 
 | ||||
|         # TODO: impl a repr for spawn more compact | ||||
|  | @ -337,11 +374,15 @@ class ActorNursery: | |||
| @acm | ||||
| async def _open_and_supervise_one_cancels_all_nursery( | ||||
|     actor: Actor, | ||||
|     tb_hide: bool = False, | ||||
| 
 | ||||
| ) -> typing.AsyncGenerator[ActorNursery, None]: | ||||
| 
 | ||||
|     # TODO: yay or nay? | ||||
|     __tracebackhide__ = True | ||||
|     # normally don't need to show user by default | ||||
|     __tracebackhide__: bool = tb_hide | ||||
| 
 | ||||
|     outer_err: BaseException|None = None | ||||
|     inner_err: BaseException|None = None | ||||
| 
 | ||||
|     # the collection of errors retreived from spawned sub-actors | ||||
|     errors: dict[tuple[str, str], BaseException] = {} | ||||
|  | @ -351,7 +392,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|     # handling errors that are generated by the inner nursery in | ||||
|     # a supervisor strategy **before** blocking indefinitely to wait for | ||||
|     # actors spawned in "daemon mode" (aka started using | ||||
|     # ``ActorNursery.start_actor()``). | ||||
|     # `ActorNursery.start_actor()`). | ||||
| 
 | ||||
|     # errors from this daemon actor nursery bubble up to caller | ||||
|     async with trio.open_nursery() as da_nursery: | ||||
|  | @ -386,7 +427,8 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                     ) | ||||
|                     an._join_procs.set() | ||||
| 
 | ||||
|                 except BaseException as inner_err: | ||||
|                 except BaseException as _inner_err: | ||||
|                     inner_err = _inner_err | ||||
|                     errors[actor.uid] = inner_err | ||||
| 
 | ||||
|                     # If we error in the root but the debugger is | ||||
|  | @ -464,8 +506,10 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|             Exception, | ||||
|             BaseExceptionGroup, | ||||
|             trio.Cancelled | ||||
|         ) as _outer_err: | ||||
|             outer_err = _outer_err | ||||
| 
 | ||||
|         ) as err: | ||||
|             an._scope_error = outer_err or inner_err | ||||
| 
 | ||||
|             # XXX: yet another guard before allowing the cancel | ||||
|             # sequence in case a (single) child is in debug. | ||||
|  | @ -480,7 +524,7 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|             if an._children: | ||||
|                 log.cancel( | ||||
|                     'Actor-nursery cancelling due error type:\n' | ||||
|                     f'{err}\n' | ||||
|                     f'{outer_err}\n' | ||||
|                 ) | ||||
|                 with trio.CancelScope(shield=True): | ||||
|                     await an.cancel() | ||||
|  | @ -507,11 +551,19 @@ async def _open_and_supervise_one_cancels_all_nursery( | |||
|                 else: | ||||
|                     raise list(errors.values())[0] | ||||
| 
 | ||||
|             # show frame on any (likely) internal error | ||||
|             if ( | ||||
|                 not an.cancelled | ||||
|                 and an._scope_error | ||||
|             ): | ||||
|                 __tracebackhide__: bool = False | ||||
| 
 | ||||
|         # da_nursery scope end - nursery checkpoint | ||||
|     # final exit | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| # @api_frame | ||||
| async def open_nursery( | ||||
|     **kwargs, | ||||
| 
 | ||||
|  | @ -531,6 +583,7 @@ async def open_nursery( | |||
|     which cancellation scopes correspond to each spawned subactor set. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = True | ||||
|     implicit_runtime: bool = False | ||||
|     actor: Actor = current_actor(err_on_no_runtime=False) | ||||
|     an: ActorNursery|None = None | ||||
|  | @ -581,13 +634,25 @@ async def open_nursery( | |||
|                 an.exited.set() | ||||
| 
 | ||||
|     finally: | ||||
|         # show frame on any internal runtime-scope error | ||||
|         if ( | ||||
|             an | ||||
|             and not an.cancelled | ||||
|             and an._scope_error | ||||
|         ): | ||||
|             __tracebackhide__: bool = False | ||||
| 
 | ||||
|         msg: str = ( | ||||
|             'Actor-nursery exited\n' | ||||
|             f'|_{an}\n' | ||||
|         ) | ||||
| 
 | ||||
|         # shutdown runtime if it was started | ||||
|         if implicit_runtime: | ||||
|             # shutdown runtime if it was started and report noisly | ||||
|             # that we're did so. | ||||
|             msg += '=> Shutting down actor runtime <=\n' | ||||
| 
 | ||||
|             log.info(msg) | ||||
| 
 | ||||
|         else: | ||||
|             # keep noise low during std operation. | ||||
|             log.runtime(msg) | ||||
|  |  | |||
|  | @ -26,6 +26,9 @@ import tractor | |||
| from .pytest import ( | ||||
|     tractor_test as tractor_test | ||||
| ) | ||||
| from .fault_simulation import ( | ||||
|     break_ipc as break_ipc, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def repodir() -> pathlib.Path: | ||||
|  | @ -51,6 +54,25 @@ def examples_dir() -> pathlib.Path: | |||
|     return repodir() / 'examples' | ||||
| 
 | ||||
| 
 | ||||
| def mk_cmd( | ||||
|     ex_name: str, | ||||
|     exs_subpath: str = 'debugging', | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Generate a shell command suitable to pass to ``pexpect.spawn()``. | ||||
| 
 | ||||
|     ''' | ||||
|     script_path: pathlib.Path = ( | ||||
|         examples_dir() | ||||
|         / exs_subpath | ||||
|         / f'{ex_name}.py' | ||||
|     ) | ||||
|     return ' '.join([ | ||||
|         'python', | ||||
|         str(script_path) | ||||
|     ]) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def expect_ctxc( | ||||
|     yay: bool, | ||||
|  |  | |||
|  | @ -0,0 +1,92 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| `pytest` utils helpers and plugins for testing `tractor`'s runtime | ||||
| and applications. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| from tractor import ( | ||||
|     MsgStream, | ||||
| ) | ||||
| 
 | ||||
| async def break_ipc( | ||||
|     stream: MsgStream, | ||||
|     method: str|None = None, | ||||
|     pre_close: bool = False, | ||||
| 
 | ||||
|     def_method: str = 'socket_close', | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     XXX: close the channel right after an error is raised | ||||
|     purposely breaking the IPC transport to make sure the parent | ||||
|     doesn't get stuck in debug or hang on the connection join. | ||||
|     this more or less simulates an infinite msg-receive hang on | ||||
|     the other end. | ||||
| 
 | ||||
|     ''' | ||||
|     # close channel via IPC prot msging before | ||||
|     # any transport breakage | ||||
|     if pre_close: | ||||
|         await stream.aclose() | ||||
| 
 | ||||
|     method: str = method or def_method | ||||
|     print( | ||||
|         '#################################\n' | ||||
|         'Simulating CHILD-side IPC BREAK!\n' | ||||
|         f'method: {method}\n' | ||||
|         f'pre `.aclose()`: {pre_close}\n' | ||||
|         '#################################\n' | ||||
|     ) | ||||
| 
 | ||||
|     match method: | ||||
|         case 'socket_close': | ||||
|             await stream._ctx.chan.transport.stream.aclose() | ||||
| 
 | ||||
|         case 'socket_eof': | ||||
|             # NOTE: `trio` does the following underneath this | ||||
|             # call in `src/trio/_highlevel_socket.py`: | ||||
|             # `Stream.socket.shutdown(tsocket.SHUT_WR)` | ||||
|             await stream._ctx.chan.transport.stream.send_eof() | ||||
| 
 | ||||
|         # TODO: remove since now this will be invalid with our | ||||
|         # new typed msg spec? | ||||
|         # case 'msg': | ||||
|         #     await stream._ctx.chan.send(None) | ||||
| 
 | ||||
|         # TODO: the actual real-world simulated cases like | ||||
|         # transport layer hangs and/or lower layer 2-gens type | ||||
|         # scenarios.. | ||||
|         # | ||||
|         # -[ ] already have some issues for this general testing | ||||
|         # area: | ||||
|         #  - https://github.com/goodboy/tractor/issues/97 | ||||
|         #  - https://github.com/goodboy/tractor/issues/124 | ||||
|         #   - PR from @guille: | ||||
|         #     https://github.com/goodboy/tractor/pull/149 | ||||
|         # case 'hang': | ||||
|         # TODO: framework research: | ||||
|         # | ||||
|         # - https://github.com/GuoTengda1993/pynetem | ||||
|         # - https://github.com/shopify/toxiproxy | ||||
|         # - https://manpages.ubuntu.com/manpages/trusty/man1/wirefilter.1.html | ||||
| 
 | ||||
|         case _: | ||||
|             raise RuntimeError( | ||||
|                 f'IPC break method unsupported: {method}' | ||||
|             ) | ||||
|  | @ -26,22 +26,24 @@ from ._debug import ( | |||
|     breakpoint as breakpoint, | ||||
|     pause as pause, | ||||
|     pause_from_sync as pause_from_sync, | ||||
|     shield_sigint_handler as shield_sigint_handler, | ||||
|     MultiActorPdb as MultiActorPdb, | ||||
|     sigint_shield as sigint_shield, | ||||
|     open_crash_handler as open_crash_handler, | ||||
|     maybe_open_crash_handler as maybe_open_crash_handler, | ||||
|     maybe_init_greenback as maybe_init_greenback, | ||||
|     post_mortem as post_mortem, | ||||
|     mk_pdb as mk_pdb, | ||||
| ) | ||||
| from ._stackscope import ( | ||||
|     enable_stack_on_sig as enable_stack_on_sig, | ||||
| ) | ||||
| # from .pformat import ( | ||||
| #     add_div as add_div, | ||||
| #     pformat_caller_frame as pformat_caller_frame, | ||||
| #     pformat_boxed_tb as pformat_boxed_tb, | ||||
| # ) | ||||
| from .pformat import ( | ||||
|     add_div as add_div, | ||||
|     pformat_caller_frame as pformat_caller_frame, | ||||
|     pformat_boxed_tb as pformat_boxed_tb, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO, move this to a new `.devx._pdbp` mod? | ||||
| def _enable_readline_feats() -> str: | ||||
|     ''' | ||||
|     Handle `readline` when compiled with `libedit` to avoid breaking | ||||
|  | @ -73,5 +75,4 @@ def _enable_readline_feats() -> str: | |||
|         return 'readline' | ||||
| 
 | ||||
| 
 | ||||
| # TODO, move this to a new `.devx._pdbp` mod? | ||||
| _enable_readline_feats() | ||||
|  |  | |||
|  | @ -1,177 +0,0 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Tools for code-object annotation, introspection and mutation | ||||
| as it pertains to improving the grok-ability of our runtime! | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| import inspect | ||||
| # import msgspec | ||||
| # from pprint import pformat | ||||
| from types import ( | ||||
|     FrameType, | ||||
|     FunctionType, | ||||
|     MethodType, | ||||
|     # CodeType, | ||||
| ) | ||||
| from typing import ( | ||||
|     # Any, | ||||
|     Callable, | ||||
|     # TYPE_CHECKING, | ||||
|     Type, | ||||
| ) | ||||
| 
 | ||||
| from tractor.msg import ( | ||||
|     pretty_struct, | ||||
|     NamespacePath, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: yeah, i don't love this and we should prolly just | ||||
| # write a decorator that actually keeps a stupid ref to the func | ||||
| # obj.. | ||||
| def get_class_from_frame(fr: FrameType) -> ( | ||||
|     FunctionType | ||||
|     |MethodType | ||||
| ): | ||||
|     ''' | ||||
|     Attempt to get the function (or method) reference | ||||
|     from a given `FrameType`. | ||||
| 
 | ||||
|     Verbatim from an SO: | ||||
|     https://stackoverflow.com/a/2220759 | ||||
| 
 | ||||
|     ''' | ||||
|     args, _, _, value_dict = inspect.getargvalues(fr) | ||||
| 
 | ||||
|     # we check the first parameter for the frame function is | ||||
|     # named 'self' | ||||
|     if ( | ||||
|         len(args) | ||||
|         and | ||||
|         # TODO: other cases for `@classmethod` etc..?) | ||||
|         args[0] == 'self' | ||||
|     ): | ||||
|         # in that case, 'self' will be referenced in value_dict | ||||
|         instance: object = value_dict.get('self') | ||||
|         if instance: | ||||
|           # return its class | ||||
|           return getattr( | ||||
|               instance, | ||||
|               '__class__', | ||||
|               None, | ||||
|           ) | ||||
| 
 | ||||
|     # return None otherwise | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| def func_ref_from_frame( | ||||
|     frame: FrameType, | ||||
| ) -> Callable: | ||||
|     func_name: str = frame.f_code.co_name | ||||
|     try: | ||||
|         return frame.f_globals[func_name] | ||||
|     except KeyError: | ||||
|         cls: Type|None = get_class_from_frame(frame) | ||||
|         if cls: | ||||
|             return getattr( | ||||
|                 cls, | ||||
|                 func_name, | ||||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: move all this into new `.devx._code`! | ||||
| # -[ ] prolly create a `@runtime_api` dec? | ||||
| # -[ ] ^- make it capture and/or accept buncha optional | ||||
| #     meta-data like a fancier version of `@pdbp.hideframe`. | ||||
| # | ||||
| class CallerInfo(pretty_struct.Struct): | ||||
|     rt_fi: inspect.FrameInfo | ||||
|     call_frame: FrameType | ||||
| 
 | ||||
|     @property | ||||
|     def api_func_ref(self) -> Callable|None: | ||||
|         return func_ref_from_frame(self.rt_fi.frame) | ||||
| 
 | ||||
|     @property | ||||
|     def api_nsp(self) -> NamespacePath|None: | ||||
|         func: FunctionType = self.api_func_ref | ||||
|         if func: | ||||
|             return NamespacePath.from_ref(func) | ||||
| 
 | ||||
|         return '<unknown>' | ||||
| 
 | ||||
|     @property | ||||
|     def caller_func_ref(self) -> Callable|None: | ||||
|         return func_ref_from_frame(self.call_frame) | ||||
| 
 | ||||
|     @property | ||||
|     def caller_nsp(self) -> NamespacePath|None: | ||||
|         func: FunctionType = self.caller_func_ref | ||||
|         if func: | ||||
|             return NamespacePath.from_ref(func) | ||||
| 
 | ||||
|         return '<unknown>' | ||||
| 
 | ||||
| 
 | ||||
| def find_caller_info( | ||||
|     dunder_var: str = '__runtimeframe__', | ||||
|     iframes:int = 1, | ||||
|     check_frame_depth: bool = True, | ||||
| 
 | ||||
| ) -> CallerInfo|None: | ||||
|     ''' | ||||
|     Scan up the callstack for a frame with a `dunder_var: str` variable | ||||
|     and return the `iframes` frames above it. | ||||
| 
 | ||||
|     By default we scan for a `__runtimeframe__` scope var which | ||||
|     denotes a `tractor` API above which (one frame up) is "user | ||||
|     app code" which "called into" the `tractor` method or func. | ||||
| 
 | ||||
|     TODO: ex with `Portal.open_context()` | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: use this instead? | ||||
|     # https://docs.python.org/3/library/inspect.html#inspect.getouterframes | ||||
|     frames: list[inspect.FrameInfo] = inspect.stack() | ||||
|     for fi in frames: | ||||
|         assert ( | ||||
|             fi.function | ||||
|             == | ||||
|             fi.frame.f_code.co_name | ||||
|         ) | ||||
|         this_frame: FrameType = fi.frame | ||||
|         dunder_val: int|None = this_frame.f_locals.get(dunder_var) | ||||
|         if dunder_val: | ||||
|             go_up_iframes: int = ( | ||||
|                 dunder_val  # could be 0 or `True` i guess? | ||||
|                 or | ||||
|                 iframes | ||||
|             ) | ||||
|             rt_frame: FrameType = fi.frame | ||||
|             call_frame = rt_frame | ||||
|             for i in range(go_up_iframes): | ||||
|                 call_frame = call_frame.f_back | ||||
| 
 | ||||
|             return CallerInfo( | ||||
|                 rt_fi=fi, | ||||
|                 call_frame=call_frame, | ||||
|             ) | ||||
| 
 | ||||
|     return None | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,303 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Tools for code-object annotation, introspection and mutation | ||||
| as it pertains to improving the grok-ability of our runtime! | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from functools import partial | ||||
| import inspect | ||||
| from types import ( | ||||
|     FrameType, | ||||
|     FunctionType, | ||||
|     MethodType, | ||||
|     # CodeType, | ||||
| ) | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     Type, | ||||
| ) | ||||
| 
 | ||||
| from tractor.msg import ( | ||||
|     pretty_struct, | ||||
|     NamespacePath, | ||||
| ) | ||||
| import wrapt | ||||
| 
 | ||||
| 
 | ||||
| # TODO: yeah, i don't love this and we should prolly just | ||||
| # write a decorator that actually keeps a stupid ref to the func | ||||
| # obj.. | ||||
| def get_class_from_frame(fr: FrameType) -> ( | ||||
|     FunctionType | ||||
|     |MethodType | ||||
| ): | ||||
|     ''' | ||||
|     Attempt to get the function (or method) reference | ||||
|     from a given `FrameType`. | ||||
| 
 | ||||
|     Verbatim from an SO: | ||||
|     https://stackoverflow.com/a/2220759 | ||||
| 
 | ||||
|     ''' | ||||
|     args, _, _, value_dict = inspect.getargvalues(fr) | ||||
| 
 | ||||
|     # we check the first parameter for the frame function is | ||||
|     # named 'self' | ||||
|     if ( | ||||
|         len(args) | ||||
|         and | ||||
|         # TODO: other cases for `@classmethod` etc..?) | ||||
|         args[0] == 'self' | ||||
|     ): | ||||
|         # in that case, 'self' will be referenced in value_dict | ||||
|         instance: object = value_dict.get('self') | ||||
|         if instance: | ||||
|           # return its class | ||||
|           return getattr( | ||||
|               instance, | ||||
|               '__class__', | ||||
|               None, | ||||
|           ) | ||||
| 
 | ||||
|     # return None otherwise | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| def get_ns_and_func_from_frame( | ||||
|     frame: FrameType, | ||||
| ) -> Callable: | ||||
|     ''' | ||||
|     Return the corresponding function object reference from | ||||
|     a `FrameType`, and return it and it's parent namespace `dict`. | ||||
| 
 | ||||
|     ''' | ||||
|     ns: dict[str, Any] | ||||
| 
 | ||||
|     # for a method, go up a frame and lookup the name in locals() | ||||
|     if '.' in (qualname := frame.f_code.co_qualname): | ||||
|         cls_name, _, func_name = qualname.partition('.') | ||||
|         ns = frame.f_back.f_locals[cls_name].__dict__ | ||||
| 
 | ||||
|     else: | ||||
|         func_name: str = frame.f_code.co_name | ||||
|         ns = frame.f_globals | ||||
| 
 | ||||
|     return ( | ||||
|         ns, | ||||
|         ns[func_name], | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def func_ref_from_frame( | ||||
|     frame: FrameType, | ||||
| ) -> Callable: | ||||
|     func_name: str = frame.f_code.co_name | ||||
|     try: | ||||
|         return frame.f_globals[func_name] | ||||
|     except KeyError: | ||||
|         cls: Type|None = get_class_from_frame(frame) | ||||
|         if cls: | ||||
|             return getattr( | ||||
|                 cls, | ||||
|                 func_name, | ||||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| class CallerInfo(pretty_struct.Struct): | ||||
|     # https://docs.python.org/dev/reference/datamodel.html#frame-objects | ||||
|     # https://docs.python.org/dev/library/inspect.html#the-interpreter-stack | ||||
|     _api_frame: FrameType | ||||
| 
 | ||||
|     @property | ||||
|     def api_frame(self) -> FrameType: | ||||
|         try: | ||||
|             self._api_frame.clear() | ||||
|         except RuntimeError: | ||||
|             # log.warning( | ||||
|             print( | ||||
|                 f'Frame {self._api_frame} for {self.api_func} is still active!' | ||||
|             ) | ||||
| 
 | ||||
|         return self._api_frame | ||||
| 
 | ||||
|     _api_func: Callable | ||||
| 
 | ||||
|     @property | ||||
|     def api_func(self) -> Callable: | ||||
|         return self._api_func | ||||
| 
 | ||||
|     _caller_frames_up: int|None = 1 | ||||
|     _caller_frame: FrameType|None = None  # cached after first stack scan | ||||
| 
 | ||||
|     @property | ||||
|     def api_nsp(self) -> NamespacePath|None: | ||||
|         func: FunctionType = self.api_func | ||||
|         if func: | ||||
|             return NamespacePath.from_ref(func) | ||||
| 
 | ||||
|         return '<unknown>' | ||||
| 
 | ||||
|     @property | ||||
|     def caller_frame(self) -> FrameType: | ||||
| 
 | ||||
|         # if not already cached, scan up stack explicitly by | ||||
|         # configured count. | ||||
|         if not self._caller_frame: | ||||
|             if self._caller_frames_up: | ||||
|                 for _ in range(self._caller_frames_up): | ||||
|                     caller_frame: FrameType|None = self.api_frame.f_back | ||||
| 
 | ||||
|                 if not caller_frame: | ||||
|                     raise ValueError( | ||||
|                         'No frame exists {self._caller_frames_up} up from\n' | ||||
|                         f'{self.api_frame} @ {self.api_nsp}\n' | ||||
|                     ) | ||||
| 
 | ||||
|             self._caller_frame = caller_frame | ||||
| 
 | ||||
|         return self._caller_frame | ||||
| 
 | ||||
|     @property | ||||
|     def caller_nsp(self) -> NamespacePath|None: | ||||
|         func: FunctionType = self.api_func | ||||
|         if func: | ||||
|             return NamespacePath.from_ref(func) | ||||
| 
 | ||||
|         return '<unknown>' | ||||
| 
 | ||||
| 
 | ||||
| def find_caller_info( | ||||
|     dunder_var: str = '__runtimeframe__', | ||||
|     iframes:int = 1, | ||||
|     check_frame_depth: bool = True, | ||||
| 
 | ||||
| ) -> CallerInfo|None: | ||||
|     ''' | ||||
|     Scan up the callstack for a frame with a `dunder_var: str` variable | ||||
|     and return the `iframes` frames above it. | ||||
| 
 | ||||
|     By default we scan for a `__runtimeframe__` scope var which | ||||
|     denotes a `tractor` API above which (one frame up) is "user | ||||
|     app code" which "called into" the `tractor` method or func. | ||||
| 
 | ||||
|     TODO: ex with `Portal.open_context()` | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: use this instead? | ||||
|     # https://docs.python.org/3/library/inspect.html#inspect.getouterframes | ||||
|     frames: list[inspect.FrameInfo] = inspect.stack() | ||||
|     for fi in frames: | ||||
|         assert ( | ||||
|             fi.function | ||||
|             == | ||||
|             fi.frame.f_code.co_name | ||||
|         ) | ||||
|         this_frame: FrameType = fi.frame | ||||
|         dunder_val: int|None = this_frame.f_locals.get(dunder_var) | ||||
|         if dunder_val: | ||||
|             go_up_iframes: int = ( | ||||
|                 dunder_val  # could be 0 or `True` i guess? | ||||
|                 or | ||||
|                 iframes | ||||
|             ) | ||||
|             rt_frame: FrameType = fi.frame | ||||
|             call_frame = rt_frame | ||||
|             for i in range(go_up_iframes): | ||||
|                 call_frame = call_frame.f_back | ||||
| 
 | ||||
|             return CallerInfo( | ||||
|                 _api_frame=rt_frame, | ||||
|                 _api_func=func_ref_from_frame(rt_frame), | ||||
|                 _caller_frames_up=go_up_iframes, | ||||
|             ) | ||||
| 
 | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| _frame2callerinfo_cache: dict[FrameType, CallerInfo] = {} | ||||
| 
 | ||||
| 
 | ||||
| # TODO: -[x] move all this into new `.devx._frame_stack`! | ||||
| # -[ ] consider rename to _callstack? | ||||
| # -[ ] prolly create a `@runtime_api` dec? | ||||
| #   |_ @api_frame seems better? | ||||
| # -[ ] ^- make it capture and/or accept buncha optional | ||||
| #     meta-data like a fancier version of `@pdbp.hideframe`. | ||||
| # | ||||
| def api_frame( | ||||
|     wrapped: Callable|None = None, | ||||
|     *, | ||||
|     caller_frames_up: int = 1, | ||||
| 
 | ||||
| ) -> Callable: | ||||
| 
 | ||||
|     # handle the decorator called WITHOUT () case, | ||||
|     # i.e. just @api_frame, NOT @api_frame(extra=<blah>) | ||||
|     if wrapped is None: | ||||
|         return partial( | ||||
|             api_frame, | ||||
|             caller_frames_up=caller_frames_up, | ||||
|         ) | ||||
| 
 | ||||
|     @wrapt.decorator | ||||
|     async def wrapper( | ||||
|         wrapped: Callable, | ||||
|         instance: object, | ||||
|         args: tuple, | ||||
|         kwargs: dict, | ||||
|     ): | ||||
|         # maybe cache the API frame for this call | ||||
|         global _frame2callerinfo_cache | ||||
|         this_frame: FrameType = inspect.currentframe() | ||||
|         api_frame: FrameType = this_frame.f_back | ||||
| 
 | ||||
|         if not _frame2callerinfo_cache.get(api_frame): | ||||
|             _frame2callerinfo_cache[api_frame] = CallerInfo( | ||||
|                 _api_frame=api_frame, | ||||
|                 _api_func=wrapped, | ||||
|                 _caller_frames_up=caller_frames_up, | ||||
|             ) | ||||
| 
 | ||||
|         return wrapped(*args, **kwargs) | ||||
| 
 | ||||
|     # annotate the function as a "api function", meaning it is | ||||
|     # a function for which the function above it in the call stack should be | ||||
|     # non-`tractor` code aka "user code". | ||||
|     # | ||||
|     # in the global frame cache for easy lookup from a given | ||||
|     # func-instance | ||||
|     wrapped._call_infos: dict[FrameType, CallerInfo] = _frame2callerinfo_cache | ||||
|     wrapped.__api_func__: bool = True | ||||
|     return wrapper(wrapped) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: something like this instead of the adhoc frame-unhiding | ||||
| # blocks all over the runtime!! XD | ||||
| # -[ ] ideally we can expect a certain error (set) and if something | ||||
| #     else is raised then all frames below the wrapped one will be | ||||
| #     un-hidden via `__tracebackhide__: bool = False`. | ||||
| # |_ might need to dynamically mutate the code objs like | ||||
| #    `pdbp.hideframe()` does? | ||||
| # -[ ] use this as a `@acm` decorator as introed in 3.10? | ||||
| # @acm | ||||
| # async def unhide_frame_when_not( | ||||
| #     error_set: set[BaseException], | ||||
| # ) -> TracebackType: | ||||
| #     ... | ||||
|  | @ -24,19 +24,32 @@ disjoint, parallel executing tasks in separate actors. | |||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| # from functools import partial | ||||
| from threading import ( | ||||
|     current_thread, | ||||
|     Thread, | ||||
|     RLock, | ||||
| ) | ||||
| import multiprocessing as mp | ||||
| from signal import ( | ||||
|     signal, | ||||
|     getsignal, | ||||
|     SIGUSR1, | ||||
|     SIGINT, | ||||
| ) | ||||
| # import traceback | ||||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     Callable, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| import traceback | ||||
| from typing import TYPE_CHECKING | ||||
| 
 | ||||
| import trio | ||||
| from tractor import ( | ||||
|     _state, | ||||
|     log as logmod, | ||||
| ) | ||||
| from tractor.devx import _debug | ||||
| 
 | ||||
| log = logmod.get_logger(__name__) | ||||
| 
 | ||||
|  | @ -51,26 +64,68 @@ if TYPE_CHECKING: | |||
| 
 | ||||
| @trio.lowlevel.disable_ki_protection | ||||
| def dump_task_tree() -> None: | ||||
|     import stackscope | ||||
|     from tractor.log import get_console_log | ||||
|     ''' | ||||
|     Do a classic `stackscope.extract()` task-tree dump to console at | ||||
|     `.devx()` level. | ||||
| 
 | ||||
|     ''' | ||||
|     import stackscope | ||||
|     tree_str: str = str( | ||||
|         stackscope.extract( | ||||
|             trio.lowlevel.current_root_task(), | ||||
|             recurse_child_tasks=True | ||||
|         ) | ||||
|     ) | ||||
|     log = get_console_log( | ||||
|         name=__name__, | ||||
|         level='cancel', | ||||
|     ) | ||||
|     actor: Actor = _state.current_actor() | ||||
|     thr: Thread = current_thread() | ||||
|     current_sigint_handler: Callable = getsignal(SIGINT) | ||||
|     if ( | ||||
|         current_sigint_handler | ||||
|         is not | ||||
|         _debug.DebugStatus._trio_handler | ||||
|     ): | ||||
|         sigint_handler_report: str = ( | ||||
|             'The default `trio` SIGINT handler was replaced?!' | ||||
|         ) | ||||
|     else: | ||||
|         sigint_handler_report: str = ( | ||||
|             'The default `trio` SIGINT handler is in use?!' | ||||
|         ) | ||||
| 
 | ||||
|     # sclang symbology | ||||
|     # |_<object> | ||||
|     # |_(Task/Thread/Process/Actor | ||||
|     # |_{Supervisor/Scope | ||||
|     # |_[Storage/Memory/IPC-Stream/Data-Struct | ||||
| 
 | ||||
|     log.devx( | ||||
|         f'Dumping `stackscope` tree for actor\n' | ||||
|         f'{actor.name}: {actor}\n' | ||||
|         f' |_{mp.current_process()}\n\n' | ||||
|         f'{tree_str}\n' | ||||
|         f'(>: {actor.uid!r}\n' | ||||
|         f' |_{mp.current_process()}\n' | ||||
|         f'   |_{thr}\n' | ||||
|         f'     |_{actor}\n' | ||||
|         f'\n' | ||||
|         f'{sigint_handler_report}\n' | ||||
|         f'signal.getsignal(SIGINT) -> {current_sigint_handler!r}\n' | ||||
|         # f'\n' | ||||
|         # start-of-trace-tree delimiter (mostly for testing) | ||||
|         # f'------ {actor.uid!r} ------\n' | ||||
|         f'\n' | ||||
|         f'------ start-of-{actor.uid!r} ------\n' | ||||
|         f'|\n' | ||||
|         f'{tree_str}' | ||||
|         # end-of-trace-tree delimiter (mostly for testing) | ||||
|         f'|\n' | ||||
|         f'|_____ end-of-{actor.uid!r} ______\n' | ||||
|     ) | ||||
|     # TODO: can remove this right? | ||||
|     # -[ ] was original code from author | ||||
|     # | ||||
|     # print( | ||||
|     #     'DUMPING FROM PRINT\n' | ||||
|     #     + | ||||
|     #     content | ||||
|     # ) | ||||
|     # import logging | ||||
|     # try: | ||||
|     #     with open("/dev/tty", "w") as tty: | ||||
|  | @ -80,58 +135,130 @@ def dump_task_tree() -> None: | |||
|     #         "task_tree" | ||||
|     #     ).exception("Error printing task tree") | ||||
| 
 | ||||
| _handler_lock = RLock() | ||||
| _tree_dumped: bool = False | ||||
| 
 | ||||
| def signal_handler( | ||||
| 
 | ||||
| def dump_tree_on_sig( | ||||
|     sig: int, | ||||
|     frame: object, | ||||
| 
 | ||||
|     relay_to_subs: bool = True, | ||||
| 
 | ||||
| ) -> None: | ||||
|     global _tree_dumped, _handler_lock | ||||
|     with _handler_lock: | ||||
|         # if _tree_dumped: | ||||
|         #     log.warning( | ||||
|         #         'Already dumped for this actor...??' | ||||
|         #     ) | ||||
|         #     return | ||||
| 
 | ||||
|         _tree_dumped = True | ||||
| 
 | ||||
|         # actor: Actor = _state.current_actor() | ||||
|         log.devx( | ||||
|             'Trying to dump `stackscope` tree..\n' | ||||
|         ) | ||||
|         try: | ||||
|         trio.lowlevel.current_trio_token( | ||||
|         ).run_sync_soon(dump_task_tree) | ||||
|             dump_task_tree() | ||||
|             # await actor._service_n.start_soon( | ||||
|             #     partial( | ||||
|             #         trio.to_thread.run_sync, | ||||
|             #         dump_task_tree, | ||||
|             #     ) | ||||
|             # ) | ||||
|             # trio.lowlevel.current_trio_token().run_sync_soon( | ||||
|             #     dump_task_tree | ||||
|             # ) | ||||
| 
 | ||||
|         except RuntimeError: | ||||
|             log.exception( | ||||
|                 'Failed to dump `stackscope` tree..\n' | ||||
|             ) | ||||
|             # not in async context -- print a normal traceback | ||||
|         traceback.print_stack() | ||||
|             # traceback.print_stack() | ||||
|             raise | ||||
| 
 | ||||
|         except BaseException: | ||||
|             log.exception( | ||||
|                 'Failed to dump `stackscope` tree..\n' | ||||
|             ) | ||||
|             raise | ||||
| 
 | ||||
|         # log.devx( | ||||
|         #     'Supposedly we dumped just fine..?' | ||||
|         # ) | ||||
| 
 | ||||
|     if not relay_to_subs: | ||||
|         return | ||||
| 
 | ||||
|     an: ActorNursery | ||||
|     for an in _state.current_actor()._actoruid2nursery.values(): | ||||
| 
 | ||||
|         subproc: ProcessType | ||||
|         subactor: Actor | ||||
|         for subactor, subproc, _ in an._children.values(): | ||||
|             log.devx( | ||||
|             log.warning( | ||||
|                 f'Relaying `SIGUSR1`[{sig}] to sub-actor\n' | ||||
|                 f'{subactor}\n' | ||||
|                 f' |_{subproc}\n' | ||||
|             ) | ||||
| 
 | ||||
|             if isinstance(subproc, trio.Process): | ||||
|             # bc of course stdlib can't have a std API.. XD | ||||
|             match subproc: | ||||
|                 case trio.Process(): | ||||
|                     subproc.send_signal(sig) | ||||
| 
 | ||||
|             elif isinstance(subproc, mp.Process): | ||||
|                 case mp.Process(): | ||||
|                     subproc._send_signal(sig) | ||||
| 
 | ||||
| 
 | ||||
| def enable_stack_on_sig( | ||||
|     sig: int = SIGUSR1 | ||||
| ) -> None: | ||||
|     sig: int = SIGUSR1, | ||||
| ) -> ModuleType: | ||||
|     ''' | ||||
|     Enable `stackscope` tracing on reception of a signal; by | ||||
|     default this is SIGUSR1. | ||||
| 
 | ||||
|     HOT TIP: a task/ctx-tree dump can be triggered from a shell with | ||||
|     fancy cmds. | ||||
| 
 | ||||
|     For ex. from `bash` using `pgrep` and cmd-sustitution | ||||
|     (https://www.gnu.org/software/bash/manual/bash.html#Command-Substitution) | ||||
|     you could use: | ||||
| 
 | ||||
|     >> kill -SIGUSR1 $(pgrep -f <part-of-cmd: str>) | ||||
| 
 | ||||
|     OR without a sub-shell, | ||||
| 
 | ||||
|     >> pkill --signal SIGUSR1 -f <part-of-cmd: str> | ||||
| 
 | ||||
|     ''' | ||||
|     try: | ||||
|         import stackscope | ||||
|     except ImportError: | ||||
|         log.warning( | ||||
|             '`stackscope` not installed for use in debug mode!' | ||||
|         ) | ||||
|         return None | ||||
| 
 | ||||
|     handler: Callable|int = getsignal(sig) | ||||
|     if handler is dump_tree_on_sig: | ||||
|         log.devx( | ||||
|             'A `SIGUSR1` handler already exists?\n' | ||||
|             f'|_ {handler!r}\n' | ||||
|         ) | ||||
|         return | ||||
| 
 | ||||
|     signal( | ||||
|         sig, | ||||
|         signal_handler, | ||||
|         dump_tree_on_sig, | ||||
|     ) | ||||
|     # NOTE: not the above can be triggered from | ||||
|     # a (xonsh) shell using: | ||||
|     # kill -SIGUSR1 @$(pgrep -f '<cmd>') | ||||
|     # | ||||
|     # for example if you were looking to trace a `pytest` run | ||||
|     # kill -SIGUSR1 @$(pgrep -f 'pytest') | ||||
|     log.devx( | ||||
|         'Enabling trace-trees on `SIGUSR1` ' | ||||
|         'since `stackscope` is installed @ \n' | ||||
|         f'{stackscope!r}\n\n' | ||||
|         f'With `SIGUSR1` handler\n' | ||||
|         f'|_{dump_tree_on_sig}\n' | ||||
|     ) | ||||
|     return stackscope | ||||
|  |  | |||
|  | @ -0,0 +1,169 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Pretty formatters for use throughout the code base. | ||||
| Mostly handy for logging and exception message content. | ||||
| 
 | ||||
| ''' | ||||
| import textwrap | ||||
| import traceback | ||||
| 
 | ||||
| from trio import CancelScope | ||||
| 
 | ||||
| 
 | ||||
| def add_div( | ||||
|     message: str, | ||||
|     div_str: str = '------ - ------', | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Add a "divider string" to the input `message` with | ||||
|     a little math to center it underneath. | ||||
| 
 | ||||
|     ''' | ||||
|     div_offset: int = ( | ||||
|         round(len(message)/2)+1 | ||||
|         - | ||||
|         round(len(div_str)/2)+1 | ||||
|     ) | ||||
|     div_str: str = ( | ||||
|         '\n' + ' '*div_offset + f'{div_str}\n' | ||||
|     ) | ||||
|     return div_str | ||||
| 
 | ||||
| 
 | ||||
| def pformat_boxed_tb( | ||||
|     tb_str: str, | ||||
|     fields_str: str|None = None, | ||||
|     field_prefix: str = ' |_', | ||||
| 
 | ||||
|     tb_box_indent: int|None = None, | ||||
|     tb_body_indent: int = 1, | ||||
|     boxer_header: str = '-' | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Create a "boxed" looking traceback string. | ||||
| 
 | ||||
|     Useful for emphasizing traceback text content as being an | ||||
|     embedded attribute of some other object (like | ||||
|     a `RemoteActorError` or other boxing remote error shuttle | ||||
|     container). | ||||
| 
 | ||||
|     Any other parent/container "fields" can be passed in the | ||||
|     `fields_str` input along with other prefix/indent settings. | ||||
| 
 | ||||
|     ''' | ||||
|     if ( | ||||
|         fields_str | ||||
|         and | ||||
|         field_prefix | ||||
|     ): | ||||
|         fields: str = textwrap.indent( | ||||
|             fields_str, | ||||
|             prefix=field_prefix, | ||||
|         ) | ||||
|     else: | ||||
|         fields = fields_str or '' | ||||
| 
 | ||||
|     tb_body = tb_str | ||||
|     if tb_body_indent: | ||||
|         tb_body: str = textwrap.indent( | ||||
|             tb_str, | ||||
|             prefix=tb_body_indent * ' ', | ||||
|         ) | ||||
| 
 | ||||
|     tb_box: str = ( | ||||
|         f'|\n' | ||||
|         f' ------ {boxer_header} ------\n' | ||||
|         f'{tb_body}' | ||||
|         f' ------ {boxer_header}- ------\n' | ||||
|         f'_|' | ||||
|     ) | ||||
|     tb_box_indent: str = ( | ||||
|         tb_box_indent | ||||
|         or | ||||
|         1 | ||||
| 
 | ||||
|         # (len(field_prefix)) | ||||
|         # ? ^-TODO-^ ? if you wanted another indent level | ||||
|     ) | ||||
|     if tb_box_indent > 0: | ||||
|         tb_box: str = textwrap.indent( | ||||
|             tb_box, | ||||
|             prefix=tb_box_indent * ' ', | ||||
|         ) | ||||
| 
 | ||||
|     return ( | ||||
|         fields | ||||
|         + | ||||
|         tb_box | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def pformat_caller_frame( | ||||
|     stack_limit: int = 1, | ||||
|     box_tb: bool = True, | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Capture and return the traceback text content from | ||||
|     `stack_limit` call frames up. | ||||
| 
 | ||||
|     ''' | ||||
|     tb_str: str = ( | ||||
|         '\n'.join( | ||||
|             traceback.format_stack(limit=stack_limit) | ||||
|         ) | ||||
|     ) | ||||
|     if box_tb: | ||||
|         tb_str: str = pformat_boxed_tb( | ||||
|             tb_str=tb_str, | ||||
|             field_prefix='  ', | ||||
|             indent='', | ||||
|         ) | ||||
|     return tb_str | ||||
| 
 | ||||
| 
 | ||||
| def pformat_cs( | ||||
|     cs: CancelScope, | ||||
|     var_name: str = 'cs', | ||||
|     field_prefix: str = ' |_', | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Pretty format info about a `trio.CancelScope` including most | ||||
|     of its public state and `._cancel_status`. | ||||
| 
 | ||||
|     The output can be modified to show a "var name" for the | ||||
|     instance as a field prefix, just a simple str before each | ||||
|     line more or less. | ||||
| 
 | ||||
|     ''' | ||||
| 
 | ||||
|     fields: str = textwrap.indent( | ||||
|         ( | ||||
|             f'cancel_called = {cs.cancel_called}\n' | ||||
|             f'cancelled_caught = {cs.cancelled_caught}\n' | ||||
|             f'_cancel_status = {cs._cancel_status}\n' | ||||
|             f'shield = {cs.shield}\n' | ||||
|         ), | ||||
|         prefix=field_prefix, | ||||
|     ) | ||||
|     return ( | ||||
|         f'{var_name}: {cs}\n' | ||||
|         + | ||||
|         fields | ||||
|     ) | ||||
|  | @ -54,11 +54,12 @@ LOG_FORMAT = ( | |||
| DATE_FORMAT = '%b %d %H:%M:%S' | ||||
| 
 | ||||
| # FYI, ERROR is 40 | ||||
| # TODO: use a `bidict` to avoid the :155 check? | ||||
| CUSTOM_LEVELS: dict[str, int] = { | ||||
|     'TRANSPORT': 5, | ||||
|     'RUNTIME': 15, | ||||
|     'DEVX': 17, | ||||
|     'CANCEL': 18, | ||||
|     'CANCEL': 22, | ||||
|     'PDB': 500, | ||||
| } | ||||
| STD_PALETTE = { | ||||
|  | @ -147,6 +148,8 @@ class StackLevelAdapter(LoggerAdapter): | |||
|         Delegate a log call to the underlying logger, after adding | ||||
|         contextual information from this adapter instance. | ||||
| 
 | ||||
|         NOTE: all custom level methods (above) delegate to this! | ||||
| 
 | ||||
|         ''' | ||||
|         if self.isEnabledFor(level): | ||||
|             stacklevel: int = 3 | ||||
|  | @ -255,20 +258,28 @@ class ActorContextInfo(Mapping): | |||
| 
 | ||||
| 
 | ||||
| def get_logger( | ||||
| 
 | ||||
|     name: str|None = None, | ||||
|     _root_name: str = _proj_name, | ||||
| 
 | ||||
|     logger: Logger|None = None, | ||||
| 
 | ||||
|     # TODO, using `.config.dictConfig()` api? | ||||
|     # -[ ] SO answer with docs links | ||||
|     #  |_https://stackoverflow.com/questions/7507825/where-is-a-complete-example-of-logging-config-dictconfig | ||||
|     #  |_https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema | ||||
|     subsys_spec: str|None = None, | ||||
| 
 | ||||
| ) -> StackLevelAdapter: | ||||
|     '''Return the package log or a sub-logger for ``name`` if provided. | ||||
| 
 | ||||
|     ''' | ||||
|     log: Logger | ||||
|     log = rlog = logging.getLogger(_root_name) | ||||
|     log = rlog = logger or logging.getLogger(_root_name) | ||||
| 
 | ||||
|     if ( | ||||
|         name | ||||
|         and name != _proj_name | ||||
|         and | ||||
|         name != _proj_name | ||||
|     ): | ||||
| 
 | ||||
|         # NOTE: for handling for modules that use ``get_logger(__name__)`` | ||||
|  | @ -303,7 +314,10 @@ def get_logger( | |||
| 
 | ||||
|     # add our actor-task aware adapter which will dynamically look up | ||||
|     # the actor and task names at each log emit | ||||
|     logger = StackLevelAdapter(log, ActorContextInfo()) | ||||
|     logger = StackLevelAdapter( | ||||
|         log, | ||||
|         ActorContextInfo(), | ||||
|     ) | ||||
| 
 | ||||
|     # additional levels | ||||
|     for name, val in CUSTOM_LEVELS.items(): | ||||
|  | @ -317,14 +331,24 @@ def get_logger( | |||
| 
 | ||||
| def get_console_log( | ||||
|     level: str|None = None, | ||||
|     logger: Logger|None = None, | ||||
|     **kwargs, | ||||
| ) -> LoggerAdapter: | ||||
|     '''Get the package logger and enable a handler which writes to stderr. | ||||
| 
 | ||||
|     Yeah yeah, i know we can use ``DictConfig``. You do it. | ||||
| ) -> LoggerAdapter: | ||||
|     ''' | ||||
|     log = get_logger(**kwargs)  # our root logger | ||||
|     logger = log.logger | ||||
|     Get a `tractor`-style logging instance: a `Logger` wrapped in | ||||
|     a `StackLevelAdapter` which injects various concurrency-primitive | ||||
|     (process, thread, task) fields and enables a `StreamHandler` that | ||||
|     writes on stderr using `colorlog` formatting. | ||||
| 
 | ||||
|     Yeah yeah, i know we can use `logging.config.dictConfig()`. You do it. | ||||
| 
 | ||||
|     ''' | ||||
|     log = get_logger( | ||||
|         logger=logger, | ||||
|         **kwargs | ||||
|     )  # set a root logger | ||||
|     logger: Logger = log.logger | ||||
| 
 | ||||
|     if not level: | ||||
|         return log | ||||
|  | @ -343,9 +367,13 @@ def get_console_log( | |||
|             None, | ||||
|         ) | ||||
|     ): | ||||
|         fmt = LOG_FORMAT | ||||
|         # if logger: | ||||
|         #     fmt = None | ||||
| 
 | ||||
|         handler = StreamHandler() | ||||
|         formatter = colorlog.ColoredFormatter( | ||||
|             LOG_FORMAT, | ||||
|             fmt=fmt, | ||||
|             datefmt=DATE_FORMAT, | ||||
|             log_colors=STD_PALETTE, | ||||
|             secondary_log_colors=BOLD_PALETTE, | ||||
|  | @ -362,7 +390,7 @@ def get_loglevel() -> str: | |||
| 
 | ||||
| 
 | ||||
| # global module logger for tractor itself | ||||
| log = get_logger('tractor') | ||||
| log: StackLevelAdapter = get_logger('tractor') | ||||
| 
 | ||||
| 
 | ||||
| def at_least_level( | ||||
|  |  | |||
|  | @ -18,9 +18,56 @@ | |||
| Built-in messaging patterns, types, APIs and helpers. | ||||
| 
 | ||||
| ''' | ||||
| from typing import ( | ||||
|     TypeAlias, | ||||
| ) | ||||
| from .ptr import ( | ||||
|     NamespacePath as NamespacePath, | ||||
| ) | ||||
| from .types import ( | ||||
| from .pretty_struct import ( | ||||
|     Struct as Struct, | ||||
| ) | ||||
| from ._codec import ( | ||||
|     _def_msgspec_codec as _def_msgspec_codec, | ||||
|     _ctxvar_MsgCodec as _ctxvar_MsgCodec, | ||||
| 
 | ||||
|     apply_codec as apply_codec, | ||||
|     mk_codec as mk_codec, | ||||
|     MsgCodec as MsgCodec, | ||||
|     MsgDec as MsgDec, | ||||
|     current_codec as current_codec, | ||||
| ) | ||||
| # currently can't bc circular with `._context` | ||||
| # from ._ops import ( | ||||
| #     PldRx as PldRx, | ||||
| #     _drain_to_final_msg as _drain_to_final_msg, | ||||
| # ) | ||||
| 
 | ||||
| from .types import ( | ||||
|     PayloadMsg as PayloadMsg, | ||||
| 
 | ||||
|     Aid as Aid, | ||||
|     SpawnSpec as SpawnSpec, | ||||
| 
 | ||||
|     Start as Start, | ||||
|     StartAck as StartAck, | ||||
| 
 | ||||
|     Started as Started, | ||||
|     Yield as Yield, | ||||
|     Stop as Stop, | ||||
|     Return as Return, | ||||
|     CancelAck as CancelAck, | ||||
| 
 | ||||
|     Error as Error, | ||||
| 
 | ||||
|     # type-var for `.pld` field | ||||
|     PayloadT as PayloadT, | ||||
| 
 | ||||
|     # full msg class set from above as list | ||||
|     __msg_types__ as __msg_types__, | ||||
| 
 | ||||
|     # type-alias for union of all msgs | ||||
|     MsgType as MsgType, | ||||
| ) | ||||
| 
 | ||||
| __msg_spec__: TypeAlias = MsgType | ||||
|  |  | |||
|  | @ -0,0 +1,699 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| IPC msg interchange codec management. | ||||
| 
 | ||||
| Supported backend libs: | ||||
| - `msgspec.msgpack` | ||||
| 
 | ||||
| ToDo: backends we prolly should offer: | ||||
| 
 | ||||
| - see project/lib list throughout GH issue discussion comments: | ||||
|   https://github.com/goodboy/tractor/issues/196 | ||||
| 
 | ||||
| - `capnproto`: https://capnproto.org/rpc.html | ||||
|    - https://capnproto.org/language.html#language-reference | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| from contextvars import ( | ||||
|     ContextVar, | ||||
|     Token, | ||||
| ) | ||||
| import textwrap | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     Protocol, | ||||
|     Type, | ||||
|     TYPE_CHECKING, | ||||
|     TypeVar, | ||||
|     Union, | ||||
| ) | ||||
| from types import ModuleType | ||||
| 
 | ||||
| import msgspec | ||||
| from msgspec import ( | ||||
|     msgpack, | ||||
|     Raw, | ||||
| ) | ||||
| # TODO: see notes below from @mikenerone.. | ||||
| # from tricycle import TreeVar | ||||
| 
 | ||||
| from tractor.msg.pretty_struct import Struct | ||||
| from tractor.msg.types import ( | ||||
|     mk_msg_spec, | ||||
|     MsgType, | ||||
| ) | ||||
| from tractor.log import get_logger | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from tractor._context import Context | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: unify with `MsgCodec` by making `._dec` part this? | ||||
| class MsgDec(Struct): | ||||
|     ''' | ||||
|     An IPC msg (payload) decoder. | ||||
| 
 | ||||
|     Normally used to decode only a payload: `MsgType.pld: | ||||
|     PayloadT` field before delivery to IPC consumer code. | ||||
| 
 | ||||
|     ''' | ||||
|     _dec: msgpack.Decoder | ||||
| 
 | ||||
|     @property | ||||
|     def dec(self) -> msgpack.Decoder: | ||||
|         return self._dec | ||||
| 
 | ||||
|     def __repr__(self) -> str: | ||||
| 
 | ||||
|         speclines: str = self.spec_str | ||||
| 
 | ||||
|         # in multi-typed spec case we stick the list | ||||
|         # all on newlines after the |__pld_spec__:, | ||||
|         # OW it's prolly single type spec-value | ||||
|         # so just leave it on same line. | ||||
|         if '\n' in speclines: | ||||
|             speclines: str = '\n' + textwrap.indent( | ||||
|                 speclines, | ||||
|                 prefix=' '*3, | ||||
|             ) | ||||
| 
 | ||||
|         body: str = textwrap.indent( | ||||
|             f'|_dec_hook: {self.dec.dec_hook}\n' | ||||
|             f'|__pld_spec__: {speclines}\n', | ||||
|             prefix=' '*2, | ||||
|         ) | ||||
|         return ( | ||||
|             f'<{type(self).__name__}(\n' | ||||
|             f'{body}' | ||||
|             ')>' | ||||
|         ) | ||||
| 
 | ||||
|     # struct type unions | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # | ||||
|     # ^-TODO-^: make a wrapper type for this such that alt | ||||
|     # backends can be represented easily without a `Union` needed, | ||||
|     # AND so that we have better support for wire transport. | ||||
|     # | ||||
|     # -[ ] maybe `FieldSpec` is a good name since msg-spec | ||||
|     #   better applies to a `MsgType[FieldSpec]`? | ||||
|     # | ||||
|     # -[ ] both as part of the `.open_context()` call AND as part of the | ||||
|     #     immediate ack-reponse (see similar below) | ||||
|     #     we should do spec matching and fail if anything is awry? | ||||
|     # | ||||
|     # -[ ] eventually spec should be generated/parsed from the | ||||
|     #     type-annots as # desired in GH issue: | ||||
|     #     https://github.com/goodboy/tractor/issues/365 | ||||
|     # | ||||
|     # -[ ] semantics of the mismatch case | ||||
|     #   - when caller-callee specs we should raise | ||||
|     #    a `MsgTypeError` or `MsgSpecError` or similar? | ||||
|     # | ||||
|     # -[ ] wrapper types for both spec types such that we can easily | ||||
|     #     IPC transport them? | ||||
|     #     - `TypeSpec: Union[Type]` | ||||
|     #      * also a `.__contains__()` for doing `None in | ||||
|     #      TypeSpec[None|int]` since rn you need to do it on | ||||
|     #      `.__args__` for unions.. | ||||
|     #     - `MsgSpec: Union[MsgType] | ||||
|     # | ||||
|     # -[ ] auto-genning this from new (in 3.12) type parameter lists Bo | ||||
|     # |_ https://docs.python.org/3/reference/compound_stmts.html#type-params | ||||
|     # |_ historical pep 695: https://peps.python.org/pep-0695/ | ||||
|     # |_ full lang spec: https://typing.readthedocs.io/en/latest/spec/ | ||||
|     # |_ on annotation scopes: | ||||
|     #    https://docs.python.org/3/reference/executionmodel.html#annotation-scopes | ||||
|     # |_ 3.13 will have subscriptable funcs Bo | ||||
|     #    https://peps.python.org/pep-0718/ | ||||
|     @property | ||||
|     def spec(self) -> Union[Type[Struct]]: | ||||
|         # NOTE: defined and applied inside `mk_codec()` | ||||
|         return self._dec.type | ||||
| 
 | ||||
|     # no difference, as compared to a `MsgCodec` which defines the | ||||
|     # `MsgType.pld: PayloadT` part of its spec separately | ||||
|     pld_spec = spec | ||||
| 
 | ||||
|     # TODO: would get moved into `FieldSpec.__str__()` right? | ||||
|     @property | ||||
|     def spec_str(self) -> str: | ||||
|         return pformat_msgspec( | ||||
|             codec=self, | ||||
|             join_char='|', | ||||
|         ) | ||||
| 
 | ||||
|     pld_spec_str = spec_str | ||||
| 
 | ||||
|     def decode( | ||||
|         self, | ||||
|         raw: Raw|bytes, | ||||
|     ) -> Any: | ||||
|         return self._dec.decode(raw) | ||||
| 
 | ||||
|     @property | ||||
|     def hook(self) -> Callable|None: | ||||
|         return self._dec.dec_hook | ||||
| 
 | ||||
| 
 | ||||
| def mk_dec( | ||||
|     spec: Union[Type[Struct]]|Any = Any, | ||||
|     dec_hook: Callable|None = None, | ||||
| 
 | ||||
| ) -> MsgDec: | ||||
|     ''' | ||||
|     Create an IPC msg decoder, normally used as the | ||||
|     `PayloadMsg.pld: PayloadT` field decoder inside a `PldRx`. | ||||
| 
 | ||||
|     ''' | ||||
|     return MsgDec( | ||||
|         _dec=msgpack.Decoder( | ||||
|             type=spec,  # like `MsgType[Any]` | ||||
|             dec_hook=dec_hook, | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def mk_msgspec_table( | ||||
|     dec: msgpack.Decoder, | ||||
|     msg: MsgType|None = None, | ||||
| 
 | ||||
| ) -> dict[str, MsgType]|str: | ||||
|     ''' | ||||
|     Fill out a `dict` of `MsgType`s keyed by name | ||||
|     for a given input `msgspec.msgpack.Decoder` | ||||
|     as defined by its `.type: Union[Type]` setting. | ||||
| 
 | ||||
|     If `msg` is provided, only deliver a `dict` with a single | ||||
|     entry for that type. | ||||
| 
 | ||||
|     ''' | ||||
|     msgspec: Union[Type]|Type = dec.type | ||||
| 
 | ||||
|     if not (msgtypes := getattr(msgspec, '__args__', False)): | ||||
|         msgtypes = [msgspec] | ||||
| 
 | ||||
|     msgt_table: dict[str, MsgType] = { | ||||
|         msgt: str(msgt.__name__) | ||||
|         for msgt in msgtypes | ||||
|     } | ||||
|     if msg: | ||||
|         msgt: MsgType = type(msg) | ||||
|         str_repr: str = msgt_table[msgt] | ||||
|         return {msgt: str_repr} | ||||
| 
 | ||||
|     return msgt_table | ||||
| 
 | ||||
| 
 | ||||
| def pformat_msgspec( | ||||
|     codec: MsgCodec|MsgDec, | ||||
|     msg: MsgType|None = None, | ||||
|     join_char: str = '\n', | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Pretty `str` format the `msgspec.msgpack.Decoder.type` attribute | ||||
|     for display in (console) log messages as a nice (maybe multiline) | ||||
|     presentation of all supported `Struct`s (subtypes) available for | ||||
|     typed decoding. | ||||
| 
 | ||||
|     ''' | ||||
|     dec: msgpack.Decoder = getattr(codec, 'dec', codec) | ||||
|     return join_char.join( | ||||
|         mk_msgspec_table( | ||||
|             dec=dec, | ||||
|             msg=msg, | ||||
|         ).values() | ||||
|     ) | ||||
| 
 | ||||
| # TODO: overall IPC msg-spec features (i.e. in this mod)! | ||||
| # | ||||
| # -[ ] API changes towards being interchange lib agnostic! | ||||
| #   -[ ] capnproto has pre-compiled schema for eg.. | ||||
| #    * https://capnproto.org/language.html | ||||
| #    * http://capnproto.github.io/pycapnp/quickstart.html | ||||
| #     * https://github.com/capnproto/pycapnp/blob/master/examples/addressbook.capnp | ||||
| # | ||||
| # -[ ] struct aware messaging coders as per: | ||||
| #   -[x] https://github.com/goodboy/tractor/issues/36 | ||||
| #   -[ ] https://github.com/goodboy/tractor/issues/196 | ||||
| #   -[ ] https://github.com/goodboy/tractor/issues/365 | ||||
| # | ||||
| class MsgCodec(Struct): | ||||
|     ''' | ||||
|     A IPC msg interchange format lib's encoder + decoder pair. | ||||
| 
 | ||||
|     Pretty much nothing more then delegation to underlying | ||||
|     `msgspec.<interchange-protocol>.Encoder/Decoder`s for now. | ||||
| 
 | ||||
|     ''' | ||||
|     _enc: msgpack.Encoder | ||||
|     _dec: msgpack.Decoder | ||||
|     _pld_spec: Type[Struct]|Raw|Any | ||||
| 
 | ||||
|     def __repr__(self) -> str: | ||||
|         speclines: str = textwrap.indent( | ||||
|             pformat_msgspec(codec=self), | ||||
|             prefix=' '*3, | ||||
|         ) | ||||
|         body: str = textwrap.indent( | ||||
|             f'|_lib = {self.lib.__name__!r}\n' | ||||
|             f'|_enc_hook: {self.enc.enc_hook}\n' | ||||
|             f'|_dec_hook: {self.dec.dec_hook}\n' | ||||
|             f'|_pld_spec: {self.pld_spec_str}\n' | ||||
|             # f'|\n' | ||||
|             f'|__msg_spec__:\n' | ||||
|             f'{speclines}\n', | ||||
|             prefix=' '*2, | ||||
|         ) | ||||
|         return ( | ||||
|             f'<{type(self).__name__}(\n' | ||||
|             f'{body}' | ||||
|             ')>' | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def pld_spec(self) -> Type[Struct]|Raw|Any: | ||||
|         return self._pld_spec | ||||
| 
 | ||||
|     @property | ||||
|     def pld_spec_str(self) -> str: | ||||
| 
 | ||||
|         # TODO: could also use match: instead? | ||||
|         spec: Union[Type]|Type = self.pld_spec | ||||
| 
 | ||||
|         # `typing.Union` case | ||||
|         if getattr(spec, '__args__', False): | ||||
|             return str(spec) | ||||
| 
 | ||||
|         # just a single type | ||||
|         else: | ||||
|             return spec.__name__ | ||||
| 
 | ||||
|     # struct type unions | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     @property | ||||
|     def msg_spec(self) -> Union[Type[Struct]]: | ||||
|         # NOTE: defined and applied inside `mk_codec()` | ||||
|         return self._dec.type | ||||
| 
 | ||||
|     # TODO: some way to make `pretty_struct.Struct` use this | ||||
|     # wrapped field over the `.msg_spec` one? | ||||
|     @property | ||||
|     def msg_spec_str(self) -> str: | ||||
|         return pformat_msgspec(self.msg_spec) | ||||
| 
 | ||||
|     lib: ModuleType = msgspec | ||||
| 
 | ||||
|     # TODO: use `functools.cached_property` for these ? | ||||
|     # https://docs.python.org/3/library/functools.html#functools.cached_property | ||||
|     @property | ||||
|     def enc(self) -> msgpack.Encoder: | ||||
|         return self._enc | ||||
| 
 | ||||
|     # TODO: reusing encode buffer for perf? | ||||
|     # https://jcristharif.com/msgspec/perf-tips.html#reusing-an-output-buffer | ||||
|     _buf: bytearray = bytearray() | ||||
| 
 | ||||
|     def encode( | ||||
|         self, | ||||
|         py_obj: Any, | ||||
| 
 | ||||
|         use_buf: bool = False, | ||||
|         # ^-XXX-^ uhh why am i getting this? | ||||
|         # |_BufferError: Existing exports of data: object cannot be re-sized | ||||
| 
 | ||||
|     ) -> bytes: | ||||
|         ''' | ||||
|         Encode input python objects to `msgpack` bytes for | ||||
|         transfer on a tranport protocol connection. | ||||
| 
 | ||||
|         When `use_buf == True` use the output buffer optimization: | ||||
|         https://jcristharif.com/msgspec/perf-tips.html#reusing-an-output-buffer | ||||
| 
 | ||||
|         ''' | ||||
|         if use_buf: | ||||
|             self._enc.encode_into(py_obj, self._buf) | ||||
|             return self._buf | ||||
|         else: | ||||
|             return self._enc.encode(py_obj) | ||||
| 
 | ||||
|     @property | ||||
|     def dec(self) -> msgpack.Decoder: | ||||
|         return self._dec | ||||
| 
 | ||||
|     def decode( | ||||
|         self, | ||||
|         msg: bytes, | ||||
|     ) -> Any: | ||||
|         ''' | ||||
|         Decode received `msgpack` bytes into a local python object | ||||
|         with special `msgspec.Struct` (or other type) handling | ||||
|         determined by the  | ||||
| 
 | ||||
|         ''' | ||||
|         # https://jcristharif.com/msgspec/usage.html#typed-decoding | ||||
|         return self._dec.decode(msg) | ||||
| 
 | ||||
| 
 | ||||
| # [x] TODO: a sub-decoder system as well? => No! | ||||
| # | ||||
| # -[x] do we still want to try and support the sub-decoder with | ||||
| # `.Raw` technique in the case that the `Generic` approach gives | ||||
| # future grief? | ||||
| # => NO, since we went with the `PldRx` approach instead B) | ||||
| # | ||||
| # IF however you want to see the code that was staged for this | ||||
| # from wayyy back, see the pure removal commit. | ||||
| 
 | ||||
| 
 | ||||
| def mk_codec( | ||||
|     # struct type unions set for `Decoder` | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     ipc_pld_spec: Union[Type[Struct]]|Any = Any, | ||||
| 
 | ||||
|     # TODO: offering a per-msg(-field) type-spec such that | ||||
|     # the fields can be dynamically NOT decoded and left as `Raw` | ||||
|     # values which are later loaded by a sub-decoder specified | ||||
|     # by `tag_field: str` value key? | ||||
|     # payload_msg_specs: dict[ | ||||
|     #     str,  # tag_field value as sub-decoder key | ||||
|     #     Union[Type[Struct]]  # `MsgType.pld` type spec | ||||
|     # ]|None = None, | ||||
| 
 | ||||
|     libname: str = 'msgspec', | ||||
| 
 | ||||
|     # proxy as `Struct(**kwargs)` for ad-hoc type extensions | ||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||
|     # ------ - ------ | ||||
|     dec_hook: Callable|None = None, | ||||
|     enc_hook: Callable|None = None, | ||||
|     # ------ - ------ | ||||
|     # | ||||
|     # Encoder: | ||||
|     # write_buffer_size=write_buffer_size, | ||||
|     # | ||||
|     # Decoder: | ||||
|     # ext_hook: ext_hook_sig | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     Convenience factory for creating codecs eventually meant | ||||
|     to be interchange lib agnostic (i.e. once we support more then just | ||||
|     `msgspec` ;). | ||||
| 
 | ||||
|     ''' | ||||
|     # (manually) generate a msg-payload-spec for all relevant | ||||
|     # god-boxing-msg subtypes, parameterizing the `PayloadMsg.pld: PayloadT` | ||||
|     # for the decoder such that all sub-type msgs in our SCIPP | ||||
|     # will automatically decode to a type-"limited" payload (`Struct`) | ||||
|     # object (set). | ||||
|     ( | ||||
|         ipc_msg_spec, | ||||
|         msg_types, | ||||
|     ) = mk_msg_spec( | ||||
|         payload_type_union=ipc_pld_spec, | ||||
|     ) | ||||
|     assert len(ipc_msg_spec.__args__) == len(msg_types) | ||||
|     assert ipc_msg_spec | ||||
| 
 | ||||
|     # TODO: use this shim instead? | ||||
|     # bc.. unification, err somethin? | ||||
|     # dec: MsgDec = mk_dec( | ||||
|     #     spec=ipc_msg_spec, | ||||
|     #     dec_hook=dec_hook, | ||||
|     # ) | ||||
| 
 | ||||
|     dec = msgpack.Decoder( | ||||
|         type=ipc_msg_spec, | ||||
|         dec_hook=dec_hook, | ||||
|     ) | ||||
|     enc = msgpack.Encoder( | ||||
|        enc_hook=enc_hook, | ||||
|     ) | ||||
| 
 | ||||
|     codec = MsgCodec( | ||||
|         _enc=enc, | ||||
|         _dec=dec, | ||||
|         _pld_spec=ipc_pld_spec, | ||||
|     ) | ||||
| 
 | ||||
|     # sanity on expected backend support | ||||
|     assert codec.lib.__name__ == libname | ||||
| 
 | ||||
|     return codec | ||||
| 
 | ||||
| 
 | ||||
| # instance of the default `msgspec.msgpack` codec settings, i.e. | ||||
| # no custom structs, hooks or other special types. | ||||
| _def_msgspec_codec: MsgCodec = mk_codec(ipc_pld_spec=Any) | ||||
| 
 | ||||
| # The built-in IPC `Msg` spec. | ||||
| # Our composing "shuttle" protocol which allows `tractor`-app code | ||||
| # to use any `msgspec` supported type as the `PayloadMsg.pld` payload, | ||||
| # https://jcristharif.com/msgspec/supported-types.html | ||||
| # | ||||
| _def_tractor_codec: MsgCodec = mk_codec( | ||||
|     # TODO: use this for debug mode locking prot? | ||||
|     # ipc_pld_spec=Any, | ||||
|     ipc_pld_spec=Raw, | ||||
| ) | ||||
| # TODO: IDEALLY provides for per-`trio.Task` specificity of the | ||||
| # IPC msging codec used by the transport layer when doing | ||||
| # `Channel.send()/.recv()` of wire data. | ||||
| 
 | ||||
| # ContextVar-TODO: DIDN'T WORK, kept resetting in every new task to default!? | ||||
| # _ctxvar_MsgCodec: ContextVar[MsgCodec] = ContextVar( | ||||
| 
 | ||||
| # TreeVar-TODO: DIDN'T WORK, kept resetting in every new embedded nursery | ||||
| # even though it's supposed to inherit from a parent context ??? | ||||
| # | ||||
| # _ctxvar_MsgCodec: TreeVar[MsgCodec] = TreeVar( | ||||
| # | ||||
| # ^-NOTE-^: for this to work see the mods by @mikenerone from `trio` gitter: | ||||
| # | ||||
| # 22:02:54 <mikenerone> even for regular contextvars, all you have to do is: | ||||
| #    `task: Task = trio.lowlevel.current_task()` | ||||
| #    `task.parent_nursery.parent_task.context.run(my_ctx_var.set, new_value)` | ||||
| # | ||||
| # From a comment in his prop code he couldn't share outright: | ||||
| # 1. For every TreeVar set in the current task (which covers what | ||||
| #    we need from SynchronizerFacade), walk up the tree until the | ||||
| #    root or finding one where the TreeVar is already set, setting | ||||
| #    it in all of the contexts along the way. | ||||
| # 2. For each of those, we also forcibly set the values that are | ||||
| #    pending for child nurseries that have not yet accessed the | ||||
| #    TreeVar. | ||||
| # 3. We similarly set the pending values for the child nurseries | ||||
| #    of the *current* task. | ||||
| # | ||||
| _ctxvar_MsgCodec: ContextVar[MsgCodec] = ContextVar( | ||||
|     'msgspec_codec', | ||||
|     default=_def_tractor_codec, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def apply_codec( | ||||
|     codec: MsgCodec, | ||||
| 
 | ||||
|     ctx: Context|None = None, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     Dynamically apply a `MsgCodec` to the current task's runtime | ||||
|     context such that all (of a certain class of payload | ||||
|     containing i.e. `MsgType.pld: PayloadT`) IPC msgs are | ||||
|     processed with it for that task. | ||||
| 
 | ||||
|     Uses a `contextvars.ContextVar` to ensure the scope of any | ||||
|     codec setting matches the current `Context` or | ||||
|     `._rpc.process_messages()` feeder task's prior setting without | ||||
|     mutating any surrounding scope. | ||||
| 
 | ||||
|     When a `ctx` is supplied, only mod its `Context.pld_codec`. | ||||
| 
 | ||||
|     matches the `@cm` block and DOES NOT change to the original | ||||
|     (default) value in new tasks (as it does for `ContextVar`). | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = True | ||||
| 
 | ||||
|     if ctx is not None: | ||||
|         var: ContextVar = ctx._var_pld_codec | ||||
|     else: | ||||
|         # use IPC channel-connection "global" codec | ||||
|         var: ContextVar = _ctxvar_MsgCodec | ||||
| 
 | ||||
|     orig: MsgCodec = var.get() | ||||
| 
 | ||||
|     assert orig is not codec | ||||
|     if codec.pld_spec is None: | ||||
|         breakpoint() | ||||
| 
 | ||||
|     log.info( | ||||
|         'Applying new msg-spec codec\n\n' | ||||
|         f'{codec}\n' | ||||
|     ) | ||||
|     token: Token = var.set(codec) | ||||
| 
 | ||||
|     # ?TODO? for TreeVar approach which copies from the | ||||
|     # cancel-scope of the prior value, NOT the prior task | ||||
|     # See the docs: | ||||
|     # - https://tricycle.readthedocs.io/en/latest/reference.html#tree-variables | ||||
|     # - https://github.com/oremanj/tricycle/blob/master/tricycle/_tests/test_tree_var.py | ||||
|     #   ^- see docs for @cm `.being()` API | ||||
|     # with _ctxvar_MsgCodec.being(codec): | ||||
|     #     new = _ctxvar_MsgCodec.get() | ||||
|     #     assert new is codec | ||||
|     #     yield codec | ||||
| 
 | ||||
|     try: | ||||
|         yield var.get() | ||||
|     finally: | ||||
|         var.reset(token) | ||||
|         log.info( | ||||
|             'Reverted to last msg-spec codec\n\n' | ||||
|             f'{orig}\n' | ||||
|         ) | ||||
|         assert var.get() is orig | ||||
| 
 | ||||
| 
 | ||||
| def current_codec() -> MsgCodec: | ||||
|     ''' | ||||
|     Return the current `trio.Task.context`'s value | ||||
|     for `msgspec_codec` used by `Channel.send/.recv()` | ||||
|     for wire serialization. | ||||
| 
 | ||||
|     ''' | ||||
|     return _ctxvar_MsgCodec.get() | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def limit_msg_spec( | ||||
|     payload_spec: Union[Type[Struct]], | ||||
| 
 | ||||
|     # TODO: don't need this approach right? | ||||
|     # -> related to the `MsgCodec._payload_decs` stuff above.. | ||||
|     # tagged_structs: list[Struct]|None = None, | ||||
| 
 | ||||
|     **codec_kwargs, | ||||
| 
 | ||||
| ) -> MsgCodec: | ||||
|     ''' | ||||
|     Apply a `MsgCodec` that will natively decode the SC-msg set's | ||||
|     `PayloadMsg.pld: Union[Type[Struct]]` payload fields using | ||||
|     tagged-unions of `msgspec.Struct`s from the `payload_types` | ||||
|     for all IPC contexts in use by the current `trio.Task`. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = True | ||||
|     curr_codec: MsgCodec = current_codec() | ||||
|     msgspec_codec: MsgCodec = mk_codec( | ||||
|         ipc_pld_spec=payload_spec, | ||||
|         **codec_kwargs, | ||||
|     ) | ||||
|     with apply_codec(msgspec_codec) as applied_codec: | ||||
|         assert applied_codec is msgspec_codec | ||||
|         yield msgspec_codec | ||||
| 
 | ||||
|     assert curr_codec is current_codec() | ||||
| 
 | ||||
| 
 | ||||
| # XXX: msgspec won't allow this with non-struct custom types | ||||
| # like `NamespacePath`!@! | ||||
| # @cm | ||||
| # def extend_msg_spec( | ||||
| #     payload_spec: Union[Type[Struct]], | ||||
| 
 | ||||
| # ) -> MsgCodec: | ||||
| #     ''' | ||||
| #     Extend the current `MsgCodec.pld_spec` (type set) by extending | ||||
| #     the payload spec to **include** the types specified by | ||||
| #     `payload_spec`. | ||||
| 
 | ||||
| #     ''' | ||||
| #     codec: MsgCodec = current_codec() | ||||
| #     pld_spec: Union[Type] = codec.pld_spec | ||||
| #     extended_spec: Union[Type] = pld_spec|payload_spec | ||||
| 
 | ||||
| #     with limit_msg_spec(payload_types=extended_spec) as ext_codec: | ||||
| #         # import pdbp; pdbp.set_trace() | ||||
| #         assert ext_codec.pld_spec == extended_spec | ||||
| #         yield ext_codec | ||||
| # | ||||
| # ^-TODO-^ is it impossible to make something like this orr!? | ||||
| 
 | ||||
| # TODO: make an auto-custom hook generator from a set of input custom | ||||
| # types? | ||||
| # -[ ] below is a proto design using a `TypeCodec` idea? | ||||
| # | ||||
| # type var for the expected interchange-lib's | ||||
| # IPC-transport type when not available as a built-in | ||||
| # serialization output. | ||||
| WireT = TypeVar('WireT') | ||||
| 
 | ||||
| 
 | ||||
| # TODO: some kinda (decorator) API for built-in subtypes | ||||
| # that builds this implicitly by inspecting the `mro()`? | ||||
| class TypeCodec(Protocol): | ||||
|     ''' | ||||
|     A per-custom-type wire-transport serialization translator | ||||
|     description type. | ||||
| 
 | ||||
|     ''' | ||||
|     src_type: Type | ||||
|     wire_type: WireT | ||||
| 
 | ||||
|     def encode(obj: Type) -> WireT: | ||||
|         ... | ||||
| 
 | ||||
|     def decode( | ||||
|         obj_type: Type[WireT], | ||||
|         obj: WireT, | ||||
|     ) -> Type: | ||||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| class MsgpackTypeCodec(TypeCodec): | ||||
|     ... | ||||
| 
 | ||||
| 
 | ||||
| def mk_codec_hooks( | ||||
|     type_codecs: list[TypeCodec], | ||||
| 
 | ||||
| ) -> tuple[Callable, Callable]: | ||||
|     ''' | ||||
|     Deliver a `enc_hook()`/`dec_hook()` pair which handle | ||||
|     manual convertion from an input `Type` set such that whenever | ||||
|     the `TypeCodec.filter()` predicate matches the | ||||
|     `TypeCodec.decode()` is called on the input native object by | ||||
|     the `dec_hook()` and whenever the | ||||
|     `isiinstance(obj, TypeCodec.type)` matches against an | ||||
|     `enc_hook(obj=obj)` the return value is taken from a | ||||
|     `TypeCodec.encode(obj)` callback. | ||||
| 
 | ||||
|     ''' | ||||
|     ... | ||||
|  | @ -0,0 +1,842 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Near-application abstractions for `MsgType.pld: PayloadT|Raw` | ||||
| delivery, filtering and type checking as well as generic | ||||
| operational helpers for processing transaction flows. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
|     contextmanager as cm, | ||||
| ) | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|     Type, | ||||
|     TYPE_CHECKING, | ||||
|     Union, | ||||
| ) | ||||
| # ------ - ------ | ||||
| from msgspec import ( | ||||
|     msgpack, | ||||
|     Raw, | ||||
|     Struct, | ||||
|     ValidationError, | ||||
| ) | ||||
| import trio | ||||
| # ------ - ------ | ||||
| from tractor.log import get_logger | ||||
| from tractor._exceptions import ( | ||||
|     MessagingError, | ||||
|     InternalError, | ||||
|     _raise_from_unexpected_msg, | ||||
|     MsgTypeError, | ||||
|     _mk_recv_mte, | ||||
|     pack_error, | ||||
| ) | ||||
| from tractor._state import current_ipc_ctx | ||||
| from ._codec import ( | ||||
|     mk_dec, | ||||
|     MsgDec, | ||||
|     MsgCodec, | ||||
|     current_codec, | ||||
| ) | ||||
| from .types import ( | ||||
|     CancelAck, | ||||
|     Error, | ||||
|     MsgType, | ||||
|     PayloadT, | ||||
|     Return, | ||||
|     Started, | ||||
|     Stop, | ||||
|     Yield, | ||||
|     pretty_struct, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from tractor._context import Context | ||||
|     from tractor._streaming import MsgStream | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| _def_any_pldec: MsgDec[Any] = mk_dec() | ||||
| 
 | ||||
| 
 | ||||
| class PldRx(Struct): | ||||
|     ''' | ||||
|     A "msg payload receiver". | ||||
| 
 | ||||
|     The pairing of a "feeder" `trio.abc.ReceiveChannel` and an | ||||
|     interchange-specific (eg. msgpack) payload field decoder. The | ||||
|     validation/type-filtering rules are runtime mutable and allow | ||||
|     type constraining the set of `MsgType.pld: Raw|PayloadT` | ||||
|     values at runtime, per IPC task-context. | ||||
| 
 | ||||
|     This abstraction, being just below "user application code", | ||||
|     allows for the equivalent of our `MsgCodec` (used for | ||||
|     typer-filtering IPC dialog protocol msgs against a msg-spec) | ||||
|     but with granular control around payload delivery (i.e. the | ||||
|     data-values user code actually sees and uses (the blobs that | ||||
|     are "shuttled" by the wrapping dialog prot) such that invalid | ||||
|     `.pld: Raw` can be decoded and handled by IPC-primitive user | ||||
|     code (i.e. that operates on `Context` and `Msgstream` APIs) | ||||
|     without knowledge of the lower level `Channel`/`MsgTransport` | ||||
|     primitives nor the `MsgCodec` in use. Further, lazily decoding | ||||
|     payload blobs allows for topical (and maybe intentionally | ||||
|     "partial") encryption of msg field subsets. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: better to bind it here? | ||||
|     # _rx_mc: trio.MemoryReceiveChannel | ||||
|     _pld_dec: MsgDec | ||||
|     _ctx: Context|None = None | ||||
|     _ipc: Context|MsgStream|None = None | ||||
| 
 | ||||
|     @property | ||||
|     def pld_dec(self) -> MsgDec: | ||||
|         return self._pld_dec | ||||
| 
 | ||||
|     # TODO: a better name? | ||||
|     # -[ ] when would this be used as it avoids needingn to pass the | ||||
|     #   ipc prim to every method | ||||
|     @cm | ||||
|     def wraps_ipc( | ||||
|         self, | ||||
|         ipc_prim: Context|MsgStream, | ||||
| 
 | ||||
|     ) -> PldRx: | ||||
|         ''' | ||||
|         Apply this payload receiver to an IPC primitive type, one | ||||
|         of `Context` or `MsgStream`. | ||||
| 
 | ||||
|         ''' | ||||
|         self._ipc = ipc_prim | ||||
|         try: | ||||
|             yield self | ||||
|         finally: | ||||
|             self._ipc = None | ||||
| 
 | ||||
|     @cm | ||||
|     def limit_plds( | ||||
|         self, | ||||
|         spec: Union[Type[Struct]], | ||||
|         **dec_kwargs, | ||||
| 
 | ||||
|     ) -> MsgDec: | ||||
|         ''' | ||||
|         Type-limit the loadable msg payloads via an applied | ||||
|         `MsgDec` given an input spec, revert to prior decoder on | ||||
|         exit. | ||||
| 
 | ||||
|         ''' | ||||
|         orig_dec: MsgDec = self._pld_dec | ||||
|         limit_dec: MsgDec = mk_dec( | ||||
|             spec=spec, | ||||
|             **dec_kwargs, | ||||
|         ) | ||||
|         try: | ||||
|             self._pld_dec = limit_dec | ||||
|             yield limit_dec | ||||
|         finally: | ||||
|             self._pld_dec = orig_dec | ||||
| 
 | ||||
|     @property | ||||
|     def dec(self) -> msgpack.Decoder: | ||||
|         return self._pld_dec.dec | ||||
| 
 | ||||
|     def recv_pld_nowait( | ||||
|         self, | ||||
|         # TODO: make this `MsgStream` compat as well, see above^ | ||||
|         # ipc_prim: Context|MsgStream, | ||||
|         ipc: Context|MsgStream, | ||||
| 
 | ||||
|         ipc_msg: MsgType|None = None, | ||||
|         expect_msg: Type[MsgType]|None = None, | ||||
|         hide_tb: bool = False, | ||||
|         **dec_pld_kwargs, | ||||
| 
 | ||||
|     ) -> Any|Raw: | ||||
|         __tracebackhide__: bool = hide_tb | ||||
| 
 | ||||
|         msg: MsgType = ( | ||||
|             ipc_msg | ||||
|             or | ||||
| 
 | ||||
|             # sync-rx msg from underlying IPC feeder (mem-)chan | ||||
|             ipc._rx_chan.receive_nowait() | ||||
|         ) | ||||
|         return self.decode_pld( | ||||
|             msg, | ||||
|             ipc=ipc, | ||||
|             expect_msg=expect_msg, | ||||
|             hide_tb=hide_tb, | ||||
|             **dec_pld_kwargs, | ||||
|         ) | ||||
| 
 | ||||
|     async def recv_pld( | ||||
|         self, | ||||
|         ipc: Context|MsgStream, | ||||
|         ipc_msg: MsgType|None = None, | ||||
|         expect_msg: Type[MsgType]|None = None, | ||||
|         hide_tb: bool = True, | ||||
| 
 | ||||
|         **dec_pld_kwargs, | ||||
| 
 | ||||
|     ) -> Any|Raw: | ||||
|         ''' | ||||
|         Receive a `MsgType`, then decode and return its `.pld` field. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         msg: MsgType = ( | ||||
|             ipc_msg | ||||
|             or | ||||
|             # async-rx msg from underlying IPC feeder (mem-)chan | ||||
|             await ipc._rx_chan.receive() | ||||
|         ) | ||||
|         return self.decode_pld( | ||||
|             msg=msg, | ||||
|             ipc=ipc, | ||||
|             expect_msg=expect_msg, | ||||
|             **dec_pld_kwargs, | ||||
|         ) | ||||
| 
 | ||||
|     def decode_pld( | ||||
|         self, | ||||
|         msg: MsgType, | ||||
|         ipc: Context|MsgStream, | ||||
|         expect_msg: Type[MsgType]|None, | ||||
| 
 | ||||
|         raise_error: bool = True, | ||||
|         hide_tb: bool = True, | ||||
| 
 | ||||
|         # XXX for special (default?) case of send side call with | ||||
|         # `Context.started(validate_pld_spec=True)` | ||||
|         is_started_send_side: bool = False, | ||||
| 
 | ||||
|     ) -> PayloadT|Raw: | ||||
|         ''' | ||||
|         Decode a msg's payload field: `MsgType.pld: PayloadT|Raw` and | ||||
|         return the value or raise an appropriate error. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         src_err: BaseException|None = None | ||||
|         match msg: | ||||
|             # payload-data shuttle msg; deliver the `.pld` value | ||||
|             # directly to IPC (primitive) client-consumer code. | ||||
|             case ( | ||||
|                 Started(pld=pld)  # sync phase | ||||
|                 |Yield(pld=pld)  # streaming phase | ||||
|                 |Return(pld=pld)  # termination phase | ||||
|             ): | ||||
|                 try: | ||||
|                     pld: PayloadT = self._pld_dec.decode(pld) | ||||
|                     log.runtime( | ||||
|                         'Decoded msg payload\n\n' | ||||
|                         f'{msg}\n' | ||||
|                         f'where payload decoded as\n' | ||||
|                         f'|_pld={pld!r}\n' | ||||
|                     ) | ||||
|                     return pld | ||||
| 
 | ||||
|                 # XXX pld-value type failure | ||||
|                 except ValidationError as valerr: | ||||
|                     # pack mgterr into error-msg for | ||||
|                     # reraise below; ensure remote-actor-err | ||||
|                     # info is displayed nicely? | ||||
|                     mte: MsgTypeError = _mk_recv_mte( | ||||
|                         msg=msg, | ||||
|                         codec=self.pld_dec, | ||||
|                         src_validation_error=valerr, | ||||
|                         is_invalid_payload=True, | ||||
|                         expected_msg=expect_msg, | ||||
|                     ) | ||||
|                     # NOTE: just raise the MTE inline instead of all | ||||
|                     # the pack-unpack-repack non-sense when this is | ||||
|                     # a "send side" validation error. | ||||
|                     if is_started_send_side: | ||||
|                         raise mte | ||||
| 
 | ||||
|                     # NOTE: the `.message` is automatically | ||||
|                     # transferred into the message as long as we | ||||
|                     # define it as a `Error.message` field. | ||||
|                     err_msg: Error = pack_error( | ||||
|                         exc=mte, | ||||
|                         cid=msg.cid, | ||||
|                         src_uid=( | ||||
|                             ipc.chan.uid | ||||
|                             if not is_started_send_side | ||||
|                             else ipc._actor.uid | ||||
|                         ), | ||||
|                     ) | ||||
|                     mte._ipc_msg = err_msg | ||||
| 
 | ||||
|                     # XXX override the `msg` passed to | ||||
|                     # `_raise_from_unexpected_msg()` (below) so so | ||||
|                     # that we're effectively able to use that same | ||||
|                     # func to unpack and raise an "emulated remote | ||||
|                     # `Error`" of this local MTE. | ||||
|                     msg = err_msg | ||||
|                     # XXX NOTE: so when the `_raise_from_unexpected_msg()` | ||||
|                     # raises the boxed `err_msg` from above it raises | ||||
|                     # it from the above caught interchange-lib | ||||
|                     # validation error. | ||||
|                     src_err = valerr | ||||
| 
 | ||||
|             # a runtime-internal RPC endpoint response. | ||||
|             # always passthrough since (internal) runtime | ||||
|             # responses are generally never exposed to consumer | ||||
|             # code. | ||||
|             case CancelAck( | ||||
|                 pld=bool(cancelled) | ||||
|             ): | ||||
|                 return cancelled | ||||
| 
 | ||||
|             case Error(): | ||||
|                 src_err = MessagingError( | ||||
|                     'IPC ctx dialog terminated without `Return`-ing a result\n' | ||||
|                     f'Instead it raised {msg.boxed_type_str!r}!' | ||||
|                 ) | ||||
|                 # XXX NOTE XXX another super subtle runtime-y thing.. | ||||
|                 # | ||||
|                 # - when user code (transitively) calls into this | ||||
|                 #   func (usually via a `Context/MsgStream` API) we | ||||
|                 #   generally want errors to propagate immediately | ||||
|                 #   and directly so that the user can define how it | ||||
|                 #   wants to handle them. | ||||
|                 # | ||||
|                 #  HOWEVER, | ||||
|                 # | ||||
|                 # - for certain runtime calling cases, we don't want to | ||||
|                 #   directly raise since the calling code might have | ||||
|                 #   special logic around whether to raise the error | ||||
|                 #   or supress it silently (eg. a `ContextCancelled` | ||||
|                 #   received from the far end which was requested by | ||||
|                 #   this side, aka a self-cancel). | ||||
|                 # | ||||
|                 # SO, we offer a flag to control this. | ||||
|                 if not raise_error: | ||||
|                     return src_err | ||||
| 
 | ||||
|             case Stop(cid=cid): | ||||
|                 ctx: Context = getattr(ipc, 'ctx', ipc) | ||||
|                 message: str = ( | ||||
|                     f'{ctx.side!r}-side of ctx received stream-`Stop` from ' | ||||
|                     f'{ctx.peer_side!r} peer ?\n' | ||||
|                     f'|_cid: {cid}\n\n' | ||||
| 
 | ||||
|                     f'{pretty_struct.pformat(msg)}\n' | ||||
|                 ) | ||||
|                 if ctx._stream is None: | ||||
|                     explain: str = ( | ||||
|                         f'BUT, no `MsgStream` (was) open(ed) on this ' | ||||
|                         f'{ctx.side!r}-side of the IPC ctx?\n' | ||||
|                         f'Maybe check your code for streaming phase race conditions?\n' | ||||
|                     ) | ||||
|                     log.warning( | ||||
|                         message | ||||
|                         + | ||||
|                         explain | ||||
|                     ) | ||||
|                     # let caller decide what to do when only one | ||||
|                     # side opened a stream, don't raise. | ||||
|                     return msg | ||||
| 
 | ||||
|                 else: | ||||
|                     explain: str = ( | ||||
|                         'Received a `Stop` when it should NEVER be possible!?!?\n' | ||||
|                     ) | ||||
|                     # TODO: this is constructed inside | ||||
|                     # `_raise_from_unexpected_msg()` but maybe we | ||||
|                     # should pass it in? | ||||
|                     # src_err = trio.EndOfChannel(explain) | ||||
|                     src_err = None | ||||
| 
 | ||||
|             case _: | ||||
|                 src_err = InternalError( | ||||
|                     'Invalid IPC msg ??\n\n' | ||||
|                     f'{msg}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         # TODO: maybe use the new `.add_note()` from 3.11? | ||||
|         # |_https://docs.python.org/3.11/library/exceptions.html#BaseException.add_note | ||||
|         # | ||||
|         # fallthrough and raise from `src_err` | ||||
|         try: | ||||
|             _raise_from_unexpected_msg( | ||||
|                 ctx=getattr(ipc, 'ctx', ipc), | ||||
|                 msg=msg, | ||||
|                 src_err=src_err, | ||||
|                 log=log, | ||||
|                 expect_msg=expect_msg, | ||||
|                 hide_tb=hide_tb, | ||||
|             ) | ||||
|         except UnboundLocalError: | ||||
|             # XXX if there's an internal lookup error in the above | ||||
|             # code (prolly on `src_err`) we want to show this frame | ||||
|             # in the tb! | ||||
|             __tracebackhide__: bool = False | ||||
|             raise | ||||
| 
 | ||||
|     dec_msg = decode_pld | ||||
| 
 | ||||
|     async def recv_msg_w_pld( | ||||
|         self, | ||||
|         ipc: Context|MsgStream, | ||||
|         expect_msg: MsgType, | ||||
| 
 | ||||
|         # NOTE: generally speaking only for handling `Stop`-msgs that | ||||
|         # arrive during a call to `drain_to_final_msg()` above! | ||||
|         passthrough_non_pld_msgs: bool = True, | ||||
|         hide_tb: bool = True, | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> tuple[MsgType, PayloadT]: | ||||
|         ''' | ||||
|         Retrieve the next avail IPC msg, decode it's payload, and return | ||||
|         the pair of refs. | ||||
| 
 | ||||
|         ''' | ||||
|         __tracebackhide__: bool = hide_tb | ||||
|         msg: MsgType = await ipc._rx_chan.receive() | ||||
| 
 | ||||
|         if passthrough_non_pld_msgs: | ||||
|             match msg: | ||||
|                 case Stop(): | ||||
|                     return msg, None | ||||
| 
 | ||||
|         # TODO: is there some way we can inject the decoded | ||||
|         # payload into an existing output buffer for the original | ||||
|         # msg instance? | ||||
|         pld: PayloadT = self.decode_pld( | ||||
|             msg, | ||||
|             ipc=ipc, | ||||
|             expect_msg=expect_msg, | ||||
|             hide_tb=hide_tb, | ||||
|             **kwargs, | ||||
|         ) | ||||
|         return msg, pld | ||||
| 
 | ||||
| 
 | ||||
| @cm | ||||
| def limit_plds( | ||||
|     spec: Union[Type[Struct]], | ||||
|     **dec_kwargs, | ||||
| 
 | ||||
| ) -> MsgDec: | ||||
|     ''' | ||||
|     Apply a `MsgCodec` that will natively decode the SC-msg set's | ||||
|     `PayloadMsg.pld: Union[Type[Struct]]` payload fields using | ||||
|     tagged-unions of `msgspec.Struct`s from the `payload_types` | ||||
|     for all IPC contexts in use by the current `trio.Task`. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = True | ||||
|     try: | ||||
|         curr_ctx: Context = current_ipc_ctx() | ||||
|         rx: PldRx = curr_ctx._pld_rx | ||||
|         orig_pldec: MsgDec = rx.pld_dec | ||||
| 
 | ||||
|         with rx.limit_plds( | ||||
|             spec=spec, | ||||
|             **dec_kwargs, | ||||
|         ) as pldec: | ||||
|             log.runtime( | ||||
|                 'Applying payload-decoder\n\n' | ||||
|                 f'{pldec}\n' | ||||
|             ) | ||||
|             yield pldec | ||||
|     finally: | ||||
|         log.runtime( | ||||
|             'Reverted to previous payload-decoder\n\n' | ||||
|             f'{orig_pldec}\n' | ||||
|         ) | ||||
|         # sanity on orig settings | ||||
|         assert rx.pld_dec is orig_pldec | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_limit_plds( | ||||
|     ctx: Context, | ||||
|     spec: Union[Type[Struct]]|None = None, | ||||
|     dec_hook: Callable|None = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> MsgDec|None: | ||||
|     ''' | ||||
|     Async compat maybe-payload type limiter. | ||||
| 
 | ||||
|     Mostly for use inside other internal `@acm`s such that a separate | ||||
|     indent block isn't needed when an async one is already being | ||||
|     used. | ||||
| 
 | ||||
|     ''' | ||||
|     if ( | ||||
|         spec is None | ||||
|         and | ||||
|         dec_hook is None | ||||
|     ): | ||||
|         yield None | ||||
|         return | ||||
| 
 | ||||
|     # sanity check on IPC scoping | ||||
|     curr_ctx: Context = current_ipc_ctx() | ||||
|     assert ctx is curr_ctx | ||||
| 
 | ||||
|     with ctx._pld_rx.limit_plds( | ||||
|         spec=spec, | ||||
|         dec_hook=dec_hook, | ||||
|         **kwargs, | ||||
|     ) as msgdec: | ||||
|         yield msgdec | ||||
| 
 | ||||
|     # when the applied spec is unwound/removed, the same IPC-ctx | ||||
|     # should still be in scope. | ||||
|     curr_ctx: Context = current_ipc_ctx() | ||||
|     assert ctx is curr_ctx | ||||
| 
 | ||||
| 
 | ||||
| async def drain_to_final_msg( | ||||
|     ctx: Context, | ||||
| 
 | ||||
|     hide_tb: bool = True, | ||||
|     msg_limit: int = 6, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     Return|None, | ||||
|     list[MsgType] | ||||
| ]: | ||||
|     ''' | ||||
|     Drain IPC msgs delivered to the underlying IPC context's | ||||
|     rx-mem-chan (i.e. from `Context._rx_chan`) in search for a final | ||||
|     `Return` or `Error` msg. | ||||
| 
 | ||||
|     Deliver the `Return` + preceding drained msgs (`list[MsgType]`) | ||||
|     as a pair unless an `Error` is found, in which unpack and raise | ||||
|     it. | ||||
| 
 | ||||
|     The motivation here is to always capture any remote error relayed | ||||
|     by the remote peer task during a ctxc condition. | ||||
| 
 | ||||
|     For eg. a ctxc-request may be sent to the peer as part of the | ||||
|     local task's (request for) cancellation but then that same task | ||||
|     **also errors** before executing the teardown in the | ||||
|     `Portal.open_context().__aexit__()` block. In such error-on-exit | ||||
|     cases we want to always capture and raise any delivered remote | ||||
|     error (like an expected ctxc-ACK) as part of the final | ||||
|     `ctx.wait_for_result()` teardown sequence such that the | ||||
|     `Context.outcome` related state always reflect what transpired | ||||
|     even after ctx closure and the `.open_context()` block exit. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     raise_overrun: bool = not ctx._allow_overruns | ||||
| 
 | ||||
|     # wait for a final context result by collecting (but | ||||
|     # basically ignoring) any bi-dir-stream msgs still in transit | ||||
|     # from the far end. | ||||
|     pre_result_drained: list[MsgType] = [] | ||||
|     result_msg: Return|Error|None = None | ||||
|     while not ( | ||||
|         ctx.maybe_error | ||||
|         and not ctx._final_result_is_set() | ||||
|     ): | ||||
|         try: | ||||
|             # receive all msgs, scanning for either a final result | ||||
|             # or error; the underlying call should never raise any | ||||
|             # remote error directly! | ||||
|             msg, pld = await ctx._pld_rx.recv_msg_w_pld( | ||||
|                 ipc=ctx, | ||||
|                 expect_msg=Return, | ||||
|                 raise_error=False, | ||||
|                 hide_tb=hide_tb, | ||||
|             ) | ||||
|             # ^-TODO-^ some bad ideas? | ||||
|             # -[ ] wrap final outcome .receive() in a scope so | ||||
|             #     it can be cancelled out of band if needed? | ||||
|             # |_with trio.CancelScope() as res_cs: | ||||
|             #       ctx._res_scope = res_cs | ||||
|             #       msg: dict = await ctx._rx_chan.receive() | ||||
|             #   if res_cs.cancelled_caught: | ||||
|             # | ||||
|             # -[ ] make sure pause points work here for REPLing | ||||
|             #   the runtime itself; i.e. ensure there's no hangs! | ||||
|             # |_from tractor.devx._debug import pause | ||||
|             #   await pause() | ||||
| 
 | ||||
|         # NOTE: we get here if the far end was | ||||
|         # `ContextCancelled` in 2 cases: | ||||
|         # 1. we requested the cancellation and thus | ||||
|         #    SHOULD NOT raise that far end error, | ||||
|         # 2. WE DID NOT REQUEST that cancel and thus | ||||
|         #    SHOULD RAISE HERE! | ||||
|         except trio.Cancelled as _taskc: | ||||
|             taskc: trio.Cancelled = _taskc | ||||
| 
 | ||||
|             # report when the cancellation wasn't (ostensibly) due to | ||||
|             # RPC operation, some surrounding parent cancel-scope. | ||||
|             if not ctx._scope.cancel_called: | ||||
|                 task: trio.lowlevel.Task = trio.lowlevel.current_task() | ||||
|                 rent_n: trio.Nursery = task.parent_nursery | ||||
|                 if ( | ||||
|                     (local_cs := rent_n.cancel_scope).cancel_called | ||||
|                 ): | ||||
|                     log.cancel( | ||||
|                         'RPC-ctx cancelled by local-parent scope during drain!\n\n' | ||||
|                         f'c}}>\n' | ||||
|                         f' |_{rent_n}\n' | ||||
|                         f'   |_.cancel_scope = {local_cs}\n' | ||||
|                         f'   |_>c}}\n' | ||||
|                         f'      |_{ctx.pformat(indent=" "*9)}' | ||||
|                         # ^TODO, some (other) simpler repr here? | ||||
|                     ) | ||||
|                     __tracebackhide__: bool = False | ||||
| 
 | ||||
|             # CASE 2: mask the local cancelled-error(s) | ||||
|             # only when we are sure the remote error is | ||||
|             # the source cause of this local task's | ||||
|             # cancellation. | ||||
|             ctx.maybe_raise( | ||||
|                 hide_tb=hide_tb, | ||||
|                 from_src_exc=taskc, | ||||
|                 # ?TODO? when *should* we use this? | ||||
|             ) | ||||
| 
 | ||||
|             # CASE 1: we DID request the cancel we simply | ||||
|             # continue to bubble up as normal. | ||||
|             raise taskc | ||||
| 
 | ||||
|         match msg: | ||||
| 
 | ||||
|             # final result arrived! | ||||
|             case Return(): | ||||
|                 log.runtime( | ||||
|                     'Context delivered final draining msg:\n' | ||||
|                     f'{pretty_struct.pformat(msg)}' | ||||
|                 ) | ||||
|                 ctx._result: Any = pld | ||||
|                 result_msg = msg | ||||
|                 break | ||||
| 
 | ||||
|             # far end task is still streaming to us so discard | ||||
|             # and report depending on local ctx state. | ||||
|             case Yield(): | ||||
|                 pre_result_drained.append(msg) | ||||
|                 if ( | ||||
|                     (ctx._stream.closed | ||||
|                      and (reason := 'stream was already closed') | ||||
|                     ) | ||||
|                     or (ctx.cancel_acked | ||||
|                         and (reason := 'ctx cancelled other side') | ||||
|                     ) | ||||
|                     or (ctx._cancel_called | ||||
|                         and (reason := 'ctx called `.cancel()`') | ||||
|                     ) | ||||
|                     or (len(pre_result_drained) > msg_limit | ||||
|                         and (reason := f'"yield" limit={msg_limit}') | ||||
|                     ) | ||||
|                 ): | ||||
|                     log.cancel( | ||||
|                         'Cancelling `MsgStream` drain since ' | ||||
|                         f'{reason}\n\n' | ||||
|                         f'<= {ctx.chan.uid}\n' | ||||
|                         f'  |_{ctx._nsf}()\n\n' | ||||
|                         f'=> {ctx._task}\n' | ||||
|                         f'  |_{ctx._stream}\n\n' | ||||
| 
 | ||||
|                         f'{pretty_struct.pformat(msg)}\n' | ||||
|                     ) | ||||
|                     break | ||||
| 
 | ||||
|                 # drain up to the `msg_limit` hoping to get | ||||
|                 # a final result or error/ctxc. | ||||
|                 else: | ||||
|                     log.warning( | ||||
|                         'Ignoring "yield" msg during `ctx.result()` drain..\n' | ||||
|                         f'<= {ctx.chan.uid}\n' | ||||
|                         f'  |_{ctx._nsf}()\n\n' | ||||
|                         f'=> {ctx._task}\n' | ||||
|                         f'  |_{ctx._stream}\n\n' | ||||
| 
 | ||||
|                         f'{pretty_struct.pformat(msg)}\n' | ||||
|                     ) | ||||
|                     continue | ||||
| 
 | ||||
|             # stream terminated, but no result yet.. | ||||
|             # | ||||
|             # TODO: work out edge cases here where | ||||
|             # a stream is open but the task also calls | ||||
|             # this? | ||||
|             # -[ ] should be a runtime error if a stream is open right? | ||||
|             # Stop() | ||||
|             case Stop(): | ||||
|                 pre_result_drained.append(msg) | ||||
|                 log.runtime(  # normal/expected shutdown transaction | ||||
|                     'Remote stream terminated due to "stop" msg:\n\n' | ||||
|                     f'{pretty_struct.pformat(msg)}\n' | ||||
|                 ) | ||||
|                 continue | ||||
| 
 | ||||
|             # remote error msg, likely already handled inside | ||||
|             # `Context._deliver_msg()` | ||||
|             case Error(): | ||||
|                 # TODO: can we replace this with `ctx.maybe_raise()`? | ||||
|                 # -[ ]  would this be handier for this case maybe? | ||||
|                 # |_async with maybe_raise_on_exit() as raises: | ||||
|                 #       if raises: | ||||
|                 #           log.error('some msg about raising..') | ||||
|                 # | ||||
|                 re: Exception|None = ctx._remote_error | ||||
|                 if re: | ||||
|                     assert msg is ctx._cancel_msg | ||||
|                     # NOTE: this solved a super duper edge case XD | ||||
|                     # this was THE super duper edge case of: | ||||
|                     # - local task opens a remote task, | ||||
|                     # - requests remote cancellation of far end | ||||
|                     #   ctx/tasks, | ||||
|                     # - needs to wait for the cancel ack msg | ||||
|                     #   (ctxc) or some result in the race case | ||||
|                     #   where the other side's task returns | ||||
|                     #   before the cancel request msg is ever | ||||
|                     #   rxed and processed, | ||||
|                     # - here this surrounding drain loop (which | ||||
|                     #   iterates all ipc msgs until the ack or | ||||
|                     #   an early result arrives) was NOT exiting | ||||
|                     #   since we are the edge case: local task | ||||
|                     #   does not re-raise any ctxc it receives | ||||
|                     #   IFF **it** was the cancellation | ||||
|                     #   requester.. | ||||
|                     # | ||||
|                     # XXX will raise if necessary but ow break | ||||
|                     # from loop presuming any supressed error | ||||
|                     # (ctxc) should terminate the context! | ||||
|                     ctx._maybe_raise_remote_err( | ||||
|                         re, | ||||
|                         # NOTE: obvi we don't care if we | ||||
|                         # overran the far end if we're already | ||||
|                         # waiting on a final result (msg). | ||||
|                         # raise_overrun_from_self=False, | ||||
|                         raise_overrun_from_self=raise_overrun, | ||||
|                     ) | ||||
|                     result_msg = msg | ||||
|                     break  # OOOOOF, yeah obvi we need this.. | ||||
| 
 | ||||
|                 else: | ||||
|                     # bubble the original src key error | ||||
|                     raise | ||||
| 
 | ||||
|             # XXX should pretty much never get here unless someone | ||||
|             # overrides the default `MsgType` spec. | ||||
|             case _: | ||||
|                 pre_result_drained.append(msg) | ||||
|                 # It's definitely an internal error if any other | ||||
|                 # msg type without a`'cid'` field arrives here! | ||||
|                 report: str = ( | ||||
|                     f'Invalid or unknown msg type {type(msg)!r}!?\n' | ||||
|                 ) | ||||
|                 if not msg.cid: | ||||
|                     report += ( | ||||
|                         '\nWhich also has no `.cid` field?\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 raise MessagingError( | ||||
|                     report | ||||
|                     + | ||||
|                     f'\n{msg}\n' | ||||
|                 ) | ||||
| 
 | ||||
|     else: | ||||
|         log.cancel( | ||||
|             'Skipping `MsgStream` drain since final outcome is set\n\n' | ||||
|             f'{ctx.outcome}\n' | ||||
|         ) | ||||
| 
 | ||||
|     return ( | ||||
|         result_msg, | ||||
|         pre_result_drained, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def validate_payload_msg( | ||||
|     pld_msg: Started|Yield|Return, | ||||
|     pld_value: PayloadT, | ||||
|     ipc: Context|MsgStream, | ||||
| 
 | ||||
|     raise_mte: bool = True, | ||||
|     strict_pld_parity: bool = False, | ||||
|     hide_tb: bool = True, | ||||
| 
 | ||||
| ) -> MsgTypeError|None: | ||||
|     ''' | ||||
|     Validate a `PayloadMsg.pld` value with the current | ||||
|     IPC ctx's `PldRx` and raise an appropriate `MsgTypeError` | ||||
|     on failure. | ||||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     codec: MsgCodec = current_codec() | ||||
|     msg_bytes: bytes = codec.encode(pld_msg) | ||||
|     try: | ||||
|         roundtripped: Started = codec.decode(msg_bytes) | ||||
|         ctx: Context = getattr(ipc, 'ctx', ipc) | ||||
|         pld: PayloadT = ctx.pld_rx.decode_pld( | ||||
|             msg=roundtripped, | ||||
|             ipc=ipc, | ||||
|             expect_msg=Started, | ||||
|             hide_tb=hide_tb, | ||||
|             is_started_send_side=True, | ||||
|         ) | ||||
|         if ( | ||||
|             strict_pld_parity | ||||
|             and | ||||
|             pld != pld_value | ||||
|         ): | ||||
|             # TODO: make that one a mod func too.. | ||||
|             diff = pretty_struct.Struct.__sub__( | ||||
|                 roundtripped, | ||||
|                 pld_msg, | ||||
|             ) | ||||
|             complaint: str = ( | ||||
|                 'Started value does not match after roundtrip?\n\n' | ||||
|                 f'{diff}' | ||||
|             ) | ||||
|             raise ValidationError(complaint) | ||||
| 
 | ||||
|     # raise any msg type error NO MATTER WHAT! | ||||
|     except ValidationError as verr: | ||||
|         try: | ||||
|             mte: MsgTypeError = _mk_recv_mte( | ||||
|                 msg=roundtripped, | ||||
|                 codec=codec, | ||||
|                 src_validation_error=verr, | ||||
|                 verb_header='Trying to send ', | ||||
|                 is_invalid_payload=True, | ||||
|             ) | ||||
|         except BaseException: | ||||
|             __tracebackhide__: bool = False | ||||
|             raise | ||||
| 
 | ||||
|         if not raise_mte: | ||||
|             return mte | ||||
| 
 | ||||
|         raise mte from verr | ||||
|  | @ -0,0 +1,342 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Prettified version of `msgspec.Struct` for easier console grokin. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from collections import UserList | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Iterator, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     msgpack, | ||||
|     Struct as _Struct, | ||||
|     structs, | ||||
| ) | ||||
| # from pprint import ( | ||||
| #     saferepr, | ||||
| # ) | ||||
| 
 | ||||
| from tractor.log import get_logger | ||||
| 
 | ||||
| log = get_logger() | ||||
| # TODO: auto-gen type sig for input func both for | ||||
| # type-msgs and logging of RPC tasks? | ||||
| # taken and modified from: | ||||
| # https://stackoverflow.com/a/57110117 | ||||
| # import inspect | ||||
| # from typing import List | ||||
| 
 | ||||
| # def my_function(input_1: str, input_2: int) -> list[int]: | ||||
| #     pass | ||||
| 
 | ||||
| # def types_of(func): | ||||
| #     specs = inspect.getfullargspec(func) | ||||
| #     return_type = specs.annotations['return'] | ||||
| #     input_types = [t.__name__ for s, t in specs.annotations.items() if s != 'return'] | ||||
| #     return f'{func.__name__}({": ".join(input_types)}) -> {return_type}' | ||||
| 
 | ||||
| # types_of(my_function) | ||||
| 
 | ||||
| 
 | ||||
| class DiffDump(UserList): | ||||
|     ''' | ||||
|     Very simple list delegator that repr() dumps (presumed) tuple | ||||
|     elements of the form `tuple[str, Any, Any]` in a nice | ||||
|     multi-line readable form for analyzing `Struct` diffs. | ||||
| 
 | ||||
|     ''' | ||||
|     def __repr__(self) -> str: | ||||
|         if not len(self): | ||||
|             return super().__repr__() | ||||
| 
 | ||||
|         # format by displaying item pair's ``repr()`` on multiple, | ||||
|         # indented lines such that they are more easily visually | ||||
|         # comparable when printed to console when printed to | ||||
|         # console. | ||||
|         repstr: str = '[\n' | ||||
|         for k, left, right in self: | ||||
|             repstr += ( | ||||
|                 f'({k},\n' | ||||
|                 f' |_{repr(left)},\n' | ||||
|                 f' |_{repr(right)},\n' | ||||
|                 ')\n' | ||||
|             ) | ||||
|         repstr += ']\n' | ||||
|         return repstr | ||||
| 
 | ||||
| 
 | ||||
| def iter_fields(struct: Struct) -> Iterator[ | ||||
|     tuple[ | ||||
|         structs.FieldIinfo, | ||||
|         str, | ||||
|         Any, | ||||
|     ] | ||||
| ]: | ||||
|     ''' | ||||
|     Iterate over all non-@property fields of this struct. | ||||
| 
 | ||||
|     ''' | ||||
|     fi: structs.FieldInfo | ||||
|     for fi in structs.fields(struct): | ||||
|         key: str = fi.name | ||||
|         val: Any = getattr(struct, key) | ||||
|         yield ( | ||||
|             fi, | ||||
|             key, | ||||
|             val, | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def pformat( | ||||
|     struct: Struct, | ||||
|     field_indent: int = 2, | ||||
|     indent: int = 0, | ||||
| 
 | ||||
| ) -> str: | ||||
|     ''' | ||||
|     Recursion-safe `pprint.pformat()` style formatting of | ||||
|     a `msgspec.Struct` for sane reading by a human using a REPL. | ||||
| 
 | ||||
|     ''' | ||||
|     # global whitespace indent | ||||
|     ws: str = ' '*indent | ||||
| 
 | ||||
|     # field whitespace indent | ||||
|     field_ws: str = ' '*(field_indent + indent) | ||||
| 
 | ||||
|     # qtn: str = ws + struct.__class__.__qualname__ | ||||
|     qtn: str = struct.__class__.__qualname__ | ||||
| 
 | ||||
|     obj_str: str = ''  # accumulator | ||||
|     fi: structs.FieldInfo | ||||
|     k: str | ||||
|     v: Any | ||||
|     for fi, k, v in iter_fields(struct): | ||||
| 
 | ||||
|         # TODO: how can we prefer `Literal['option1',  'option2, | ||||
|         # ..]` over .__name__ == `Literal` but still get only the | ||||
|         # latter for simple types like `str | int | None` etc..? | ||||
|         ft: type = fi.type | ||||
|         typ_name: str = getattr(ft, '__name__', str(ft)) | ||||
| 
 | ||||
|         # recurse to get sub-struct's `.pformat()` output Bo | ||||
|         if isinstance(v, Struct): | ||||
|             val_str: str =  v.pformat( | ||||
|                 indent=field_indent + indent, | ||||
|                 field_indent=indent + field_indent, | ||||
|             ) | ||||
| 
 | ||||
|         else: | ||||
|             val_str: str = repr(v) | ||||
| 
 | ||||
|             # XXX LOL, below just seems to be f#$%in causing | ||||
|             # recursion errs.. | ||||
|             # | ||||
|             # the `pprint` recursion-safe format: | ||||
|             # https://docs.python.org/3.11/library/pprint.html#pprint.saferepr | ||||
|             # try: | ||||
|             #     val_str: str = saferepr(v) | ||||
|             # except Exception: | ||||
|             #     log.exception( | ||||
|             #         'Failed to `saferepr({type(struct)})` !?\n' | ||||
|             #     ) | ||||
|                 # raise | ||||
|                 # return _Struct.__repr__(struct) | ||||
| 
 | ||||
|         # TODO: LOLOL use `textwrap.indent()` instead dawwwwwg! | ||||
|         obj_str += (field_ws + f'{k}: {typ_name} = {val_str},\n') | ||||
| 
 | ||||
|     return ( | ||||
|         f'{qtn}(\n' | ||||
|         f'{obj_str}' | ||||
|         f'{ws})' | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| class Struct( | ||||
|     _Struct, | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # tag='pikerstruct', | ||||
|     # tag=True, | ||||
| ): | ||||
|     ''' | ||||
|     A "human friendlier" (aka repl buddy) struct subtype. | ||||
| 
 | ||||
|     ''' | ||||
|     def to_dict( | ||||
|         self, | ||||
|         include_non_members: bool = True, | ||||
| 
 | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Like it sounds.. direct delegation to: | ||||
|         https://jcristharif.com/msgspec/api.html#msgspec.structs.asdict | ||||
| 
 | ||||
|         BUT, by default we pop all non-member (aka not defined as | ||||
|         struct fields) fields by default. | ||||
| 
 | ||||
|         ''' | ||||
|         asdict: dict = structs.asdict(self) | ||||
|         if include_non_members: | ||||
|             return asdict | ||||
| 
 | ||||
|         # only return a dict of the struct members | ||||
|         # which were provided as input, NOT anything | ||||
|         # added as type-defined `@property` methods! | ||||
|         sin_props: dict = {} | ||||
|         fi: structs.FieldInfo | ||||
|         for fi, k, v in iter_fields(self): | ||||
|             sin_props[k] = asdict[k] | ||||
| 
 | ||||
|         return sin_props | ||||
| 
 | ||||
|     pformat = pformat | ||||
| 
 | ||||
|     def __repr__(self) -> str: | ||||
|         try: | ||||
|             return pformat(self) | ||||
|         except Exception: | ||||
|             log.exception( | ||||
|                 f'Failed to `pformat({type(self)})` !?\n' | ||||
|             ) | ||||
|             return _Struct.__repr__(self) | ||||
| 
 | ||||
|     # __repr__ = pformat | ||||
|     # __str__ = __repr__ = pformat | ||||
|     # TODO: use a pprint.PrettyPrinter instance around ONLY rendering | ||||
|     # inside a known tty? | ||||
|     # def __repr__(self) -> str: | ||||
|     #     ... | ||||
| 
 | ||||
|     def copy( | ||||
|         self, | ||||
|         update: dict | None = None, | ||||
| 
 | ||||
|     ) -> Struct: | ||||
|         ''' | ||||
|         Validate-typecast all self defined fields, return a copy of | ||||
|         us with all such fields. | ||||
| 
 | ||||
|         NOTE: This is kinda like the default behaviour in | ||||
|         `pydantic.BaseModel` except a copy of the object is | ||||
|         returned making it compat with `frozen=True`. | ||||
| 
 | ||||
|         ''' | ||||
|         if update: | ||||
|             for k, v in update.items(): | ||||
|                 setattr(self, k, v) | ||||
| 
 | ||||
|         # NOTE: roundtrip serialize to validate | ||||
|         # - enode to msgpack binary format, | ||||
|         # - decode that back to a struct. | ||||
|         return msgpack.Decoder(type=type(self)).decode( | ||||
|             msgpack.Encoder().encode(self) | ||||
|         ) | ||||
| 
 | ||||
|     def typecast( | ||||
|         self, | ||||
| 
 | ||||
|         # TODO: allow only casting a named subset? | ||||
|         # fields: set[str] | None = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Cast all fields using their declared type annotations | ||||
|         (kinda like what `pydantic` does by default). | ||||
| 
 | ||||
|         NOTE: this of course won't work on frozen types, use | ||||
|         ``.copy()`` above in such cases. | ||||
| 
 | ||||
|         ''' | ||||
|         # https://jcristharif.com/msgspec/api.html#msgspec.structs.fields | ||||
|         fi: structs.FieldInfo | ||||
|         for fi in structs.fields(self): | ||||
|             setattr( | ||||
|                 self, | ||||
|                 fi.name, | ||||
|                 fi.type(getattr(self, fi.name)), | ||||
|             ) | ||||
| 
 | ||||
|     # TODO: make a mod func instead and just point to it here for | ||||
|     # method impl? | ||||
|     def __sub__( | ||||
|         self, | ||||
|         other: Struct, | ||||
| 
 | ||||
|     ) -> DiffDump[tuple[str, Any, Any]]: | ||||
|         ''' | ||||
|         Compare fields/items key-wise and return a `DiffDump` | ||||
|         for easy visual REPL comparison B) | ||||
| 
 | ||||
|         ''' | ||||
|         diffs: DiffDump[tuple[str, Any, Any]] = DiffDump() | ||||
|         for fi in structs.fields(self): | ||||
|             attr_name: str = fi.name | ||||
|             ours: Any = getattr(self, attr_name) | ||||
|             theirs: Any = getattr(other, attr_name) | ||||
|             if ours != theirs: | ||||
|                 diffs.append(( | ||||
|                     attr_name, | ||||
|                     ours, | ||||
|                     theirs, | ||||
|                 )) | ||||
| 
 | ||||
|         return diffs | ||||
| 
 | ||||
|     @classmethod | ||||
|     def fields_diff( | ||||
|         cls, | ||||
|         other: dict|Struct, | ||||
| 
 | ||||
|     ) -> DiffDump[tuple[str, Any, Any]]: | ||||
|         ''' | ||||
|         Very similar to `PrettyStruct.__sub__()` except accepts an | ||||
|         input `other: dict` (presumably that would normally be called | ||||
|         like `Struct(**other)`) which returns a `DiffDump` of the | ||||
|         fields of the struct and the `dict`'s fields. | ||||
| 
 | ||||
|         ''' | ||||
|         nullish = object() | ||||
|         consumed: dict = other.copy() | ||||
|         diffs: DiffDump[tuple[str, Any, Any]] = DiffDump() | ||||
|         for fi in structs.fields(cls): | ||||
|             field_name: str = fi.name | ||||
|             # ours: Any = getattr(self, field_name) | ||||
|             theirs: Any = consumed.pop(field_name, nullish) | ||||
|             if theirs is nullish: | ||||
|                 diffs.append(( | ||||
|                     field_name, | ||||
|                     f'{fi.type!r}', | ||||
|                     'NOT-DEFINED in `other: dict`', | ||||
|                 )) | ||||
| 
 | ||||
|         # when there are lingering fields in `other` that this struct | ||||
|         # DOES NOT define we also append those. | ||||
|         if consumed: | ||||
|             for k, v in consumed.items(): | ||||
|                 diffs.append(( | ||||
|                     k, | ||||
|                     f'NOT-DEFINED for `{cls.__name__}`', | ||||
|                     f'`other: dict` has value = {v!r}', | ||||
|                 )) | ||||
| 
 | ||||
|         return diffs | ||||
|  | @ -76,9 +76,11 @@ class NamespacePath(str): | |||
|         return self._ref | ||||
| 
 | ||||
|     @staticmethod | ||||
|     def _mk_fqnp(ref: type | object) -> tuple[str, str]: | ||||
|     def _mk_fqnp( | ||||
|         ref: type|object, | ||||
|     ) -> tuple[str, str]: | ||||
|         ''' | ||||
|         Generate a minial ``str`` pair which describes a python | ||||
|         Generate a minial `str` pair which describes a python | ||||
|         object's namespace path and object/type name. | ||||
| 
 | ||||
|         In more precise terms something like: | ||||
|  | @ -87,10 +89,9 @@ class NamespacePath(str): | |||
|             of THIS type XD | ||||
| 
 | ||||
|         ''' | ||||
|         if ( | ||||
|             isfunction(ref) | ||||
|         ): | ||||
|         if isfunction(ref): | ||||
|             name: str = getattr(ref, '__name__') | ||||
|             mod_name: str = ref.__module__ | ||||
| 
 | ||||
|         elif ismethod(ref): | ||||
|             # build out the path manually i guess..? | ||||
|  | @ -99,15 +100,19 @@ class NamespacePath(str): | |||
|                 type(ref.__self__).__name__, | ||||
|                 ref.__func__.__name__, | ||||
|             ]) | ||||
|             mod_name: str = ref.__self__.__module__ | ||||
| 
 | ||||
|         else:  # object or other? | ||||
|             # isinstance(ref, object) | ||||
|             # and not isfunction(ref) | ||||
|             name: str = type(ref).__name__ | ||||
|             mod_name: str = ref.__module__ | ||||
| 
 | ||||
|         # TODO: return static value direactly? | ||||
|         # | ||||
|         # fully qualified namespace path, tuple. | ||||
|         fqnp: tuple[str, str] = ( | ||||
|             ref.__module__, | ||||
|             mod_name, | ||||
|             name, | ||||
|         ) | ||||
|         return fqnp | ||||
|  |  | |||
|  | @ -15,256 +15,716 @@ | |||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Extensions to built-in or (heavily used but 3rd party) friend-lib | ||||
| types. | ||||
| Define our strictly typed IPC message spec for the SCIPP: | ||||
| 
 | ||||
| that is, | ||||
| 
 | ||||
| the "Structurred-Concurrency-Inter-Process-(dialog)-(un)Protocol". | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from collections import UserList | ||||
| from pprint import ( | ||||
|     saferepr, | ||||
| ) | ||||
| import types | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Iterator, | ||||
|     Generic, | ||||
|     Literal, | ||||
|     Type, | ||||
|     TypeVar, | ||||
|     TypeAlias, | ||||
|     Union, | ||||
| ) | ||||
| 
 | ||||
| from msgspec import ( | ||||
|     msgpack, | ||||
|     Struct as _Struct, | ||||
|     structs, | ||||
|     defstruct, | ||||
|     # field, | ||||
|     Raw, | ||||
|     Struct, | ||||
|     # UNSET, | ||||
|     # UnsetType, | ||||
| ) | ||||
| 
 | ||||
| # TODO: auto-gen type sig for input func both for | ||||
| # type-msgs and logging of RPC tasks? | ||||
| # taken and modified from: | ||||
| # https://stackoverflow.com/a/57110117 | ||||
| # import inspect | ||||
| # from typing import List | ||||
| 
 | ||||
| # def my_function(input_1: str, input_2: int) -> list[int]: | ||||
| #     pass | ||||
| 
 | ||||
| # def types_of(func): | ||||
| #     specs = inspect.getfullargspec(func) | ||||
| #     return_type = specs.annotations['return'] | ||||
| #     input_types = [t.__name__ for s, t in specs.annotations.items() if s != 'return'] | ||||
| #     return f'{func.__name__}({": ".join(input_types)}) -> {return_type}' | ||||
| 
 | ||||
| # types_of(my_function) | ||||
| 
 | ||||
| 
 | ||||
| class DiffDump(UserList): | ||||
|     ''' | ||||
|     Very simple list delegator that repr() dumps (presumed) tuple | ||||
|     elements of the form `tuple[str, Any, Any]` in a nice | ||||
|     multi-line readable form for analyzing `Struct` diffs. | ||||
| 
 | ||||
|     ''' | ||||
|     def __repr__(self) -> str: | ||||
|         if not len(self): | ||||
|             return super().__repr__() | ||||
| 
 | ||||
|         # format by displaying item pair's ``repr()`` on multiple, | ||||
|         # indented lines such that they are more easily visually | ||||
|         # comparable when printed to console when printed to | ||||
|         # console. | ||||
|         repstr: str = '[\n' | ||||
|         for k, left, right in self: | ||||
|             repstr += ( | ||||
|                 f'({k},\n' | ||||
|                 f'\t{repr(left)},\n' | ||||
|                 f'\t{repr(right)},\n' | ||||
|                 ')\n' | ||||
| from tractor.msg import ( | ||||
|     pretty_struct, | ||||
| ) | ||||
|         repstr += ']\n' | ||||
|         return repstr | ||||
| from tractor.log import get_logger | ||||
| 
 | ||||
| 
 | ||||
| class Struct( | ||||
|     _Struct, | ||||
| log = get_logger('tractor.msgspec') | ||||
| 
 | ||||
| # type variable for the boxed payload field `.pld` | ||||
| PayloadT = TypeVar('PayloadT') | ||||
| 
 | ||||
| 
 | ||||
| class PayloadMsg( | ||||
|     Struct, | ||||
|     Generic[PayloadT], | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||
|     # tag='pikerstruct', | ||||
|     # tag=True, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#field-ordering | ||||
|     # kw_only=True, | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#equality-and-order | ||||
|     # order=True, | ||||
| 
 | ||||
|     # https://jcristharif.com/msgspec/structs.html#encoding-decoding-as-arrays | ||||
|     # as_array=True, | ||||
| ): | ||||
|     ''' | ||||
|     A "human friendlier" (aka repl buddy) struct subtype. | ||||
|     An abstract payload boxing/shuttling IPC msg type. | ||||
| 
 | ||||
|     Boxes data-values passed to/from user code | ||||
| 
 | ||||
|     (i.e. any values passed by `tractor` application code using any of | ||||
| 
 | ||||
|       |_ `._streaming.MsgStream.send/receive()` | ||||
|       |_ `._context.Context.started/result()` | ||||
|       |_ `._ipc.Channel.send/recv()` | ||||
| 
 | ||||
|      aka our "IPC primitive APIs") | ||||
| 
 | ||||
|     as message "payloads" set to the `.pld` field and uses | ||||
|     `msgspec`'s "tagged unions" feature to support a subset of our | ||||
|     "SC-transitive shuttle protocol" specification with | ||||
|     a `msgspec.Struct` inheritance tree. | ||||
| 
 | ||||
|     ''' | ||||
|     def _sin_props(self) -> Iterator[ | ||||
|         tuple[ | ||||
|             structs.FieldIinfo, | ||||
|             str, | ||||
|             Any, | ||||
|     cid: str  # call/context-id | ||||
|     # ^-TODO-^: more explicit type? | ||||
|     # -[ ] use UNSET here? | ||||
|     #  https://jcristharif.com/msgspec/supported-types.html#unset | ||||
|     # | ||||
|     # -[ ] `uuid.UUID` which has multi-protocol support | ||||
|     #  https://jcristharif.com/msgspec/supported-types.html#uuid | ||||
| 
 | ||||
|     # The msg's "payload" (spelled without vowels): | ||||
|     # https://en.wikipedia.org/wiki/Payload_(computing) | ||||
|     pld: Raw | ||||
| 
 | ||||
|     # ^-NOTE-^ inherited from any `PayloadMsg` (and maybe type | ||||
|     # overriden via the `._ops.limit_plds()` API), but by default is | ||||
|     # parameterized to be `Any`. | ||||
|     # | ||||
|     # XXX this `Union` must strictly NOT contain `Any` if | ||||
|     # a limited msg-type-spec is intended, such that when | ||||
|     # creating and applying a new `MsgCodec` its  | ||||
|     # `.decoder: Decoder` is configured with a `Union[Type[Struct]]` which | ||||
|     # restricts the allowed payload content (this `.pld` field)  | ||||
|     # by type system defined loading constraints B) | ||||
|     # | ||||
|     # TODO: could also be set to `msgspec.Raw` if the sub-decoders | ||||
|     # approach is preferred over the generic parameterization  | ||||
|     # approach as take by `mk_msg_spec()` below. | ||||
| 
 | ||||
| 
 | ||||
| # TODO: complete rename | ||||
| Msg = PayloadMsg | ||||
| 
 | ||||
| 
 | ||||
| class Aid( | ||||
|     Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| ): | ||||
|     ''' | ||||
|     Actor-identity msg. | ||||
| 
 | ||||
|     Initial contact exchange enabling an actor "mailbox handshake" | ||||
|     delivering the peer identity (and maybe eventually contact) | ||||
|     info. | ||||
| 
 | ||||
|     Used by discovery protocol to register actors as well as | ||||
|     conduct the initial comms (capability) filtering. | ||||
| 
 | ||||
|     ''' | ||||
|     name: str | ||||
|     uuid: str | ||||
|     # TODO: use built-in support for UUIDs? | ||||
|     # -[ ] `uuid.UUID` which has multi-protocol support | ||||
|     #  https://jcristharif.com/msgspec/supported-types.html#uuid | ||||
| 
 | ||||
| 
 | ||||
| class SpawnSpec( | ||||
|     pretty_struct.Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| ): | ||||
|     ''' | ||||
|     Initial runtime spec handed down from a spawning parent to its | ||||
|     child subactor immediately following first contact via an | ||||
|     `Aid` msg. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: similar to the `Start` kwargs spec needed below, we need | ||||
|     # a hard `Struct` def for all of these fields! | ||||
|     _parent_main_data: dict | ||||
|     _runtime_vars: dict[str, Any] | ||||
| 
 | ||||
|     # module import capability | ||||
|     enable_modules: dict[str, str] | ||||
| 
 | ||||
|     # TODO: not just sockaddr pairs? | ||||
|     # -[ ] abstract into a `TransportAddr` type? | ||||
|     reg_addrs: list[tuple[str, int]] | ||||
|     bind_addrs: list[tuple[str, int]] | ||||
| 
 | ||||
| 
 | ||||
| # TODO: caps based RPC support in the payload? | ||||
| # | ||||
| # -[ ] integration with our ``enable_modules: list[str]`` caps sys. | ||||
| #   ``pkgutil.resolve_name()`` internally uses | ||||
| #   ``importlib.import_module()`` which can be filtered by | ||||
| #   inserting a ``MetaPathFinder`` into ``sys.meta_path`` (which | ||||
| #   we could do before entering the ``Actor._process_messages()`` | ||||
| #   loop)? | ||||
| #   - https://github.com/python/cpython/blob/main/Lib/pkgutil.py#L645 | ||||
| #   - https://stackoverflow.com/questions/1350466/preventing-python-code-from-importing-certain-modules | ||||
| #   - https://stackoverflow.com/a/63320902 | ||||
| #   - https://docs.python.org/3/library/sys.html#sys.meta_path | ||||
| # | ||||
| # -[ ] can we combine .ns + .func into a native `NamespacePath` field? | ||||
| # | ||||
| # -[ ] better name, like `Call/TaskInput`? | ||||
| # | ||||
| # -[ ] XXX a debugger lock msg transaction with payloads like, | ||||
| #   child -> `.pld: DebugLock` -> root | ||||
| #   child <- `.pld: DebugLocked` <- root | ||||
| #   child -> `.pld: DebugRelease` -> root | ||||
| # | ||||
| #   WHY => when a pld spec is provided it might not allow for | ||||
| #   debug mode msgs as they currently are (using plain old `pld. | ||||
| #   str` payloads) so we only when debug_mode=True we need to | ||||
| #   union in this debugger payload set? | ||||
| # | ||||
| #   mk_msg_spec( | ||||
| #       MyPldSpec, | ||||
| #       debug_mode=True, | ||||
| #   ) -> ( | ||||
| #       Union[MyPldSpec] | ||||
| #      | Union[DebugLock, DebugLocked, DebugRelease] | ||||
| #   ) | ||||
| 
 | ||||
| # class Params( | ||||
| #     Struct, | ||||
| #     Generic[PayloadT], | ||||
| # ): | ||||
| #     spec: PayloadT|ParamSpec | ||||
| #     inputs: InputsT|dict[str, Any] | ||||
| 
 | ||||
|     # TODO: for eg. we could stringently check the target | ||||
|     # task-func's type sig and enforce it? | ||||
|     # as an example for an IPTC, | ||||
|     # @tractor.context | ||||
|     # async def send_back_nsp( | ||||
|     #     ctx: Context, | ||||
|     #     expect_debug: bool, | ||||
|     #     pld_spec_str: str, | ||||
|     #     add_hooks: bool, | ||||
|     #     started_msg_dict: dict, | ||||
|     # ) -> <WhatHere!>: | ||||
| 
 | ||||
|     # TODO: figure out which of the `typing` feats we want to | ||||
|     # support: | ||||
|     # - plain ol `ParamSpec`: | ||||
|     #   https://docs.python.org/3/library/typing.html#typing.ParamSpec | ||||
|     # - new in 3.12 type parameter lists Bo | ||||
|     # |_ https://docs.python.org/3/reference/compound_stmts.html#type-params | ||||
|     # |_ historical pep 695: https://peps.python.org/pep-0695/ | ||||
|     # |_ full lang spec: https://typing.readthedocs.io/en/latest/spec/ | ||||
|     # |_ on annotation scopes: | ||||
|     #    https://docs.python.org/3/reference/executionmodel.html#annotation-scopes | ||||
|     # spec: ParamSpec[ | ||||
|     #     expect_debug: bool, | ||||
|     #     pld_spec_str: str, | ||||
|     #     add_hooks: bool, | ||||
|     #     started_msg_dict: dict, | ||||
|     # ] | ||||
| 
 | ||||
| 
 | ||||
| # TODO: possibly sub-type for runtime method requests? | ||||
| # -[ ] `Runtime(Start)` with a `.ns: str = 'self' or | ||||
| #     we can just enforce any such method as having a strict | ||||
| #     ns for calling funcs, namely the `Actor` instance? | ||||
| class Start( | ||||
|     Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| ): | ||||
|     ''' | ||||
|     Initial request to remotely schedule an RPC `trio.Task` via | ||||
|     `Actor.start_remote_task()`. | ||||
| 
 | ||||
|     It is called by all the following public APIs: | ||||
| 
 | ||||
|     - `ActorNursery.run_in_actor()` | ||||
| 
 | ||||
|     - `Portal.run()` | ||||
|           `|_.run_from_ns()` | ||||
|           `|_.open_stream_from()` | ||||
|           `|_._submit_for_result()` | ||||
| 
 | ||||
|     - `Context.open_context()` | ||||
| 
 | ||||
|     ''' | ||||
|     cid: str | ||||
| 
 | ||||
|     ns: str | ||||
|     func: str | ||||
| 
 | ||||
|     # TODO: make this a sub-struct which can be further | ||||
|     # type-limited, maybe `Inputs`? | ||||
|     # => SEE ABOVE <= | ||||
|     kwargs: dict[str, Any] | ||||
|     uid: tuple[str, str]  # (calling) actor-id | ||||
| 
 | ||||
|     # TODO: enforcing a msg-spec in terms `Msg.pld` | ||||
|     # parameterizable msgs to be used in the appls IPC dialog. | ||||
|     # => SEE `._codec.MsgDec` for more <= | ||||
|     pld_spec: str = str(Any) | ||||
| 
 | ||||
| 
 | ||||
| class StartAck( | ||||
|     Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| ): | ||||
|     ''' | ||||
|     Init response to a `Cmd` request indicating the far | ||||
|     end's RPC spec, namely its callable "type". | ||||
| 
 | ||||
|     ''' | ||||
|     cid: str | ||||
|     # TODO: maybe better names for all these? | ||||
|     # -[ ] obvi ^ would need sync with `._rpc` | ||||
|     functype: Literal[ | ||||
|         'asyncfunc', | ||||
|         'asyncgen', | ||||
|         'context',  # TODO: the only one eventually? | ||||
|     ] | ||||
| 
 | ||||
|     # import typing | ||||
|     # eval(str(Any), {}, {'typing': typing}) | ||||
|     # started_spec: str = str(Any) | ||||
|     # return_spec | ||||
| 
 | ||||
| 
 | ||||
| class Started( | ||||
|     PayloadMsg, | ||||
|     Generic[PayloadT], | ||||
| ): | ||||
|     ''' | ||||
|     Packet to shuttle the "first value" delivered by | ||||
|     `Context.started(value: Any)` from a `@tractor.context` | ||||
|     decorated IPC endpoint. | ||||
| 
 | ||||
|     ''' | ||||
|     pld: PayloadT|Raw | ||||
| 
 | ||||
| 
 | ||||
| # TODO: cancel request dedicated msg? | ||||
| # -[ ] instead of using our existing `Start`? | ||||
| # | ||||
| # class Cancel: | ||||
| #     cid: str | ||||
| 
 | ||||
| 
 | ||||
| class Yield( | ||||
|     PayloadMsg, | ||||
|     Generic[PayloadT], | ||||
| ): | ||||
|     ''' | ||||
|     Per IPC transmission of a value from `await MsgStream.send(<value>)`. | ||||
| 
 | ||||
|     ''' | ||||
|     pld: PayloadT|Raw | ||||
| 
 | ||||
| 
 | ||||
| class Stop( | ||||
|     Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| ): | ||||
|     ''' | ||||
|     Stream termination signal much like an IPC version  | ||||
|     of `StopAsyncIteration`. | ||||
| 
 | ||||
|     ''' | ||||
|     cid: str | ||||
|     # TODO: do we want to support a payload on stop? | ||||
|     # pld: UnsetType = UNSET | ||||
| 
 | ||||
| 
 | ||||
| # TODO: is `Result` or `Out[come]` a better name? | ||||
| class Return( | ||||
|     PayloadMsg, | ||||
|     Generic[PayloadT], | ||||
| ): | ||||
|     ''' | ||||
|     Final `return <value>` from a remotely scheduled | ||||
|     func-as-`trio.Task`. | ||||
| 
 | ||||
|     ''' | ||||
|     pld: PayloadT|Raw | ||||
| 
 | ||||
| 
 | ||||
| class CancelAck( | ||||
|     PayloadMsg, | ||||
|     Generic[PayloadT], | ||||
| ): | ||||
|     ''' | ||||
|     Deliver the `bool` return-value from a cancellation `Actor` | ||||
|     method scheduled via and prior RPC request. | ||||
| 
 | ||||
|     - `Actor.cancel()` | ||||
|        `|_.cancel_soon()` | ||||
|        `|_.cancel_rpc_tasks()` | ||||
|        `|_._cancel_task()` | ||||
|        `|_.cancel_server()` | ||||
| 
 | ||||
|     RPCs to these methods must **always** be able to deliver a result | ||||
|     despite the currently configured IPC msg spec such that graceful | ||||
|     cancellation is always functional in the runtime. | ||||
| 
 | ||||
|     ''' | ||||
|     pld: bool | ||||
| 
 | ||||
| 
 | ||||
| # TODO: unify this with `._exceptions.RemoteActorError` | ||||
| # such that we can have a msg which is both raisable and | ||||
| # IPC-wire ready? | ||||
| # B~o | ||||
| class Error( | ||||
|     Struct, | ||||
|     tag=True, | ||||
|     tag_field='msg_type', | ||||
| 
 | ||||
|     # TODO may omit defaults? | ||||
|     # https://jcristharif.com/msgspec/structs.html#omitting-default-values | ||||
|     # omit_defaults=True, | ||||
| ): | ||||
|     ''' | ||||
|     A pkt that wraps `RemoteActorError`s for relay and raising. | ||||
| 
 | ||||
|     Fields are 1-to-1 meta-data as needed originally by | ||||
|     `RemoteActorError.msgdata: dict` but now are defined here. | ||||
| 
 | ||||
|     Note: this msg shuttles `ContextCancelled` and `StreamOverrun` | ||||
|     as well is used to rewrap any `MsgTypeError` for relay-reponse | ||||
|     to bad `Yield.pld` senders during an IPC ctx's streaming dialog | ||||
|     phase. | ||||
| 
 | ||||
|     ''' | ||||
|     src_uid: tuple[str, str] | ||||
|     src_type_str: str | ||||
|     boxed_type_str: str | ||||
|     relay_path: list[tuple[str, str]] | ||||
| 
 | ||||
|     # normally either both are provided or just | ||||
|     # a message for certain special cases where | ||||
|     # we pack a message for a locally raised | ||||
|     # mte or ctxc. | ||||
|     message: str|None = None | ||||
|     tb_str: str = '' | ||||
| 
 | ||||
|     # TODO: only optionally include sub-type specfic fields? | ||||
|     # -[ ] use UNSET or don't include them via `omit_defaults` (see | ||||
|     #      inheritance-line options above) | ||||
|     # | ||||
|     # `ContextCancelled` reports the src cancelling `Actor.uid` | ||||
|     canceller: tuple[str, str]|None = None | ||||
| 
 | ||||
|     # `StreamOverrun`-specific src `Actor.uid` | ||||
|     sender: tuple[str, str]|None = None | ||||
| 
 | ||||
|     # `MsgTypeError` meta-data | ||||
|     cid: str|None = None | ||||
|     # when the receiver side fails to decode a delivered | ||||
|     # `PayloadMsg`-subtype; one and/or both the msg-struct instance | ||||
|     # and `Any`-decoded to `dict` of the msg are set and relayed | ||||
|     # (back to the sender) for introspection. | ||||
|     _bad_msg: Started|Yield|Return|None = None | ||||
|     _bad_msg_as_dict: dict|None = None | ||||
| 
 | ||||
| 
 | ||||
| def from_dict_msg( | ||||
|     dict_msg: dict, | ||||
| 
 | ||||
|     msgT: MsgType|None = None, | ||||
|     tag_field: str = 'msg_type', | ||||
|     use_pretty: bool = False, | ||||
| 
 | ||||
| ) -> MsgType: | ||||
|     ''' | ||||
|     Helper to build a specific `MsgType` struct from a "vanilla" | ||||
|     decoded `dict`-ified equivalent of the msg: i.e. if the | ||||
|     `msgpack.Decoder.type == Any`, the default when using | ||||
|     `msgspec.msgpack` and not "typed decoding" using | ||||
|     `msgspec.Struct`. | ||||
| 
 | ||||
|     ''' | ||||
|     msg_type_tag_field: str = ( | ||||
|         msgT.__struct_config__.tag_field | ||||
|         if msgT is not None | ||||
|         else tag_field | ||||
|     ) | ||||
|     # XXX ensure tag field is removed | ||||
|     msgT_name: str = dict_msg.pop(msg_type_tag_field) | ||||
|     msgT: MsgType = _msg_table[msgT_name] | ||||
|     if use_pretty: | ||||
|         msgT = defstruct( | ||||
|             name=msgT_name, | ||||
|             fields=[ | ||||
|                 (key, fi.type) | ||||
|                 for fi, key, _ | ||||
|                 in pretty_struct.iter_fields(msgT) | ||||
|             ], | ||||
|             bases=( | ||||
|                 pretty_struct.Struct, | ||||
|                 msgT, | ||||
|             ), | ||||
|         ) | ||||
|     return msgT(**dict_msg) | ||||
| 
 | ||||
| # TODO: should be make a set of cancel msgs? | ||||
| # -[ ] a version of `ContextCancelled`? | ||||
| #     |_ and/or with a scope field? | ||||
| # -[ ] or, a full `ActorCancelled`? | ||||
| # | ||||
| # class Cancelled(MsgType): | ||||
| #     cid: str | ||||
| # | ||||
| # -[ ] what about overruns? | ||||
| # | ||||
| # class Overrun(MsgType): | ||||
| #     cid: str | ||||
| 
 | ||||
| _runtime_msgs: list[Struct] = [ | ||||
| 
 | ||||
|     # identity handshake on first IPC `Channel` contact. | ||||
|     Aid, | ||||
| 
 | ||||
|     # parent-to-child spawn specification passed as 2nd msg after | ||||
|     # handshake ONLY after child connects back to parent. | ||||
|     SpawnSpec, | ||||
| 
 | ||||
|     # inter-actor RPC initiation | ||||
|     Start,  # schedule remote task-as-func | ||||
|     StartAck,  # ack the schedule request | ||||
| 
 | ||||
|     # emission from `MsgStream.aclose()` | ||||
|     Stop, | ||||
| 
 | ||||
|     # `Return` sub-type that we always accept from | ||||
|     # runtime-internal cancel endpoints | ||||
|     CancelAck, | ||||
| 
 | ||||
|     # box remote errors, normally subtypes | ||||
|     # of `RemoteActorError`. | ||||
|     Error, | ||||
| ] | ||||
| 
 | ||||
| # the no-outcome-yet IAC (inter-actor-communication) sub-set which | ||||
| # can be `PayloadMsg.pld` payload field type-limited by application code | ||||
| # using `apply_codec()` and `limit_msg_spec()`. | ||||
| _payload_msgs: list[PayloadMsg] = [ | ||||
|     # first <value> from `Context.started(<value>)` | ||||
|     Started, | ||||
| 
 | ||||
|     # any <value> sent via `MsgStream.send(<value>)` | ||||
|     Yield, | ||||
| 
 | ||||
|     # the final value returned from a `@context` decorated | ||||
|     # IPC endpoint. | ||||
|     Return, | ||||
| ] | ||||
| 
 | ||||
| # built-in SC shuttle protocol msg type set in | ||||
| # approx order of the IPC txn-state spaces. | ||||
| __msg_types__: list[MsgType] = ( | ||||
|     _runtime_msgs | ||||
|     + | ||||
|     _payload_msgs | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| _msg_table: dict[str, MsgType] = { | ||||
|     msgT.__name__: msgT | ||||
|     for msgT in __msg_types__ | ||||
| } | ||||
| 
 | ||||
| # TODO: use new type declaration syntax for msg-type-spec | ||||
| # https://docs.python.org/3/library/typing.html#type-aliases | ||||
| # https://docs.python.org/3/reference/simple_stmts.html#type | ||||
| MsgType: TypeAlias = Union[*__msg_types__] | ||||
| 
 | ||||
| 
 | ||||
| def mk_msg_spec( | ||||
|     payload_type_union: Union[Type] = Any, | ||||
| 
 | ||||
|     spec_build_method: Literal[ | ||||
|         'indexed_generics',  # works | ||||
|         'defstruct', | ||||
|         'types_new_class', | ||||
| 
 | ||||
|     ] = 'indexed_generics', | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     Union[MsgType], | ||||
|     list[MsgType], | ||||
| ]: | ||||
|     ''' | ||||
|         Iterate over all non-@property fields of this struct. | ||||
|     Create a payload-(data-)type-parameterized IPC message specification. | ||||
| 
 | ||||
|     Allows generating IPC msg types from the above builtin set | ||||
|     with a payload (field) restricted data-type, the `Msg.pld: PayloadT`. | ||||
| 
 | ||||
|     This allows runtime-task contexts to use the python type system | ||||
|     to limit/filter payload values as determined by the input | ||||
|     `payload_type_union: Union[Type]`. | ||||
| 
 | ||||
|     Notes: originally multiple approaches for constructing the | ||||
|     type-union passed to `msgspec` were attempted as selected via the | ||||
|     `spec_build_method`, but it turns out only the defaul method | ||||
|     'indexed_generics' seems to work reliably in all use cases. As | ||||
|     such, the others will likely be removed in the near future. | ||||
| 
 | ||||
|     ''' | ||||
|         fi: structs.FieldInfo | ||||
|         for fi in structs.fields(self): | ||||
|             key: str = fi.name | ||||
|             val: Any = getattr(self, key) | ||||
|             yield fi, key, val | ||||
|     submsg_types: list[MsgType] = Msg.__subclasses__() | ||||
|     bases: tuple = ( | ||||
|         # XXX NOTE XXX the below generic-parameterization seems to | ||||
|         # be THE ONLY way to get this to work correctly in terms | ||||
|         # of getting ValidationError on a roundtrip? | ||||
|         Msg[payload_type_union], | ||||
|         Generic[PayloadT], | ||||
|     ) | ||||
|     defstruct_bases: tuple = ( | ||||
|         Msg, # [payload_type_union], | ||||
|         # Generic[PayloadT], | ||||
|         # ^-XXX-^: not allowed? lul.. | ||||
|     ) | ||||
|     ipc_msg_types: list[Msg] = [] | ||||
| 
 | ||||
|     def to_dict( | ||||
|         self, | ||||
|         include_non_members: bool = True, | ||||
|     idx_msg_types: list[Msg] = [] | ||||
|     defs_msg_types: list[Msg] = [] | ||||
|     nc_msg_types: list[Msg] = [] | ||||
| 
 | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Like it sounds.. direct delegation to: | ||||
|         https://jcristharif.com/msgspec/api.html#msgspec.structs.asdict | ||||
|     for msgtype in __msg_types__: | ||||
| 
 | ||||
|         BUT, by default we pop all non-member (aka not defined as | ||||
|         struct fields) fields by default. | ||||
|         # for the NON-payload (user api) type specify-able | ||||
|         # msgs types, we simply aggregate the def as is | ||||
|         # for inclusion in the output type `Union`. | ||||
|         if msgtype not in _payload_msgs: | ||||
|             ipc_msg_types.append(msgtype) | ||||
|             continue | ||||
| 
 | ||||
|         ''' | ||||
|         asdict: dict = structs.asdict(self) | ||||
|         if include_non_members: | ||||
|             return asdict | ||||
|         # check inheritance sanity | ||||
|         assert msgtype in submsg_types | ||||
| 
 | ||||
|         # only return a dict of the struct members | ||||
|         # which were provided as input, NOT anything | ||||
|         # added as type-defined `@property` methods! | ||||
|         sin_props: dict = {} | ||||
|         fi: structs.FieldInfo | ||||
|         for fi, k, v in self._sin_props(): | ||||
|             sin_props[k] = asdict[k] | ||||
|         # TODO: wait why do we need the dynamic version here? | ||||
|         # XXX ANSWER XXX -> BC INHERITANCE.. don't work w generics.. | ||||
|         # | ||||
|         # NOTE previously bc msgtypes WERE NOT inheritting | ||||
|         # directly the `Generic[PayloadT]` type, the manual method | ||||
|         # of generic-paraming with `.__class_getitem__()` wasn't | ||||
|         # working.. | ||||
|         # | ||||
|         # XXX but bc i changed that to make every subtype inherit | ||||
|         # it, this manual "indexed parameterization" method seems | ||||
|         # to work? | ||||
|         # | ||||
|         # -[x] paraming the `PayloadT` values via `Generic[T]` | ||||
|         #   does work it seems but WITHOUT inheritance of generics | ||||
|         # | ||||
|         # -[-] is there a way to get it to work at module level | ||||
|         #   just using inheritance or maybe a metaclass? | ||||
|         #  => thot that `defstruct` might work, but NOPE, see | ||||
|         #   below.. | ||||
|         # | ||||
|         idxed_msg_type: Msg = msgtype[payload_type_union] | ||||
|         idx_msg_types.append(idxed_msg_type) | ||||
| 
 | ||||
|         return sin_props | ||||
| 
 | ||||
|     def pformat( | ||||
|         self, | ||||
|         field_indent: int = 2, | ||||
|         indent: int = 0, | ||||
| 
 | ||||
|     ) -> str: | ||||
|         ''' | ||||
|         Recursion-safe `pprint.pformat()` style formatting of | ||||
|         a `msgspec.Struct` for sane reading by a human using a REPL. | ||||
| 
 | ||||
|         ''' | ||||
|         # global whitespace indent | ||||
|         ws: str = ' '*indent | ||||
| 
 | ||||
|         # field whitespace indent | ||||
|         field_ws: str = ' '*(field_indent + indent) | ||||
| 
 | ||||
|         # qtn: str = ws + self.__class__.__qualname__ | ||||
|         qtn: str = self.__class__.__qualname__ | ||||
| 
 | ||||
|         obj_str: str = ''  # accumulator | ||||
|         fi: structs.FieldInfo | ||||
|         k: str | ||||
|         v: Any | ||||
|         for fi, k, v in self._sin_props(): | ||||
| 
 | ||||
|             # TODO: how can we prefer `Literal['option1',  'option2, | ||||
|             # ..]` over .__name__ == `Literal` but still get only the | ||||
|             # latter for simple types like `str | int | None` etc..? | ||||
|             ft: type = fi.type | ||||
|             typ_name: str = getattr(ft, '__name__', str(ft)) | ||||
| 
 | ||||
|             # recurse to get sub-struct's `.pformat()` output Bo | ||||
|             if isinstance(v, Struct): | ||||
|                 val_str: str =  v.pformat( | ||||
|                     indent=field_indent + indent, | ||||
|                     field_indent=indent + field_indent, | ||||
|         # TODO: WHY do we need to dynamically generate the | ||||
|         # subtype-msgs here to ensure the `.pld` parameterization | ||||
|         # propagates as well as works at all in terms of the | ||||
|         # `msgpack.Decoder()`..? | ||||
|         # | ||||
|         # dynamically create the payload type-spec-limited msg set. | ||||
|         newclass_msgtype: Type = types.new_class( | ||||
|             name=msgtype.__name__, | ||||
|             bases=bases, | ||||
|             kwds={}, | ||||
|         ) | ||||
|         nc_msg_types.append( | ||||
|             newclass_msgtype[payload_type_union] | ||||
|         ) | ||||
| 
 | ||||
|             else:  # the `pprint` recursion-safe format: | ||||
|                 # https://docs.python.org/3.11/library/pprint.html#pprint.saferepr | ||||
|                 val_str: str = saferepr(v) | ||||
|         # with `msgspec.structs.defstruct` | ||||
|         # XXX ALSO DOESN'T WORK | ||||
|         defstruct_msgtype = defstruct( | ||||
|             name=msgtype.__name__, | ||||
|             fields=[ | ||||
|                 ('cid', str), | ||||
| 
 | ||||
|             # TODO: LOLOL use `textwrap.indent()` instead dawwwwwg! | ||||
|             obj_str += (field_ws + f'{k}: {typ_name} = {val_str},\n') | ||||
|                 # XXX doesn't seem to work.. | ||||
|                 # ('pld', PayloadT), | ||||
| 
 | ||||
|                 ('pld', payload_type_union), | ||||
|             ], | ||||
|             bases=defstruct_bases, | ||||
|         ) | ||||
|         defs_msg_types.append(defstruct_msgtype) | ||||
| 
 | ||||
|         # assert index_paramed_msg_type == manual_paramed_msg_subtype | ||||
| 
 | ||||
|         # paramed_msg_type = manual_paramed_msg_subtype | ||||
| 
 | ||||
|         # ipc_payload_msgs_type_union |= index_paramed_msg_type | ||||
| 
 | ||||
|     idx_spec: Union[Type[Msg]] = Union[*idx_msg_types] | ||||
|     def_spec: Union[Type[Msg]] = Union[*defs_msg_types] | ||||
|     nc_spec: Union[Type[Msg]] = Union[*nc_msg_types] | ||||
| 
 | ||||
|     specs: dict[str, Union[Type[Msg]]] = { | ||||
|         'indexed_generics': idx_spec, | ||||
|         'defstruct': def_spec, | ||||
|         'types_new_class': nc_spec, | ||||
|     } | ||||
|     msgtypes_table: dict[str, list[Msg]] = { | ||||
|         'indexed_generics': idx_msg_types, | ||||
|         'defstruct': defs_msg_types, | ||||
|         'types_new_class': nc_msg_types, | ||||
|     } | ||||
| 
 | ||||
|     # XXX lol apparently type unions can't ever | ||||
|     # be equal eh? | ||||
|     # TODO: grok the diff here better.. | ||||
|     # | ||||
|     # assert ( | ||||
|     #     idx_spec | ||||
|     #     == | ||||
|     #     nc_spec | ||||
|     #     == | ||||
|     #     def_spec | ||||
|     # ) | ||||
|     # breakpoint() | ||||
| 
 | ||||
|     pld_spec: Union[Type] = specs[spec_build_method] | ||||
|     runtime_spec: Union[Type] = Union[*ipc_msg_types] | ||||
|     ipc_spec = pld_spec | runtime_spec | ||||
|     log.runtime( | ||||
|         'Generating new IPC msg-spec\n' | ||||
|         f'{ipc_spec}\n' | ||||
|     ) | ||||
|     assert ( | ||||
|         ipc_spec | ||||
|         and | ||||
|         ipc_spec is not Any | ||||
|     ) | ||||
|     return ( | ||||
|             f'{qtn}(\n' | ||||
|             f'{obj_str}' | ||||
|             f'{ws})' | ||||
|         ipc_spec, | ||||
|         msgtypes_table[spec_build_method] | ||||
|         + | ||||
|         ipc_msg_types, | ||||
|     ) | ||||
| 
 | ||||
|     # TODO: use a pprint.PrettyPrinter instance around ONLY rendering | ||||
|     # inside a known tty? | ||||
|     # def __repr__(self) -> str: | ||||
|     #     ... | ||||
| 
 | ||||
|     # __str__ = __repr__ = pformat | ||||
|     __repr__ = pformat | ||||
| 
 | ||||
|     def copy( | ||||
|         self, | ||||
|         update: dict | None = None, | ||||
| 
 | ||||
|     ) -> Struct: | ||||
|         ''' | ||||
|         Validate-typecast all self defined fields, return a copy of | ||||
|         us with all such fields. | ||||
| 
 | ||||
|         NOTE: This is kinda like the default behaviour in | ||||
|         `pydantic.BaseModel` except a copy of the object is | ||||
|         returned making it compat with `frozen=True`. | ||||
| 
 | ||||
|         ''' | ||||
|         if update: | ||||
|             for k, v in update.items(): | ||||
|                 setattr(self, k, v) | ||||
| 
 | ||||
|         # NOTE: roundtrip serialize to validate | ||||
|         # - enode to msgpack binary format, | ||||
|         # - decode that back to a struct. | ||||
|         return msgpack.Decoder(type=type(self)).decode( | ||||
|             msgpack.Encoder().encode(self) | ||||
|         ) | ||||
| 
 | ||||
|     def typecast( | ||||
|         self, | ||||
| 
 | ||||
|         # TODO: allow only casting a named subset? | ||||
|         # fields: set[str] | None = None, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Cast all fields using their declared type annotations | ||||
|         (kinda like what `pydantic` does by default). | ||||
| 
 | ||||
|         NOTE: this of course won't work on frozen types, use | ||||
|         ``.copy()`` above in such cases. | ||||
| 
 | ||||
|         ''' | ||||
|         # https://jcristharif.com/msgspec/api.html#msgspec.structs.fields | ||||
|         fi: structs.FieldInfo | ||||
|         for fi in structs.fields(self): | ||||
|             setattr( | ||||
|                 self, | ||||
|                 fi.name, | ||||
|                 fi.type(getattr(self, fi.name)), | ||||
|             ) | ||||
| 
 | ||||
|     def __sub__( | ||||
|         self, | ||||
|         other: Struct, | ||||
| 
 | ||||
|     ) -> DiffDump[tuple[str, Any, Any]]: | ||||
|         ''' | ||||
|         Compare fields/items key-wise and return a ``DiffDump`` | ||||
|         for easy visual REPL comparison B) | ||||
| 
 | ||||
|         ''' | ||||
|         diffs: DiffDump[tuple[str, Any, Any]] = DiffDump() | ||||
|         for fi in structs.fields(self): | ||||
|             attr_name: str = fi.name | ||||
|             ours: Any = getattr(self, attr_name) | ||||
|             theirs: Any = getattr(other, attr_name) | ||||
|             if ours != theirs: | ||||
|                 diffs.append(( | ||||
|                     attr_name, | ||||
|                     ours, | ||||
|                     theirs, | ||||
|                 )) | ||||
| 
 | ||||
|         return diffs | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -156,11 +156,12 @@ class BroadcastState(Struct): | |||
| 
 | ||||
| class BroadcastReceiver(ReceiveChannel): | ||||
|     ''' | ||||
|     A memory receive channel broadcaster which is non-lossy for the | ||||
|     fastest consumer. | ||||
|     A memory receive channel broadcaster which is non-lossy for | ||||
|     the fastest consumer. | ||||
| 
 | ||||
|     Additional consumer tasks can receive all produced values by registering | ||||
|     with ``.subscribe()`` and receiving from the new instance it delivers. | ||||
|     Additional consumer tasks can receive all produced values by | ||||
|     registering with ``.subscribe()`` and receiving from the new | ||||
|     instance it delivers. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|  | @ -381,7 +382,7 @@ class BroadcastReceiver(ReceiveChannel): | |||
|                         # likely it makes sense to unwind back to the | ||||
|                         # underlying? | ||||
|                         # import tractor | ||||
|                         # await tractor.breakpoint() | ||||
|                         # await tractor.pause() | ||||
|                         log.warning( | ||||
|                             f'Only one sub left for {self}?\n' | ||||
|                             'We can probably unwind from breceiver?' | ||||
|  |  | |||
|  | @ -18,8 +18,12 @@ | |||
| Async context manager primitives with hard ``trio``-aware semantics | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from __future__ import annotations | ||||
| from contextlib import ( | ||||
|     asynccontextmanager as acm, | ||||
| ) | ||||
| import inspect | ||||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncContextManager, | ||||
|  | @ -30,13 +34,16 @@ from typing import ( | |||
|     Optional, | ||||
|     Sequence, | ||||
|     TypeVar, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import trio | ||||
| 
 | ||||
| from tractor._state import current_actor | ||||
| from tractor.log import get_logger | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from tractor import ActorNursery | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
|  | @ -46,8 +53,10 @@ T = TypeVar("T") | |||
| 
 | ||||
| @acm | ||||
| async def maybe_open_nursery( | ||||
|     nursery: trio.Nursery | None = None, | ||||
|     nursery: trio.Nursery|ActorNursery|None = None, | ||||
|     shield: bool = False, | ||||
|     lib: ModuleType = trio, | ||||
| 
 | ||||
| ) -> AsyncGenerator[trio.Nursery, Any]: | ||||
|     ''' | ||||
|     Create a new nursery if None provided. | ||||
|  | @ -58,13 +67,12 @@ async def maybe_open_nursery( | |||
|     if nursery is not None: | ||||
|         yield nursery | ||||
|     else: | ||||
|         async with trio.open_nursery() as nursery: | ||||
|         async with lib.open_nursery() as nursery: | ||||
|             nursery.cancel_scope.shield = shield | ||||
|             yield nursery | ||||
| 
 | ||||
| 
 | ||||
| async def _enter_and_wait( | ||||
| 
 | ||||
|     mngr: AsyncContextManager[T], | ||||
|     unwrapped: dict[int, T], | ||||
|     all_entered: trio.Event, | ||||
|  | @ -91,7 +99,6 @@ async def _enter_and_wait( | |||
| 
 | ||||
| @acm | ||||
| async def gather_contexts( | ||||
| 
 | ||||
|     mngrs: Sequence[AsyncContextManager[T]], | ||||
| 
 | ||||
| ) -> AsyncGenerator[ | ||||
|  | @ -102,15 +109,17 @@ async def gather_contexts( | |||
|     None, | ||||
| ]: | ||||
|     ''' | ||||
|     Concurrently enter a sequence of async context managers, each in | ||||
|     a separate ``trio`` task and deliver the unwrapped values in the | ||||
|     same order once all managers have entered. On exit all contexts are | ||||
|     subsequently and concurrently exited. | ||||
|     Concurrently enter a sequence of async context managers (acms), | ||||
|     each from a separate `trio` task and deliver the unwrapped | ||||
|     `yield`-ed values in the same order once all managers have entered. | ||||
| 
 | ||||
|     This function is somewhat similar to common usage of | ||||
|     ``contextlib.AsyncExitStack.enter_async_context()`` (in a loop) in | ||||
|     combo with ``asyncio.gather()`` except the managers are concurrently | ||||
|     entered and exited, and cancellation just works. | ||||
|     On exit, all acms are subsequently and concurrently exited. | ||||
| 
 | ||||
|     This function is somewhat similar to a batch of non-blocking | ||||
|     calls to `contextlib.AsyncExitStack.enter_async_context()` | ||||
|     (inside a loop) *in combo with* a `asyncio.gather()` to get the | ||||
|     `.__aenter__()`-ed values, except the managers are both | ||||
|     concurrently entered and exited and *cancellation just works*(R). | ||||
| 
 | ||||
|     ''' | ||||
|     seed: int = id(mngrs) | ||||
|  | @ -210,9 +219,10 @@ async def maybe_open_context( | |||
| 
 | ||||
| ) -> AsyncIterator[tuple[bool, T]]: | ||||
|     ''' | ||||
|     Maybe open a context manager if there is not already a _Cached | ||||
|     version for the provided ``key`` for *this* actor. Return the | ||||
|     _Cached instance on a _Cache hit. | ||||
|     Maybe open an async-context-manager (acm) if there is not already | ||||
|     a `_Cached` version for the provided (input) `key` for *this* actor. | ||||
| 
 | ||||
|     Return the `_Cached` instance on a _Cache hit. | ||||
| 
 | ||||
|     ''' | ||||
|     fid = id(acm_func) | ||||
|  | @ -271,8 +281,16 @@ async def maybe_open_context( | |||
|         yield False, yielded | ||||
| 
 | ||||
|     else: | ||||
|         log.info(f'Reusing _Cached resource for {ctx_key}') | ||||
|         _Cache.users += 1 | ||||
|         log.runtime( | ||||
|             f'Re-using cached resource for user {_Cache.users}\n\n' | ||||
|             f'{ctx_key!r} -> {type(yielded)}\n' | ||||
| 
 | ||||
|             # TODO: make this work with values but without | ||||
|             # `msgspec.Struct` causing frickin crashes on field-type | ||||
|             # lookups.. | ||||
|             # f'{ctx_key!r} -> {yielded!r}\n' | ||||
|         ) | ||||
|         lock.release() | ||||
|         yield True, yielded | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue