Compare commits
	
		
			63 Commits 
		
	
	
		
			843fd96b5d
			...
			c595a88069
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | c595a88069 | |
|  | 7edd984c98 | |
|  | 7a11b731c9 | |
|  | 89ab768835 | |
|  | a57b335517 | |
|  | e964a33ba8 | |
|  | e4529f71bf | |
|  | b2f210a17a | |
|  | 9c66d140d0 | |
|  | 5b08d1444e | |
|  | 22a0e25a02 | |
|  | c6b8f809d6 | |
|  | dc19659956 | |
|  | 8681af6fa4 | |
|  | 87c19128ea | |
|  | 3c12d1ebf6 | |
|  | f87dd356d9 | |
|  | 5b9104070b | |
|  | dc35cd0923 | |
|  | a27615f15a | |
|  | 17fdb50781 | |
|  | 75cd9b84db | |
|  | be22f1c263 | |
|  | 0d0b450be8 | |
|  | a1e2abf97b | |
|  | 8500189690 | |
|  | 92b9fe0a32 | |
|  | ef1bfda8b6 | |
|  | 057935a98d | |
|  | 64ea64f73a | |
|  | 9bf80e1850 | |
|  | 66846c940d | |
|  | 25cfe890c4 | |
|  | a2820774d5 | |
|  | 973e43135d | |
|  | 5297c0b88b | |
|  | b87ca855be | |
|  | 9d11ffbc03 | |
|  | 59335524cf | |
|  | e0538549aa | |
|  | 26138e29e5 | |
|  | 7d435fa33b | |
|  | be0d65b5ee | |
|  | d1903ac437 | |
|  | 3ef1305c86 | |
|  | 61ddade2c2 | |
|  | c2f278eace | |
|  | c88ae9e556 | |
|  | e190f6c957 | |
|  | 2f8eeb0fb7 | |
|  | acbf4b287b | |
|  | 07bc63be8c | |
|  | 76e143d06a | |
|  | e0147563a7 | |
|  | c4419a0d7a | |
|  | 91d5ce5458 | |
|  | e7638b127c | |
|  | c7f21b9dd4 | |
|  | 82369773c5 | |
|  | 97ba5b595f | |
|  | d3f95b583c | |
|  | adf4723e18 | |
|  | 845a10b9b2 | 
|  | @ -62,7 +62,9 @@ async def recv_and_spawn_net_killers( | ||||||
|     await ctx.started() |     await ctx.started() | ||||||
|     async with ( |     async with ( | ||||||
|         ctx.open_stream() as stream, |         ctx.open_stream() as stream, | ||||||
|         trio.open_nursery() as n, |         trio.open_nursery( | ||||||
|  |             strict_exception_groups=False, | ||||||
|  |         ) as tn, | ||||||
|     ): |     ): | ||||||
|         async for i in stream: |         async for i in stream: | ||||||
|             print(f'child echoing {i}') |             print(f'child echoing {i}') | ||||||
|  | @ -77,11 +79,11 @@ async def recv_and_spawn_net_killers( | ||||||
|                 i >= break_ipc_after |                 i >= break_ipc_after | ||||||
|             ): |             ): | ||||||
|                 broke_ipc = True |                 broke_ipc = True | ||||||
|                 n.start_soon( |                 tn.start_soon( | ||||||
|                     iter_ipc_stream, |                     iter_ipc_stream, | ||||||
|                     stream, |                     stream, | ||||||
|                 ) |                 ) | ||||||
|                 n.start_soon( |                 tn.start_soon( | ||||||
|                     partial( |                     partial( | ||||||
|                         break_ipc_then_error, |                         break_ipc_then_error, | ||||||
|                         stream=stream, |                         stream=stream, | ||||||
|  |  | ||||||
|  | @ -25,7 +25,7 @@ async def bp_then_error( | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|     # sync with ``trio``-side (caller) task |     # sync with `trio`-side (caller) task | ||||||
|     to_trio.send_nowait('start') |     to_trio.send_nowait('start') | ||||||
| 
 | 
 | ||||||
|     # NOTE: what happens here inside the hook needs some refinement.. |     # NOTE: what happens here inside the hook needs some refinement.. | ||||||
|  | @ -33,8 +33,7 @@ async def bp_then_error( | ||||||
|     #    we set `Lock.local_task_in_debug = 'sync'`, we probably want |     #    we set `Lock.local_task_in_debug = 'sync'`, we probably want | ||||||
|     #    some further, at least, meta-data about the task/actor in debug |     #    some further, at least, meta-data about the task/actor in debug | ||||||
|     #    in terms of making it clear it's `asyncio` mucking about. |     #    in terms of making it clear it's `asyncio` mucking about. | ||||||
|     breakpoint() |     breakpoint()  # asyncio-side | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
|     # short checkpoint / delay |     # short checkpoint / delay | ||||||
|     await asyncio.sleep(0.5)  # asyncio-side |     await asyncio.sleep(0.5)  # asyncio-side | ||||||
|  | @ -58,7 +57,6 @@ async def trio_ctx( | ||||||
|     # this will block until the ``asyncio`` task sends a "first" |     # this will block until the ``asyncio`` task sends a "first" | ||||||
|     # message, see first line in above func. |     # message, see first line in above func. | ||||||
|     async with ( |     async with ( | ||||||
| 
 |  | ||||||
|         to_asyncio.open_channel_from( |         to_asyncio.open_channel_from( | ||||||
|             bp_then_error, |             bp_then_error, | ||||||
|             # raise_after_bp=not bp_before_started, |             # raise_after_bp=not bp_before_started, | ||||||
|  | @ -69,7 +67,7 @@ async def trio_ctx( | ||||||
|         assert first == 'start' |         assert first == 'start' | ||||||
| 
 | 
 | ||||||
|         if bp_before_started: |         if bp_before_started: | ||||||
|             await tractor.pause() |             await tractor.pause()  # trio-side | ||||||
| 
 | 
 | ||||||
|         await ctx.started(first)  # trio-side |         await ctx.started(first)  # trio-side | ||||||
| 
 | 
 | ||||||
|  | @ -111,7 +109,7 @@ async def main( | ||||||
| 
 | 
 | ||||||
|             # pause in parent to ensure no cross-actor |             # pause in parent to ensure no cross-actor | ||||||
|             # locking problems exist! |             # locking problems exist! | ||||||
|             await tractor.pause() |             await tractor.pause()  # trio-root | ||||||
| 
 | 
 | ||||||
|             if cancel_from_root: |             if cancel_from_root: | ||||||
|                 await ctx.cancel() |                 await ctx.cancel() | ||||||
|  |  | ||||||
|  | @ -21,11 +21,13 @@ async def name_error(): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
|     """Test breakpoint in a streaming actor. |     ''' | ||||||
|     """ |     Test breakpoint in a streaming actor. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         # loglevel='cancel', |         loglevel='cancel', | ||||||
|         # loglevel='devx', |         # loglevel='devx', | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -40,7 +40,7 @@ async def main(): | ||||||
|     """ |     """ | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=True, |         debug_mode=True, | ||||||
|         # loglevel='cancel', |         loglevel='devx', | ||||||
|     ) as n: |     ) as n: | ||||||
| 
 | 
 | ||||||
|         # spawn both actors |         # spawn both actors | ||||||
|  |  | ||||||
|  | @ -91,7 +91,7 @@ async def main() -> list[int]: | ||||||
|     an: ActorNursery |     an: ActorNursery | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         loglevel='cancel', |         loglevel='cancel', | ||||||
|         debug_mode=True, |         # debug_mode=True, | ||||||
|     ) as an: |     ) as an: | ||||||
| 
 | 
 | ||||||
|         seed = int(1e3) |         seed = int(1e3) | ||||||
|  |  | ||||||
|  | @ -3,20 +3,18 @@ import trio | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def sleepy_jane(): | async def sleepy_jane() -> None: | ||||||
|     uid = tractor.current_actor().uid |     uid: tuple = tractor.current_actor().uid | ||||||
|     print(f'Yo i am actor {uid}') |     print(f'Yo i am actor {uid}') | ||||||
|     await trio.sleep_forever() |     await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def main(): | async def main(): | ||||||
|     ''' |     ''' | ||||||
|     Spawn a flat actor cluster, with one process per |     Spawn a flat actor cluster, with one process per detected core. | ||||||
|     detected core. |  | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     portal_map: dict[str, tractor.Portal] |     portal_map: dict[str, tractor.Portal] | ||||||
|     results: dict[str, str] |  | ||||||
| 
 | 
 | ||||||
|     # look at this hip new syntax! |     # look at this hip new syntax! | ||||||
|     async with ( |     async with ( | ||||||
|  | @ -25,11 +23,16 @@ async def main(): | ||||||
|             modules=[__name__] |             modules=[__name__] | ||||||
|         ) as portal_map, |         ) as portal_map, | ||||||
| 
 | 
 | ||||||
|         trio.open_nursery() as n, |         trio.open_nursery( | ||||||
|  |             strict_exception_groups=False, | ||||||
|  |         ) as tn, | ||||||
|     ): |     ): | ||||||
| 
 | 
 | ||||||
|         for (name, portal) in portal_map.items(): |         for (name, portal) in portal_map.items(): | ||||||
|             n.start_soon(portal.run, sleepy_jane) |             tn.start_soon( | ||||||
|  |                 portal.run, | ||||||
|  |                 sleepy_jane, | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         await trio.sleep(0.5) |         await trio.sleep(0.5) | ||||||
| 
 | 
 | ||||||
|  | @ -41,4 +44,4 @@ if __name__ == '__main__': | ||||||
|     try: |     try: | ||||||
|         trio.run(main) |         trio.run(main) | ||||||
|     except KeyboardInterrupt: |     except KeyboardInterrupt: | ||||||
|         pass |         print('trio cancelled by KBI') | ||||||
|  |  | ||||||
|  | @ -37,16 +37,14 @@ dependencies = [ | ||||||
|   # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 |   # https://packaging.python.org/en/latest/discussions/install-requires-vs-requirements/#id5 | ||||||
|   # TODO, for 3.13 we must go go `0.27` which means we have to |   # TODO, for 3.13 we must go go `0.27` which means we have to | ||||||
|   # disable strict egs or port to handling them internally! |   # disable strict egs or port to handling them internally! | ||||||
|   # trio='^0.27' |   "trio>0.27", | ||||||
|   "trio>=0.24,<0.25", |  | ||||||
|   "tricycle>=0.4.1,<0.5", |   "tricycle>=0.4.1,<0.5", | ||||||
|   "wrapt>=1.16.0,<2", |   "wrapt>=1.16.0,<2", | ||||||
|   "colorlog>=6.8.2,<7", |   "colorlog>=6.8.2,<7", | ||||||
|   # built-in multi-actor `pdb` REPL |   # built-in multi-actor `pdb` REPL | ||||||
|   "pdbp>=1.5.0,<2", |   "pdbp>=1.6,<2", # windows only (from `pdbp`) | ||||||
|   # typed IPC msging |   # typed IPC msging | ||||||
|   # TODO, get back on release once 3.13 support is out! |   "msgspec>=0.19.0", | ||||||
|   "msgspec", |  | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| # ------ project ------ | # ------ project ------ | ||||||
|  | @ -56,18 +54,14 @@ dev = [ | ||||||
|   # test suite |   # test suite | ||||||
|   # TODO: maybe some of these layout choices? |   # TODO: maybe some of these layout choices? | ||||||
|   # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules |   # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | ||||||
|   "pytest>=8.2.0,<9", |   "pytest>=8.3.5", | ||||||
|   "pexpect>=4.9.0,<5", |   "pexpect>=4.9.0,<5", | ||||||
|   # `tractor.devx` tooling |   # `tractor.devx` tooling | ||||||
|   "greenback>=1.2.1,<2", |   "greenback>=1.2.1,<2", | ||||||
|   "stackscope>=0.2.2,<0.3", |   "stackscope>=0.2.2,<0.3", | ||||||
| 
 |  | ||||||
|   # xonsh usage/integration (namely as @goodboy's sh of choice Bp) |  | ||||||
|   "xonsh>=0.19.1", |  | ||||||
|   "xontrib-vox>=0.0.1,<0.0.2", |  | ||||||
|   "prompt-toolkit>=3.0.43,<4", |  | ||||||
|   "xonsh-vox-tabcomplete>=0.5,<0.6", |  | ||||||
|   "pyperclip>=1.9.0", |   "pyperclip>=1.9.0", | ||||||
|  |   "prompt-toolkit>=3.0.50", | ||||||
|  |   "xonsh>=0.19.2", | ||||||
| ] | ] | ||||||
| # TODO, add these with sane versions; were originally in | # TODO, add these with sane versions; were originally in | ||||||
| # `requirements-docs.txt`.. | # `requirements-docs.txt`.. | ||||||
|  | @ -78,21 +72,39 @@ dev = [ | ||||||
| 
 | 
 | ||||||
| # ------ dependency-groups ------ | # ------ dependency-groups ------ | ||||||
| 
 | 
 | ||||||
|  | # ------ dependency-groups ------ | ||||||
|  | 
 | ||||||
| [tool.uv.sources] | [tool.uv.sources] | ||||||
| msgspec = { git = "https://github.com/jcrist/msgspec.git" } | # XXX NOTE, only for @goodboy's hacking on `pprint(sort_dicts=False)` | ||||||
|  | # for the `pp` alias.. | ||||||
|  | # pdbp = { path = "../pdbp", editable = true } | ||||||
| 
 | 
 | ||||||
| # ------ tool.uv.sources ------ | # ------ tool.uv.sources ------ | ||||||
| # TODO, distributed (multi-host) extensions | # TODO, distributed (multi-host) extensions | ||||||
| # linux kernel networking | # linux kernel networking | ||||||
| # 'pyroute2 | # 'pyroute2 | ||||||
| 
 | 
 | ||||||
|  | # ------ tool.uv.sources ------ | ||||||
|  | 
 | ||||||
|  | [tool.uv] | ||||||
|  | # XXX NOTE, prefer the sys python bc apparently the distis from | ||||||
|  | # `astral` are built in a way that breaks `pdbp`+`tabcompleter`'s | ||||||
|  | # likely due to linking against `libedit` over `readline`.. | ||||||
|  | # |_https://docs.astral.sh/uv/concepts/python-versions/#managed-python-distributions | ||||||
|  | # |_https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html#use-of-libedit-on-linux | ||||||
|  | # | ||||||
|  | # https://docs.astral.sh/uv/reference/settings/#python-preference | ||||||
|  | python-preference = 'system' | ||||||
|  | 
 | ||||||
|  | # ------ tool.uv ------ | ||||||
|  | 
 | ||||||
| [tool.hatch.build.targets.sdist] | [tool.hatch.build.targets.sdist] | ||||||
| include = ["tractor"] | include = ["tractor"] | ||||||
| 
 | 
 | ||||||
| [tool.hatch.build.targets.wheel] | [tool.hatch.build.targets.wheel] | ||||||
| include = ["tractor"] | include = ["tractor"] | ||||||
| 
 | 
 | ||||||
| # ------ dependency-groups ------ | # ------ tool.hatch ------ | ||||||
| 
 | 
 | ||||||
| [tool.towncrier] | [tool.towncrier] | ||||||
| package = "tractor" | package = "tractor" | ||||||
|  | @ -142,3 +154,5 @@ log_cli = false | ||||||
| # TODO: maybe some of these layout choices? | # TODO: maybe some of these layout choices? | ||||||
| # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | # https://docs.pytest.org/en/8.0.x/explanation/goodpractices.html#choosing-a-test-layout-import-rules | ||||||
| # pythonpath = "src" | # pythonpath = "src" | ||||||
|  | 
 | ||||||
|  | # ------ tool.pytest ------ | ||||||
|  |  | ||||||
|  | @ -75,7 +75,10 @@ def pytest_configure(config): | ||||||
| 
 | 
 | ||||||
| @pytest.fixture(scope='session') | @pytest.fixture(scope='session') | ||||||
| def debug_mode(request): | def debug_mode(request): | ||||||
|     return request.config.option.tractor_debug_mode |     debug_mode: bool = request.config.option.tractor_debug_mode | ||||||
|  |     # if debug_mode: | ||||||
|  |     #     breakpoint() | ||||||
|  |     return debug_mode | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.fixture(scope='session', autouse=True) | @pytest.fixture(scope='session', autouse=True) | ||||||
|  | @ -92,6 +95,12 @@ def spawn_backend(request) -> str: | ||||||
|     return request.config.option.spawn_backend |     return request.config.option.spawn_backend | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # @pytest.fixture(scope='function', autouse=True) | ||||||
|  | # def debug_enabled(request) -> str: | ||||||
|  | #     from tractor import _state | ||||||
|  | #     if _state._runtime_vars['_debug_mode']: | ||||||
|  | #         breakpoint() | ||||||
|  | 
 | ||||||
| _ci_env: bool = os.environ.get('CI', False) | _ci_env: bool = os.environ.get('CI', False) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -309,10 +309,13 @@ def test_subactor_breakpoint( | ||||||
|     child.expect(EOF) |     child.expect(EOF) | ||||||
| 
 | 
 | ||||||
|     assert in_prompt_msg( |     assert in_prompt_msg( | ||||||
|         child, |         child, [ | ||||||
|         ['RemoteActorError:', |         'MessagingError:', | ||||||
|  |         'RemoteActorError:', | ||||||
|          "('breakpoint_forever'", |          "('breakpoint_forever'", | ||||||
|          'bdb.BdbQuit',] |          'bdb.BdbQuit', | ||||||
|  |         ], | ||||||
|  |         pause_on_false=True, | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -3,7 +3,6 @@ Sketchy network blackoutz, ugly byzantine gens, puedes eschuchar la | ||||||
| cancelacion?.. | cancelacion?.. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| import itertools |  | ||||||
| from functools import partial | from functools import partial | ||||||
| from types import ModuleType | from types import ModuleType | ||||||
| 
 | 
 | ||||||
|  | @ -230,13 +229,10 @@ def test_ipc_channel_break_during_stream( | ||||||
|     # get raw instance from pytest wrapper |     # get raw instance from pytest wrapper | ||||||
|     value = excinfo.value |     value = excinfo.value | ||||||
|     if isinstance(value, ExceptionGroup): |     if isinstance(value, ExceptionGroup): | ||||||
|         value = next( |         excs = value.exceptions | ||||||
|             itertools.dropwhile( |         assert len(excs) == 1 | ||||||
|                 lambda exc: not isinstance(exc, expect_final_exc), |         final_exc = excs[0] | ||||||
|                 value.exceptions, |         assert isinstance(final_exc, expect_final_exc) | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         assert value |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
|  | @ -259,15 +255,16 @@ async def break_ipc_after_started( | ||||||
| 
 | 
 | ||||||
| def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages(): | def test_stream_closed_right_after_ipc_break_and_zombie_lord_engages(): | ||||||
|     ''' |     ''' | ||||||
|     Verify that is a subactor's IPC goes down just after bringing up a stream |     Verify that is a subactor's IPC goes down just after bringing up | ||||||
|     the parent can trigger a SIGINT and the child will be reaped out-of-IPC by |     a stream the parent can trigger a SIGINT and the child will be | ||||||
|     the localhost process supervision machinery: aka "zombie lord". |     reaped out-of-IPC by the localhost process supervision machinery: | ||||||
|  |     aka "zombie lord". | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async def main(): |     async def main(): | ||||||
|         with trio.fail_after(3): |         with trio.fail_after(3): | ||||||
|             async with tractor.open_nursery() as n: |             async with tractor.open_nursery() as an: | ||||||
|                 portal = await n.start_actor( |                 portal = await an.start_actor( | ||||||
|                     'ipc_breaker', |                     'ipc_breaker', | ||||||
|                     enable_modules=[__name__], |                     enable_modules=[__name__], | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|  | @ -307,7 +307,15 @@ async def inf_streamer( | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         ctx.open_stream() as stream, |         ctx.open_stream() as stream, | ||||||
|         trio.open_nursery() as tn, | 
 | ||||||
|  |         # XXX TODO, INTERESTING CASE!! | ||||||
|  |         # - if we don't collapse the eg then the embedded | ||||||
|  |         # `trio.EndOfChannel` doesn't propagate directly to the above | ||||||
|  |         # .open_stream() parent, resulting in it also raising instead | ||||||
|  |         # of gracefully absorbing as normal.. so how to handle? | ||||||
|  |         trio.open_nursery( | ||||||
|  |             strict_exception_groups=False, | ||||||
|  |         ) as tn, | ||||||
|     ): |     ): | ||||||
|         async def close_stream_on_sentinel(): |         async def close_stream_on_sentinel(): | ||||||
|             async for msg in stream: |             async for msg in stream: | ||||||
|  |  | ||||||
|  | @ -519,7 +519,9 @@ def test_cancel_via_SIGINT_other_task( | ||||||
|     async def main(): |     async def main(): | ||||||
|         # should never timeout since SIGINT should cancel the current program |         # should never timeout since SIGINT should cancel the current program | ||||||
|         with trio.fail_after(timeout): |         with trio.fail_after(timeout): | ||||||
|             async with trio.open_nursery() as n: |             async with trio.open_nursery( | ||||||
|  |                 strict_exception_groups=False, | ||||||
|  |             ) as n: | ||||||
|                 await n.start(spawn_and_sleep_forever) |                 await n.start(spawn_and_sleep_forever) | ||||||
|                 if 'mp' in spawn_backend: |                 if 'mp' in spawn_backend: | ||||||
|                     time.sleep(0.1) |                     time.sleep(0.1) | ||||||
|  | @ -612,6 +614,12 @@ def test_fast_graceful_cancel_when_spawn_task_in_soft_proc_wait_for_daemon( | ||||||
|                     nurse.start_soon(delayed_kbi) |                     nurse.start_soon(delayed_kbi) | ||||||
| 
 | 
 | ||||||
|                     await p.run(do_nuthin) |                     await p.run(do_nuthin) | ||||||
|  | 
 | ||||||
|  |         # need to explicitly re-raise the lone kbi..now | ||||||
|  |         except* KeyboardInterrupt as kbi_eg: | ||||||
|  |             assert (len(excs := kbi_eg.exceptions) == 1) | ||||||
|  |             raise excs[0] | ||||||
|  | 
 | ||||||
|         finally: |         finally: | ||||||
|             duration = time.time() - start |             duration = time.time() - start | ||||||
|             if duration > timeout: |             if duration > timeout: | ||||||
|  |  | ||||||
|  | @ -1,917 +0,0 @@ | ||||||
| ''' |  | ||||||
| Low-level functional audits for our |  | ||||||
| "capability based messaging"-spec feats. |  | ||||||
| 
 |  | ||||||
| B~) |  | ||||||
| 
 |  | ||||||
| ''' |  | ||||||
| import typing |  | ||||||
| from typing import ( |  | ||||||
|     Any, |  | ||||||
|     Type, |  | ||||||
|     Union, |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| from msgspec import ( |  | ||||||
|     structs, |  | ||||||
|     msgpack, |  | ||||||
|     Struct, |  | ||||||
|     ValidationError, |  | ||||||
| ) |  | ||||||
| import pytest |  | ||||||
| 
 |  | ||||||
| import tractor |  | ||||||
| from tractor import ( |  | ||||||
|     _state, |  | ||||||
|     MsgTypeError, |  | ||||||
|     Context, |  | ||||||
| ) |  | ||||||
| from tractor.msg import ( |  | ||||||
|     _codec, |  | ||||||
|     _ctxvar_MsgCodec, |  | ||||||
| 
 |  | ||||||
|     NamespacePath, |  | ||||||
|     MsgCodec, |  | ||||||
|     mk_codec, |  | ||||||
|     apply_codec, |  | ||||||
|     current_codec, |  | ||||||
| ) |  | ||||||
| from tractor.msg.types import ( |  | ||||||
|     _payload_msgs, |  | ||||||
|     log, |  | ||||||
|     PayloadMsg, |  | ||||||
|     Started, |  | ||||||
|     mk_msg_spec, |  | ||||||
| ) |  | ||||||
| import trio |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def mk_custom_codec( |  | ||||||
|     pld_spec: Union[Type]|Any, |  | ||||||
|     add_hooks: bool, |  | ||||||
| 
 |  | ||||||
| ) -> MsgCodec: |  | ||||||
|     ''' |  | ||||||
|     Create custom `msgpack` enc/dec-hooks and set a `Decoder` |  | ||||||
|     which only loads `pld_spec` (like `NamespacePath`) types. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     uid: tuple[str, str] = tractor.current_actor().uid |  | ||||||
| 
 |  | ||||||
|     # XXX NOTE XXX: despite defining `NamespacePath` as a type |  | ||||||
|     # field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair |  | ||||||
|     # to cast to/from that type on the wire. See the docs: |  | ||||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types |  | ||||||
| 
 |  | ||||||
|     def enc_nsp(obj: Any) -> Any: |  | ||||||
|         print(f'{uid} ENC HOOK') |  | ||||||
|         match obj: |  | ||||||
|             case NamespacePath(): |  | ||||||
|                 print( |  | ||||||
|                     f'{uid}: `NamespacePath`-Only ENCODE?\n' |  | ||||||
|                     f'obj-> `{obj}`: {type(obj)}\n' |  | ||||||
|                 ) |  | ||||||
|                 # if type(obj) != NamespacePath: |  | ||||||
|                 #     breakpoint() |  | ||||||
|                 return str(obj) |  | ||||||
| 
 |  | ||||||
|         print( |  | ||||||
|             f'{uid}\n' |  | ||||||
|             'CUSTOM ENCODE\n' |  | ||||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n' |  | ||||||
|         ) |  | ||||||
|         logmsg: str = ( |  | ||||||
|             f'{uid}\n' |  | ||||||
|             'FAILED ENCODE\n' |  | ||||||
|             f'obj-> `{obj}: {type(obj)}`\n' |  | ||||||
|         ) |  | ||||||
|         raise NotImplementedError(logmsg) |  | ||||||
| 
 |  | ||||||
|     def dec_nsp( |  | ||||||
|         obj_type: Type, |  | ||||||
|         obj: Any, |  | ||||||
| 
 |  | ||||||
|     ) -> Any: |  | ||||||
|         print( |  | ||||||
|             f'{uid}\n' |  | ||||||
|             'CUSTOM DECODE\n' |  | ||||||
|             f'type-arg-> {obj_type}\n' |  | ||||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n' |  | ||||||
|         ) |  | ||||||
|         nsp = None |  | ||||||
| 
 |  | ||||||
|         if ( |  | ||||||
|             obj_type is NamespacePath |  | ||||||
|             and isinstance(obj, str) |  | ||||||
|             and ':' in obj |  | ||||||
|         ): |  | ||||||
|             nsp = NamespacePath(obj) |  | ||||||
|             # TODO: we could built a generic handler using |  | ||||||
|             # JUST matching the obj_type part? |  | ||||||
|             # nsp = obj_type(obj) |  | ||||||
| 
 |  | ||||||
|         if nsp: |  | ||||||
|             print(f'Returning NSP instance: {nsp}') |  | ||||||
|             return nsp |  | ||||||
| 
 |  | ||||||
|         logmsg: str = ( |  | ||||||
|             f'{uid}\n' |  | ||||||
|             'FAILED DECODE\n' |  | ||||||
|             f'type-> {obj_type}\n' |  | ||||||
|             f'obj-arg-> `{obj}`: {type(obj)}\n\n' |  | ||||||
|             f'current codec:\n' |  | ||||||
|             f'{current_codec()}\n' |  | ||||||
|         ) |  | ||||||
|         # TODO: figure out the ignore subsys for this! |  | ||||||
|         # -[ ] option whether to defense-relay backc the msg |  | ||||||
|         #   inside an `Invalid`/`Ignore` |  | ||||||
|         # -[ ] how to make this handling pluggable such that a |  | ||||||
|         #   `Channel`/`MsgTransport` can intercept and process |  | ||||||
|         #   back msgs either via exception handling or some other |  | ||||||
|         #   signal? |  | ||||||
|         log.warning(logmsg) |  | ||||||
|         # NOTE: this delivers the invalid |  | ||||||
|         # value up to `msgspec`'s decoding |  | ||||||
|         # machinery for error raising. |  | ||||||
|         return obj |  | ||||||
|         # raise NotImplementedError(logmsg) |  | ||||||
| 
 |  | ||||||
|     nsp_codec: MsgCodec = mk_codec( |  | ||||||
|         ipc_pld_spec=pld_spec, |  | ||||||
| 
 |  | ||||||
|         # NOTE XXX: the encode hook MUST be used no matter what since |  | ||||||
|         # our `NamespacePath` is not any of a `Any` native type nor |  | ||||||
|         # a `msgspec.Struct` subtype - so `msgspec` has no way to know |  | ||||||
|         # how to encode it unless we provide the custom hook. |  | ||||||
|         # |  | ||||||
|         # AGAIN that is, regardless of whether we spec an |  | ||||||
|         # `Any`-decoded-pld the enc has no knowledge (by default) |  | ||||||
|         # how to enc `NamespacePath` (nsp), so we add a custom |  | ||||||
|         # hook to do that ALWAYS. |  | ||||||
|         enc_hook=enc_nsp if add_hooks else None, |  | ||||||
| 
 |  | ||||||
|         # XXX NOTE: pretty sure this is mutex with the `type=` to |  | ||||||
|         # `Decoder`? so it won't work in tandem with the |  | ||||||
|         # `ipc_pld_spec` passed above? |  | ||||||
|         dec_hook=dec_nsp if add_hooks else None, |  | ||||||
|     ) |  | ||||||
|     return nsp_codec |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def chk_codec_applied( |  | ||||||
|     expect_codec: MsgCodec, |  | ||||||
|     enter_value: MsgCodec|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> MsgCodec: |  | ||||||
|     ''' |  | ||||||
|     buncha sanity checks ensuring that the IPC channel's |  | ||||||
|     context-vars are set to the expected codec and that are |  | ||||||
|     ctx-var wrapper APIs match the same. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # TODO: play with tricyle again, bc this is supposed to work |  | ||||||
|     # the way we want? |  | ||||||
|     # |  | ||||||
|     # TreeVar |  | ||||||
|     # task: trio.Task = trio.lowlevel.current_task() |  | ||||||
|     # curr_codec = _ctxvar_MsgCodec.get_in(task) |  | ||||||
| 
 |  | ||||||
|     # ContextVar |  | ||||||
|     # task_ctx: Context = task.context |  | ||||||
|     # assert _ctxvar_MsgCodec in task_ctx |  | ||||||
|     # curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec] |  | ||||||
| 
 |  | ||||||
|     # NOTE: currently we use this! |  | ||||||
|     # RunVar |  | ||||||
|     curr_codec: MsgCodec = current_codec() |  | ||||||
|     last_read_codec = _ctxvar_MsgCodec.get() |  | ||||||
|     # assert curr_codec is last_read_codec |  | ||||||
| 
 |  | ||||||
|     assert ( |  | ||||||
|         (same_codec := expect_codec) is |  | ||||||
|         # returned from `mk_codec()` |  | ||||||
| 
 |  | ||||||
|         # yielded value from `apply_codec()` |  | ||||||
| 
 |  | ||||||
|         # read from current task's `contextvars.Context` |  | ||||||
|         curr_codec is |  | ||||||
|         last_read_codec |  | ||||||
| 
 |  | ||||||
|         # the default `msgspec` settings |  | ||||||
|         is not _codec._def_msgspec_codec |  | ||||||
|         is not _codec._def_tractor_codec |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     if enter_value: |  | ||||||
|         enter_value is same_codec |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def iter_maybe_sends( |  | ||||||
|     send_items: dict[Union[Type], Any] | list[tuple], |  | ||||||
|     ipc_pld_spec: Union[Type] | Any, |  | ||||||
|     add_codec_hooks: bool, |  | ||||||
| 
 |  | ||||||
|     codec: MsgCodec|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> tuple[Any, bool]: |  | ||||||
| 
 |  | ||||||
|     if isinstance(send_items, dict): |  | ||||||
|         send_items = send_items.items() |  | ||||||
| 
 |  | ||||||
|     for ( |  | ||||||
|         send_type_spec, |  | ||||||
|         send_value, |  | ||||||
|     ) in send_items: |  | ||||||
| 
 |  | ||||||
|         expect_roundtrip: bool = False |  | ||||||
| 
 |  | ||||||
|         # values-to-typespec santiy |  | ||||||
|         send_type = type(send_value) |  | ||||||
|         assert send_type == send_type_spec or ( |  | ||||||
|             (subtypes := getattr(send_type_spec, '__args__', None)) |  | ||||||
|             and send_type in subtypes |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         spec_subtypes: set[Union[Type]] = ( |  | ||||||
|              getattr( |  | ||||||
|                  ipc_pld_spec, |  | ||||||
|                  '__args__', |  | ||||||
|                  {ipc_pld_spec,}, |  | ||||||
|              ) |  | ||||||
|         ) |  | ||||||
|         send_in_spec: bool = ( |  | ||||||
|             send_type == ipc_pld_spec |  | ||||||
|             or ( |  | ||||||
|                 ipc_pld_spec != Any |  | ||||||
|                 and  # presume `Union` of types |  | ||||||
|                 send_type in spec_subtypes |  | ||||||
|             ) |  | ||||||
|             or ( |  | ||||||
|                 ipc_pld_spec == Any |  | ||||||
|                 and |  | ||||||
|                 send_type != NamespacePath |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         expect_roundtrip = ( |  | ||||||
|             send_in_spec |  | ||||||
|             # any spec should support all other |  | ||||||
|             # builtin py values that we send |  | ||||||
|             # except our custom nsp type which |  | ||||||
|             # we should be able to send as long |  | ||||||
|             # as we provide the custom codec hooks. |  | ||||||
|             or ( |  | ||||||
|                 ipc_pld_spec == Any |  | ||||||
|                 and |  | ||||||
|                 send_type == NamespacePath |  | ||||||
|                 and |  | ||||||
|                 add_codec_hooks |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         if codec is not None: |  | ||||||
|             # XXX FIRST XXX ensure roundtripping works |  | ||||||
|             # before touching any IPC primitives/APIs. |  | ||||||
|             wire_bytes: bytes = codec.encode( |  | ||||||
|                 Started( |  | ||||||
|                     cid='blahblah', |  | ||||||
|                     pld=send_value, |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|             # NOTE: demonstrates the decoder loading |  | ||||||
|             # to via our native SCIPP msg-spec |  | ||||||
|             # (structurred-conc-inter-proc-protocol) |  | ||||||
|             # implemented as per, |  | ||||||
|             try: |  | ||||||
|                 msg: Started = codec.decode(wire_bytes) |  | ||||||
|                 if not expect_roundtrip: |  | ||||||
|                     pytest.fail( |  | ||||||
|                         f'NOT-EXPECTED able to roundtrip value given spec:\n' |  | ||||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' |  | ||||||
|                         f'value -> {send_value}: {send_type}\n' |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|                 pld = msg.pld |  | ||||||
|                 assert pld == send_value |  | ||||||
| 
 |  | ||||||
|             except ValidationError: |  | ||||||
|                 if expect_roundtrip: |  | ||||||
|                     pytest.fail( |  | ||||||
|                         f'EXPECTED to roundtrip value given spec:\n' |  | ||||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' |  | ||||||
|                         f'value -> {send_value}: {send_type}\n' |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|         yield ( |  | ||||||
|             str(send_type), |  | ||||||
|             send_value, |  | ||||||
|             expect_roundtrip, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def dec_type_union( |  | ||||||
|     type_names: list[str], |  | ||||||
| ) -> Type: |  | ||||||
|     ''' |  | ||||||
|     Look up types by name, compile into a list and then create and |  | ||||||
|     return a `typing.Union` from the full set. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     import importlib |  | ||||||
|     types: list[Type] = [] |  | ||||||
|     for type_name in type_names: |  | ||||||
|         for mod in [ |  | ||||||
|             typing, |  | ||||||
|             importlib.import_module(__name__), |  | ||||||
|         ]: |  | ||||||
|             if type_ref := getattr( |  | ||||||
|                 mod, |  | ||||||
|                 type_name, |  | ||||||
|                 False, |  | ||||||
|             ): |  | ||||||
|                 types.append(type_ref) |  | ||||||
| 
 |  | ||||||
|     # special case handling only.. |  | ||||||
|     # ipc_pld_spec: Union[Type] = eval( |  | ||||||
|     #     pld_spec_str, |  | ||||||
|     #     {},  # globals |  | ||||||
|     #     {'typing': typing},  # locals |  | ||||||
|     # ) |  | ||||||
| 
 |  | ||||||
|     return Union[*types] |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def enc_type_union( |  | ||||||
|     union_or_type: Union[Type]|Type, |  | ||||||
| ) -> list[str]: |  | ||||||
|     ''' |  | ||||||
|     Encode a type-union or single type to a list of type-name-strings |  | ||||||
|     ready for IPC interchange. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     type_strs: list[str] = [] |  | ||||||
|     for typ in getattr( |  | ||||||
|         union_or_type, |  | ||||||
|         '__args__', |  | ||||||
|         {union_or_type,}, |  | ||||||
|     ): |  | ||||||
|         type_strs.append(typ.__qualname__) |  | ||||||
| 
 |  | ||||||
|     return type_strs |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @tractor.context |  | ||||||
| async def send_back_values( |  | ||||||
|     ctx: Context, |  | ||||||
|     expect_debug: bool, |  | ||||||
|     pld_spec_type_strs: list[str], |  | ||||||
|     add_hooks: bool, |  | ||||||
|     started_msg_bytes: bytes, |  | ||||||
|     expect_ipc_send: dict[str, tuple[Any, bool]], |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     ''' |  | ||||||
|     Setup up a custom codec to load instances of `NamespacePath` |  | ||||||
|     and ensure we can round trip a func ref with our parent. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     uid: tuple = tractor.current_actor().uid |  | ||||||
| 
 |  | ||||||
|     # debug mode sanity check (prolly superfluous but, meh) |  | ||||||
|     assert expect_debug == _state.debug_mode() |  | ||||||
| 
 |  | ||||||
|     # init state in sub-actor should be default |  | ||||||
|     chk_codec_applied( |  | ||||||
|         expect_codec=_codec._def_tractor_codec, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # load pld spec from input str |  | ||||||
|     ipc_pld_spec = dec_type_union( |  | ||||||
|         pld_spec_type_strs, |  | ||||||
|     ) |  | ||||||
|     pld_spec_str = str(ipc_pld_spec) |  | ||||||
| 
 |  | ||||||
|     # same as on parent side config. |  | ||||||
|     nsp_codec: MsgCodec = mk_custom_codec( |  | ||||||
|         pld_spec=ipc_pld_spec, |  | ||||||
|         add_hooks=add_hooks, |  | ||||||
|     ) |  | ||||||
|     with ( |  | ||||||
|         apply_codec(nsp_codec) as codec, |  | ||||||
|     ): |  | ||||||
|         chk_codec_applied( |  | ||||||
|             expect_codec=nsp_codec, |  | ||||||
|             enter_value=codec, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         print( |  | ||||||
|             f'{uid}: attempting `Started`-bytes DECODE..\n' |  | ||||||
|         ) |  | ||||||
|         try: |  | ||||||
|             msg: Started = nsp_codec.decode(started_msg_bytes) |  | ||||||
|             expected_pld_spec_str: str = msg.pld |  | ||||||
|             assert pld_spec_str == expected_pld_spec_str |  | ||||||
| 
 |  | ||||||
|         # TODO: maybe we should add our own wrapper error so as to |  | ||||||
|         # be interchange-lib agnostic? |  | ||||||
|         # -[ ] the error type is wtv is raised from the hook so we |  | ||||||
|         #   could also require a type-class of errors for |  | ||||||
|         #   indicating whether the hook-failure can be handled by |  | ||||||
|         #   a nasty-dialog-unprot sub-sys? |  | ||||||
|         except ValidationError: |  | ||||||
| 
 |  | ||||||
|             # NOTE: only in the `Any` spec case do we expect this to |  | ||||||
|             # work since otherwise no spec covers a plain-ol' |  | ||||||
|             # `.pld: str` |  | ||||||
|             if pld_spec_str == 'Any': |  | ||||||
|                 raise |  | ||||||
|             else: |  | ||||||
|                 print( |  | ||||||
|                     f'{uid}: (correctly) unable to DECODE `Started`-bytes\n' |  | ||||||
|                     f'{started_msg_bytes}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|         iter_send_val_items = iter(expect_ipc_send.values()) |  | ||||||
|         sent: list[Any] = [] |  | ||||||
|         for send_value, expect_send in iter_send_val_items: |  | ||||||
|             try: |  | ||||||
|                 print( |  | ||||||
|                     f'{uid}: attempting to `.started({send_value})`\n' |  | ||||||
|                     f'=> expect_send: {expect_send}\n' |  | ||||||
|                     f'SINCE, ipc_pld_spec: {ipc_pld_spec}\n' |  | ||||||
|                     f'AND, codec: {codec}\n' |  | ||||||
|                 ) |  | ||||||
|                 await ctx.started(send_value) |  | ||||||
|                 sent.append(send_value) |  | ||||||
|                 if not expect_send: |  | ||||||
| 
 |  | ||||||
|                     # XXX NOTE XXX THIS WON'T WORK WITHOUT SPECIAL |  | ||||||
|                     # `str` handling! or special debug mode IPC |  | ||||||
|                     # msgs! |  | ||||||
|                     await tractor.pause() |  | ||||||
| 
 |  | ||||||
|                     raise RuntimeError( |  | ||||||
|                         f'NOT-EXPECTED able to roundtrip value given spec:\n' |  | ||||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' |  | ||||||
|                         f'value -> {send_value}: {type(send_value)}\n' |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|                 break  # move on to streaming block.. |  | ||||||
| 
 |  | ||||||
|             except tractor.MsgTypeError: |  | ||||||
|                 await tractor.pause() |  | ||||||
| 
 |  | ||||||
|                 if expect_send: |  | ||||||
|                     raise RuntimeError( |  | ||||||
|                         f'EXPECTED to `.started()` value given spec:\n' |  | ||||||
|                         f'ipc_pld_spec -> {ipc_pld_spec}\n' |  | ||||||
|                         f'value -> {send_value}: {type(send_value)}\n' |  | ||||||
|                     ) |  | ||||||
| 
 |  | ||||||
|         async with ctx.open_stream() as ipc: |  | ||||||
|             print( |  | ||||||
|                 f'{uid}: Entering streaming block to send remaining values..' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             for send_value, expect_send in iter_send_val_items: |  | ||||||
|                 send_type: Type = type(send_value) |  | ||||||
|                 print( |  | ||||||
|                     '------ - ------\n' |  | ||||||
|                     f'{uid}: SENDING NEXT VALUE\n' |  | ||||||
|                     f'ipc_pld_spec: {ipc_pld_spec}\n' |  | ||||||
|                     f'expect_send: {expect_send}\n' |  | ||||||
|                     f'val: {send_value}\n' |  | ||||||
|                     '------ - ------\n' |  | ||||||
|                 ) |  | ||||||
|                 try: |  | ||||||
|                     await ipc.send(send_value) |  | ||||||
|                     print(f'***\n{uid}-CHILD sent {send_value!r}\n***\n') |  | ||||||
|                     sent.append(send_value) |  | ||||||
| 
 |  | ||||||
|                     # NOTE: should only raise above on |  | ||||||
|                     # `.started()` or a `Return` |  | ||||||
|                     # if not expect_send: |  | ||||||
|                     #     raise RuntimeError( |  | ||||||
|                     #         f'NOT-EXPECTED able to roundtrip value given spec:\n' |  | ||||||
|                     #         f'ipc_pld_spec -> {ipc_pld_spec}\n' |  | ||||||
|                     #         f'value -> {send_value}: {send_type}\n' |  | ||||||
|                     #     ) |  | ||||||
| 
 |  | ||||||
|                 except ValidationError: |  | ||||||
|                     print(f'{uid} FAILED TO SEND {send_value}!') |  | ||||||
| 
 |  | ||||||
|                     # await tractor.pause() |  | ||||||
|                     if expect_send: |  | ||||||
|                         raise RuntimeError( |  | ||||||
|                             f'EXPECTED to roundtrip value given spec:\n' |  | ||||||
|                             f'ipc_pld_spec -> {ipc_pld_spec}\n' |  | ||||||
|                             f'value -> {send_value}: {send_type}\n' |  | ||||||
|                         ) |  | ||||||
|                     # continue |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 print( |  | ||||||
|                     f'{uid}: finished sending all values\n' |  | ||||||
|                     'Should be exiting stream block!\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|         print(f'{uid}: exited streaming block!') |  | ||||||
| 
 |  | ||||||
|         # TODO: this won't be true bc in streaming phase we DO NOT |  | ||||||
|         # msgspec check outbound msgs! |  | ||||||
|         # -[ ] once we implement the receiver side `InvalidMsg` |  | ||||||
|         #   then we can expect it here? |  | ||||||
|         # assert ( |  | ||||||
|         #     len(sent) |  | ||||||
|         #     == |  | ||||||
|         #     len([val |  | ||||||
|         #          for val, expect in |  | ||||||
|         #          expect_ipc_send.values() |  | ||||||
|         #          if expect is True]) |  | ||||||
|         # ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def ex_func(*args): |  | ||||||
|     print(f'ex_func({args})') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'ipc_pld_spec', |  | ||||||
|     [ |  | ||||||
|         Any, |  | ||||||
|         NamespacePath, |  | ||||||
|         NamespacePath|None,  # the "maybe" spec Bo |  | ||||||
|     ], |  | ||||||
|     ids=[ |  | ||||||
|         'any_type', |  | ||||||
|         'nsp_type', |  | ||||||
|         'maybe_nsp_type', |  | ||||||
|     ] |  | ||||||
| ) |  | ||||||
| @pytest.mark.parametrize( |  | ||||||
|     'add_codec_hooks', |  | ||||||
|     [ |  | ||||||
|         True, |  | ||||||
|         False, |  | ||||||
|     ], |  | ||||||
|     ids=['use_codec_hooks', 'no_codec_hooks'], |  | ||||||
| ) |  | ||||||
| def test_codec_hooks_mod( |  | ||||||
|     debug_mode: bool, |  | ||||||
|     ipc_pld_spec: Union[Type]|Any, |  | ||||||
|     # send_value: None|str|NamespacePath, |  | ||||||
|     add_codec_hooks: bool, |  | ||||||
| ): |  | ||||||
|     ''' |  | ||||||
|     Audit the `.msg.MsgCodec` override apis details given our impl |  | ||||||
|     uses `contextvars` to accomplish per `trio` task codec |  | ||||||
|     application around an inter-proc-task-comms context. |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     async def main(): |  | ||||||
|         nsp = NamespacePath.from_ref(ex_func) |  | ||||||
|         send_items: dict[Union, Any] = { |  | ||||||
|             Union[None]: None, |  | ||||||
|             Union[NamespacePath]: nsp, |  | ||||||
|             Union[str]: str(nsp), |  | ||||||
|         } |  | ||||||
| 
 |  | ||||||
|         # init default state for actor |  | ||||||
|         chk_codec_applied( |  | ||||||
|             expect_codec=_codec._def_tractor_codec, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         async with tractor.open_nursery( |  | ||||||
|             debug_mode=debug_mode, |  | ||||||
|         ) as an: |  | ||||||
|             p: tractor.Portal = await an.start_actor( |  | ||||||
|                 'sub', |  | ||||||
|                 enable_modules=[__name__], |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # TODO: 2 cases: |  | ||||||
|             # - codec not modified -> decode nsp as `str` |  | ||||||
|             # - codec modified with hooks -> decode nsp as |  | ||||||
|             #   `NamespacePath` |  | ||||||
|             nsp_codec: MsgCodec = mk_custom_codec( |  | ||||||
|                 pld_spec=ipc_pld_spec, |  | ||||||
|                 add_hooks=add_codec_hooks, |  | ||||||
|             ) |  | ||||||
|             with apply_codec(nsp_codec) as codec: |  | ||||||
|                 chk_codec_applied( |  | ||||||
|                     expect_codec=nsp_codec, |  | ||||||
|                     enter_value=codec, |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|                 expect_ipc_send: dict[str, tuple[Any, bool]] = {} |  | ||||||
| 
 |  | ||||||
|                 report: str = ( |  | ||||||
|                     'Parent report on send values with\n' |  | ||||||
|                     f'ipc_pld_spec: {ipc_pld_spec}\n' |  | ||||||
|                     '       ------ - ------\n' |  | ||||||
|                 ) |  | ||||||
|                 for val_type_str, val, expect_send in iter_maybe_sends( |  | ||||||
|                     send_items, |  | ||||||
|                     ipc_pld_spec, |  | ||||||
|                     add_codec_hooks=add_codec_hooks, |  | ||||||
|                 ): |  | ||||||
|                     report += ( |  | ||||||
|                         f'send_value: {val}: {type(val)} ' |  | ||||||
|                         f'=> expect_send: {expect_send}\n' |  | ||||||
|                     ) |  | ||||||
|                     expect_ipc_send[val_type_str] = (val, expect_send) |  | ||||||
| 
 |  | ||||||
|                 print( |  | ||||||
|                     report + |  | ||||||
|                     '       ------ - ------\n' |  | ||||||
|                 ) |  | ||||||
|                 assert len(expect_ipc_send) == len(send_items) |  | ||||||
|                 # now try over real IPC with a the subactor |  | ||||||
|                 # expect_ipc_rountrip: bool = True |  | ||||||
|                 expected_started = Started( |  | ||||||
|                     cid='cid', |  | ||||||
|                     pld=str(ipc_pld_spec), |  | ||||||
|                 ) |  | ||||||
|                 # build list of values we expect to receive from |  | ||||||
|                 # the subactor. |  | ||||||
|                 expect_to_send: list[Any] = [ |  | ||||||
|                     val |  | ||||||
|                     for val, expect_send in expect_ipc_send.values() |  | ||||||
|                     if expect_send |  | ||||||
|                 ] |  | ||||||
| 
 |  | ||||||
|                 pld_spec_type_strs: list[str] = enc_type_union(ipc_pld_spec) |  | ||||||
| 
 |  | ||||||
|                 # XXX should raise an mte (`MsgTypeError`) |  | ||||||
|                 # when `add_codec_hooks == False` bc the input |  | ||||||
|                 # `expect_ipc_send` kwarg has a nsp which can't be |  | ||||||
|                 # serialized! |  | ||||||
|                 # |  | ||||||
|                 # TODO:can we ensure this happens from the |  | ||||||
|                 # `Return`-side (aka the sub) as well? |  | ||||||
|                 if not add_codec_hooks: |  | ||||||
|                     try: |  | ||||||
|                         async with p.open_context( |  | ||||||
|                             send_back_values, |  | ||||||
|                             expect_debug=debug_mode, |  | ||||||
|                             pld_spec_type_strs=pld_spec_type_strs, |  | ||||||
|                             add_hooks=add_codec_hooks, |  | ||||||
|                             started_msg_bytes=nsp_codec.encode(expected_started), |  | ||||||
| 
 |  | ||||||
|                             # XXX NOTE bc we send a `NamespacePath` in this kwarg |  | ||||||
|                             expect_ipc_send=expect_ipc_send, |  | ||||||
| 
 |  | ||||||
|                         ) as (ctx, first): |  | ||||||
|                             pytest.fail('ctx should fail to open without custom enc_hook!?') |  | ||||||
| 
 |  | ||||||
|                     # this test passes bc we can go no further! |  | ||||||
|                     except MsgTypeError: |  | ||||||
|                         # teardown nursery |  | ||||||
|                         await p.cancel_actor() |  | ||||||
|                         return |  | ||||||
| 
 |  | ||||||
|                 # TODO: send the original nsp here and |  | ||||||
|                 # test with `limit_msg_spec()` above? |  | ||||||
|                 # await tractor.pause() |  | ||||||
|                 print('PARENT opening IPC ctx!\n') |  | ||||||
|                 async with ( |  | ||||||
| 
 |  | ||||||
|                     # XXX should raise an mte (`MsgTypeError`) |  | ||||||
|                     # when `add_codec_hooks == False`.. |  | ||||||
|                     p.open_context( |  | ||||||
|                         send_back_values, |  | ||||||
|                         expect_debug=debug_mode, |  | ||||||
|                         pld_spec_type_strs=pld_spec_type_strs, |  | ||||||
|                         add_hooks=add_codec_hooks, |  | ||||||
|                         started_msg_bytes=nsp_codec.encode(expected_started), |  | ||||||
|                         expect_ipc_send=expect_ipc_send, |  | ||||||
|                     ) as (ctx, first), |  | ||||||
| 
 |  | ||||||
|                     ctx.open_stream() as ipc, |  | ||||||
|                 ): |  | ||||||
|                     # ensure codec is still applied across |  | ||||||
|                     # `tractor.Context` + its embedded nursery. |  | ||||||
|                     chk_codec_applied( |  | ||||||
|                         expect_codec=nsp_codec, |  | ||||||
|                         enter_value=codec, |  | ||||||
|                     ) |  | ||||||
|                     print( |  | ||||||
|                         'root: ENTERING CONTEXT BLOCK\n' |  | ||||||
|                         f'type(first): {type(first)}\n' |  | ||||||
|                         f'first: {first}\n' |  | ||||||
|                     ) |  | ||||||
|                     expect_to_send.remove(first) |  | ||||||
| 
 |  | ||||||
|                     # TODO: explicit values we expect depending on |  | ||||||
|                     # codec config! |  | ||||||
|                     # assert first == first_val |  | ||||||
|                     # assert first == f'{__name__}:ex_func' |  | ||||||
| 
 |  | ||||||
|                     async for next_sent in ipc: |  | ||||||
|                         print( |  | ||||||
|                             'Parent: child sent next value\n' |  | ||||||
|                             f'{next_sent}: {type(next_sent)}\n' |  | ||||||
|                         ) |  | ||||||
|                         if expect_to_send: |  | ||||||
|                             expect_to_send.remove(next_sent) |  | ||||||
|                         else: |  | ||||||
|                             print('PARENT should terminate stream loop + block!') |  | ||||||
| 
 |  | ||||||
|                     # all sent values should have arrived! |  | ||||||
|                     assert not expect_to_send |  | ||||||
| 
 |  | ||||||
|             await p.cancel_actor() |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def chk_pld_type( |  | ||||||
|     payload_spec: Type[Struct]|Any, |  | ||||||
|     pld: Any, |  | ||||||
| 
 |  | ||||||
|     expect_roundtrip: bool|None = None, |  | ||||||
| 
 |  | ||||||
| ) -> bool: |  | ||||||
| 
 |  | ||||||
|     pld_val_type: Type = type(pld) |  | ||||||
| 
 |  | ||||||
|     # TODO: verify that the overridden subtypes |  | ||||||
|     # DO NOT have modified type-annots from original! |  | ||||||
|     # 'Start',  .pld: FuncSpec |  | ||||||
|     # 'StartAck',  .pld: IpcCtxSpec |  | ||||||
|     # 'Stop',  .pld: UNSEt |  | ||||||
|     # 'Error',  .pld: ErrorData |  | ||||||
| 
 |  | ||||||
|     codec: MsgCodec = mk_codec( |  | ||||||
|         # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified |  | ||||||
|         # type union. |  | ||||||
|         ipc_pld_spec=payload_spec, |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # make a one-off dec to compare with our `MsgCodec` instance |  | ||||||
|     # which does the below `mk_msg_spec()` call internally |  | ||||||
|     ipc_msg_spec: Union[Type[Struct]] |  | ||||||
|     msg_types: list[PayloadMsg[payload_spec]] |  | ||||||
|     ( |  | ||||||
|         ipc_msg_spec, |  | ||||||
|         msg_types, |  | ||||||
|     ) = mk_msg_spec( |  | ||||||
|         payload_type_union=payload_spec, |  | ||||||
|     ) |  | ||||||
|     _enc = msgpack.Encoder() |  | ||||||
|     _dec = msgpack.Decoder( |  | ||||||
|         type=ipc_msg_spec or Any,  # like `PayloadMsg[Any]` |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     assert ( |  | ||||||
|         payload_spec |  | ||||||
|         == |  | ||||||
|         codec.pld_spec |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # assert codec.dec == dec |  | ||||||
|     # |  | ||||||
|     # ^-XXX-^ not sure why these aren't "equal" but when cast |  | ||||||
|     # to `str` they seem to match ?? .. kk |  | ||||||
| 
 |  | ||||||
|     assert ( |  | ||||||
|         str(ipc_msg_spec) |  | ||||||
|         == |  | ||||||
|         str(codec.msg_spec) |  | ||||||
|         == |  | ||||||
|         str(_dec.type) |  | ||||||
|         == |  | ||||||
|         str(codec.dec.type) |  | ||||||
|     ) |  | ||||||
| 
 |  | ||||||
|     # verify the boxed-type for all variable payload-type msgs. |  | ||||||
|     if not msg_types: |  | ||||||
|         breakpoint() |  | ||||||
| 
 |  | ||||||
|     roundtrip: bool|None = None |  | ||||||
|     pld_spec_msg_names: list[str] = [ |  | ||||||
|         td.__name__ for td in _payload_msgs |  | ||||||
|     ] |  | ||||||
|     for typedef in msg_types: |  | ||||||
| 
 |  | ||||||
|         skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names |  | ||||||
|         if skip_runtime_msg: |  | ||||||
|             continue |  | ||||||
| 
 |  | ||||||
|         pld_field = structs.fields(typedef)[1] |  | ||||||
|         assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere? |  | ||||||
| 
 |  | ||||||
|         kwargs: dict[str, Any] = { |  | ||||||
|             'cid': '666', |  | ||||||
|             'pld': pld, |  | ||||||
|         } |  | ||||||
|         enc_msg: PayloadMsg = typedef(**kwargs) |  | ||||||
| 
 |  | ||||||
|         _wire_bytes: bytes = _enc.encode(enc_msg) |  | ||||||
|         wire_bytes: bytes = codec.enc.encode(enc_msg) |  | ||||||
|         assert _wire_bytes == wire_bytes |  | ||||||
| 
 |  | ||||||
|         ve: ValidationError|None = None |  | ||||||
|         try: |  | ||||||
|             dec_msg = codec.dec.decode(wire_bytes) |  | ||||||
|             _dec_msg = _dec.decode(wire_bytes) |  | ||||||
| 
 |  | ||||||
|             # decoded msg and thus payload should be exactly same! |  | ||||||
|             assert (roundtrip := ( |  | ||||||
|                 _dec_msg |  | ||||||
|                 == |  | ||||||
|                 dec_msg |  | ||||||
|                 == |  | ||||||
|                 enc_msg |  | ||||||
|             )) |  | ||||||
| 
 |  | ||||||
|             if ( |  | ||||||
|                 expect_roundtrip is not None |  | ||||||
|                 and expect_roundtrip != roundtrip |  | ||||||
|             ): |  | ||||||
|                 breakpoint() |  | ||||||
| 
 |  | ||||||
|             assert ( |  | ||||||
|                 pld |  | ||||||
|                 == |  | ||||||
|                 dec_msg.pld |  | ||||||
|                 == |  | ||||||
|                 enc_msg.pld |  | ||||||
|             ) |  | ||||||
|             # assert (roundtrip := (_dec_msg == enc_msg)) |  | ||||||
| 
 |  | ||||||
|         except ValidationError as _ve: |  | ||||||
|             ve = _ve |  | ||||||
|             roundtrip: bool = False |  | ||||||
|             if pld_val_type is payload_spec: |  | ||||||
|                 raise ValueError( |  | ||||||
|                    'Got `ValidationError` despite type-var match!?\n' |  | ||||||
|                     f'pld_val_type: {pld_val_type}\n' |  | ||||||
|                     f'payload_type: {payload_spec}\n' |  | ||||||
|                 ) from ve |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 # ow we good cuz the pld spec mismatched. |  | ||||||
|                 print( |  | ||||||
|                     'Got expected `ValidationError` since,\n' |  | ||||||
|                     f'{pld_val_type} is not {payload_spec}\n' |  | ||||||
|                 ) |  | ||||||
|         else: |  | ||||||
|             if ( |  | ||||||
|                 payload_spec is not Any |  | ||||||
|                 and |  | ||||||
|                 pld_val_type is not payload_spec |  | ||||||
|             ): |  | ||||||
|                 raise ValueError( |  | ||||||
|                    'DID NOT `ValidationError` despite expected type match!?\n' |  | ||||||
|                     f'pld_val_type: {pld_val_type}\n' |  | ||||||
|                     f'payload_type: {payload_spec}\n' |  | ||||||
|                 ) |  | ||||||
| 
 |  | ||||||
|     # full code decode should always be attempted! |  | ||||||
|     if roundtrip is None: |  | ||||||
|         breakpoint() |  | ||||||
| 
 |  | ||||||
|     return roundtrip |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def test_limit_msgspec(): |  | ||||||
| 
 |  | ||||||
|     async def main(): |  | ||||||
|         async with tractor.open_root_actor( |  | ||||||
|             debug_mode=True |  | ||||||
|         ): |  | ||||||
| 
 |  | ||||||
|             # ensure we can round-trip a boxing `PayloadMsg` |  | ||||||
|             assert chk_pld_type( |  | ||||||
|                 payload_spec=Any, |  | ||||||
|                 pld=None, |  | ||||||
|                 expect_roundtrip=True, |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # verify that a mis-typed payload value won't decode |  | ||||||
|             assert not chk_pld_type( |  | ||||||
|                 payload_spec=int, |  | ||||||
|                 pld='doggy', |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # parametrize the boxed `.pld` type as a custom-struct |  | ||||||
|             # and ensure that parametrization propagates |  | ||||||
|             # to all payload-msg-spec-able subtypes! |  | ||||||
|             class CustomPayload(Struct): |  | ||||||
|                 name: str |  | ||||||
|                 value: Any |  | ||||||
| 
 |  | ||||||
|             assert not chk_pld_type( |  | ||||||
|                 payload_spec=CustomPayload, |  | ||||||
|                 pld='doggy', |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             assert chk_pld_type( |  | ||||||
|                 payload_spec=CustomPayload, |  | ||||||
|                 pld=CustomPayload(name='doggy', value='urmom') |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             # yah, we can `.pause_from_sync()` now! |  | ||||||
|             # breakpoint() |  | ||||||
| 
 |  | ||||||
|     trio.run(main) |  | ||||||
|  | @ -95,8 +95,8 @@ async def trio_main( | ||||||
| 
 | 
 | ||||||
|     # stash a "service nursery" as "actor local" (aka a Python global) |     # stash a "service nursery" as "actor local" (aka a Python global) | ||||||
|     global _nursery |     global _nursery | ||||||
|     n = _nursery |     tn = _nursery | ||||||
|     assert n |     assert tn | ||||||
| 
 | 
 | ||||||
|     async def consume_stream(): |     async def consume_stream(): | ||||||
|         async with wrapper_mngr() as stream: |         async with wrapper_mngr() as stream: | ||||||
|  | @ -104,10 +104,10 @@ async def trio_main( | ||||||
|                 print(msg) |                 print(msg) | ||||||
| 
 | 
 | ||||||
|     # run 2 tasks to ensure broadcaster chan use |     # run 2 tasks to ensure broadcaster chan use | ||||||
|     n.start_soon(consume_stream) |     tn.start_soon(consume_stream) | ||||||
|     n.start_soon(consume_stream) |     tn.start_soon(consume_stream) | ||||||
| 
 | 
 | ||||||
|     n.start_soon(trio_sleep_and_err) |     tn.start_soon(trio_sleep_and_err) | ||||||
| 
 | 
 | ||||||
|     await trio.sleep_forever() |     await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
|  | @ -117,8 +117,10 @@ async def open_actor_local_nursery( | ||||||
|     ctx: tractor.Context, |     ctx: tractor.Context, | ||||||
| ): | ): | ||||||
|     global _nursery |     global _nursery | ||||||
|     async with trio.open_nursery() as n: |     async with trio.open_nursery( | ||||||
|         _nursery = n |         strict_exception_groups=False, | ||||||
|  |     ) as tn: | ||||||
|  |         _nursery = tn | ||||||
|         await ctx.started() |         await ctx.started() | ||||||
|         await trio.sleep(10) |         await trio.sleep(10) | ||||||
|         # await trio.sleep(1) |         # await trio.sleep(1) | ||||||
|  | @ -132,7 +134,7 @@ async def open_actor_local_nursery( | ||||||
|         # never yields back.. aka a scenario where the |         # never yields back.. aka a scenario where the | ||||||
|         # ``tractor.context`` task IS NOT in the service n's cancel |         # ``tractor.context`` task IS NOT in the service n's cancel | ||||||
|         # scope. |         # scope. | ||||||
|         n.cancel_scope.cancel() |         tn.cancel_scope.cancel() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|  | @ -157,7 +159,7 @@ def test_actor_managed_trio_nursery_task_error_cancels_aio( | ||||||
|         async with tractor.open_nursery() as n: |         async with tractor.open_nursery() as n: | ||||||
|             p = await n.start_actor( |             p = await n.start_actor( | ||||||
|                 'nursery_mngr', |                 'nursery_mngr', | ||||||
|                 infect_asyncio=asyncio_mode, |                 infect_asyncio=asyncio_mode,  # TODO, is this enabling debug mode? | ||||||
|                 enable_modules=[__name__], |                 enable_modules=[__name__], | ||||||
|             ) |             ) | ||||||
|             async with ( |             async with ( | ||||||
|  |  | ||||||
|  | @ -38,9 +38,9 @@ from tractor._testing import ( | ||||||
| # - standard setup/teardown: | # - standard setup/teardown: | ||||||
| #   ``Portal.open_context()`` starts a new | #   ``Portal.open_context()`` starts a new | ||||||
| #   remote task context in another actor. The target actor's task must | #   remote task context in another actor. The target actor's task must | ||||||
| #   call ``Context.started()`` to unblock this entry on the caller side. | #   call ``Context.started()`` to unblock this entry on the parent side. | ||||||
| #   the callee task executes until complete and returns a final value | #   the child task executes until complete and returns a final value | ||||||
| #   which is delivered to the caller side and retreived via | #   which is delivered to the parent side and retreived via | ||||||
| #   ``Context.result()``. | #   ``Context.result()``. | ||||||
| 
 | 
 | ||||||
| # - cancel termination: | # - cancel termination: | ||||||
|  | @ -170,9 +170,9 @@ async def assert_state(value: bool): | ||||||
|     [False, ValueError, KeyboardInterrupt], |     [False, ValueError, KeyboardInterrupt], | ||||||
| ) | ) | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'callee_blocks_forever', |     'child_blocks_forever', | ||||||
|     [False, True], |     [False, True], | ||||||
|     ids=lambda item: f'callee_blocks_forever={item}' |     ids=lambda item: f'child_blocks_forever={item}' | ||||||
| ) | ) | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'pointlessly_open_stream', |     'pointlessly_open_stream', | ||||||
|  | @ -181,7 +181,7 @@ async def assert_state(value: bool): | ||||||
| ) | ) | ||||||
| def test_simple_context( | def test_simple_context( | ||||||
|     error_parent, |     error_parent, | ||||||
|     callee_blocks_forever, |     child_blocks_forever, | ||||||
|     pointlessly_open_stream, |     pointlessly_open_stream, | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
| ): | ): | ||||||
|  | @ -204,13 +204,13 @@ def test_simple_context( | ||||||
|                         portal.open_context( |                         portal.open_context( | ||||||
|                             simple_setup_teardown, |                             simple_setup_teardown, | ||||||
|                             data=10, |                             data=10, | ||||||
|                             block_forever=callee_blocks_forever, |                             block_forever=child_blocks_forever, | ||||||
|                         ) as (ctx, sent), |                         ) as (ctx, sent), | ||||||
|                     ): |                     ): | ||||||
|                         assert current_ipc_ctx() is ctx |                         assert current_ipc_ctx() is ctx | ||||||
|                         assert sent == 11 |                         assert sent == 11 | ||||||
| 
 | 
 | ||||||
|                         if callee_blocks_forever: |                         if child_blocks_forever: | ||||||
|                             await portal.run(assert_state, value=True) |                             await portal.run(assert_state, value=True) | ||||||
|                         else: |                         else: | ||||||
|                             assert await ctx.result() == 'yo' |                             assert await ctx.result() == 'yo' | ||||||
|  | @ -220,7 +220,7 @@ def test_simple_context( | ||||||
|                                 if error_parent: |                                 if error_parent: | ||||||
|                                     raise error_parent |                                     raise error_parent | ||||||
| 
 | 
 | ||||||
|                                 if callee_blocks_forever: |                                 if child_blocks_forever: | ||||||
|                                     await ctx.cancel() |                                     await ctx.cancel() | ||||||
|                                 else: |                                 else: | ||||||
|                                     # in this case the stream will send a |                                     # in this case the stream will send a | ||||||
|  | @ -259,9 +259,9 @@ def test_simple_context( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'callee_returns_early', |     'child_returns_early', | ||||||
|     [True, False], |     [True, False], | ||||||
|     ids=lambda item: f'callee_returns_early={item}' |     ids=lambda item: f'child_returns_early={item}' | ||||||
| ) | ) | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'cancel_method', |     'cancel_method', | ||||||
|  | @ -273,14 +273,14 @@ def test_simple_context( | ||||||
|     [True, False], |     [True, False], | ||||||
|     ids=lambda item: f'chk_ctx_result_before_exit={item}' |     ids=lambda item: f'chk_ctx_result_before_exit={item}' | ||||||
| ) | ) | ||||||
| def test_caller_cancels( | def test_parent_cancels( | ||||||
|     cancel_method: str, |     cancel_method: str, | ||||||
|     chk_ctx_result_before_exit: bool, |     chk_ctx_result_before_exit: bool, | ||||||
|     callee_returns_early: bool, |     child_returns_early: bool, | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     Verify that when the opening side of a context (aka the caller) |     Verify that when the opening side of a context (aka the parent) | ||||||
|     cancels that context, the ctx does not raise a cancelled when |     cancels that context, the ctx does not raise a cancelled when | ||||||
|     either calling `.result()` or on context exit. |     either calling `.result()` or on context exit. | ||||||
| 
 | 
 | ||||||
|  | @ -294,7 +294,7 @@ def test_caller_cancels( | ||||||
| 
 | 
 | ||||||
|         if ( |         if ( | ||||||
|             cancel_method == 'portal' |             cancel_method == 'portal' | ||||||
|             and not callee_returns_early |             and not child_returns_early | ||||||
|         ): |         ): | ||||||
|             try: |             try: | ||||||
|                 res = await ctx.result() |                 res = await ctx.result() | ||||||
|  | @ -318,7 +318,7 @@ def test_caller_cancels( | ||||||
|                 pytest.fail(f'should not have raised ctxc\n{ctxc}') |                 pytest.fail(f'should not have raised ctxc\n{ctxc}') | ||||||
| 
 | 
 | ||||||
|         # we actually get a result |         # we actually get a result | ||||||
|         if callee_returns_early: |         if child_returns_early: | ||||||
|             assert res == 'yo' |             assert res == 'yo' | ||||||
|             assert ctx.outcome is res |             assert ctx.outcome is res | ||||||
|             assert ctx.maybe_error is None |             assert ctx.maybe_error is None | ||||||
|  | @ -362,14 +362,14 @@ def test_caller_cancels( | ||||||
|             ) |             ) | ||||||
|             timeout: float = ( |             timeout: float = ( | ||||||
|                 0.5 |                 0.5 | ||||||
|                 if not callee_returns_early |                 if not child_returns_early | ||||||
|                 else 2 |                 else 2 | ||||||
|             ) |             ) | ||||||
|             with trio.fail_after(timeout): |             with trio.fail_after(timeout): | ||||||
|                 async with ( |                 async with ( | ||||||
|                     expect_ctxc( |                     expect_ctxc( | ||||||
|                         yay=( |                         yay=( | ||||||
|                             not callee_returns_early |                             not child_returns_early | ||||||
|                             and cancel_method == 'portal' |                             and cancel_method == 'portal' | ||||||
|                         ) |                         ) | ||||||
|                     ), |                     ), | ||||||
|  | @ -377,13 +377,13 @@ def test_caller_cancels( | ||||||
|                     portal.open_context( |                     portal.open_context( | ||||||
|                         simple_setup_teardown, |                         simple_setup_teardown, | ||||||
|                         data=10, |                         data=10, | ||||||
|                         block_forever=not callee_returns_early, |                         block_forever=not child_returns_early, | ||||||
|                     ) as (ctx, sent), |                     ) as (ctx, sent), | ||||||
|                 ): |                 ): | ||||||
| 
 | 
 | ||||||
|                     if callee_returns_early: |                     if child_returns_early: | ||||||
|                         # ensure we block long enough before sending |                         # ensure we block long enough before sending | ||||||
|                         # a cancel such that the callee has already |                         # a cancel such that the child has already | ||||||
|                         # returned it's result. |                         # returned it's result. | ||||||
|                         await trio.sleep(0.5) |                         await trio.sleep(0.5) | ||||||
| 
 | 
 | ||||||
|  | @ -421,7 +421,7 @@ def test_caller_cancels( | ||||||
|             #   which should in turn cause `ctx._scope` to |             #   which should in turn cause `ctx._scope` to | ||||||
|             # catch any cancellation? |             # catch any cancellation? | ||||||
|             if ( |             if ( | ||||||
|                 not callee_returns_early |                 not child_returns_early | ||||||
|                 and cancel_method != 'portal' |                 and cancel_method != 'portal' | ||||||
|             ): |             ): | ||||||
|                 assert not ctx._scope.cancelled_caught |                 assert not ctx._scope.cancelled_caught | ||||||
|  | @ -430,11 +430,11 @@ def test_caller_cancels( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # basic stream terminations: | # basic stream terminations: | ||||||
| # - callee context closes without using stream | # - child context closes without using stream | ||||||
| # - caller context closes without using stream | # - parent context closes without using stream | ||||||
| # - caller context calls `Context.cancel()` while streaming | # - parent context calls `Context.cancel()` while streaming | ||||||
| #   is ongoing resulting in callee being cancelled | #   is ongoing resulting in child being cancelled | ||||||
| # - callee calls `Context.cancel()` while streaming and caller | # - child calls `Context.cancel()` while streaming and parent | ||||||
| #   sees stream terminated in `RemoteActorError` | #   sees stream terminated in `RemoteActorError` | ||||||
| 
 | 
 | ||||||
| # TODO: future possible features | # TODO: future possible features | ||||||
|  | @ -443,7 +443,6 @@ def test_caller_cancels( | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def close_ctx_immediately( | async def close_ctx_immediately( | ||||||
| 
 |  | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|  | @ -454,13 +453,24 @@ async def close_ctx_immediately( | ||||||
|     async with ctx.open_stream(): |     async with ctx.open_stream(): | ||||||
|         pass |         pass | ||||||
| 
 | 
 | ||||||
|  |     print('child returning!') | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'parent_send_before_receive', | ||||||
|  |     [ | ||||||
|  |         False, | ||||||
|  |         True, | ||||||
|  |     ], | ||||||
|  |     ids=lambda item: f'child_send_before_receive={item}' | ||||||
|  | ) | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_callee_closes_ctx_after_stream_open( | async def test_child_exits_ctx_after_stream_open( | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
|  |     parent_send_before_receive: bool, | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     callee context closes without using stream. |     child context closes without using stream. | ||||||
| 
 | 
 | ||||||
|     This should result in a msg sequence |     This should result in a msg sequence | ||||||
|     |_<root>_ |     |_<root>_ | ||||||
|  | @ -474,6 +484,9 @@ async def test_callee_closes_ctx_after_stream_open( | ||||||
|     => {'stop': True, 'cid': <str>} |     => {'stop': True, 'cid': <str>} | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  |     timeout: float = ( | ||||||
|  |         0.5 if not debug_mode else 999 | ||||||
|  |     ) | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=debug_mode, |         debug_mode=debug_mode, | ||||||
|     ) as an: |     ) as an: | ||||||
|  | @ -482,7 +495,7 @@ async def test_callee_closes_ctx_after_stream_open( | ||||||
|             enable_modules=[__name__], |             enable_modules=[__name__], | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         with trio.fail_after(0.5): |         with trio.fail_after(timeout): | ||||||
|             async with portal.open_context( |             async with portal.open_context( | ||||||
|                 close_ctx_immediately, |                 close_ctx_immediately, | ||||||
| 
 | 
 | ||||||
|  | @ -494,41 +507,56 @@ async def test_callee_closes_ctx_after_stream_open( | ||||||
| 
 | 
 | ||||||
|                 with trio.fail_after(0.4): |                 with trio.fail_after(0.4): | ||||||
|                     async with ctx.open_stream() as stream: |                     async with ctx.open_stream() as stream: | ||||||
|  |                         if parent_send_before_receive: | ||||||
|  |                             print('sending first msg from parent!') | ||||||
|  |                             await stream.send('yo') | ||||||
| 
 | 
 | ||||||
|                         # should fall through since ``StopAsyncIteration`` |                         # should fall through since ``StopAsyncIteration`` | ||||||
|                         # should be raised through translation of |                         # should be raised through translation of | ||||||
|                         # a ``trio.EndOfChannel`` by |                         # a ``trio.EndOfChannel`` by | ||||||
|                         # ``trio.abc.ReceiveChannel.__anext__()`` |                         # ``trio.abc.ReceiveChannel.__anext__()`` | ||||||
|                         async for _ in stream: |                         msg = 10 | ||||||
|  |                         async for msg in stream: | ||||||
|                             # trigger failure if we DO NOT |                             # trigger failure if we DO NOT | ||||||
|                             # get an EOC! |                             # get an EOC! | ||||||
|                             assert 0 |                             assert 0 | ||||||
|                         else: |                         else: | ||||||
|  |                             # never should get anythinig new from | ||||||
|  |                             # the underlying stream | ||||||
|  |                             assert msg == 10 | ||||||
| 
 | 
 | ||||||
|                             # verify stream is now closed |                             # verify stream is now closed | ||||||
|                             try: |                             try: | ||||||
|                                 with trio.fail_after(0.3): |                                 with trio.fail_after(0.3): | ||||||
|  |                                     print('parent trying to `.receive()` on EoC stream!') | ||||||
|                                     await stream.receive() |                                     await stream.receive() | ||||||
|  |                                     assert 0, 'should have raised eoc!?' | ||||||
|                             except trio.EndOfChannel: |                             except trio.EndOfChannel: | ||||||
|  |                                 print('parent got EoC as expected!') | ||||||
|                                 pass |                                 pass | ||||||
|  |                                 # raise | ||||||
| 
 | 
 | ||||||
|                 # TODO: should be just raise the closed resource err |                 # TODO: should be just raise the closed resource err | ||||||
|                 # directly here to enforce not allowing a re-open |                 # directly here to enforce not allowing a re-open | ||||||
|                 # of a stream to the context (at least until a time of |                 # of a stream to the context (at least until a time of | ||||||
|                 # if/when we decide that's a good idea?) |                 # if/when we decide that's a good idea?) | ||||||
|                 try: |                 try: | ||||||
|                     with trio.fail_after(0.5): |                     with trio.fail_after(timeout): | ||||||
|                         async with ctx.open_stream() as stream: |                         async with ctx.open_stream() as stream: | ||||||
|                             pass |                             pass | ||||||
|                 except trio.ClosedResourceError: |                 except trio.ClosedResourceError: | ||||||
|                     pass |                     pass | ||||||
| 
 | 
 | ||||||
|  |                 # if ctx._rx_chan._state.data: | ||||||
|  |                 #     await tractor.pause() | ||||||
|  | 
 | ||||||
|         await portal.cancel_actor() |         await portal.cancel_actor() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def expect_cancelled( | async def expect_cancelled( | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
|  |     send_before_receive: bool = False, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     global _state |     global _state | ||||||
|  | @ -538,6 +566,10 @@ async def expect_cancelled( | ||||||
| 
 | 
 | ||||||
|     try: |     try: | ||||||
|         async with ctx.open_stream() as stream: |         async with ctx.open_stream() as stream: | ||||||
|  | 
 | ||||||
|  |             if send_before_receive: | ||||||
|  |                 await stream.send('yo') | ||||||
|  | 
 | ||||||
|             async for msg in stream: |             async for msg in stream: | ||||||
|                 await stream.send(msg)  # echo server |                 await stream.send(msg)  # echo server | ||||||
| 
 | 
 | ||||||
|  | @ -564,26 +596,49 @@ async def expect_cancelled( | ||||||
|         raise |         raise | ||||||
| 
 | 
 | ||||||
|     else: |     else: | ||||||
|         assert 0, "callee wasn't cancelled !?" |         assert 0, "child wasn't cancelled !?" | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'child_send_before_receive', | ||||||
|  |     [ | ||||||
|  |         False, | ||||||
|  |         True, | ||||||
|  |     ], | ||||||
|  |     ids=lambda item: f'child_send_before_receive={item}' | ||||||
|  | ) | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'rent_wait_for_msg', | ||||||
|  |     [ | ||||||
|  |         False, | ||||||
|  |         True, | ||||||
|  |     ], | ||||||
|  |     ids=lambda item: f'rent_wait_for_msg={item}' | ||||||
|  | ) | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'use_ctx_cancel_method', |     'use_ctx_cancel_method', | ||||||
|     [False, True], |     [ | ||||||
|  |         False, | ||||||
|  |         'pre_stream', | ||||||
|  |         'post_stream_open', | ||||||
|  |         'post_stream_close', | ||||||
|  |     ], | ||||||
|  |     ids=lambda item: f'use_ctx_cancel_method={item}' | ||||||
| ) | ) | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_caller_closes_ctx_after_callee_opens_stream( | async def test_parent_exits_ctx_after_child_enters_stream( | ||||||
|     use_ctx_cancel_method: bool, |     use_ctx_cancel_method: bool|str, | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
|  |     rent_wait_for_msg: bool, | ||||||
|  |     child_send_before_receive: bool, | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     caller context closes without using/opening stream |     Parent-side of IPC context closes without sending on `MsgStream`. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         debug_mode=debug_mode, |         debug_mode=debug_mode, | ||||||
|     ) as an: |     ) as an: | ||||||
| 
 |  | ||||||
|         root: Actor = current_actor() |         root: Actor = current_actor() | ||||||
|         portal = await an.start_actor( |         portal = await an.start_actor( | ||||||
|             'ctx_cancelled', |             'ctx_cancelled', | ||||||
|  | @ -592,26 +647,45 @@ async def test_caller_closes_ctx_after_callee_opens_stream( | ||||||
| 
 | 
 | ||||||
|         async with portal.open_context( |         async with portal.open_context( | ||||||
|             expect_cancelled, |             expect_cancelled, | ||||||
|  |             send_before_receive=child_send_before_receive, | ||||||
|         ) as (ctx, sent): |         ) as (ctx, sent): | ||||||
|             assert sent is None |             assert sent is None | ||||||
| 
 | 
 | ||||||
|             await portal.run(assert_state, value=True) |             await portal.run(assert_state, value=True) | ||||||
| 
 | 
 | ||||||
|             # call `ctx.cancel()` explicitly |             # call `ctx.cancel()` explicitly | ||||||
|             if use_ctx_cancel_method: |             if use_ctx_cancel_method == 'pre_stream': | ||||||
|                 await ctx.cancel() |                 await ctx.cancel() | ||||||
| 
 | 
 | ||||||
|                 # NOTE: means the local side `ctx._scope` will |                 # NOTE: means the local side `ctx._scope` will | ||||||
|                 # have been cancelled by an ctxc ack and thus |                 # have been cancelled by an ctxc ack and thus | ||||||
|                 # `._scope.cancelled_caught` should be set. |                 # `._scope.cancelled_caught` should be set. | ||||||
|                 try: |                 async with ( | ||||||
|                     async with ctx.open_stream() as stream: |                     expect_ctxc( | ||||||
|                         async for msg in stream: |  | ||||||
|                             pass |  | ||||||
| 
 |  | ||||||
|                 except tractor.ContextCancelled as ctxc: |  | ||||||
|                         # XXX: the cause is US since we call |                         # XXX: the cause is US since we call | ||||||
|                         # `Context.cancel()` just above! |                         # `Context.cancel()` just above! | ||||||
|  |                         yay=True, | ||||||
|  | 
 | ||||||
|  |                         # XXX: must be propagated to __aexit__ | ||||||
|  |                         # and should be silently absorbed there | ||||||
|  |                         # since we called `.cancel()` just above ;) | ||||||
|  |                         reraise=True, | ||||||
|  |                     ) as maybe_ctxc, | ||||||
|  |                 ): | ||||||
|  |                     async with ctx.open_stream() as stream: | ||||||
|  | 
 | ||||||
|  |                         if rent_wait_for_msg: | ||||||
|  |                             async for msg in stream: | ||||||
|  |                                 print(f'PARENT rx: {msg!r}\n') | ||||||
|  |                                 break | ||||||
|  | 
 | ||||||
|  |                         if use_ctx_cancel_method == 'post_stream_open': | ||||||
|  |                             await ctx.cancel() | ||||||
|  | 
 | ||||||
|  |                     if use_ctx_cancel_method == 'post_stream_close': | ||||||
|  |                         await ctx.cancel() | ||||||
|  | 
 | ||||||
|  |                 ctxc: tractor.ContextCancelled = maybe_ctxc.value | ||||||
|                 assert ( |                 assert ( | ||||||
|                     ctxc.canceller |                     ctxc.canceller | ||||||
|                     == |                     == | ||||||
|  | @ -620,14 +694,6 @@ async def test_caller_closes_ctx_after_callee_opens_stream( | ||||||
|                     root.uid |                     root.uid | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|                     # XXX: must be propagated to __aexit__ |  | ||||||
|                     # and should be silently absorbed there |  | ||||||
|                     # since we called `.cancel()` just above ;) |  | ||||||
|                     raise |  | ||||||
| 
 |  | ||||||
|                 else: |  | ||||||
|                     assert 0, "Should have context cancelled?" |  | ||||||
| 
 |  | ||||||
|                 # channel should still be up |                 # channel should still be up | ||||||
|                 assert portal.channel.connected() |                 assert portal.channel.connected() | ||||||
| 
 | 
 | ||||||
|  | @ -637,13 +703,20 @@ async def test_caller_closes_ctx_after_callee_opens_stream( | ||||||
|                     value=False, |                     value=False, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|  |             # XXX CHILD-BLOCKS case, we SHOULD NOT exit from the | ||||||
|  |             # `.open_context()` before the child has returned, | ||||||
|  |             # errored or been cancelled! | ||||||
|             else: |             else: | ||||||
|                 try: |                 try: | ||||||
|                     with trio.fail_after(0.2): |                     with trio.fail_after( | ||||||
|                         await ctx.result() |                         0.5  # if not debug_mode else 999 | ||||||
|  |                     ): | ||||||
|  |                         res = await ctx.wait_for_result() | ||||||
|  |                         assert res is not tractor._context.Unresolved | ||||||
|                         assert 0, "Callee should have blocked!?" |                         assert 0, "Callee should have blocked!?" | ||||||
|                 except trio.TooSlowError: |                 except trio.TooSlowError: | ||||||
|                     # NO-OP -> since already called above |                     # NO-OP -> since already triggered by | ||||||
|  |                     # `trio.fail_after()` above! | ||||||
|                     await ctx.cancel() |                     await ctx.cancel() | ||||||
| 
 | 
 | ||||||
|         # NOTE: local scope should have absorbed the cancellation since |         # NOTE: local scope should have absorbed the cancellation since | ||||||
|  | @ -683,7 +756,7 @@ async def test_caller_closes_ctx_after_callee_opens_stream( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_multitask_caller_cancels_from_nonroot_task( | async def test_multitask_parent_cancels_from_nonroot_task( | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
| ): | ): | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|  | @ -735,7 +808,6 @@ async def test_multitask_caller_cancels_from_nonroot_task( | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def cancel_self( | async def cancel_self( | ||||||
| 
 |  | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|  | @ -775,11 +847,11 @@ async def cancel_self( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor_test | @tractor_test | ||||||
| async def test_callee_cancels_before_started( | async def test_child_cancels_before_started( | ||||||
|     debug_mode: bool, |     debug_mode: bool, | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     Callee calls `Context.cancel()` while streaming and caller |     Callee calls `Context.cancel()` while streaming and parent | ||||||
|     sees stream terminated in `ContextCancelled`. |     sees stream terminated in `ContextCancelled`. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  | @ -826,14 +898,13 @@ async def never_open_stream( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def keep_sending_from_callee( | async def keep_sending_from_child( | ||||||
| 
 |  | ||||||
|     ctx:  Context, |     ctx:  Context, | ||||||
|     msg_buffer_size: int|None = None, |     msg_buffer_size: int|None = None, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|     Send endlessly on the calleee stream. |     Send endlessly on the child stream. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     await ctx.started() |     await ctx.started() | ||||||
|  | @ -841,7 +912,7 @@ async def keep_sending_from_callee( | ||||||
|         msg_buffer_size=msg_buffer_size, |         msg_buffer_size=msg_buffer_size, | ||||||
|     ) as stream: |     ) as stream: | ||||||
|         for msg in count(): |         for msg in count(): | ||||||
|             print(f'callee sending {msg}') |             print(f'child sending {msg}') | ||||||
|             await stream.send(msg) |             await stream.send(msg) | ||||||
|             await trio.sleep(0.01) |             await trio.sleep(0.01) | ||||||
| 
 | 
 | ||||||
|  | @ -849,12 +920,12 @@ async def keep_sending_from_callee( | ||||||
| @pytest.mark.parametrize( | @pytest.mark.parametrize( | ||||||
|     'overrun_by', |     'overrun_by', | ||||||
|     [ |     [ | ||||||
|         ('caller', 1, never_open_stream), |         ('parent', 1, never_open_stream), | ||||||
|         ('callee', 0, keep_sending_from_callee), |         ('child', 0, keep_sending_from_child), | ||||||
|     ], |     ], | ||||||
|     ids=[ |     ids=[ | ||||||
|          ('caller_1buf_never_open_stream'), |          ('parent_1buf_never_open_stream'), | ||||||
|          ('callee_0buf_keep_sending_from_callee'), |          ('child_0buf_keep_sending_from_child'), | ||||||
|     ] |     ] | ||||||
| ) | ) | ||||||
| def test_one_end_stream_not_opened( | def test_one_end_stream_not_opened( | ||||||
|  | @ -885,8 +956,7 @@ def test_one_end_stream_not_opened( | ||||||
|                 ) as (ctx, sent): |                 ) as (ctx, sent): | ||||||
|                     assert sent is None |                     assert sent is None | ||||||
| 
 | 
 | ||||||
|                     if 'caller' in overrunner: |                     if 'parent' in overrunner: | ||||||
| 
 |  | ||||||
|                         async with ctx.open_stream() as stream: |                         async with ctx.open_stream() as stream: | ||||||
| 
 | 
 | ||||||
|                             # itersend +1 msg more then the buffer size |                             # itersend +1 msg more then the buffer size | ||||||
|  | @ -901,7 +971,7 @@ def test_one_end_stream_not_opened( | ||||||
|                                 await trio.sleep_forever() |                                 await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
|                     else: |                     else: | ||||||
|                         # callee overruns caller case so we do nothing here |                         # child overruns parent case so we do nothing here | ||||||
|                         await trio.sleep_forever() |                         await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
|             await portal.cancel_actor() |             await portal.cancel_actor() | ||||||
|  | @ -909,19 +979,19 @@ def test_one_end_stream_not_opened( | ||||||
|     # 2 overrun cases and the no overrun case (which pushes right up to |     # 2 overrun cases and the no overrun case (which pushes right up to | ||||||
|     # the msg limit) |     # the msg limit) | ||||||
|     if ( |     if ( | ||||||
|         overrunner == 'caller' |         overrunner == 'parent' | ||||||
|     ): |     ): | ||||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: |         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||||
|             trio.run(main) |             trio.run(main) | ||||||
| 
 | 
 | ||||||
|         assert excinfo.value.boxed_type == StreamOverrun |         assert excinfo.value.boxed_type == StreamOverrun | ||||||
| 
 | 
 | ||||||
|     elif overrunner == 'callee': |     elif overrunner == 'child': | ||||||
|         with pytest.raises(tractor.RemoteActorError) as excinfo: |         with pytest.raises(tractor.RemoteActorError) as excinfo: | ||||||
|             trio.run(main) |             trio.run(main) | ||||||
| 
 | 
 | ||||||
|         # TODO: embedded remote errors so that we can verify the source |         # TODO: embedded remote errors so that we can verify the source | ||||||
|         # error? the callee delivers an error which is an overrun |         # error? the child delivers an error which is an overrun | ||||||
|         # wrapped in a remote actor error. |         # wrapped in a remote actor error. | ||||||
|         assert excinfo.value.boxed_type == tractor.RemoteActorError |         assert excinfo.value.boxed_type == tractor.RemoteActorError | ||||||
| 
 | 
 | ||||||
|  | @ -931,7 +1001,6 @@ def test_one_end_stream_not_opened( | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def echo_back_sequence( | async def echo_back_sequence( | ||||||
| 
 |  | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
|     seq: list[int], |     seq: list[int], | ||||||
|     wait_for_cancel: bool, |     wait_for_cancel: bool, | ||||||
|  | @ -941,12 +1010,12 @@ async def echo_back_sequence( | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|     Send endlessly on the calleee stream using a small buffer size |     Send endlessly on the child stream using a small buffer size | ||||||
|     setting on the contex to simulate backlogging that would normally |     setting on the contex to simulate backlogging that would normally | ||||||
|     cause overruns. |     cause overruns. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     # NOTE: ensure that if the caller is expecting to cancel this task |     # NOTE: ensure that if the parent is expecting to cancel this task | ||||||
|     # that we stay echoing much longer then they are so we don't |     # that we stay echoing much longer then they are so we don't | ||||||
|     # return early instead of receive the cancel msg. |     # return early instead of receive the cancel msg. | ||||||
|     total_batches: int = ( |     total_batches: int = ( | ||||||
|  | @ -996,18 +1065,18 @@ async def echo_back_sequence( | ||||||
|                 if be_slow: |                 if be_slow: | ||||||
|                     await trio.sleep(0.05) |                     await trio.sleep(0.05) | ||||||
| 
 | 
 | ||||||
|                 print('callee waiting on next') |                 print('child waiting on next') | ||||||
| 
 | 
 | ||||||
|             print(f'callee echoing back latest batch\n{batch}') |             print(f'child echoing back latest batch\n{batch}') | ||||||
|             for msg in batch: |             for msg in batch: | ||||||
|                 print(f'callee sending msg\n{msg}') |                 print(f'child sending msg\n{msg}') | ||||||
|                 await stream.send(msg) |                 await stream.send(msg) | ||||||
| 
 | 
 | ||||||
|     try: |     try: | ||||||
|         return 'yo' |         return 'yo' | ||||||
|     finally: |     finally: | ||||||
|         print( |         print( | ||||||
|             'exiting callee with context:\n' |             'exiting child with context:\n' | ||||||
|             f'{pformat(ctx)}\n' |             f'{pformat(ctx)}\n' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  | @ -1061,7 +1130,7 @@ def test_maybe_allow_overruns_stream( | ||||||
|             debug_mode=debug_mode, |             debug_mode=debug_mode, | ||||||
|         ) as an: |         ) as an: | ||||||
|             portal = await an.start_actor( |             portal = await an.start_actor( | ||||||
|                 'callee_sends_forever', |                 'child_sends_forever', | ||||||
|                 enable_modules=[__name__], |                 enable_modules=[__name__], | ||||||
|                 loglevel=loglevel, |                 loglevel=loglevel, | ||||||
|                 debug_mode=debug_mode, |                 debug_mode=debug_mode, | ||||||
|  |  | ||||||
|  | @ -181,7 +181,9 @@ async def spawn_and_check_registry( | ||||||
| 
 | 
 | ||||||
|             try: |             try: | ||||||
|                 async with tractor.open_nursery() as n: |                 async with tractor.open_nursery() as n: | ||||||
|                     async with trio.open_nursery() as trion: |                     async with trio.open_nursery( | ||||||
|  |                         strict_exception_groups=False, | ||||||
|  |                     ) as trion: | ||||||
| 
 | 
 | ||||||
|                         portals = {} |                         portals = {} | ||||||
|                         for i in range(3): |                         for i in range(3): | ||||||
|  | @ -316,7 +318,9 @@ async def close_chans_before_nursery( | ||||||
|                         async with portal2.open_stream_from( |                         async with portal2.open_stream_from( | ||||||
|                             stream_forever |                             stream_forever | ||||||
|                         ) as agen2: |                         ) as agen2: | ||||||
|                             async with trio.open_nursery() as n: |                             async with trio.open_nursery( | ||||||
|  |                                 strict_exception_groups=False, | ||||||
|  |                             ) as n: | ||||||
|                                 n.start_soon(streamer, agen1) |                                 n.start_soon(streamer, agen1) | ||||||
|                                 n.start_soon(cancel, use_signal, .5) |                                 n.start_soon(cancel, use_signal, .5) | ||||||
|                                 try: |                                 try: | ||||||
|  |  | ||||||
|  | @ -19,7 +19,7 @@ from tractor._testing import ( | ||||||
| @pytest.fixture | @pytest.fixture | ||||||
| def run_example_in_subproc( | def run_example_in_subproc( | ||||||
|     loglevel: str, |     loglevel: str, | ||||||
|     testdir: pytest.Testdir, |     testdir: pytest.Pytester, | ||||||
|     reg_addr: tuple[str, int], |     reg_addr: tuple[str, int], | ||||||
| ): | ): | ||||||
| 
 | 
 | ||||||
|  | @ -81,28 +81,36 @@ def run_example_in_subproc( | ||||||
| 
 | 
 | ||||||
|     # walk yields: (dirpath, dirnames, filenames) |     # walk yields: (dirpath, dirnames, filenames) | ||||||
|     [ |     [ | ||||||
|         (p[0], f) for p in os.walk(examples_dir()) for f in p[2] |         (p[0], f) | ||||||
|  |         for p in os.walk(examples_dir()) | ||||||
|  |         for f in p[2] | ||||||
| 
 | 
 | ||||||
|         if '__' not in f |         if ( | ||||||
|  |             '__' not in f | ||||||
|             and f[0] != '_' |             and f[0] != '_' | ||||||
|             and 'debugging' not in p[0] |             and 'debugging' not in p[0] | ||||||
|             and 'integration' not in p[0] |             and 'integration' not in p[0] | ||||||
|             and 'advanced_faults' not in p[0] |             and 'advanced_faults' not in p[0] | ||||||
|             and 'multihost' not in p[0] |             and 'multihost' not in p[0] | ||||||
|  |         ) | ||||||
|     ], |     ], | ||||||
| 
 |  | ||||||
|     ids=lambda t: t[1], |     ids=lambda t: t[1], | ||||||
| ) | ) | ||||||
| def test_example(run_example_in_subproc, example_script): | def test_example( | ||||||
|     """Load and run scripts from this repo's ``examples/`` dir as a user |     run_example_in_subproc, | ||||||
|  |     example_script, | ||||||
|  | ): | ||||||
|  |     ''' | ||||||
|  |     Load and run scripts from this repo's ``examples/`` dir as a user | ||||||
|     would copy and pasing them into their editor. |     would copy and pasing them into their editor. | ||||||
| 
 | 
 | ||||||
|     On windows a little more "finessing" is done to make |     On windows a little more "finessing" is done to make | ||||||
|     ``multiprocessing`` play nice: we copy the ``__main__.py`` into the |     ``multiprocessing`` play nice: we copy the ``__main__.py`` into the | ||||||
|     test directory and invoke the script as a module with ``python -m |     test directory and invoke the script as a module with ``python -m | ||||||
|     test_example``. |     test_example``. | ||||||
|     """ | 
 | ||||||
|     ex_file = os.path.join(*example_script) |     ''' | ||||||
|  |     ex_file: str = os.path.join(*example_script) | ||||||
| 
 | 
 | ||||||
|     if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9): |     if 'rpc_bidir_streaming' in ex_file and sys.version_info < (3, 9): | ||||||
|         pytest.skip("2-way streaming example requires py3.9 async with syntax") |         pytest.skip("2-way streaming example requires py3.9 async with syntax") | ||||||
|  | @ -128,7 +136,8 @@ def test_example(run_example_in_subproc, example_script): | ||||||
|                     # shouldn't eventually once we figure out what's |                     # shouldn't eventually once we figure out what's | ||||||
|                     # a better way to be explicit about aio side |                     # a better way to be explicit about aio side | ||||||
|                     # cancels? |                     # cancels? | ||||||
|                     and 'asyncio.exceptions.CancelledError' not in last_error |                     and | ||||||
|  |                     'asyncio.exceptions.CancelledError' not in last_error | ||||||
|                 ): |                 ): | ||||||
|                     raise Exception(errmsg) |                     raise Exception(errmsg) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -0,0 +1,946 @@ | ||||||
|  | ''' | ||||||
|  | Low-level functional audits for our | ||||||
|  | "capability based messaging"-spec feats. | ||||||
|  | 
 | ||||||
|  | B~) | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | from contextlib import ( | ||||||
|  |     contextmanager as cm, | ||||||
|  |     # nullcontext, | ||||||
|  | ) | ||||||
|  | import importlib | ||||||
|  | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     Type, | ||||||
|  |     Union, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | from msgspec import ( | ||||||
|  |     # structs, | ||||||
|  |     # msgpack, | ||||||
|  |     Raw, | ||||||
|  |     # Struct, | ||||||
|  |     ValidationError, | ||||||
|  | ) | ||||||
|  | import pytest | ||||||
|  | import trio | ||||||
|  | 
 | ||||||
|  | import tractor | ||||||
|  | from tractor import ( | ||||||
|  |     Actor, | ||||||
|  |     # _state, | ||||||
|  |     MsgTypeError, | ||||||
|  |     Context, | ||||||
|  | ) | ||||||
|  | from tractor.msg import ( | ||||||
|  |     _codec, | ||||||
|  |     _ctxvar_MsgCodec, | ||||||
|  |     _exts, | ||||||
|  | 
 | ||||||
|  |     NamespacePath, | ||||||
|  |     MsgCodec, | ||||||
|  |     MsgDec, | ||||||
|  |     mk_codec, | ||||||
|  |     mk_dec, | ||||||
|  |     apply_codec, | ||||||
|  |     current_codec, | ||||||
|  | ) | ||||||
|  | from tractor.msg.types import ( | ||||||
|  |     log, | ||||||
|  |     Started, | ||||||
|  |     # _payload_msgs, | ||||||
|  |     # PayloadMsg, | ||||||
|  |     # mk_msg_spec, | ||||||
|  | ) | ||||||
|  | from tractor.msg._ops import ( | ||||||
|  |     limit_plds, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | def enc_nsp(obj: Any) -> Any: | ||||||
|  |     actor: Actor = tractor.current_actor( | ||||||
|  |         err_on_no_runtime=False, | ||||||
|  |     ) | ||||||
|  |     uid: tuple[str, str]|None = None if not actor else actor.uid | ||||||
|  |     print(f'{uid} ENC HOOK') | ||||||
|  | 
 | ||||||
|  |     match obj: | ||||||
|  |         # case NamespacePath()|str(): | ||||||
|  |         case NamespacePath(): | ||||||
|  |             encoded: str = str(obj) | ||||||
|  |             print( | ||||||
|  |                 f'----- ENCODING `NamespacePath` as `str` ------\n' | ||||||
|  |                 f'|_obj:{type(obj)!r} = {obj!r}\n' | ||||||
|  |                 f'|_encoded: str = {encoded!r}\n' | ||||||
|  |             ) | ||||||
|  |             # if type(obj) != NamespacePath: | ||||||
|  |             #     breakpoint() | ||||||
|  |             return encoded | ||||||
|  |         case _: | ||||||
|  |             logmsg: str = ( | ||||||
|  |                 f'{uid}\n' | ||||||
|  |                 'FAILED ENCODE\n' | ||||||
|  |                 f'obj-> `{obj}: {type(obj)}`\n' | ||||||
|  |             ) | ||||||
|  |             raise NotImplementedError(logmsg) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def dec_nsp( | ||||||
|  |     obj_type: Type, | ||||||
|  |     obj: Any, | ||||||
|  | 
 | ||||||
|  | ) -> Any: | ||||||
|  |     # breakpoint() | ||||||
|  |     actor: Actor = tractor.current_actor( | ||||||
|  |         err_on_no_runtime=False, | ||||||
|  |     ) | ||||||
|  |     uid: tuple[str, str]|None = None if not actor else actor.uid | ||||||
|  |     print( | ||||||
|  |         f'{uid}\n' | ||||||
|  |         'CUSTOM DECODE\n' | ||||||
|  |         f'type-arg-> {obj_type}\n' | ||||||
|  |         f'obj-arg-> `{obj}`: {type(obj)}\n' | ||||||
|  |     ) | ||||||
|  |     nsp = None | ||||||
|  |     # XXX, never happens right? | ||||||
|  |     if obj_type is Raw: | ||||||
|  |         breakpoint() | ||||||
|  | 
 | ||||||
|  |     if ( | ||||||
|  |         obj_type is NamespacePath | ||||||
|  |         and isinstance(obj, str) | ||||||
|  |         and ':' in obj | ||||||
|  |     ): | ||||||
|  |         nsp = NamespacePath(obj) | ||||||
|  |         # TODO: we could built a generic handler using | ||||||
|  |         # JUST matching the obj_type part? | ||||||
|  |         # nsp = obj_type(obj) | ||||||
|  | 
 | ||||||
|  |     if nsp: | ||||||
|  |         print(f'Returning NSP instance: {nsp}') | ||||||
|  |         return nsp | ||||||
|  | 
 | ||||||
|  |     logmsg: str = ( | ||||||
|  |         f'{uid}\n' | ||||||
|  |         'FAILED DECODE\n' | ||||||
|  |         f'type-> {obj_type}\n' | ||||||
|  |         f'obj-arg-> `{obj}`: {type(obj)}\n\n' | ||||||
|  |         f'current codec:\n' | ||||||
|  |         f'{current_codec()}\n' | ||||||
|  |     ) | ||||||
|  |     # TODO: figure out the ignore subsys for this! | ||||||
|  |     # -[ ] option whether to defense-relay backc the msg | ||||||
|  |     #   inside an `Invalid`/`Ignore` | ||||||
|  |     # -[ ] how to make this handling pluggable such that a | ||||||
|  |     #   `Channel`/`MsgTransport` can intercept and process | ||||||
|  |     #   back msgs either via exception handling or some other | ||||||
|  |     #   signal? | ||||||
|  |     log.warning(logmsg) | ||||||
|  |     # NOTE: this delivers the invalid | ||||||
|  |     # value up to `msgspec`'s decoding | ||||||
|  |     # machinery for error raising. | ||||||
|  |     return obj | ||||||
|  |     # raise NotImplementedError(logmsg) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def ex_func(*args): | ||||||
|  |     ''' | ||||||
|  |     A mod level func we can ref and load via our `NamespacePath` | ||||||
|  |     python-object pointer `str` subtype. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     print(f'ex_func({args})') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'add_codec_hooks', | ||||||
|  |     [ | ||||||
|  |         True, | ||||||
|  |         False, | ||||||
|  |     ], | ||||||
|  |     ids=['use_codec_hooks', 'no_codec_hooks'], | ||||||
|  | ) | ||||||
|  | def test_custom_extension_types( | ||||||
|  |     debug_mode: bool, | ||||||
|  |     add_codec_hooks: bool | ||||||
|  | ): | ||||||
|  |     ''' | ||||||
|  |     Verify that a `MsgCodec` (used for encoding all outbound IPC msgs | ||||||
|  |     and decoding all inbound `PayloadMsg`s) and a paired `MsgDec` | ||||||
|  |     (used for decoding the `PayloadMsg.pld: Raw` received within a given | ||||||
|  |     task's ipc `Context` scope) can both send and receive "extension types" | ||||||
|  |     as supported via custom converter hooks passed to `msgspec`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     nsp_pld_dec: MsgDec = mk_dec( | ||||||
|  |         spec=None,  # ONLY support the ext type | ||||||
|  |         dec_hook=dec_nsp if add_codec_hooks else None, | ||||||
|  |         ext_types=[NamespacePath], | ||||||
|  |     ) | ||||||
|  |     nsp_codec: MsgCodec = mk_codec( | ||||||
|  |         # ipc_pld_spec=Raw,  # default! | ||||||
|  | 
 | ||||||
|  |         # NOTE XXX: the encode hook MUST be used no matter what since | ||||||
|  |         # our `NamespacePath` is not any of a `Any` native type nor | ||||||
|  |         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||||
|  |         # how to encode it unless we provide the custom hook. | ||||||
|  |         # | ||||||
|  |         # AGAIN that is, regardless of whether we spec an | ||||||
|  |         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||||
|  |         # how to enc `NamespacePath` (nsp), so we add a custom | ||||||
|  |         # hook to do that ALWAYS. | ||||||
|  |         enc_hook=enc_nsp if add_codec_hooks else None, | ||||||
|  | 
 | ||||||
|  |         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||||
|  |         # `Decoder`? so it won't work in tandem with the | ||||||
|  |         # `ipc_pld_spec` passed above? | ||||||
|  |         ext_types=[NamespacePath], | ||||||
|  | 
 | ||||||
|  |         # TODO? is it useful to have the `.pld` decoded *prior* to | ||||||
|  |         # the `PldRx`?? like perf or mem related? | ||||||
|  |         # ext_dec=nsp_pld_dec, | ||||||
|  |     ) | ||||||
|  |     if add_codec_hooks: | ||||||
|  |         assert nsp_codec.dec.dec_hook is None | ||||||
|  | 
 | ||||||
|  |         # TODO? if we pass `ext_dec` above? | ||||||
|  |         # assert nsp_codec.dec.dec_hook is dec_nsp | ||||||
|  | 
 | ||||||
|  |         assert nsp_codec.enc.enc_hook is enc_nsp | ||||||
|  | 
 | ||||||
|  |     nsp = NamespacePath.from_ref(ex_func) | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         nsp_bytes: bytes = nsp_codec.encode(nsp) | ||||||
|  |         nsp_rt_sin_msg = nsp_pld_dec.decode(nsp_bytes) | ||||||
|  |         nsp_rt_sin_msg.load_ref() is ex_func | ||||||
|  |     except TypeError: | ||||||
|  |         if not add_codec_hooks: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         msg_bytes: bytes = nsp_codec.encode( | ||||||
|  |             Started( | ||||||
|  |                 cid='cid', | ||||||
|  |                 pld=nsp, | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|  |         # since the ext-type obj should also be set as the msg.pld | ||||||
|  |         assert nsp_bytes in msg_bytes | ||||||
|  |         started_rt: Started = nsp_codec.decode(msg_bytes) | ||||||
|  |         pld: Raw = started_rt.pld | ||||||
|  |         assert isinstance(pld, Raw) | ||||||
|  |         nsp_rt: NamespacePath = nsp_pld_dec.decode(pld) | ||||||
|  |         assert isinstance(nsp_rt, NamespacePath) | ||||||
|  |         # in obj comparison terms they should be the same | ||||||
|  |         assert nsp_rt == nsp | ||||||
|  |         # ensure we've decoded to ext type! | ||||||
|  |         assert nsp_rt.load_ref() is ex_func | ||||||
|  | 
 | ||||||
|  |     except TypeError: | ||||||
|  |         if not add_codec_hooks: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def sleep_forever_in_sub( | ||||||
|  |     ctx: Context, | ||||||
|  | ) -> None: | ||||||
|  |     await trio.sleep_forever() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def mk_custom_codec( | ||||||
|  |     add_hooks: bool, | ||||||
|  | 
 | ||||||
|  | ) -> tuple[ | ||||||
|  |     MsgCodec,  # encode to send | ||||||
|  |     MsgDec,  # pld receive-n-decode | ||||||
|  | ]: | ||||||
|  |     ''' | ||||||
|  |     Create custom `msgpack` enc/dec-hooks and set a `Decoder` | ||||||
|  |     which only loads `pld_spec` (like `NamespacePath`) types. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  | 
 | ||||||
|  |     # XXX NOTE XXX: despite defining `NamespacePath` as a type | ||||||
|  |     # field on our `PayloadMsg.pld`, we still need a enc/dec_hook() pair | ||||||
|  |     # to cast to/from that type on the wire. See the docs: | ||||||
|  |     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||||
|  | 
 | ||||||
|  |     # if pld_spec is Any: | ||||||
|  |     #     pld_spec = Raw | ||||||
|  | 
 | ||||||
|  |     nsp_codec: MsgCodec = mk_codec( | ||||||
|  |         # ipc_pld_spec=Raw,  # default! | ||||||
|  | 
 | ||||||
|  |         # NOTE XXX: the encode hook MUST be used no matter what since | ||||||
|  |         # our `NamespacePath` is not any of a `Any` native type nor | ||||||
|  |         # a `msgspec.Struct` subtype - so `msgspec` has no way to know | ||||||
|  |         # how to encode it unless we provide the custom hook. | ||||||
|  |         # | ||||||
|  |         # AGAIN that is, regardless of whether we spec an | ||||||
|  |         # `Any`-decoded-pld the enc has no knowledge (by default) | ||||||
|  |         # how to enc `NamespacePath` (nsp), so we add a custom | ||||||
|  |         # hook to do that ALWAYS. | ||||||
|  |         enc_hook=enc_nsp if add_hooks else None, | ||||||
|  | 
 | ||||||
|  |         # XXX NOTE: pretty sure this is mutex with the `type=` to | ||||||
|  |         # `Decoder`? so it won't work in tandem with the | ||||||
|  |         # `ipc_pld_spec` passed above? | ||||||
|  |         ext_types=[NamespacePath], | ||||||
|  |     ) | ||||||
|  |     # dec_hook=dec_nsp if add_hooks else None, | ||||||
|  |     return nsp_codec | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'limit_plds_args', | ||||||
|  |     [ | ||||||
|  |         ( | ||||||
|  |             {'dec_hook': None, 'ext_types': None}, | ||||||
|  |             None, | ||||||
|  |         ), | ||||||
|  |         ( | ||||||
|  |             {'dec_hook': dec_nsp, 'ext_types': None}, | ||||||
|  |             TypeError, | ||||||
|  |         ), | ||||||
|  |         ( | ||||||
|  |             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath]}, | ||||||
|  |             None, | ||||||
|  |         ), | ||||||
|  |         ( | ||||||
|  |             {'dec_hook': dec_nsp, 'ext_types': [NamespacePath|None]}, | ||||||
|  |             None, | ||||||
|  |         ), | ||||||
|  |     ], | ||||||
|  |     ids=[ | ||||||
|  |         'no_hook_no_ext_types', | ||||||
|  |         'only_hook', | ||||||
|  |         'hook_and_ext_types', | ||||||
|  |         'hook_and_ext_types_w_null', | ||||||
|  |     ] | ||||||
|  | ) | ||||||
|  | def test_pld_limiting_usage( | ||||||
|  |     limit_plds_args: tuple[dict, Exception|None], | ||||||
|  | ): | ||||||
|  |     ''' | ||||||
|  |     Verify `dec_hook()` and `ext_types` need to either both be | ||||||
|  |     provided or we raise a explanator type-error. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     kwargs, maybe_err = limit_plds_args | ||||||
|  |     async def main(): | ||||||
|  |         async with tractor.open_nursery() as an:  # just to open runtime | ||||||
|  | 
 | ||||||
|  |             # XXX SHOULD NEVER WORK outside an ipc ctx scope! | ||||||
|  |             try: | ||||||
|  |                 with limit_plds(**kwargs): | ||||||
|  |                     pass | ||||||
|  |             except RuntimeError: | ||||||
|  |                 pass | ||||||
|  | 
 | ||||||
|  |             p: tractor.Portal = await an.start_actor( | ||||||
|  |                 'sub', | ||||||
|  |                 enable_modules=[__name__], | ||||||
|  |             ) | ||||||
|  |             async with ( | ||||||
|  |                 p.open_context( | ||||||
|  |                     sleep_forever_in_sub | ||||||
|  |                 ) as (ctx, first), | ||||||
|  |             ): | ||||||
|  |                 try: | ||||||
|  |                     with limit_plds(**kwargs): | ||||||
|  |                         pass | ||||||
|  |                 except maybe_err as exc: | ||||||
|  |                     assert type(exc) is maybe_err | ||||||
|  |                     pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def chk_codec_applied( | ||||||
|  |     expect_codec: MsgCodec|None, | ||||||
|  |     enter_value: MsgCodec|None = None, | ||||||
|  | 
 | ||||||
|  | ) -> MsgCodec: | ||||||
|  |     ''' | ||||||
|  |     buncha sanity checks ensuring that the IPC channel's | ||||||
|  |     context-vars are set to the expected codec and that are | ||||||
|  |     ctx-var wrapper APIs match the same. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # TODO: play with tricyle again, bc this is supposed to work | ||||||
|  |     # the way we want? | ||||||
|  |     # | ||||||
|  |     # TreeVar | ||||||
|  |     # task: trio.Task = trio.lowlevel.current_task() | ||||||
|  |     # curr_codec = _ctxvar_MsgCodec.get_in(task) | ||||||
|  | 
 | ||||||
|  |     # ContextVar | ||||||
|  |     # task_ctx: Context = task.context | ||||||
|  |     # assert _ctxvar_MsgCodec in task_ctx | ||||||
|  |     # curr_codec: MsgCodec = task.context[_ctxvar_MsgCodec] | ||||||
|  |     if expect_codec is None: | ||||||
|  |         assert enter_value is None | ||||||
|  |         return | ||||||
|  | 
 | ||||||
|  |     # NOTE: currently we use this! | ||||||
|  |     # RunVar | ||||||
|  |     curr_codec: MsgCodec = current_codec() | ||||||
|  |     last_read_codec = _ctxvar_MsgCodec.get() | ||||||
|  |     # assert curr_codec is last_read_codec | ||||||
|  | 
 | ||||||
|  |     assert ( | ||||||
|  |         (same_codec := expect_codec) is | ||||||
|  |         # returned from `mk_codec()` | ||||||
|  | 
 | ||||||
|  |         # yielded value from `apply_codec()` | ||||||
|  | 
 | ||||||
|  |         # read from current task's `contextvars.Context` | ||||||
|  |         curr_codec is | ||||||
|  |         last_read_codec | ||||||
|  | 
 | ||||||
|  |         # the default `msgspec` settings | ||||||
|  |         is not _codec._def_msgspec_codec | ||||||
|  |         is not _codec._def_tractor_codec | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     if enter_value: | ||||||
|  |         assert enter_value is same_codec | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def send_back_values( | ||||||
|  |     ctx: Context, | ||||||
|  |     rent_pld_spec_type_strs: list[str], | ||||||
|  |     add_hooks: bool, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     Setup up a custom codec to load instances of `NamespacePath` | ||||||
|  |     and ensure we can round trip a func ref with our parent. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     uid: tuple = tractor.current_actor().uid | ||||||
|  | 
 | ||||||
|  |     # init state in sub-actor should be default | ||||||
|  |     chk_codec_applied( | ||||||
|  |         expect_codec=_codec._def_tractor_codec, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # load pld spec from input str | ||||||
|  |     rent_pld_spec = _exts.dec_type_union( | ||||||
|  |         rent_pld_spec_type_strs, | ||||||
|  |         mods=[ | ||||||
|  |             importlib.import_module(__name__), | ||||||
|  |         ], | ||||||
|  |     ) | ||||||
|  |     rent_pld_spec_types: set[Type] = _codec.unpack_spec_types( | ||||||
|  |         rent_pld_spec, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # ONLY add ext-hooks if the rent specified a non-std type! | ||||||
|  |     add_hooks: bool = ( | ||||||
|  |         NamespacePath in rent_pld_spec_types | ||||||
|  |         and | ||||||
|  |         add_hooks | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # same as on parent side config. | ||||||
|  |     nsp_codec: MsgCodec|None = None | ||||||
|  |     if add_hooks: | ||||||
|  |         nsp_codec = mk_codec( | ||||||
|  |             enc_hook=enc_nsp, | ||||||
|  |             ext_types=[NamespacePath], | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     with ( | ||||||
|  |         maybe_apply_codec(nsp_codec) as codec, | ||||||
|  |         limit_plds( | ||||||
|  |             rent_pld_spec, | ||||||
|  |             dec_hook=dec_nsp if add_hooks else None, | ||||||
|  |             ext_types=[NamespacePath]  if add_hooks else None, | ||||||
|  |         ) as pld_dec, | ||||||
|  |     ): | ||||||
|  |         # ?XXX? SHOULD WE NOT be swapping the global codec since it | ||||||
|  |         # breaks `Context.started()` roundtripping checks?? | ||||||
|  |         chk_codec_applied( | ||||||
|  |             expect_codec=nsp_codec, | ||||||
|  |             enter_value=codec, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         # ?TODO, mismatch case(s)? | ||||||
|  |         # | ||||||
|  |         # ensure pld spec matches on both sides | ||||||
|  |         ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec | ||||||
|  |         assert pld_dec is ctx_pld_dec | ||||||
|  |         child_pld_spec: Type = pld_dec.spec | ||||||
|  |         child_pld_spec_types: set[Type] = _codec.unpack_spec_types( | ||||||
|  |             child_pld_spec, | ||||||
|  |         ) | ||||||
|  |         assert ( | ||||||
|  |             child_pld_spec_types.issuperset( | ||||||
|  |                 rent_pld_spec_types | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         # ?TODO, try loop for each of the types in pld-superset? | ||||||
|  |         # | ||||||
|  |         # for send_value in [ | ||||||
|  |         #     nsp, | ||||||
|  |         #     str(nsp), | ||||||
|  |         #     None, | ||||||
|  |         # ]: | ||||||
|  |         nsp = NamespacePath.from_ref(ex_func) | ||||||
|  |         try: | ||||||
|  |             print( | ||||||
|  |                 f'{uid}: attempting to `.started({nsp})`\n' | ||||||
|  |                 f'\n' | ||||||
|  |                 f'rent_pld_spec: {rent_pld_spec}\n' | ||||||
|  |                 f'child_pld_spec: {child_pld_spec}\n' | ||||||
|  |                 f'codec: {codec}\n' | ||||||
|  |             ) | ||||||
|  |             # await tractor.pause() | ||||||
|  |             await ctx.started(nsp) | ||||||
|  | 
 | ||||||
|  |         except tractor.MsgTypeError as _mte: | ||||||
|  |             mte = _mte | ||||||
|  | 
 | ||||||
|  |             # false -ve case | ||||||
|  |             if add_hooks: | ||||||
|  |                 raise RuntimeError( | ||||||
|  |                     f'EXPECTED to `.started()` value given spec ??\n\n' | ||||||
|  |                     f'child_pld_spec -> {child_pld_spec}\n' | ||||||
|  |                     f'value = {nsp}: {type(nsp)}\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             # true -ve case | ||||||
|  |             raise mte | ||||||
|  | 
 | ||||||
|  |         # TODO: maybe we should add our own wrapper error so as to | ||||||
|  |         # be interchange-lib agnostic? | ||||||
|  |         # -[ ] the error type is wtv is raised from the hook so we | ||||||
|  |         #   could also require a type-class of errors for | ||||||
|  |         #   indicating whether the hook-failure can be handled by | ||||||
|  |         #   a nasty-dialog-unprot sub-sys? | ||||||
|  |         except TypeError as typerr: | ||||||
|  |             # false -ve | ||||||
|  |             if add_hooks: | ||||||
|  |                 raise RuntimeError('Should have been able to send `nsp`??') | ||||||
|  | 
 | ||||||
|  |             # true -ve | ||||||
|  |             print('Failed to send `nsp` due to no ext hooks set!') | ||||||
|  |             raise typerr | ||||||
|  | 
 | ||||||
|  |         # now try sending a set of valid and invalid plds to ensure | ||||||
|  |         # the pld spec is respected. | ||||||
|  |         sent: list[Any] = [] | ||||||
|  |         async with ctx.open_stream() as ipc: | ||||||
|  |             print( | ||||||
|  |                 f'{uid}: streaming all pld types to rent..' | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             # for send_value, expect_send in iter_send_val_items: | ||||||
|  |             for send_value in [ | ||||||
|  |                 nsp, | ||||||
|  |                 str(nsp), | ||||||
|  |                 None, | ||||||
|  |             ]: | ||||||
|  |                 send_type: Type = type(send_value) | ||||||
|  |                 print( | ||||||
|  |                     f'{uid}: SENDING NEXT pld\n' | ||||||
|  |                     f'send_type: {send_type}\n' | ||||||
|  |                     f'send_value: {send_value}\n' | ||||||
|  |                 ) | ||||||
|  |                 try: | ||||||
|  |                     await ipc.send(send_value) | ||||||
|  |                     sent.append(send_value) | ||||||
|  | 
 | ||||||
|  |                 except ValidationError as valerr: | ||||||
|  |                     print(f'{uid} FAILED TO SEND {send_value}!') | ||||||
|  | 
 | ||||||
|  |                     # false -ve | ||||||
|  |                     if add_hooks: | ||||||
|  |                         raise RuntimeError( | ||||||
|  |                             f'EXPECTED to roundtrip value given spec:\n' | ||||||
|  |                             f'rent_pld_spec -> {rent_pld_spec}\n' | ||||||
|  |                             f'child_pld_spec -> {child_pld_spec}\n' | ||||||
|  |                             f'value = {send_value}: {send_type}\n' | ||||||
|  |                         ) | ||||||
|  | 
 | ||||||
|  |                     # true -ve | ||||||
|  |                     raise valerr | ||||||
|  |                     # continue | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 print( | ||||||
|  |                     f'{uid}: finished sending all values\n' | ||||||
|  |                     'Should be exiting stream block!\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |         print(f'{uid}: exited streaming block!') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @cm | ||||||
|  | def maybe_apply_codec(codec: MsgCodec|None) -> MsgCodec|None: | ||||||
|  |     if codec is None: | ||||||
|  |         yield None | ||||||
|  |         return | ||||||
|  | 
 | ||||||
|  |     with apply_codec(codec) as codec: | ||||||
|  |         yield codec | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'pld_spec', | ||||||
|  |     [ | ||||||
|  |         Any, | ||||||
|  |         NamespacePath, | ||||||
|  |         NamespacePath|None,  # the "maybe" spec Bo | ||||||
|  |     ], | ||||||
|  |     ids=[ | ||||||
|  |         'any_type', | ||||||
|  |         'only_nsp_ext', | ||||||
|  |         'maybe_nsp_ext', | ||||||
|  |     ] | ||||||
|  | ) | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'add_hooks', | ||||||
|  |     [ | ||||||
|  |         True, | ||||||
|  |         False, | ||||||
|  |     ], | ||||||
|  |     ids=[ | ||||||
|  |         'use_codec_hooks', | ||||||
|  |         'no_codec_hooks', | ||||||
|  |     ], | ||||||
|  | ) | ||||||
|  | def test_ext_types_over_ipc( | ||||||
|  |     debug_mode: bool, | ||||||
|  |     pld_spec: Union[Type], | ||||||
|  |     add_hooks: bool, | ||||||
|  | ): | ||||||
|  |     ''' | ||||||
|  |     Ensure we can support extension types coverted using | ||||||
|  |     `enc/dec_hook()`s passed to the `.msg.limit_plds()` API | ||||||
|  |     and that sane errors happen when we try do the same without | ||||||
|  |     the codec hooks. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     pld_types: set[Type] = _codec.unpack_spec_types(pld_spec) | ||||||
|  | 
 | ||||||
|  |     async def main(): | ||||||
|  | 
 | ||||||
|  |         # sanity check the default pld-spec beforehand | ||||||
|  |         chk_codec_applied( | ||||||
|  |             expect_codec=_codec._def_tractor_codec, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         # extension type we want to send as msg payload | ||||||
|  |         nsp = NamespacePath.from_ref(ex_func) | ||||||
|  | 
 | ||||||
|  |         # ^NOTE, 2 cases: | ||||||
|  |         # - codec hooks noto added -> decode nsp as `str` | ||||||
|  |         # - codec with hooks -> decode nsp as `NamespacePath` | ||||||
|  |         nsp_codec: MsgCodec|None = None | ||||||
|  |         if ( | ||||||
|  |             NamespacePath in pld_types | ||||||
|  |             and | ||||||
|  |             add_hooks | ||||||
|  |         ): | ||||||
|  |             nsp_codec = mk_codec( | ||||||
|  |                 enc_hook=enc_nsp, | ||||||
|  |                 ext_types=[NamespacePath], | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         async with tractor.open_nursery( | ||||||
|  |             debug_mode=debug_mode, | ||||||
|  |         ) as an: | ||||||
|  |             p: tractor.Portal = await an.start_actor( | ||||||
|  |                 'sub', | ||||||
|  |                 enable_modules=[__name__], | ||||||
|  |             ) | ||||||
|  |             with ( | ||||||
|  |                 maybe_apply_codec(nsp_codec) as codec, | ||||||
|  |             ): | ||||||
|  |                 chk_codec_applied( | ||||||
|  |                     expect_codec=nsp_codec, | ||||||
|  |                     enter_value=codec, | ||||||
|  |                 ) | ||||||
|  |                 rent_pld_spec_type_strs: list[str] = _exts.enc_type_union(pld_spec) | ||||||
|  | 
 | ||||||
|  |                 # XXX should raise an mte (`MsgTypeError`) | ||||||
|  |                 # when `add_hooks == False` bc the input | ||||||
|  |                 # `expect_ipc_send` kwarg has a nsp which can't be | ||||||
|  |                 # serialized! | ||||||
|  |                 # | ||||||
|  |                 # TODO:can we ensure this happens from the | ||||||
|  |                 # `Return`-side (aka the sub) as well? | ||||||
|  |                 try: | ||||||
|  |                     ctx: tractor.Context | ||||||
|  |                     ipc: tractor.MsgStream | ||||||
|  |                     async with ( | ||||||
|  | 
 | ||||||
|  |                         # XXX should raise an mte (`MsgTypeError`) | ||||||
|  |                         # when `add_hooks == False`.. | ||||||
|  |                         p.open_context( | ||||||
|  |                             send_back_values, | ||||||
|  |                             # expect_debug=debug_mode, | ||||||
|  |                             rent_pld_spec_type_strs=rent_pld_spec_type_strs, | ||||||
|  |                             add_hooks=add_hooks, | ||||||
|  |                             # expect_ipc_send=expect_ipc_send, | ||||||
|  |                         ) as (ctx, first), | ||||||
|  | 
 | ||||||
|  |                         ctx.open_stream() as ipc, | ||||||
|  |                     ): | ||||||
|  |                         with ( | ||||||
|  |                             limit_plds( | ||||||
|  |                                 pld_spec, | ||||||
|  |                                 dec_hook=dec_nsp if add_hooks else None, | ||||||
|  |                                 ext_types=[NamespacePath]  if add_hooks else None, | ||||||
|  |                             ) as pld_dec, | ||||||
|  |                         ): | ||||||
|  |                             ctx_pld_dec: MsgDec = ctx._pld_rx._pld_dec | ||||||
|  |                             assert pld_dec is ctx_pld_dec | ||||||
|  | 
 | ||||||
|  |                             # if ( | ||||||
|  |                             #     not add_hooks | ||||||
|  |                             #     and | ||||||
|  |                             #     NamespacePath in  | ||||||
|  |                             # ): | ||||||
|  |                             #     pytest.fail('ctx should fail to open without custom enc_hook!?') | ||||||
|  | 
 | ||||||
|  |                             await ipc.send(nsp) | ||||||
|  |                             nsp_rt = await ipc.receive() | ||||||
|  | 
 | ||||||
|  |                             assert nsp_rt == nsp | ||||||
|  |                             assert nsp_rt.load_ref() is ex_func | ||||||
|  | 
 | ||||||
|  |                 # this test passes bc we can go no further! | ||||||
|  |                 except MsgTypeError as mte: | ||||||
|  |                     # if not add_hooks: | ||||||
|  |                     #     # teardown nursery | ||||||
|  |                     #     await p.cancel_actor() | ||||||
|  |                         # return | ||||||
|  | 
 | ||||||
|  |                     raise mte | ||||||
|  | 
 | ||||||
|  |             await p.cancel_actor() | ||||||
|  | 
 | ||||||
|  |     if ( | ||||||
|  |         NamespacePath in pld_types | ||||||
|  |         and | ||||||
|  |         add_hooks | ||||||
|  |     ): | ||||||
|  |         trio.run(main) | ||||||
|  | 
 | ||||||
|  |     else: | ||||||
|  |         with pytest.raises( | ||||||
|  |             expected_exception=tractor.RemoteActorError, | ||||||
|  |         ) as excinfo: | ||||||
|  |             trio.run(main) | ||||||
|  | 
 | ||||||
|  |         exc = excinfo.value | ||||||
|  |         # bc `.started(nsp: NamespacePath)` will raise | ||||||
|  |         assert exc.boxed_type is TypeError | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # def chk_pld_type( | ||||||
|  | #     payload_spec: Type[Struct]|Any, | ||||||
|  | #     pld: Any, | ||||||
|  | 
 | ||||||
|  | #     expect_roundtrip: bool|None = None, | ||||||
|  | 
 | ||||||
|  | # ) -> bool: | ||||||
|  | 
 | ||||||
|  | #     pld_val_type: Type = type(pld) | ||||||
|  | 
 | ||||||
|  | #     # TODO: verify that the overridden subtypes | ||||||
|  | #     # DO NOT have modified type-annots from original! | ||||||
|  | #     # 'Start',  .pld: FuncSpec | ||||||
|  | #     # 'StartAck',  .pld: IpcCtxSpec | ||||||
|  | #     # 'Stop',  .pld: UNSEt | ||||||
|  | #     # 'Error',  .pld: ErrorData | ||||||
|  | 
 | ||||||
|  | #     codec: MsgCodec = mk_codec( | ||||||
|  | #         # NOTE: this ONLY accepts `PayloadMsg.pld` fields of a specified | ||||||
|  | #         # type union. | ||||||
|  | #         ipc_pld_spec=payload_spec, | ||||||
|  | #     ) | ||||||
|  | 
 | ||||||
|  | #     # make a one-off dec to compare with our `MsgCodec` instance | ||||||
|  | #     # which does the below `mk_msg_spec()` call internally | ||||||
|  | #     ipc_msg_spec: Union[Type[Struct]] | ||||||
|  | #     msg_types: list[PayloadMsg[payload_spec]] | ||||||
|  | #     ( | ||||||
|  | #         ipc_msg_spec, | ||||||
|  | #         msg_types, | ||||||
|  | #     ) = mk_msg_spec( | ||||||
|  | #         payload_type_union=payload_spec, | ||||||
|  | #     ) | ||||||
|  | #     _enc = msgpack.Encoder() | ||||||
|  | #     _dec = msgpack.Decoder( | ||||||
|  | #         type=ipc_msg_spec or Any,  # like `PayloadMsg[Any]` | ||||||
|  | #     ) | ||||||
|  | 
 | ||||||
|  | #     assert ( | ||||||
|  | #         payload_spec | ||||||
|  | #         == | ||||||
|  | #         codec.pld_spec | ||||||
|  | #     ) | ||||||
|  | 
 | ||||||
|  | #     # assert codec.dec == dec | ||||||
|  | #     # | ||||||
|  | #     # ^-XXX-^ not sure why these aren't "equal" but when cast | ||||||
|  | #     # to `str` they seem to match ?? .. kk | ||||||
|  | 
 | ||||||
|  | #     assert ( | ||||||
|  | #         str(ipc_msg_spec) | ||||||
|  | #         == | ||||||
|  | #         str(codec.msg_spec) | ||||||
|  | #         == | ||||||
|  | #         str(_dec.type) | ||||||
|  | #         == | ||||||
|  | #         str(codec.dec.type) | ||||||
|  | #     ) | ||||||
|  | 
 | ||||||
|  | #     # verify the boxed-type for all variable payload-type msgs. | ||||||
|  | #     if not msg_types: | ||||||
|  | #         breakpoint() | ||||||
|  | 
 | ||||||
|  | #     roundtrip: bool|None = None | ||||||
|  | #     pld_spec_msg_names: list[str] = [ | ||||||
|  | #         td.__name__ for td in _payload_msgs | ||||||
|  | #     ] | ||||||
|  | #     for typedef in msg_types: | ||||||
|  | 
 | ||||||
|  | #         skip_runtime_msg: bool = typedef.__name__ not in pld_spec_msg_names | ||||||
|  | #         if skip_runtime_msg: | ||||||
|  | #             continue | ||||||
|  | 
 | ||||||
|  | #         pld_field = structs.fields(typedef)[1] | ||||||
|  | #         assert pld_field.type is payload_spec # TODO-^ does this need to work to get all subtypes to adhere? | ||||||
|  | 
 | ||||||
|  | #         kwargs: dict[str, Any] = { | ||||||
|  | #             'cid': '666', | ||||||
|  | #             'pld': pld, | ||||||
|  | #         } | ||||||
|  | #         enc_msg: PayloadMsg = typedef(**kwargs) | ||||||
|  | 
 | ||||||
|  | #         _wire_bytes: bytes = _enc.encode(enc_msg) | ||||||
|  | #         wire_bytes: bytes = codec.enc.encode(enc_msg) | ||||||
|  | #         assert _wire_bytes == wire_bytes | ||||||
|  | 
 | ||||||
|  | #         ve: ValidationError|None = None | ||||||
|  | #         try: | ||||||
|  | #             dec_msg = codec.dec.decode(wire_bytes) | ||||||
|  | #             _dec_msg = _dec.decode(wire_bytes) | ||||||
|  | 
 | ||||||
|  | #             # decoded msg and thus payload should be exactly same! | ||||||
|  | #             assert (roundtrip := ( | ||||||
|  | #                 _dec_msg | ||||||
|  | #                 == | ||||||
|  | #                 dec_msg | ||||||
|  | #                 == | ||||||
|  | #                 enc_msg | ||||||
|  | #             )) | ||||||
|  | 
 | ||||||
|  | #             if ( | ||||||
|  | #                 expect_roundtrip is not None | ||||||
|  | #                 and expect_roundtrip != roundtrip | ||||||
|  | #             ): | ||||||
|  | #                 breakpoint() | ||||||
|  | 
 | ||||||
|  | #             assert ( | ||||||
|  | #                 pld | ||||||
|  | #                 == | ||||||
|  | #                 dec_msg.pld | ||||||
|  | #                 == | ||||||
|  | #                 enc_msg.pld | ||||||
|  | #             ) | ||||||
|  | #             # assert (roundtrip := (_dec_msg == enc_msg)) | ||||||
|  | 
 | ||||||
|  | #         except ValidationError as _ve: | ||||||
|  | #             ve = _ve | ||||||
|  | #             roundtrip: bool = False | ||||||
|  | #             if pld_val_type is payload_spec: | ||||||
|  | #                 raise ValueError( | ||||||
|  | #                    'Got `ValidationError` despite type-var match!?\n' | ||||||
|  | #                     f'pld_val_type: {pld_val_type}\n' | ||||||
|  | #                     f'payload_type: {payload_spec}\n' | ||||||
|  | #                 ) from ve | ||||||
|  | 
 | ||||||
|  | #             else: | ||||||
|  | #                 # ow we good cuz the pld spec mismatched. | ||||||
|  | #                 print( | ||||||
|  | #                     'Got expected `ValidationError` since,\n' | ||||||
|  | #                     f'{pld_val_type} is not {payload_spec}\n' | ||||||
|  | #                 ) | ||||||
|  | #         else: | ||||||
|  | #             if ( | ||||||
|  | #                 payload_spec is not Any | ||||||
|  | #                 and | ||||||
|  | #                 pld_val_type is not payload_spec | ||||||
|  | #             ): | ||||||
|  | #                 raise ValueError( | ||||||
|  | #                    'DID NOT `ValidationError` despite expected type match!?\n' | ||||||
|  | #                     f'pld_val_type: {pld_val_type}\n' | ||||||
|  | #                     f'payload_type: {payload_spec}\n' | ||||||
|  | #                 ) | ||||||
|  | 
 | ||||||
|  | #     # full code decode should always be attempted! | ||||||
|  | #     if roundtrip is None: | ||||||
|  | #         breakpoint() | ||||||
|  | 
 | ||||||
|  | #     return roundtrip | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # ?TODO? maybe remove since covered in the newer `test_pldrx_limiting` | ||||||
|  | # via end-2-end testing of all this? | ||||||
|  | # -[ ] IOW do we really NEED this lowlevel unit testing? | ||||||
|  | # | ||||||
|  | # def test_limit_msgspec( | ||||||
|  | #     debug_mode: bool, | ||||||
|  | # ): | ||||||
|  | #     ''' | ||||||
|  | #     Internals unit testing to verify that type-limiting an IPC ctx's | ||||||
|  | #     msg spec with `Pldrx.limit_plds()` results in various | ||||||
|  | #     encapsulated `msgspec` object settings and state. | ||||||
|  | 
 | ||||||
|  | #     ''' | ||||||
|  | #     async def main(): | ||||||
|  | #         async with tractor.open_root_actor( | ||||||
|  | #             debug_mode=debug_mode, | ||||||
|  | #         ): | ||||||
|  | #             # ensure we can round-trip a boxing `PayloadMsg` | ||||||
|  | #             assert chk_pld_type( | ||||||
|  | #                 payload_spec=Any, | ||||||
|  | #                 pld=None, | ||||||
|  | #                 expect_roundtrip=True, | ||||||
|  | #             ) | ||||||
|  | 
 | ||||||
|  | #             # verify that a mis-typed payload value won't decode | ||||||
|  | #             assert not chk_pld_type( | ||||||
|  | #                 payload_spec=int, | ||||||
|  | #                 pld='doggy', | ||||||
|  | #             ) | ||||||
|  | 
 | ||||||
|  | #             # parametrize the boxed `.pld` type as a custom-struct | ||||||
|  | #             # and ensure that parametrization propagates | ||||||
|  | #             # to all payload-msg-spec-able subtypes! | ||||||
|  | #             class CustomPayload(Struct): | ||||||
|  | #                 name: str | ||||||
|  | #                 value: Any | ||||||
|  | 
 | ||||||
|  | #             assert not chk_pld_type( | ||||||
|  | #                 payload_spec=CustomPayload, | ||||||
|  | #                 pld='doggy', | ||||||
|  | #             ) | ||||||
|  | 
 | ||||||
|  | #             assert chk_pld_type( | ||||||
|  | #                 payload_spec=CustomPayload, | ||||||
|  | #                 pld=CustomPayload(name='doggy', value='urmom') | ||||||
|  | #             ) | ||||||
|  | 
 | ||||||
|  | #             # yah, we can `.pause_from_sync()` now! | ||||||
|  | #             # breakpoint() | ||||||
|  | 
 | ||||||
|  | #     trio.run(main) | ||||||
|  | @ -0,0 +1,167 @@ | ||||||
|  | """ | ||||||
|  | Shared mem primitives and APIs. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | import uuid | ||||||
|  | 
 | ||||||
|  | # import numpy | ||||||
|  | import pytest | ||||||
|  | import trio | ||||||
|  | import tractor | ||||||
|  | from tractor._shm import ( | ||||||
|  |     open_shm_list, | ||||||
|  |     attach_shm_list, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def child_attach_shml_alot( | ||||||
|  |     ctx: tractor.Context, | ||||||
|  |     shm_key: str, | ||||||
|  | ) -> None: | ||||||
|  | 
 | ||||||
|  |     await ctx.started(shm_key) | ||||||
|  | 
 | ||||||
|  |     # now try to attach a boatload of times in a loop.. | ||||||
|  |     for _ in range(1000): | ||||||
|  |         shml = attach_shm_list( | ||||||
|  |             key=shm_key, | ||||||
|  |             readonly=False, | ||||||
|  |         ) | ||||||
|  |         assert shml.shm.name == shm_key | ||||||
|  |         await trio.sleep(0.001) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def test_child_attaches_alot(): | ||||||
|  |     async def main(): | ||||||
|  |         async with tractor.open_nursery() as an: | ||||||
|  | 
 | ||||||
|  |             # allocate writeable list in parent | ||||||
|  |             key = f'shml_{uuid.uuid4()}' | ||||||
|  |             shml = open_shm_list( | ||||||
|  |                 key=key, | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             portal = await an.start_actor( | ||||||
|  |                 'shm_attacher', | ||||||
|  |                 enable_modules=[__name__], | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             async with ( | ||||||
|  |                 portal.open_context( | ||||||
|  |                     child_attach_shml_alot, | ||||||
|  |                     shm_key=shml.key, | ||||||
|  |                 ) as (ctx, start_val), | ||||||
|  |             ): | ||||||
|  |                 assert start_val == key | ||||||
|  |                 await ctx.result() | ||||||
|  | 
 | ||||||
|  |             await portal.cancel_actor() | ||||||
|  | 
 | ||||||
|  |     trio.run(main) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def child_read_shm_list( | ||||||
|  |     ctx: tractor.Context, | ||||||
|  |     shm_key: str, | ||||||
|  |     use_str: bool, | ||||||
|  |     frame_size: int, | ||||||
|  | ) -> None: | ||||||
|  | 
 | ||||||
|  |     # attach in child | ||||||
|  |     shml = attach_shm_list( | ||||||
|  |         key=shm_key, | ||||||
|  |         # dtype=str if use_str else float, | ||||||
|  |     ) | ||||||
|  |     await ctx.started(shml.key) | ||||||
|  | 
 | ||||||
|  |     async with ctx.open_stream() as stream: | ||||||
|  |         async for i in stream: | ||||||
|  |             print(f'(child): reading shm list index: {i}') | ||||||
|  | 
 | ||||||
|  |             if use_str: | ||||||
|  |                 expect = str(float(i)) | ||||||
|  |             else: | ||||||
|  |                 expect = float(i) | ||||||
|  | 
 | ||||||
|  |             if frame_size == 1: | ||||||
|  |                 val = shml[i] | ||||||
|  |                 assert expect == val | ||||||
|  |                 print(f'(child): reading value: {val}') | ||||||
|  |             else: | ||||||
|  |                 frame = shml[i - frame_size:i] | ||||||
|  |                 print(f'(child): reading frame: {frame}') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'use_str', | ||||||
|  |     [False, True], | ||||||
|  |     ids=lambda i: f'use_str_values={i}', | ||||||
|  | ) | ||||||
|  | @pytest.mark.parametrize( | ||||||
|  |     'frame_size', | ||||||
|  |     [1, 2**6, 2**10], | ||||||
|  |     ids=lambda i: f'frame_size={i}', | ||||||
|  | ) | ||||||
|  | def test_parent_writer_child_reader( | ||||||
|  |     use_str: bool, | ||||||
|  |     frame_size: int, | ||||||
|  | ): | ||||||
|  | 
 | ||||||
|  |     async def main(): | ||||||
|  |         async with tractor.open_nursery( | ||||||
|  |             # debug_mode=True, | ||||||
|  |         ) as an: | ||||||
|  | 
 | ||||||
|  |             portal = await an.start_actor( | ||||||
|  |                 'shm_reader', | ||||||
|  |                 enable_modules=[__name__], | ||||||
|  |                 debug_mode=True, | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             # allocate writeable list in parent | ||||||
|  |             key = 'shm_list' | ||||||
|  |             seq_size = int(2 * 2 ** 10) | ||||||
|  |             shml = open_shm_list( | ||||||
|  |                 key=key, | ||||||
|  |                 size=seq_size, | ||||||
|  |                 dtype=str if use_str else float, | ||||||
|  |                 readonly=False, | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             async with ( | ||||||
|  |                 portal.open_context( | ||||||
|  |                     child_read_shm_list, | ||||||
|  |                     shm_key=key, | ||||||
|  |                     use_str=use_str, | ||||||
|  |                     frame_size=frame_size, | ||||||
|  |                 ) as (ctx, sent), | ||||||
|  | 
 | ||||||
|  |                 ctx.open_stream() as stream, | ||||||
|  |             ): | ||||||
|  | 
 | ||||||
|  |                 assert sent == key | ||||||
|  | 
 | ||||||
|  |                 for i in range(seq_size): | ||||||
|  | 
 | ||||||
|  |                     val = float(i) | ||||||
|  |                     if use_str: | ||||||
|  |                         val = str(val) | ||||||
|  | 
 | ||||||
|  |                     # print(f'(parent): writing {val}') | ||||||
|  |                     shml[i] = val | ||||||
|  | 
 | ||||||
|  |                     # only on frame fills do we | ||||||
|  |                     # signal to the child that a frame's | ||||||
|  |                     # worth is ready. | ||||||
|  |                     if (i % frame_size) == 0: | ||||||
|  |                         print(f'(parent): signalling frame full on {val}') | ||||||
|  |                         await stream.send(i) | ||||||
|  |                 else: | ||||||
|  |                     print(f'(parent): signalling final frame on {val}') | ||||||
|  |                     await stream.send(i) | ||||||
|  | 
 | ||||||
|  |             await portal.cancel_actor() | ||||||
|  | 
 | ||||||
|  |     trio.run(main) | ||||||
|  | @ -2,7 +2,9 @@ | ||||||
| Broadcast channels for fan-out to local tasks. | Broadcast channels for fan-out to local tasks. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| from contextlib import asynccontextmanager | from contextlib import ( | ||||||
|  |     asynccontextmanager as acm, | ||||||
|  | ) | ||||||
| from functools import partial | from functools import partial | ||||||
| from itertools import cycle | from itertools import cycle | ||||||
| import time | import time | ||||||
|  | @ -15,6 +17,7 @@ import tractor | ||||||
| from tractor.trionics import ( | from tractor.trionics import ( | ||||||
|     broadcast_receiver, |     broadcast_receiver, | ||||||
|     Lagged, |     Lagged, | ||||||
|  |     collapse_eg, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -62,7 +65,7 @@ async def ensure_sequence( | ||||||
|                 break |                 break | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @asynccontextmanager | @acm | ||||||
| async def open_sequence_streamer( | async def open_sequence_streamer( | ||||||
| 
 | 
 | ||||||
|     sequence: list[int], |     sequence: list[int], | ||||||
|  | @ -74,9 +77,9 @@ async def open_sequence_streamer( | ||||||
|     async with tractor.open_nursery( |     async with tractor.open_nursery( | ||||||
|         arbiter_addr=reg_addr, |         arbiter_addr=reg_addr, | ||||||
|         start_method=start_method, |         start_method=start_method, | ||||||
|     ) as tn: |     ) as an: | ||||||
| 
 | 
 | ||||||
|         portal = await tn.start_actor( |         portal = await an.start_actor( | ||||||
|             'sequence_echoer', |             'sequence_echoer', | ||||||
|             enable_modules=[__name__], |             enable_modules=[__name__], | ||||||
|         ) |         ) | ||||||
|  | @ -155,9 +158,12 @@ def test_consumer_and_parent_maybe_lag( | ||||||
|         ) as stream: |         ) as stream: | ||||||
| 
 | 
 | ||||||
|             try: |             try: | ||||||
|                 async with trio.open_nursery() as n: |                 async with ( | ||||||
|  |                     collapse_eg(), | ||||||
|  |                     trio.open_nursery() as tn, | ||||||
|  |                 ): | ||||||
| 
 | 
 | ||||||
|                     n.start_soon( |                     tn.start_soon( | ||||||
|                         ensure_sequence, |                         ensure_sequence, | ||||||
|                         stream, |                         stream, | ||||||
|                         sequence.copy(), |                         sequence.copy(), | ||||||
|  | @ -230,8 +236,8 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | ||||||
| 
 | 
 | ||||||
|         ) as stream: |         ) as stream: | ||||||
| 
 | 
 | ||||||
|             async with trio.open_nursery() as n: |             async with trio.open_nursery() as tn: | ||||||
|                 n.start_soon( |                 tn.start_soon( | ||||||
|                     ensure_sequence, |                     ensure_sequence, | ||||||
|                     stream, |                     stream, | ||||||
|                     sequence.copy(), |                     sequence.copy(), | ||||||
|  | @ -253,7 +259,7 @@ def test_faster_task_to_recv_is_cancelled_by_slower( | ||||||
|                         continue |                         continue | ||||||
| 
 | 
 | ||||||
|                 print('cancelling faster subtask') |                 print('cancelling faster subtask') | ||||||
|                 n.cancel_scope.cancel() |                 tn.cancel_scope.cancel() | ||||||
| 
 | 
 | ||||||
|             try: |             try: | ||||||
|                 value = await stream.receive() |                 value = await stream.receive() | ||||||
|  | @ -371,13 +377,13 @@ def test_ensure_slow_consumers_lag_out( | ||||||
|                                     f'on {lags}:{value}') |                                     f'on {lags}:{value}') | ||||||
|                                 return |                                 return | ||||||
| 
 | 
 | ||||||
|             async with trio.open_nursery() as nursery: |             async with trio.open_nursery() as tn: | ||||||
| 
 | 
 | ||||||
|                 for i in range(1, num_laggers): |                 for i in range(1, num_laggers): | ||||||
| 
 | 
 | ||||||
|                     task_name = f'sub_{i}' |                     task_name = f'sub_{i}' | ||||||
|                     laggers[task_name] = 0 |                     laggers[task_name] = 0 | ||||||
|                     nursery.start_soon( |                     tn.start_soon( | ||||||
|                         partial( |                         partial( | ||||||
|                             sub_and_print, |                             sub_and_print, | ||||||
|                             delay=i*0.001, |                             delay=i*0.001, | ||||||
|  | @ -497,6 +503,7 @@ def test_no_raise_on_lag(): | ||||||
|                 # internals when the no raise flag is set. |                 # internals when the no raise flag is set. | ||||||
|                 loglevel='warning', |                 loglevel='warning', | ||||||
|             ), |             ), | ||||||
|  |             collapse_eg(), | ||||||
|             trio.open_nursery() as n, |             trio.open_nursery() as n, | ||||||
|         ): |         ): | ||||||
|             n.start_soon(slow) |             n.start_soon(slow) | ||||||
|  |  | ||||||
|  | @ -64,7 +64,9 @@ def test_stashed_child_nursery(use_start_soon): | ||||||
|     async def main(): |     async def main(): | ||||||
| 
 | 
 | ||||||
|         async with ( |         async with ( | ||||||
|             trio.open_nursery() as pn, |             trio.open_nursery( | ||||||
|  |                 strict_exception_groups=False, | ||||||
|  |             ) as pn, | ||||||
|         ): |         ): | ||||||
|             cn = await pn.start(mk_child_nursery) |             cn = await pn.start(mk_child_nursery) | ||||||
|             assert cn |             assert cn | ||||||
|  | @ -101,6 +103,7 @@ def test_stashed_child_nursery(use_start_soon): | ||||||
| def test_acm_embedded_nursery_propagates_enter_err( | def test_acm_embedded_nursery_propagates_enter_err( | ||||||
|     canc_from_finally: bool, |     canc_from_finally: bool, | ||||||
|     unmask_from_canc: bool, |     unmask_from_canc: bool, | ||||||
|  |     debug_mode: bool, | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     Demo how a masking `trio.Cancelled` could be handled by unmasking from the |     Demo how a masking `trio.Cancelled` could be handled by unmasking from the | ||||||
|  | @ -174,7 +177,9 @@ def test_acm_embedded_nursery_propagates_enter_err( | ||||||
|                     await trio.lowlevel.checkpoint() |                     await trio.lowlevel.checkpoint() | ||||||
| 
 | 
 | ||||||
|     async def _main(): |     async def _main(): | ||||||
|         with tractor.devx.open_crash_handler() as bxerr: |         with tractor.devx.maybe_open_crash_handler( | ||||||
|  |             pdb=debug_mode, | ||||||
|  |         ) as bxerr: | ||||||
|             assert not bxerr.value |             assert not bxerr.value | ||||||
| 
 | 
 | ||||||
|             async with ( |             async with ( | ||||||
|  |  | ||||||
|  | @ -44,6 +44,7 @@ from ._state import ( | ||||||
|     current_actor as current_actor, |     current_actor as current_actor, | ||||||
|     is_root_process as is_root_process, |     is_root_process as is_root_process, | ||||||
|     current_ipc_ctx as current_ipc_ctx, |     current_ipc_ctx as current_ipc_ctx, | ||||||
|  |     debug_mode as debug_mode | ||||||
| ) | ) | ||||||
| from ._exceptions import ( | from ._exceptions import ( | ||||||
|     ContextCancelled as ContextCancelled, |     ContextCancelled as ContextCancelled, | ||||||
|  | @ -66,3 +67,4 @@ from ._root import ( | ||||||
| from ._ipc import Channel as Channel | from ._ipc import Channel as Channel | ||||||
| from ._portal import Portal as Portal | from ._portal import Portal as Portal | ||||||
| from ._runtime import Actor as Actor | from ._runtime import Actor as Actor | ||||||
|  | # from . import hilevel as hilevel | ||||||
|  |  | ||||||
|  | @ -19,10 +19,13 @@ Actor cluster helpers. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| 
 | from contextlib import ( | ||||||
| from contextlib import asynccontextmanager as acm |     asynccontextmanager as acm, | ||||||
|  | ) | ||||||
| from multiprocessing import cpu_count | from multiprocessing import cpu_count | ||||||
| from typing import AsyncGenerator, Optional | from typing import ( | ||||||
|  |     AsyncGenerator, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
|  |  | ||||||
|  | @ -47,6 +47,9 @@ from functools import partial | ||||||
| import inspect | import inspect | ||||||
| from pprint import pformat | from pprint import pformat | ||||||
| import textwrap | import textwrap | ||||||
|  | from types import ( | ||||||
|  |     UnionType, | ||||||
|  | ) | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     AsyncGenerator, |     AsyncGenerator, | ||||||
|  | @ -79,6 +82,7 @@ from .msg import ( | ||||||
|     MsgType, |     MsgType, | ||||||
|     NamespacePath, |     NamespacePath, | ||||||
|     PayloadT, |     PayloadT, | ||||||
|  |     Return, | ||||||
|     Started, |     Started, | ||||||
|     Stop, |     Stop, | ||||||
|     Yield, |     Yield, | ||||||
|  | @ -242,11 +246,13 @@ class Context: | ||||||
|     # a drain loop? |     # a drain loop? | ||||||
|     # _res_scope: trio.CancelScope|None = None |     # _res_scope: trio.CancelScope|None = None | ||||||
| 
 | 
 | ||||||
|  |     _outcome_msg: Return|Error|ContextCancelled = Unresolved | ||||||
|  | 
 | ||||||
|     # on a clean exit there should be a final value |     # on a clean exit there should be a final value | ||||||
|     # delivered from the far end "callee" task, so |     # delivered from the far end "callee" task, so | ||||||
|     # this value is only set on one side. |     # this value is only set on one side. | ||||||
|     # _result: Any | int = None |     # _result: Any | int = None | ||||||
|     _result: Any|Unresolved = Unresolved |     _result: PayloadT|Unresolved = Unresolved | ||||||
| 
 | 
 | ||||||
|     # if the local "caller"  task errors this value is always set |     # if the local "caller"  task errors this value is always set | ||||||
|     # to the error that was captured in the |     # to the error that was captured in the | ||||||
|  | @ -1003,7 +1009,8 @@ class Context: | ||||||
|                     ) |                     ) | ||||||
|                 else: |                 else: | ||||||
|                     log.cancel( |                     log.cancel( | ||||||
|                         'Timed out on cancel request of remote task?\n' |                         f'Timed out on cancel request of remote task?\n' | ||||||
|  |                         f'\n' | ||||||
|                         f'{reminfo}' |                         f'{reminfo}' | ||||||
|                     ) |                     ) | ||||||
| 
 | 
 | ||||||
|  | @ -1195,8 +1202,10 @@ class Context: | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         __tracebackhide__: bool = hide_tb |         __tracebackhide__: bool = hide_tb | ||||||
|         assert self._portal, ( |         if not self._portal: | ||||||
|             '`Context.wait_for_result()` can not be called from callee side!' |             raise RuntimeError( | ||||||
|  |                 'Invalid usage of `Context.wait_for_result()`!\n' | ||||||
|  |                 'Not valid on child-side IPC ctx!\n' | ||||||
|             ) |             ) | ||||||
|         if self._final_result_is_set(): |         if self._final_result_is_set(): | ||||||
|             return self._result |             return self._result | ||||||
|  | @ -1218,6 +1227,8 @@ class Context: | ||||||
|             # since every message should be delivered via the normal |             # since every message should be delivered via the normal | ||||||
|             # `._deliver_msg()` route which will appropriately set |             # `._deliver_msg()` route which will appropriately set | ||||||
|             # any `.maybe_error`. |             # any `.maybe_error`. | ||||||
|  |             outcome_msg: Return|Error|ContextCancelled | ||||||
|  |             drained_msgs: list[MsgType] | ||||||
|             ( |             ( | ||||||
|                 outcome_msg, |                 outcome_msg, | ||||||
|                 drained_msgs, |                 drained_msgs, | ||||||
|  | @ -1225,11 +1236,19 @@ class Context: | ||||||
|                 ctx=self, |                 ctx=self, | ||||||
|                 hide_tb=hide_tb, |                 hide_tb=hide_tb, | ||||||
|             ) |             ) | ||||||
| 
 |  | ||||||
|             drained_status: str = ( |             drained_status: str = ( | ||||||
|                 'Ctx drained to final outcome msg\n\n' |                 'Ctx drained to final outcome msg\n\n' | ||||||
|                 f'{outcome_msg}\n' |                 f'{outcome_msg}\n' | ||||||
|             ) |             ) | ||||||
|  | 
 | ||||||
|  |             # ?XXX, should already be set in `._deliver_msg()` right? | ||||||
|  |             if self._outcome_msg is not Unresolved: | ||||||
|  |                 # from .devx import _debug | ||||||
|  |                 # await _debug.pause() | ||||||
|  |                 assert self._outcome_msg is outcome_msg | ||||||
|  |             else: | ||||||
|  |                 self._outcome_msg = outcome_msg | ||||||
|  | 
 | ||||||
|             if drained_msgs: |             if drained_msgs: | ||||||
|                 drained_status += ( |                 drained_status += ( | ||||||
|                     '\n' |                     '\n' | ||||||
|  | @ -1560,12 +1579,12 @@ class Context: | ||||||
|                     strict_pld_parity=strict_pld_parity, |                     strict_pld_parity=strict_pld_parity, | ||||||
|                     hide_tb=hide_tb, |                     hide_tb=hide_tb, | ||||||
|                 ) |                 ) | ||||||
|             except BaseException as err: |             except BaseException as _bexc: | ||||||
|  |                 err = _bexc | ||||||
|                 if not isinstance(err, MsgTypeError): |                 if not isinstance(err, MsgTypeError): | ||||||
|                     __tracebackhide__: bool = False |                     __tracebackhide__: bool = False | ||||||
| 
 | 
 | ||||||
|                 raise |                 raise err | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
|         # TODO: maybe a flag to by-pass encode op if already done |         # TODO: maybe a flag to by-pass encode op if already done | ||||||
|         # here in caller? |         # here in caller? | ||||||
|  | @ -1737,7 +1756,6 @@ class Context: | ||||||
| 
 | 
 | ||||||
|                 f'{structfmt(msg)}\n' |                 f'{structfmt(msg)}\n' | ||||||
|             ) |             ) | ||||||
| 
 |  | ||||||
|             # NOTE: if an error is deteced we should always still |             # NOTE: if an error is deteced we should always still | ||||||
|             # send it through the feeder-mem-chan and expect |             # send it through the feeder-mem-chan and expect | ||||||
|             # it to be raised by any context (stream) consumer |             # it to be raised by any context (stream) consumer | ||||||
|  | @ -1749,6 +1767,21 @@ class Context: | ||||||
|             # normally the task that should get cancelled/error |             # normally the task that should get cancelled/error | ||||||
|             # from some remote fault! |             # from some remote fault! | ||||||
|             send_chan.send_nowait(msg) |             send_chan.send_nowait(msg) | ||||||
|  |             match msg: | ||||||
|  |                 case Stop(): | ||||||
|  |                     if (stream := self._stream): | ||||||
|  |                         stream._stop_msg = msg | ||||||
|  | 
 | ||||||
|  |                 case Return(): | ||||||
|  |                     if not self._outcome_msg: | ||||||
|  |                         log.warning( | ||||||
|  |                             f'Setting final outcome msg AFTER ' | ||||||
|  |                             f'`._rx_chan.send()`??\n' | ||||||
|  |                             f'\n' | ||||||
|  |                             f'{msg}' | ||||||
|  |                         ) | ||||||
|  |                         self._outcome_msg = msg | ||||||
|  | 
 | ||||||
|             return True |             return True | ||||||
| 
 | 
 | ||||||
|         except trio.BrokenResourceError: |         except trio.BrokenResourceError: | ||||||
|  | @ -1982,7 +2015,10 @@ async def open_context_from_portal( | ||||||
|     ctxc_from_callee: ContextCancelled|None = None |     ctxc_from_callee: ContextCancelled|None = None | ||||||
|     try: |     try: | ||||||
|         async with ( |         async with ( | ||||||
|             trio.open_nursery() as tn, |             trio.open_nursery( | ||||||
|  |                 strict_exception_groups=False, | ||||||
|  |             ) as tn, | ||||||
|  | 
 | ||||||
|             msgops.maybe_limit_plds( |             msgops.maybe_limit_plds( | ||||||
|                 ctx=ctx, |                 ctx=ctx, | ||||||
|                 spec=ctx_meta.get('pld_spec'), |                 spec=ctx_meta.get('pld_spec'), | ||||||
|  | @ -2002,7 +2038,7 @@ async def open_context_from_portal( | ||||||
|             # the dialog, the `Error` msg should be raised from the `msg` |             # the dialog, the `Error` msg should be raised from the `msg` | ||||||
|             # handling block below. |             # handling block below. | ||||||
|             try: |             try: | ||||||
|                 started_msg, first = await ctx._pld_rx.recv_msg_w_pld( |                 started_msg, first = await ctx._pld_rx.recv_msg( | ||||||
|                     ipc=ctx, |                     ipc=ctx, | ||||||
|                     expect_msg=Started, |                     expect_msg=Started, | ||||||
|                     passthrough_non_pld_msgs=False, |                     passthrough_non_pld_msgs=False, | ||||||
|  | @ -2367,7 +2403,8 @@ async def open_context_from_portal( | ||||||
|             # displaying `ContextCancelled` traces where the |             # displaying `ContextCancelled` traces where the | ||||||
|             # cause of crash/exit IS due to something in |             # cause of crash/exit IS due to something in | ||||||
|             # user/app code on either end of the context. |             # user/app code on either end of the context. | ||||||
|             and not rxchan._closed |             and | ||||||
|  |             not rxchan._closed | ||||||
|         ): |         ): | ||||||
|             # XXX NOTE XXX: and again as per above, we mask any |             # XXX NOTE XXX: and again as per above, we mask any | ||||||
|             # `trio.Cancelled` raised here so as to NOT mask |             # `trio.Cancelled` raised here so as to NOT mask | ||||||
|  | @ -2426,6 +2463,7 @@ async def open_context_from_portal( | ||||||
|         # FINALLY, remove the context from runtime tracking and |         # FINALLY, remove the context from runtime tracking and | ||||||
|         # exit! |         # exit! | ||||||
|         log.runtime( |         log.runtime( | ||||||
|  |         # log.cancel( | ||||||
|             f'De-allocating IPC ctx opened with {ctx.side!r} peer \n' |             f'De-allocating IPC ctx opened with {ctx.side!r} peer \n' | ||||||
|             f'uid: {uid}\n' |             f'uid: {uid}\n' | ||||||
|             f'cid: {ctx.cid}\n' |             f'cid: {ctx.cid}\n' | ||||||
|  | @ -2481,7 +2519,6 @@ def mk_context( | ||||||
|         _caller_info=caller_info, |         _caller_info=caller_info, | ||||||
|         **kwargs, |         **kwargs, | ||||||
|     ) |     ) | ||||||
|     pld_rx._ctx = ctx |  | ||||||
|     ctx._result = Unresolved |     ctx._result = Unresolved | ||||||
|     return ctx |     return ctx | ||||||
| 
 | 
 | ||||||
|  | @ -2544,7 +2581,14 @@ def context( | ||||||
|     name: str |     name: str | ||||||
|     param: Type |     param: Type | ||||||
|     for name, param in annots.items(): |     for name, param in annots.items(): | ||||||
|         if param is Context: |         if ( | ||||||
|  |             param is Context | ||||||
|  |             or ( | ||||||
|  |                 isinstance(param, UnionType) | ||||||
|  |                 and | ||||||
|  |                 Context in param.__args__ | ||||||
|  |             ) | ||||||
|  |         ): | ||||||
|             ctx_var_name: str = name |             ctx_var_name: str = name | ||||||
|             break |             break | ||||||
|     else: |     else: | ||||||
|  |  | ||||||
|  | @ -238,7 +238,7 @@ def _trio_main( | ||||||
|             nest_from_op( |             nest_from_op( | ||||||
|                 input_op='>(',  # see syntax ideas above |                 input_op='>(',  # see syntax ideas above | ||||||
|                 tree_str=actor_info, |                 tree_str=actor_info, | ||||||
|                 back_from_op=1, |                 back_from_op=2,  # since "complete" | ||||||
|             ) |             ) | ||||||
|         ) |         ) | ||||||
|     logmeth = log.info |     logmeth = log.info | ||||||
|  |  | ||||||
|  | @ -22,6 +22,7 @@ from __future__ import annotations | ||||||
| import builtins | import builtins | ||||||
| import importlib | import importlib | ||||||
| from pprint import pformat | from pprint import pformat | ||||||
|  | from pdb import bdb | ||||||
| import sys | import sys | ||||||
| from types import ( | from types import ( | ||||||
|     TracebackType, |     TracebackType, | ||||||
|  | @ -103,7 +104,16 @@ class AsyncioTaskExited(Exception): | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
| 
 | 
 | ||||||
| class TrioTaskExited(AsyncioCancelled): | class TrioCancelled(Exception): | ||||||
|  |     ''' | ||||||
|  |     Trio cancelled translation (non-base) error | ||||||
|  |     for use with the `to_asyncio` module | ||||||
|  |     to be raised in the `asyncio.Task` to indicate | ||||||
|  |     that the `trio` side raised `Cancelled` or an error. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  | 
 | ||||||
|  | class TrioTaskExited(Exception): | ||||||
|     ''' |     ''' | ||||||
|     The `trio`-side task exited without explicitly cancelling the |     The `trio`-side task exited without explicitly cancelling the | ||||||
|     `asyncio.Task` peer. |     `asyncio.Task` peer. | ||||||
|  | @ -172,6 +182,7 @@ def get_err_type(type_name: str) -> BaseException|None: | ||||||
|         builtins, |         builtins, | ||||||
|         _this_mod, |         _this_mod, | ||||||
|         trio, |         trio, | ||||||
|  |         bdb, | ||||||
|     ]: |     ]: | ||||||
|         if type_ref := getattr( |         if type_ref := getattr( | ||||||
|             ns, |             ns, | ||||||
|  | @ -406,6 +417,9 @@ class RemoteActorError(Exception): | ||||||
|         String-name of the (last hop's) boxed error type. |         String-name of the (last hop's) boxed error type. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|  |         # TODO, maybe support also serializing the | ||||||
|  |         # `ExceptionGroup.exeptions: list[BaseException]` set under | ||||||
|  |         # certain conditions? | ||||||
|         bt: Type[BaseException] = self.boxed_type |         bt: Type[BaseException] = self.boxed_type | ||||||
|         if bt: |         if bt: | ||||||
|             return str(bt.__name__) |             return str(bt.__name__) | ||||||
|  | @ -418,9 +432,13 @@ class RemoteActorError(Exception): | ||||||
|         Error type boxed by last actor IPC hop. |         Error type boxed by last actor IPC hop. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         if self._boxed_type is None: |         if ( | ||||||
|  |             self._boxed_type is None | ||||||
|  |             and | ||||||
|  |             (ipc_msg := self._ipc_msg) | ||||||
|  |         ): | ||||||
|             self._boxed_type = get_err_type( |             self._boxed_type = get_err_type( | ||||||
|                 self._ipc_msg.boxed_type_str |                 ipc_msg.boxed_type_str | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         return self._boxed_type |         return self._boxed_type | ||||||
|  | @ -821,8 +839,11 @@ class MsgTypeError( | ||||||
|         ''' |         ''' | ||||||
|         if ( |         if ( | ||||||
|             (_bad_msg := self.msgdata.get('_bad_msg')) |             (_bad_msg := self.msgdata.get('_bad_msg')) | ||||||
|             and |             and ( | ||||||
|                 isinstance(_bad_msg, PayloadMsg) |                 isinstance(_bad_msg, PayloadMsg) | ||||||
|  |                 or | ||||||
|  |                 isinstance(_bad_msg, msgtypes.Start) | ||||||
|  |             ) | ||||||
|         ): |         ): | ||||||
|             return _bad_msg |             return _bad_msg | ||||||
| 
 | 
 | ||||||
|  | @ -1015,18 +1036,6 @@ class MessagingError(Exception): | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
| 
 | 
 | ||||||
| class AsyncioCancelled(Exception): |  | ||||||
|     ''' |  | ||||||
|     Asyncio cancelled translation (non-base) error |  | ||||||
|     for use with the ``to_asyncio`` module |  | ||||||
|     to be raised in the ``trio`` side task |  | ||||||
| 
 |  | ||||||
|     NOTE: this should NOT inherit from `asyncio.CancelledError` or |  | ||||||
|     tests should break! |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def pack_error( | def pack_error( | ||||||
|     exc: BaseException|RemoteActorError, |     exc: BaseException|RemoteActorError, | ||||||
| 
 | 
 | ||||||
|  | @ -1138,6 +1147,8 @@ def unpack_error( | ||||||
|     which is the responsibilitiy of the caller. |     which is the responsibilitiy of the caller. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  |     # XXX, apparently we pass all sorts of msgs here? | ||||||
|  |     # kinda odd but seems like maybe they shouldn't be? | ||||||
|     if not isinstance(msg, Error): |     if not isinstance(msg, Error): | ||||||
|         return None |         return None | ||||||
| 
 | 
 | ||||||
|  | @ -1206,7 +1217,7 @@ def is_multi_cancelled( | ||||||
|         trio.Cancelled in ignore_nested |         trio.Cancelled in ignore_nested | ||||||
|         # XXX always count-in `trio`'s native signal |         # XXX always count-in `trio`'s native signal | ||||||
|     ): |     ): | ||||||
|         ignore_nested |= {trio.Cancelled} |         ignore_nested.update({trio.Cancelled}) | ||||||
| 
 | 
 | ||||||
|     if isinstance(exc, BaseExceptionGroup): |     if isinstance(exc, BaseExceptionGroup): | ||||||
|         matched_exc: BaseExceptionGroup|None = exc.subgroup( |         matched_exc: BaseExceptionGroup|None = exc.subgroup( | ||||||
|  |  | ||||||
|  | @ -255,7 +255,7 @@ class MsgpackTCPStream(MsgTransport): | ||||||
|                 raise TransportClosed( |                 raise TransportClosed( | ||||||
|                     message=( |                     message=( | ||||||
|                         f'IPC transport already closed by peer\n' |                         f'IPC transport already closed by peer\n' | ||||||
|                         f'x)> {type(trans_err)}\n' |                         f'x]> {type(trans_err)}\n' | ||||||
|                         f'  |_{self}\n' |                         f'  |_{self}\n' | ||||||
|                     ), |                     ), | ||||||
|                     loglevel=loglevel, |                     loglevel=loglevel, | ||||||
|  | @ -273,7 +273,7 @@ class MsgpackTCPStream(MsgTransport): | ||||||
|                 raise TransportClosed( |                 raise TransportClosed( | ||||||
|                     message=( |                     message=( | ||||||
|                         f'IPC transport already manually closed locally?\n' |                         f'IPC transport already manually closed locally?\n' | ||||||
|                         f'x)> {type(closure_err)} \n' |                         f'x]> {type(closure_err)} \n' | ||||||
|                         f'  |_{self}\n' |                         f'  |_{self}\n' | ||||||
|                     ), |                     ), | ||||||
|                     loglevel='error', |                     loglevel='error', | ||||||
|  | @ -289,7 +289,7 @@ class MsgpackTCPStream(MsgTransport): | ||||||
|                 raise TransportClosed( |                 raise TransportClosed( | ||||||
|                     message=( |                     message=( | ||||||
|                         f'IPC transport already gracefully closed\n' |                         f'IPC transport already gracefully closed\n' | ||||||
|                         f')>\n' |                         f']>\n' | ||||||
|                         f' |_{self}\n' |                         f' |_{self}\n' | ||||||
|                     ), |                     ), | ||||||
|                     loglevel='transport', |                     loglevel='transport', | ||||||
|  |  | ||||||
|  | @ -184,7 +184,7 @@ class Portal: | ||||||
|                 ( |                 ( | ||||||
|                     self._final_result_msg, |                     self._final_result_msg, | ||||||
|                     self._final_result_pld, |                     self._final_result_pld, | ||||||
|                 ) = await self._expect_result_ctx._pld_rx.recv_msg_w_pld( |                 ) = await self._expect_result_ctx._pld_rx.recv_msg( | ||||||
|                     ipc=self._expect_result_ctx, |                     ipc=self._expect_result_ctx, | ||||||
|                     expect_msg=Return, |                     expect_msg=Return, | ||||||
|                 ) |                 ) | ||||||
|  | @ -533,6 +533,10 @@ async def open_portal( | ||||||
|     async with maybe_open_nursery( |     async with maybe_open_nursery( | ||||||
|         tn, |         tn, | ||||||
|         shield=shield, |         shield=shield, | ||||||
|  |         strict_exception_groups=False, | ||||||
|  |         # ^XXX^ TODO? soo roll our own then ?? | ||||||
|  |         # -> since we kinda want the "if only one `.exception` then | ||||||
|  |         # just raise that" interface? | ||||||
|     ) as tn: |     ) as tn: | ||||||
| 
 | 
 | ||||||
|         if not channel.connected(): |         if not channel.connected(): | ||||||
|  |  | ||||||
|  | @ -111,8 +111,8 @@ async def open_root_actor( | ||||||
|     Runtime init entry point for ``tractor``. |     Runtime init entry point for ``tractor``. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     __tracebackhide__: bool = hide_tb |  | ||||||
|     _debug.hide_runtime_frames() |     _debug.hide_runtime_frames() | ||||||
|  |     __tracebackhide__: bool = hide_tb | ||||||
| 
 | 
 | ||||||
|     # TODO: stick this in a `@cm` defined in `devx._debug`? |     # TODO: stick this in a `@cm` defined in `devx._debug`? | ||||||
|     # |     # | ||||||
|  | @ -362,7 +362,10 @@ async def open_root_actor( | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # start the actor runtime in a new task |         # start the actor runtime in a new task | ||||||
|         async with trio.open_nursery() as nursery: |         async with trio.open_nursery( | ||||||
|  |             strict_exception_groups=False, | ||||||
|  |             # ^XXX^ TODO? instead unpack any RAE as per "loose" style? | ||||||
|  |         ) as nursery: | ||||||
| 
 | 
 | ||||||
|             # ``_runtime.async_main()`` creates an internal nursery |             # ``_runtime.async_main()`` creates an internal nursery | ||||||
|             # and blocks here until any underlying actor(-process) |             # and blocks here until any underlying actor(-process) | ||||||
|  | @ -387,6 +390,12 @@ async def open_root_actor( | ||||||
|                 BaseExceptionGroup, |                 BaseExceptionGroup, | ||||||
|             ) as err: |             ) as err: | ||||||
| 
 | 
 | ||||||
|  |                 # TODO, in beginning to handle the subsubactor with | ||||||
|  |                 # crashed grandparent cases.. | ||||||
|  |                 # | ||||||
|  |                 # was_locked: bool = await _debug.maybe_wait_for_debugger( | ||||||
|  |                 #     child_in_debug=True, | ||||||
|  |                 # ) | ||||||
|                 # XXX NOTE XXX see equiv note inside |                 # XXX NOTE XXX see equiv note inside | ||||||
|                 # `._runtime.Actor._stream_handler()` where in the |                 # `._runtime.Actor._stream_handler()` where in the | ||||||
|                 # non-root or root-that-opened-this-mahually case we |                 # non-root or root-that-opened-this-mahually case we | ||||||
|  | @ -457,12 +466,19 @@ def run_daemon( | ||||||
| 
 | 
 | ||||||
|     start_method: str | None = None, |     start_method: str | None = None, | ||||||
|     debug_mode: bool = False, |     debug_mode: bool = False, | ||||||
|  | 
 | ||||||
|  |     # TODO, support `infected_aio=True` mode by, | ||||||
|  |     # - calling the appropriate entrypoint-func from `.to_asyncio` | ||||||
|  |     # - maybe init-ing `greenback` as done above in | ||||||
|  |     #   `open_root_actor()`. | ||||||
|  | 
 | ||||||
|     **kwargs |     **kwargs | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     ''' |     ''' | ||||||
|     Spawn daemon actor which will respond to RPC; the main task simply |     Spawn a root (daemon) actor which will respond to RPC; the main | ||||||
|     starts the runtime and then sleeps forever. |     task simply starts the runtime and then blocks via embedded | ||||||
|  |     `trio.sleep_forever()`. | ||||||
| 
 | 
 | ||||||
|     This is a very minimal convenience wrapper around starting |     This is a very minimal convenience wrapper around starting | ||||||
|     a "run-until-cancelled" root actor which can be started with a set |     a "run-until-cancelled" root actor which can be started with a set | ||||||
|  | @ -475,7 +491,6 @@ def run_daemon( | ||||||
|         importlib.import_module(path) |         importlib.import_module(path) | ||||||
| 
 | 
 | ||||||
|     async def _main(): |     async def _main(): | ||||||
| 
 |  | ||||||
|         async with open_root_actor( |         async with open_root_actor( | ||||||
|             registry_addrs=registry_addrs, |             registry_addrs=registry_addrs, | ||||||
|             name=name, |             name=name, | ||||||
|  |  | ||||||
|  | @ -620,7 +620,11 @@ async def _invoke( | ||||||
|             tn: trio.Nursery |             tn: trio.Nursery | ||||||
|             rpc_ctx_cs: CancelScope |             rpc_ctx_cs: CancelScope | ||||||
|             async with ( |             async with ( | ||||||
|                 trio.open_nursery() as tn, |                 trio.open_nursery( | ||||||
|  |                     strict_exception_groups=False, | ||||||
|  |                     # ^XXX^ TODO? instead unpack any RAE as per "loose" style? | ||||||
|  | 
 | ||||||
|  |                 ) as tn, | ||||||
|                 msgops.maybe_limit_plds( |                 msgops.maybe_limit_plds( | ||||||
|                     ctx=ctx, |                     ctx=ctx, | ||||||
|                     spec=ctx_meta.get('pld_spec'), |                     spec=ctx_meta.get('pld_spec'), | ||||||
|  | @ -645,6 +649,10 @@ async def _invoke( | ||||||
|                 ) |                 ) | ||||||
|                 # set and shuttle final result to "parent"-side task. |                 # set and shuttle final result to "parent"-side task. | ||||||
|                 ctx._result = res |                 ctx._result = res | ||||||
|  |                 log.runtime( | ||||||
|  |                     f'Sending result msg and exiting {ctx.side!r}\n' | ||||||
|  |                     f'{return_msg}\n' | ||||||
|  |                 ) | ||||||
|                 await chan.send(return_msg) |                 await chan.send(return_msg) | ||||||
| 
 | 
 | ||||||
|             # NOTE: this happens IFF `ctx._scope.cancel()` is |             # NOTE: this happens IFF `ctx._scope.cancel()` is | ||||||
|  | @ -733,8 +741,8 @@ async def _invoke( | ||||||
|         # XXX: do we ever trigger this block any more? |         # XXX: do we ever trigger this block any more? | ||||||
|         except ( |         except ( | ||||||
|             BaseExceptionGroup, |             BaseExceptionGroup, | ||||||
|             trio.Cancelled, |  | ||||||
|             BaseException, |             BaseException, | ||||||
|  |             trio.Cancelled, | ||||||
| 
 | 
 | ||||||
|         ) as scope_error: |         ) as scope_error: | ||||||
|             if ( |             if ( | ||||||
|  | @ -847,8 +855,8 @@ async def try_ship_error_to_remote( | ||||||
|             log.critical( |             log.critical( | ||||||
|                 'IPC transport failure -> ' |                 'IPC transport failure -> ' | ||||||
|                 f'failed to ship error to {remote_descr}!\n\n' |                 f'failed to ship error to {remote_descr}!\n\n' | ||||||
|                 f'X=> {channel.uid}\n\n' |                 f'{type(msg)!r}[{msg.boxed_type_str}] X=> {channel.uid}\n' | ||||||
| 
 |                 f'\n' | ||||||
|                 # TODO: use `.msg.preetty_struct` for this! |                 # TODO: use `.msg.preetty_struct` for this! | ||||||
|                 f'{msg}\n' |                 f'{msg}\n' | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|  | @ -836,8 +836,10 @@ class Actor: | ||||||
|             )] |             )] | ||||||
|         except KeyError: |         except KeyError: | ||||||
|             report: str = ( |             report: str = ( | ||||||
|                 'Ignoring invalid IPC ctx msg!\n\n' |                 'Ignoring invalid IPC msg!?\n' | ||||||
|                 f'<=? {uid}\n\n' |                 f'Ctx seems to not/no-longer exist??\n' | ||||||
|  |                 f'\n' | ||||||
|  |                 f'<=? {uid}\n' | ||||||
|                 f'  |_{pretty_struct.pformat(msg)}\n' |                 f'  |_{pretty_struct.pformat(msg)}\n' | ||||||
|             ) |             ) | ||||||
|             match msg: |             match msg: | ||||||
|  | @ -1284,6 +1286,7 @@ class Actor: | ||||||
|             f'Actor-runtime cancel request from {requester_type}\n\n' |             f'Actor-runtime cancel request from {requester_type}\n\n' | ||||||
|             f'<=c) {requesting_uid}\n' |             f'<=c) {requesting_uid}\n' | ||||||
|             f'  |_{self}\n' |             f'  |_{self}\n' | ||||||
|  |             f'\n' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # TODO: what happens here when we self-cancel tho? |         # TODO: what happens here when we self-cancel tho? | ||||||
|  | @ -1303,13 +1306,15 @@ class Actor: | ||||||
|                 lock_req_ctx.has_outcome |                 lock_req_ctx.has_outcome | ||||||
|             ): |             ): | ||||||
|                 msg += ( |                 msg += ( | ||||||
|                     '-> Cancelling active debugger request..\n' |                     f'\n' | ||||||
|  |                     f'-> Cancelling active debugger request..\n' | ||||||
|                     f'|_{_debug.Lock.repr()}\n\n' |                     f'|_{_debug.Lock.repr()}\n\n' | ||||||
|                     f'|_{lock_req_ctx}\n\n' |                     f'|_{lock_req_ctx}\n\n' | ||||||
|                 ) |                 ) | ||||||
|                 # lock_req_ctx._scope.cancel() |                 # lock_req_ctx._scope.cancel() | ||||||
|                 # TODO: wrap this in a method-API.. |                 # TODO: wrap this in a method-API.. | ||||||
|                 debug_req.req_cs.cancel() |                 debug_req.req_cs.cancel() | ||||||
|  |                 # if lock_req_ctx: | ||||||
| 
 | 
 | ||||||
|             # self-cancel **all** ongoing RPC tasks |             # self-cancel **all** ongoing RPC tasks | ||||||
|             await self.cancel_rpc_tasks( |             await self.cancel_rpc_tasks( | ||||||
|  | @ -1718,11 +1723,15 @@ async def async_main( | ||||||
|         # parent is kept alive as a resilient service until |         # parent is kept alive as a resilient service until | ||||||
|         # cancellation steps have (mostly) occurred in |         # cancellation steps have (mostly) occurred in | ||||||
|         # a deterministic way. |         # a deterministic way. | ||||||
|         async with trio.open_nursery() as root_nursery: |         async with trio.open_nursery( | ||||||
|  |             strict_exception_groups=False, | ||||||
|  |         ) as root_nursery: | ||||||
|             actor._root_n = root_nursery |             actor._root_n = root_nursery | ||||||
|             assert actor._root_n |             assert actor._root_n | ||||||
| 
 | 
 | ||||||
|             async with trio.open_nursery() as service_nursery: |             async with trio.open_nursery( | ||||||
|  |                 strict_exception_groups=False, | ||||||
|  |             ) as service_nursery: | ||||||
|                 # This nursery is used to handle all inbound |                 # This nursery is used to handle all inbound | ||||||
|                 # connections to us such that if the TCP server |                 # connections to us such that if the TCP server | ||||||
|                 # is killed, connections can continue to process |                 # is killed, connections can continue to process | ||||||
|  |  | ||||||
|  | @ -0,0 +1,833 @@ | ||||||
|  | # tractor: structured concurrent "actors". | ||||||
|  | # Copyright 2018-eternity Tyler Goodlet. | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | SC friendly shared memory management geared at real-time | ||||||
|  | processing. | ||||||
|  | 
 | ||||||
|  | Support for ``numpy`` compatible array-buffers is provided but is | ||||||
|  | considered optional within the context of this runtime-library. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | from __future__ import annotations | ||||||
|  | from sys import byteorder | ||||||
|  | import time | ||||||
|  | from typing import Optional | ||||||
|  | from multiprocessing import shared_memory as shm | ||||||
|  | from multiprocessing.shared_memory import ( | ||||||
|  |     SharedMemory, | ||||||
|  |     ShareableList, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | from msgspec import Struct | ||||||
|  | import tractor | ||||||
|  | 
 | ||||||
|  | from .log import get_logger | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _USE_POSIX = getattr(shm, '_USE_POSIX', False) | ||||||
|  | if _USE_POSIX: | ||||||
|  |     from _posixshmem import shm_unlink | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | try: | ||||||
|  |     import numpy as np | ||||||
|  |     from numpy.lib import recfunctions as rfn | ||||||
|  |     import nptyping | ||||||
|  | except ImportError: | ||||||
|  |     pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | log = get_logger(__name__) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def disable_mantracker(): | ||||||
|  |     ''' | ||||||
|  |     Disable all ``multiprocessing``` "resource tracking" machinery since | ||||||
|  |     it's an absolute multi-threaded mess of non-SC madness. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     from multiprocessing import resource_tracker as mantracker | ||||||
|  | 
 | ||||||
|  |     # Tell the "resource tracker" thing to fuck off. | ||||||
|  |     class ManTracker(mantracker.ResourceTracker): | ||||||
|  |         def register(self, name, rtype): | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |         def unregister(self, name, rtype): | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |         def ensure_running(self): | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |     # "know your land and know your prey" | ||||||
|  |     # https://www.dailymotion.com/video/x6ozzco | ||||||
|  |     mantracker._resource_tracker = ManTracker() | ||||||
|  |     mantracker.register = mantracker._resource_tracker.register | ||||||
|  |     mantracker.ensure_running = mantracker._resource_tracker.ensure_running | ||||||
|  |     mantracker.unregister = mantracker._resource_tracker.unregister | ||||||
|  |     mantracker.getfd = mantracker._resource_tracker.getfd | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | disable_mantracker() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class SharedInt: | ||||||
|  |     ''' | ||||||
|  |     Wrapper around a single entry shared memory array which | ||||||
|  |     holds an ``int`` value used as an index counter. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     def __init__( | ||||||
|  |         self, | ||||||
|  |         shm: SharedMemory, | ||||||
|  |     ) -> None: | ||||||
|  |         self._shm = shm | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def value(self) -> int: | ||||||
|  |         return int.from_bytes(self._shm.buf, byteorder) | ||||||
|  | 
 | ||||||
|  |     @value.setter | ||||||
|  |     def value(self, value) -> None: | ||||||
|  |         self._shm.buf[:] = value.to_bytes(self._shm.size, byteorder) | ||||||
|  | 
 | ||||||
|  |     def destroy(self) -> None: | ||||||
|  |         if _USE_POSIX: | ||||||
|  |             # We manually unlink to bypass all the "resource tracker" | ||||||
|  |             # nonsense meant for non-SC systems. | ||||||
|  |             name = self._shm.name | ||||||
|  |             try: | ||||||
|  |                 shm_unlink(name) | ||||||
|  |             except FileNotFoundError: | ||||||
|  |                 # might be a teardown race here? | ||||||
|  |                 log.warning(f'Shm for {name} already unlinked?') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class NDToken(Struct, frozen=True): | ||||||
|  |     ''' | ||||||
|  |     Internal represenation of a shared memory ``numpy`` array "token" | ||||||
|  |     which can be used to key and load a system (OS) wide shm entry | ||||||
|  |     and correctly read the array by type signature. | ||||||
|  | 
 | ||||||
|  |     This type is msg safe. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     shm_name: str  # this servers as a "key" value | ||||||
|  |     shm_first_index_name: str | ||||||
|  |     shm_last_index_name: str | ||||||
|  |     dtype_descr: tuple | ||||||
|  |     size: int  # in struct-array index / row terms | ||||||
|  | 
 | ||||||
|  |     # TODO: use nptyping here on dtypes | ||||||
|  |     @property | ||||||
|  |     def dtype(self) -> list[tuple[str, str, tuple[int, ...]]]: | ||||||
|  |         return np.dtype( | ||||||
|  |             list( | ||||||
|  |                 map(tuple, self.dtype_descr) | ||||||
|  |             ) | ||||||
|  |         ).descr | ||||||
|  | 
 | ||||||
|  |     def as_msg(self): | ||||||
|  |         return self.to_dict() | ||||||
|  | 
 | ||||||
|  |     @classmethod | ||||||
|  |     def from_msg(cls, msg: dict) -> NDToken: | ||||||
|  |         if isinstance(msg, NDToken): | ||||||
|  |             return msg | ||||||
|  | 
 | ||||||
|  |         # TODO: native struct decoding | ||||||
|  |         # return _token_dec.decode(msg) | ||||||
|  | 
 | ||||||
|  |         msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr'])) | ||||||
|  |         return NDToken(**msg) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # _token_dec = msgspec.msgpack.Decoder(NDToken) | ||||||
|  | 
 | ||||||
|  | # TODO: this api? | ||||||
|  | # _known_tokens = tractor.ActorVar('_shm_tokens', {}) | ||||||
|  | # _known_tokens = tractor.ContextStack('_known_tokens', ) | ||||||
|  | # _known_tokens = trio.RunVar('shms', {}) | ||||||
|  | 
 | ||||||
|  | # TODO: this should maybe be provided via | ||||||
|  | # a `.trionics.maybe_open_context()` wrapper factory? | ||||||
|  | # process-local store of keys to tokens | ||||||
|  | _known_tokens: dict[str, NDToken] = {} | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_shm_token(key: str) -> NDToken | None: | ||||||
|  |     ''' | ||||||
|  |     Convenience func to check if a token | ||||||
|  |     for the provided key is known by this process. | ||||||
|  | 
 | ||||||
|  |     Returns either the ``numpy`` token or a string for a shared list. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     return _known_tokens.get(key) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _make_token( | ||||||
|  |     key: str, | ||||||
|  |     size: int, | ||||||
|  |     dtype: np.dtype, | ||||||
|  | 
 | ||||||
|  | ) -> NDToken: | ||||||
|  |     ''' | ||||||
|  |     Create a serializable token that can be used | ||||||
|  |     to access a shared array. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     return NDToken( | ||||||
|  |         shm_name=key, | ||||||
|  |         shm_first_index_name=key + "_first", | ||||||
|  |         shm_last_index_name=key + "_last", | ||||||
|  |         dtype_descr=tuple(np.dtype(dtype).descr), | ||||||
|  |         size=size, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ShmArray: | ||||||
|  |     ''' | ||||||
|  |     A shared memory ``numpy.ndarray`` API. | ||||||
|  | 
 | ||||||
|  |     An underlying shared memory buffer is allocated based on | ||||||
|  |     a user specified ``numpy.ndarray``. This fixed size array | ||||||
|  |     can be read and written to by pushing data both onto the "front" | ||||||
|  |     or "back" of a set index range. The indexes for the "first" and | ||||||
|  |     "last" index are themselves stored in shared memory (accessed via | ||||||
|  |     ``SharedInt`` interfaces) values such that multiple processes can | ||||||
|  |     interact with the same array using a synchronized-index. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     def __init__( | ||||||
|  |         self, | ||||||
|  |         shmarr: np.ndarray, | ||||||
|  |         first: SharedInt, | ||||||
|  |         last: SharedInt, | ||||||
|  |         shm: SharedMemory, | ||||||
|  |         # readonly: bool = True, | ||||||
|  |     ) -> None: | ||||||
|  |         self._array = shmarr | ||||||
|  | 
 | ||||||
|  |         # indexes for first and last indices corresponding | ||||||
|  |         # to fille data | ||||||
|  |         self._first = first | ||||||
|  |         self._last = last | ||||||
|  | 
 | ||||||
|  |         self._len = len(shmarr) | ||||||
|  |         self._shm = shm | ||||||
|  |         self._post_init: bool = False | ||||||
|  | 
 | ||||||
|  |         # pushing data does not write the index (aka primary key) | ||||||
|  |         self._write_fields: list[str] | None = None | ||||||
|  |         dtype = shmarr.dtype | ||||||
|  |         if dtype.fields: | ||||||
|  |             self._write_fields = list(shmarr.dtype.fields.keys())[1:] | ||||||
|  | 
 | ||||||
|  |     # TODO: ringbuf api? | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def _token(self) -> NDToken: | ||||||
|  |         return NDToken( | ||||||
|  |             shm_name=self._shm.name, | ||||||
|  |             shm_first_index_name=self._first._shm.name, | ||||||
|  |             shm_last_index_name=self._last._shm.name, | ||||||
|  |             dtype_descr=tuple(self._array.dtype.descr), | ||||||
|  |             size=self._len, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def token(self) -> dict: | ||||||
|  |         """Shared memory token that can be serialized and used by | ||||||
|  |         another process to attach to this array. | ||||||
|  |         """ | ||||||
|  |         return self._token.as_msg() | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def index(self) -> int: | ||||||
|  |         return self._last.value % self._len | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def array(self) -> np.ndarray: | ||||||
|  |         ''' | ||||||
|  |         Return an up-to-date ``np.ndarray`` view of the | ||||||
|  |         so-far-written data to the underlying shm buffer. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         a = self._array[self._first.value:self._last.value] | ||||||
|  | 
 | ||||||
|  |         # first, last = self._first.value, self._last.value | ||||||
|  |         # a = self._array[first:last] | ||||||
|  | 
 | ||||||
|  |         # TODO: eventually comment this once we've not seen it in the | ||||||
|  |         # wild in a long time.. | ||||||
|  |         # XXX: race where first/last indexes cause a reader | ||||||
|  |         # to load an empty array.. | ||||||
|  |         if len(a) == 0 and self._post_init: | ||||||
|  |             raise RuntimeError('Empty array race condition hit!?') | ||||||
|  |             # breakpoint() | ||||||
|  | 
 | ||||||
|  |         return a | ||||||
|  | 
 | ||||||
|  |     def ustruct( | ||||||
|  |         self, | ||||||
|  |         fields: Optional[list[str]] = None, | ||||||
|  | 
 | ||||||
|  |         # type that all field values will be cast to | ||||||
|  |         # in the returned view. | ||||||
|  |         common_dtype: np.dtype = float, | ||||||
|  | 
 | ||||||
|  |     ) -> np.ndarray: | ||||||
|  | 
 | ||||||
|  |         array = self._array | ||||||
|  | 
 | ||||||
|  |         if fields: | ||||||
|  |             selection = array[fields] | ||||||
|  |             # fcount = len(fields) | ||||||
|  |         else: | ||||||
|  |             selection = array | ||||||
|  |             # fcount = len(array.dtype.fields) | ||||||
|  | 
 | ||||||
|  |         # XXX: manual ``.view()`` attempt that also doesn't work. | ||||||
|  |         # uview = selection.view( | ||||||
|  |         #     dtype='<f16', | ||||||
|  |         # ).reshape(-1, 4, order='A') | ||||||
|  | 
 | ||||||
|  |         # assert len(selection) == len(uview) | ||||||
|  | 
 | ||||||
|  |         u = rfn.structured_to_unstructured( | ||||||
|  |             selection, | ||||||
|  |             # dtype=float, | ||||||
|  |             copy=True, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         # unstruct = np.ndarray(u.shape, dtype=a.dtype, buffer=shm.buf) | ||||||
|  |         # array[:] = a[:] | ||||||
|  |         return u | ||||||
|  |         # return ShmArray( | ||||||
|  |         #     shmarr=u, | ||||||
|  |         #     first=self._first, | ||||||
|  |         #     last=self._last, | ||||||
|  |         #     shm=self._shm | ||||||
|  |         # ) | ||||||
|  | 
 | ||||||
|  |     def last( | ||||||
|  |         self, | ||||||
|  |         length: int = 1, | ||||||
|  | 
 | ||||||
|  |     ) -> np.ndarray: | ||||||
|  |         ''' | ||||||
|  |         Return the last ``length``'s worth of ("row") entries from the | ||||||
|  |         array. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         return self.array[-length:] | ||||||
|  | 
 | ||||||
|  |     def push( | ||||||
|  |         self, | ||||||
|  |         data: np.ndarray, | ||||||
|  | 
 | ||||||
|  |         field_map: Optional[dict[str, str]] = None, | ||||||
|  |         prepend: bool = False, | ||||||
|  |         update_first: bool = True, | ||||||
|  |         start: int | None = None, | ||||||
|  | 
 | ||||||
|  |     ) -> int: | ||||||
|  |         ''' | ||||||
|  |         Ring buffer like "push" to append data | ||||||
|  |         into the buffer and return updated "last" index. | ||||||
|  | 
 | ||||||
|  |         NB: no actual ring logic yet to give a "loop around" on overflow | ||||||
|  |         condition, lel. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         length = len(data) | ||||||
|  | 
 | ||||||
|  |         if prepend: | ||||||
|  |             index = (start or self._first.value) - length | ||||||
|  | 
 | ||||||
|  |             if index < 0: | ||||||
|  |                 raise ValueError( | ||||||
|  |                     f'Array size of {self._len} was overrun during prepend.\n' | ||||||
|  |                     f'You have passed {abs(index)} too many datums.' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             index = start if start is not None else self._last.value | ||||||
|  | 
 | ||||||
|  |         end = index + length | ||||||
|  | 
 | ||||||
|  |         if field_map: | ||||||
|  |             src_names, dst_names = zip(*field_map.items()) | ||||||
|  |         else: | ||||||
|  |             dst_names = src_names = self._write_fields | ||||||
|  | 
 | ||||||
|  |         try: | ||||||
|  |             self._array[ | ||||||
|  |                 list(dst_names) | ||||||
|  |             ][index:end] = data[list(src_names)][:] | ||||||
|  | 
 | ||||||
|  |             # NOTE: there was a race here between updating | ||||||
|  |             # the first and last indices and when the next reader | ||||||
|  |             # tries to access ``.array`` (which due to the index | ||||||
|  |             # overlap will be empty). Pretty sure we've fixed it now | ||||||
|  |             # but leaving this here as a reminder. | ||||||
|  |             if ( | ||||||
|  |                 prepend | ||||||
|  |                 and update_first | ||||||
|  |                 and length | ||||||
|  |             ): | ||||||
|  |                 assert index < self._first.value | ||||||
|  | 
 | ||||||
|  |             if ( | ||||||
|  |                 index < self._first.value | ||||||
|  |                 and update_first | ||||||
|  |             ): | ||||||
|  |                 assert prepend, 'prepend=True not passed but index decreased?' | ||||||
|  |                 self._first.value = index | ||||||
|  | 
 | ||||||
|  |             elif not prepend: | ||||||
|  |                 self._last.value = end | ||||||
|  | 
 | ||||||
|  |             self._post_init = True | ||||||
|  |             return end | ||||||
|  | 
 | ||||||
|  |         except ValueError as err: | ||||||
|  |             if field_map: | ||||||
|  |                 raise | ||||||
|  | 
 | ||||||
|  |             # should raise if diff detected | ||||||
|  |             self.diff_err_fields(data) | ||||||
|  |             raise err | ||||||
|  | 
 | ||||||
|  |     def diff_err_fields( | ||||||
|  |         self, | ||||||
|  |         data: np.ndarray, | ||||||
|  |     ) -> None: | ||||||
|  |         # reraise with any field discrepancy | ||||||
|  |         our_fields, their_fields = ( | ||||||
|  |             set(self._array.dtype.fields), | ||||||
|  |             set(data.dtype.fields), | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         only_in_ours = our_fields - their_fields | ||||||
|  |         only_in_theirs = their_fields - our_fields | ||||||
|  | 
 | ||||||
|  |         if only_in_ours: | ||||||
|  |             raise TypeError( | ||||||
|  |                 f"Input array is missing field(s): {only_in_ours}" | ||||||
|  |             ) | ||||||
|  |         elif only_in_theirs: | ||||||
|  |             raise TypeError( | ||||||
|  |                 f"Input array has unknown field(s): {only_in_theirs}" | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |     # TODO: support "silent" prepends that don't update ._first.value? | ||||||
|  |     def prepend( | ||||||
|  |         self, | ||||||
|  |         data: np.ndarray, | ||||||
|  |     ) -> int: | ||||||
|  |         end = self.push(data, prepend=True) | ||||||
|  |         assert end | ||||||
|  | 
 | ||||||
|  |     def close(self) -> None: | ||||||
|  |         self._first._shm.close() | ||||||
|  |         self._last._shm.close() | ||||||
|  |         self._shm.close() | ||||||
|  | 
 | ||||||
|  |     def destroy(self) -> None: | ||||||
|  |         if _USE_POSIX: | ||||||
|  |             # We manually unlink to bypass all the "resource tracker" | ||||||
|  |             # nonsense meant for non-SC systems. | ||||||
|  |             shm_unlink(self._shm.name) | ||||||
|  | 
 | ||||||
|  |         self._first.destroy() | ||||||
|  |         self._last.destroy() | ||||||
|  | 
 | ||||||
|  |     def flush(self) -> None: | ||||||
|  |         # TODO: flush to storage backend like markestore? | ||||||
|  |         ... | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def open_shm_ndarray( | ||||||
|  |     size: int, | ||||||
|  |     key: str | None = None, | ||||||
|  |     dtype: np.dtype | None = None, | ||||||
|  |     append_start_index: int | None = None, | ||||||
|  |     readonly: bool = False, | ||||||
|  | 
 | ||||||
|  | ) -> ShmArray: | ||||||
|  |     ''' | ||||||
|  |     Open a memory shared ``numpy`` using the standard library. | ||||||
|  | 
 | ||||||
|  |     This call unlinks (aka permanently destroys) the buffer on teardown | ||||||
|  |     and thus should be used from the parent-most accessor (process). | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # create new shared mem segment for which we | ||||||
|  |     # have write permission | ||||||
|  |     a = np.zeros(size, dtype=dtype) | ||||||
|  |     a['index'] = np.arange(len(a)) | ||||||
|  | 
 | ||||||
|  |     shm = SharedMemory( | ||||||
|  |         name=key, | ||||||
|  |         create=True, | ||||||
|  |         size=a.nbytes | ||||||
|  |     ) | ||||||
|  |     array = np.ndarray( | ||||||
|  |         a.shape, | ||||||
|  |         dtype=a.dtype, | ||||||
|  |         buffer=shm.buf | ||||||
|  |     ) | ||||||
|  |     array[:] = a[:] | ||||||
|  |     array.setflags(write=int(not readonly)) | ||||||
|  | 
 | ||||||
|  |     token = _make_token( | ||||||
|  |         key=key, | ||||||
|  |         size=size, | ||||||
|  |         dtype=dtype, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # create single entry arrays for storing an first and last indices | ||||||
|  |     first = SharedInt( | ||||||
|  |         shm=SharedMemory( | ||||||
|  |             name=token.shm_first_index_name, | ||||||
|  |             create=True, | ||||||
|  |             size=4,  # std int | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     last = SharedInt( | ||||||
|  |         shm=SharedMemory( | ||||||
|  |             name=token.shm_last_index_name, | ||||||
|  |             create=True, | ||||||
|  |             size=4,  # std int | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # Start the "real-time" append-updated (or "pushed-to") section | ||||||
|  |     # after some start index: ``append_start_index``. This allows appending | ||||||
|  |     # from a start point in the array which isn't the 0 index and looks | ||||||
|  |     # something like, | ||||||
|  |     # ------------------------- | ||||||
|  |     # |              |        i | ||||||
|  |     # _________________________ | ||||||
|  |     # <-------------> <-------> | ||||||
|  |     #  history         real-time | ||||||
|  |     # | ||||||
|  |     # Once fully "prepended", the history section will leave the | ||||||
|  |     # ``ShmArray._start.value: int = 0`` and the yet-to-be written | ||||||
|  |     # real-time section will start at ``ShmArray.index: int``. | ||||||
|  | 
 | ||||||
|  |     # this sets the index to nearly 2/3rds into the the length of | ||||||
|  |     # the buffer leaving at least a "days worth of second samples" | ||||||
|  |     # for the real-time section. | ||||||
|  |     if append_start_index is None: | ||||||
|  |         append_start_index = round(size * 0.616) | ||||||
|  | 
 | ||||||
|  |     last.value = first.value = append_start_index | ||||||
|  | 
 | ||||||
|  |     shmarr = ShmArray( | ||||||
|  |         array, | ||||||
|  |         first, | ||||||
|  |         last, | ||||||
|  |         shm, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     assert shmarr._token == token | ||||||
|  |     _known_tokens[key] = shmarr.token | ||||||
|  | 
 | ||||||
|  |     # "unlink" created shm on process teardown by | ||||||
|  |     # pushing teardown calls onto actor context stack | ||||||
|  |     stack = tractor.current_actor().lifetime_stack | ||||||
|  |     stack.callback(shmarr.close) | ||||||
|  |     stack.callback(shmarr.destroy) | ||||||
|  | 
 | ||||||
|  |     return shmarr | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def attach_shm_ndarray( | ||||||
|  |     token: tuple[str, str, tuple[str, str]], | ||||||
|  |     readonly: bool = True, | ||||||
|  | 
 | ||||||
|  | ) -> ShmArray: | ||||||
|  |     ''' | ||||||
|  |     Attach to an existing shared memory array previously | ||||||
|  |     created by another process using ``open_shared_array``. | ||||||
|  | 
 | ||||||
|  |     No new shared mem is allocated but wrapper types for read/write | ||||||
|  |     access are constructed. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     token = NDToken.from_msg(token) | ||||||
|  |     key = token.shm_name | ||||||
|  | 
 | ||||||
|  |     if key in _known_tokens: | ||||||
|  |         assert NDToken.from_msg(_known_tokens[key]) == token, "WTF" | ||||||
|  | 
 | ||||||
|  |     # XXX: ugh, looks like due to the ``shm_open()`` C api we can't | ||||||
|  |     # actually place files in a subdir, see discussion here: | ||||||
|  |     # https://stackoverflow.com/a/11103289 | ||||||
|  | 
 | ||||||
|  |     # attach to array buffer and view as per dtype | ||||||
|  |     _err: Optional[Exception] = None | ||||||
|  |     for _ in range(3): | ||||||
|  |         try: | ||||||
|  |             shm = SharedMemory( | ||||||
|  |                 name=key, | ||||||
|  |                 create=False, | ||||||
|  |             ) | ||||||
|  |             break | ||||||
|  |         except OSError as oserr: | ||||||
|  |             _err = oserr | ||||||
|  |             time.sleep(0.1) | ||||||
|  |     else: | ||||||
|  |         if _err: | ||||||
|  |             raise _err | ||||||
|  | 
 | ||||||
|  |     shmarr = np.ndarray( | ||||||
|  |         (token.size,), | ||||||
|  |         dtype=token.dtype, | ||||||
|  |         buffer=shm.buf | ||||||
|  |     ) | ||||||
|  |     shmarr.setflags(write=int(not readonly)) | ||||||
|  | 
 | ||||||
|  |     first = SharedInt( | ||||||
|  |         shm=SharedMemory( | ||||||
|  |             name=token.shm_first_index_name, | ||||||
|  |             create=False, | ||||||
|  |             size=4,  # std int | ||||||
|  |         ), | ||||||
|  |     ) | ||||||
|  |     last = SharedInt( | ||||||
|  |         shm=SharedMemory( | ||||||
|  |             name=token.shm_last_index_name, | ||||||
|  |             create=False, | ||||||
|  |             size=4,  # std int | ||||||
|  |         ), | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # make sure we can read | ||||||
|  |     first.value | ||||||
|  | 
 | ||||||
|  |     sha = ShmArray( | ||||||
|  |         shmarr, | ||||||
|  |         first, | ||||||
|  |         last, | ||||||
|  |         shm, | ||||||
|  |     ) | ||||||
|  |     # read test | ||||||
|  |     sha.array | ||||||
|  | 
 | ||||||
|  |     # Stash key -> token knowledge for future queries | ||||||
|  |     # via `maybe_opepn_shm_array()` but only after we know | ||||||
|  |     # we can attach. | ||||||
|  |     if key not in _known_tokens: | ||||||
|  |         _known_tokens[key] = token | ||||||
|  | 
 | ||||||
|  |     # "close" attached shm on actor teardown | ||||||
|  |     tractor.current_actor().lifetime_stack.callback(sha.close) | ||||||
|  | 
 | ||||||
|  |     return sha | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def maybe_open_shm_ndarray( | ||||||
|  |     key: str,  # unique identifier for segment | ||||||
|  |     size: int, | ||||||
|  |     dtype: np.dtype | None = None, | ||||||
|  |     append_start_index: int = 0, | ||||||
|  |     readonly: bool = True, | ||||||
|  | 
 | ||||||
|  | ) -> tuple[ShmArray, bool]: | ||||||
|  |     ''' | ||||||
|  |     Attempt to attach to a shared memory block using a "key" lookup | ||||||
|  |     to registered blocks in the users overall "system" registry | ||||||
|  |     (presumes you don't have the block's explicit token). | ||||||
|  | 
 | ||||||
|  |     This function is meant to solve the problem of discovering whether | ||||||
|  |     a shared array token has been allocated or discovered by the actor | ||||||
|  |     running in **this** process. Systems where multiple actors may seek | ||||||
|  |     to access a common block can use this function to attempt to acquire | ||||||
|  |     a token as discovered by the actors who have previously stored | ||||||
|  |     a "key" -> ``NDToken`` map in an actor local (aka python global) | ||||||
|  |     variable. | ||||||
|  | 
 | ||||||
|  |     If you know the explicit ``NDToken`` for your memory segment instead | ||||||
|  |     use ``attach_shm_array``. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     try: | ||||||
|  |         # see if we already know this key | ||||||
|  |         token = _known_tokens[key] | ||||||
|  |         return ( | ||||||
|  |             attach_shm_ndarray( | ||||||
|  |                 token=token, | ||||||
|  |                 readonly=readonly, | ||||||
|  |             ), | ||||||
|  |             False,  # not newly opened | ||||||
|  |         ) | ||||||
|  |     except KeyError: | ||||||
|  |         log.warning(f"Could not find {key} in shms cache") | ||||||
|  |         if dtype: | ||||||
|  |             token = _make_token( | ||||||
|  |                 key, | ||||||
|  |                 size=size, | ||||||
|  |                 dtype=dtype, | ||||||
|  |             ) | ||||||
|  |         else: | ||||||
|  | 
 | ||||||
|  |             try: | ||||||
|  |                 return ( | ||||||
|  |                     attach_shm_ndarray( | ||||||
|  |                         token=token, | ||||||
|  |                         readonly=readonly, | ||||||
|  |                     ), | ||||||
|  |                     False, | ||||||
|  |                 ) | ||||||
|  |             except FileNotFoundError: | ||||||
|  |                 log.warning(f"Could not attach to shm with token {token}") | ||||||
|  | 
 | ||||||
|  |         # This actor does not know about memory | ||||||
|  |         # associated with the provided "key". | ||||||
|  |         # Attempt to open a block and expect | ||||||
|  |         # to fail if a block has been allocated | ||||||
|  |         # on the OS by someone else. | ||||||
|  |         return ( | ||||||
|  |             open_shm_ndarray( | ||||||
|  |                 key=key, | ||||||
|  |                 size=size, | ||||||
|  |                 dtype=dtype, | ||||||
|  |                 append_start_index=append_start_index, | ||||||
|  |                 readonly=readonly, | ||||||
|  |             ), | ||||||
|  |             True, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ShmList(ShareableList): | ||||||
|  |     ''' | ||||||
|  |     Carbon copy of ``.shared_memory.ShareableList`` with a few | ||||||
|  |     enhancements: | ||||||
|  | 
 | ||||||
|  |     - readonly mode via instance var flag  `._readonly: bool` | ||||||
|  |     - ``.__getitem__()`` accepts ``slice`` inputs | ||||||
|  |     - exposes the underlying buffer "name" as a ``.key: str`` | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     def __init__( | ||||||
|  |         self, | ||||||
|  |         sequence: list | None = None, | ||||||
|  |         *, | ||||||
|  |         name: str | None = None, | ||||||
|  |         readonly: bool = True | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  |         self._readonly = readonly | ||||||
|  |         self._key = name | ||||||
|  |         return super().__init__( | ||||||
|  |             sequence=sequence, | ||||||
|  |             name=name, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def key(self) -> str: | ||||||
|  |         return self._key | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def readonly(self) -> bool: | ||||||
|  |         return self._readonly | ||||||
|  | 
 | ||||||
|  |     def __setitem__( | ||||||
|  |         self, | ||||||
|  |         position, | ||||||
|  |         value, | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  | 
 | ||||||
|  |         # mimick ``numpy`` error | ||||||
|  |         if self._readonly: | ||||||
|  |             raise ValueError('assignment destination is read-only') | ||||||
|  | 
 | ||||||
|  |         return super().__setitem__(position, value) | ||||||
|  | 
 | ||||||
|  |     def __getitem__( | ||||||
|  |         self, | ||||||
|  |         indexish, | ||||||
|  |     ) -> list: | ||||||
|  | 
 | ||||||
|  |         # NOTE: this is a non-writeable view (copy?) of the buffer | ||||||
|  |         # in a new list instance. | ||||||
|  |         if isinstance(indexish, slice): | ||||||
|  |             return list(self)[indexish] | ||||||
|  | 
 | ||||||
|  |         return super().__getitem__(indexish) | ||||||
|  | 
 | ||||||
|  |     # TODO: should we offer a `.array` and `.push()` equivalent | ||||||
|  |     # to the `ShmArray`? | ||||||
|  |     # currently we have the following limitations: | ||||||
|  |     # - can't write slices of input using traditional slice-assign | ||||||
|  |     #   syntax due to the ``ShareableList.__setitem__()`` implementation. | ||||||
|  |     # - ``list(shmlist)`` returns a non-mutable copy instead of | ||||||
|  |     #   a writeable view which would be handier numpy-style ops. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def open_shm_list( | ||||||
|  |     key: str, | ||||||
|  |     sequence: list | None = None, | ||||||
|  |     size: int = int(2 ** 10), | ||||||
|  |     dtype: float | int | bool | str | bytes | None = float, | ||||||
|  |     readonly: bool = True, | ||||||
|  | 
 | ||||||
|  | ) -> ShmList: | ||||||
|  | 
 | ||||||
|  |     if sequence is None: | ||||||
|  |         default = { | ||||||
|  |             float: 0., | ||||||
|  |             int: 0, | ||||||
|  |             bool: True, | ||||||
|  |             str: 'doggy', | ||||||
|  |             None: None, | ||||||
|  |         }[dtype] | ||||||
|  |         sequence = [default] * size | ||||||
|  | 
 | ||||||
|  |     shml = ShmList( | ||||||
|  |         sequence=sequence, | ||||||
|  |         name=key, | ||||||
|  |         readonly=readonly, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # "close" attached shm on actor teardown | ||||||
|  |     try: | ||||||
|  |         actor = tractor.current_actor() | ||||||
|  |         actor.lifetime_stack.callback(shml.shm.close) | ||||||
|  |         actor.lifetime_stack.callback(shml.shm.unlink) | ||||||
|  |     except RuntimeError: | ||||||
|  |         log.warning('tractor runtime not active, skipping teardown steps') | ||||||
|  | 
 | ||||||
|  |     return shml | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def attach_shm_list( | ||||||
|  |     key: str, | ||||||
|  |     readonly: bool = False, | ||||||
|  | 
 | ||||||
|  | ) -> ShmList: | ||||||
|  | 
 | ||||||
|  |     return ShmList( | ||||||
|  |         name=key, | ||||||
|  |         readonly=readonly, | ||||||
|  |     ) | ||||||
|  | @ -327,8 +327,9 @@ async def soft_kill( | ||||||
|     uid: tuple[str, str] = portal.channel.uid |     uid: tuple[str, str] = portal.channel.uid | ||||||
|     try: |     try: | ||||||
|         log.cancel( |         log.cancel( | ||||||
|             'Soft killing sub-actor via portal request\n' |             f'Soft killing sub-actor via portal request\n' | ||||||
|             f'c)> {portal.chan.uid}\n' |             f'\n' | ||||||
|  |             f'(c=> {portal.chan.uid}\n' | ||||||
|             f'  |_{proc}\n' |             f'  |_{proc}\n' | ||||||
|         ) |         ) | ||||||
|         # wait on sub-proc to signal termination |         # wait on sub-proc to signal termination | ||||||
|  |  | ||||||
|  | @ -108,6 +108,7 @@ def is_main_process() -> bool: | ||||||
|     return mp.current_process().name == 'MainProcess' |     return mp.current_process().name == 'MainProcess' | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO, more verby name? | ||||||
| def debug_mode() -> bool: | def debug_mode() -> bool: | ||||||
|     ''' |     ''' | ||||||
|     Bool determining if "debug mode" is on which enables |     Bool determining if "debug mode" is on which enables | ||||||
|  |  | ||||||
|  | @ -45,9 +45,11 @@ from .trionics import ( | ||||||
|     BroadcastReceiver, |     BroadcastReceiver, | ||||||
| ) | ) | ||||||
| from tractor.msg import ( | from tractor.msg import ( | ||||||
|     # Return, |     Error, | ||||||
|     # Stop, |     Return, | ||||||
|  |     Stop, | ||||||
|     MsgType, |     MsgType, | ||||||
|  |     PayloadT, | ||||||
|     Yield, |     Yield, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | @ -70,8 +72,7 @@ class MsgStream(trio.abc.Channel): | ||||||
|     A bidirectional message stream for receiving logically sequenced |     A bidirectional message stream for receiving logically sequenced | ||||||
|     values over an inter-actor IPC `Channel`. |     values over an inter-actor IPC `Channel`. | ||||||
| 
 | 
 | ||||||
|     This is the type returned to a local task which entered either | 
 | ||||||
|     `Portal.open_stream_from()` or `Context.open_stream()`. |  | ||||||
| 
 | 
 | ||||||
|     Termination rules: |     Termination rules: | ||||||
| 
 | 
 | ||||||
|  | @ -94,6 +95,9 @@ class MsgStream(trio.abc.Channel): | ||||||
|         self._rx_chan = rx_chan |         self._rx_chan = rx_chan | ||||||
|         self._broadcaster = _broadcaster |         self._broadcaster = _broadcaster | ||||||
| 
 | 
 | ||||||
|  |         # any actual IPC msg which is effectively an `EndOfStream` | ||||||
|  |         self._stop_msg: bool|Stop = False | ||||||
|  | 
 | ||||||
|         # flag to denote end of stream |         # flag to denote end of stream | ||||||
|         self._eoc: bool|trio.EndOfChannel = False |         self._eoc: bool|trio.EndOfChannel = False | ||||||
|         self._closed: bool|trio.ClosedResourceError = False |         self._closed: bool|trio.ClosedResourceError = False | ||||||
|  | @ -125,16 +129,67 @@ class MsgStream(trio.abc.Channel): | ||||||
|     def receive_nowait( |     def receive_nowait( | ||||||
|         self, |         self, | ||||||
|         expect_msg: MsgType = Yield, |         expect_msg: MsgType = Yield, | ||||||
|     ): |     ) -> PayloadT: | ||||||
|         ctx: Context = self._ctx |         ctx: Context = self._ctx | ||||||
|         return ctx._pld_rx.recv_pld_nowait( |         ( | ||||||
|  |             msg, | ||||||
|  |             pld, | ||||||
|  |         ) = ctx._pld_rx.recv_msg_nowait( | ||||||
|             ipc=self, |             ipc=self, | ||||||
|             expect_msg=expect_msg, |             expect_msg=expect_msg, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |         # ?TODO, maybe factor this into a hyper-common `unwrap_pld()` | ||||||
|  |         # | ||||||
|  |         match msg: | ||||||
|  | 
 | ||||||
|  |             # XXX, these never seems to ever hit? cool? | ||||||
|  |             case Stop(): | ||||||
|  |                 log.cancel( | ||||||
|  |                     f'Msg-stream was ended via stop msg\n' | ||||||
|  |                     f'{msg}' | ||||||
|  |                 ) | ||||||
|  |             case Error(): | ||||||
|  |                 log.error( | ||||||
|  |                     f'Msg-stream was ended via error msg\n' | ||||||
|  |                     f'{msg}' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             # XXX NOTE, always set any final result on the ctx to | ||||||
|  |             # avoid teardown race conditions where previously this msg | ||||||
|  |             # would be consumed silently (by `.aclose()` doing its | ||||||
|  |             # own "msg drain loop" but WITHOUT those `drained: lists[MsgType]` | ||||||
|  |             # being post-close-processed! | ||||||
|  |             # | ||||||
|  |             # !!TODO, see the equiv todo-comment in `.receive()` | ||||||
|  |             # around the `if drained:` where we should prolly | ||||||
|  |             # ACTUALLY be doing this post-close processing?? | ||||||
|  |             # | ||||||
|  |             case Return(pld=pld): | ||||||
|  |                 log.warning( | ||||||
|  |                     f'Msg-stream final result msg for IPC ctx?\n' | ||||||
|  |                     f'{msg}' | ||||||
|  |                 ) | ||||||
|  |                 # XXX TODO, this **should be covered** by higher | ||||||
|  |                 # scoped runtime-side method calls such as | ||||||
|  |                 # `Context._deliver_msg()`, so you should never | ||||||
|  |                 # really see the warning above or else something | ||||||
|  |                 # racy/out-of-order is likely going on between | ||||||
|  |                 # actor-runtime-side push tasks and the user-app-side | ||||||
|  |                 # consume tasks! | ||||||
|  |                 # -[ ] figure out that set of race cases and fix! | ||||||
|  |                 # -[ ] possibly return the `msg` given an input | ||||||
|  |                 #     arg-flag is set so we can process the `Return` | ||||||
|  |                 #     from the `.aclose()` caller? | ||||||
|  |                 # | ||||||
|  |                 # breakpoint()  # to debug this RACE CASE! | ||||||
|  |                 ctx._result = pld | ||||||
|  |                 ctx._outcome_msg = msg | ||||||
|  | 
 | ||||||
|  |         return pld | ||||||
|  | 
 | ||||||
|     async def receive( |     async def receive( | ||||||
|         self, |         self, | ||||||
| 
 |  | ||||||
|         hide_tb: bool = False, |         hide_tb: bool = False, | ||||||
|     ): |     ): | ||||||
|         ''' |         ''' | ||||||
|  | @ -154,7 +209,7 @@ class MsgStream(trio.abc.Channel): | ||||||
|         #     except trio.EndOfChannel: |         #     except trio.EndOfChannel: | ||||||
|         #         raise StopAsyncIteration |         #         raise StopAsyncIteration | ||||||
|         # |         # | ||||||
|         # see ``.aclose()`` for notes on the old behaviour prior to |         # see `.aclose()` for notes on the old behaviour prior to | ||||||
|         # introducing this |         # introducing this | ||||||
|         if self._eoc: |         if self._eoc: | ||||||
|             raise self._eoc |             raise self._eoc | ||||||
|  | @ -165,7 +220,11 @@ class MsgStream(trio.abc.Channel): | ||||||
|         src_err: Exception|None = None  # orig tb |         src_err: Exception|None = None  # orig tb | ||||||
|         try: |         try: | ||||||
|             ctx: Context = self._ctx |             ctx: Context = self._ctx | ||||||
|             return await ctx._pld_rx.recv_pld(ipc=self) |             pld = await ctx._pld_rx.recv_pld( | ||||||
|  |                 ipc=self, | ||||||
|  |                 expect_msg=Yield, | ||||||
|  |             ) | ||||||
|  |             return pld | ||||||
| 
 | 
 | ||||||
|         # XXX: the stream terminates on either of: |         # XXX: the stream terminates on either of: | ||||||
|         # - `self._rx_chan.receive()` raising  after manual closure |         # - `self._rx_chan.receive()` raising  after manual closure | ||||||
|  | @ -174,7 +233,7 @@ class MsgStream(trio.abc.Channel): | ||||||
|         # - via a `Stop`-msg received from remote peer task. |         # - via a `Stop`-msg received from remote peer task. | ||||||
|         #   NOTE |         #   NOTE | ||||||
|         #   |_ previously this was triggered by calling |         #   |_ previously this was triggered by calling | ||||||
|         #   ``._rx_chan.aclose()`` on the send side of the channel |         #   `._rx_chan.aclose()` on the send side of the channel | ||||||
|         #   inside `Actor._deliver_ctx_payload()`, but now the 'stop' |         #   inside `Actor._deliver_ctx_payload()`, but now the 'stop' | ||||||
|         #   message handling gets delegated to `PldRFx.recv_pld()` |         #   message handling gets delegated to `PldRFx.recv_pld()` | ||||||
|         #   internals. |         #   internals. | ||||||
|  | @ -198,11 +257,14 @@ class MsgStream(trio.abc.Channel): | ||||||
|         # terminated and signal this local iterator to stop |         # terminated and signal this local iterator to stop | ||||||
|         drained: list[Exception|dict] = await self.aclose() |         drained: list[Exception|dict] = await self.aclose() | ||||||
|         if drained: |         if drained: | ||||||
|             # ?TODO? pass these to the `._ctx._drained_msgs: deque` |         #  ^^^^^^^^TODO? pass these to the `._ctx._drained_msgs: | ||||||
|             # and then iterate them as part of any `.wait_for_result()` call? |         #  deque` and then iterate them as part of any | ||||||
|  |         #  `.wait_for_result()` call? | ||||||
|  |         # | ||||||
|  |         # -[ ] move the match-case processing from | ||||||
|  |         #     `.receive_nowait()` instead to right here, use it from | ||||||
|  |         #     a for msg in drained:` post-proc loop? | ||||||
|         # |         # | ||||||
|             # from .devx import pause |  | ||||||
|             # await pause() |  | ||||||
|             log.warning( |             log.warning( | ||||||
|                 'Drained context msgs during closure\n\n' |                 'Drained context msgs during closure\n\n' | ||||||
|                 f'{drained}' |                 f'{drained}' | ||||||
|  | @ -265,9 +327,6 @@ class MsgStream(trio.abc.Channel): | ||||||
|          - more or less we try to maintain adherance to trio's `.aclose()` semantics: |          - more or less we try to maintain adherance to trio's `.aclose()` semantics: | ||||||
|            https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose |            https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||||
|         ''' |         ''' | ||||||
| 
 |  | ||||||
|         # rx_chan = self._rx_chan |  | ||||||
| 
 |  | ||||||
|         # XXX NOTE XXX |         # XXX NOTE XXX | ||||||
|         # it's SUPER IMPORTANT that we ensure we don't DOUBLE |         # it's SUPER IMPORTANT that we ensure we don't DOUBLE | ||||||
|         # DRAIN msgs on closure so avoid getting stuck handing on |         # DRAIN msgs on closure so avoid getting stuck handing on | ||||||
|  | @ -279,15 +338,16 @@ class MsgStream(trio.abc.Channel): | ||||||
|             # this stream has already been closed so silently succeed as |             # this stream has already been closed so silently succeed as | ||||||
|             # per ``trio.AsyncResource`` semantics. |             # per ``trio.AsyncResource`` semantics. | ||||||
|             # https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose |             # https://trio.readthedocs.io/en/stable/reference-io.html#trio.abc.AsyncResource.aclose | ||||||
|  |             # import tractor | ||||||
|  |             # await tractor.pause() | ||||||
|             return [] |             return [] | ||||||
| 
 | 
 | ||||||
|         ctx: Context = self._ctx |         ctx: Context = self._ctx | ||||||
|         drained: list[Exception|dict] = [] |         drained: list[Exception|dict] = [] | ||||||
|         while not drained: |         while not drained: | ||||||
|             try: |             try: | ||||||
|                 maybe_final_msg = self.receive_nowait( |                 maybe_final_msg: Yield|Return = self.receive_nowait( | ||||||
|                     # allow_msgs=[Yield, Return], |                     expect_msg=Yield|Return, | ||||||
|                     expect_msg=Yield, |  | ||||||
|                 ) |                 ) | ||||||
|                 if maybe_final_msg: |                 if maybe_final_msg: | ||||||
|                     log.debug( |                     log.debug( | ||||||
|  | @ -372,8 +432,10 @@ class MsgStream(trio.abc.Channel): | ||||||
|         #         await rx_chan.aclose() |         #         await rx_chan.aclose() | ||||||
| 
 | 
 | ||||||
|         if not self._eoc: |         if not self._eoc: | ||||||
|  |             this_side: str = self._ctx.side | ||||||
|  |             peer_side: str = self._ctx.peer_side | ||||||
|             message: str = ( |             message: str = ( | ||||||
|                 f'Stream self-closed by {self._ctx.side!r}-side before EoC\n' |                 f'Stream self-closed by {this_side!r}-side before EoC from {peer_side!r}\n' | ||||||
|                 # } bc a stream is a "scope"/msging-phase inside an IPC |                 # } bc a stream is a "scope"/msging-phase inside an IPC | ||||||
|                 f'x}}>\n' |                 f'x}}>\n' | ||||||
|                 f'  |_{self}\n' |                 f'  |_{self}\n' | ||||||
|  | @ -381,9 +443,19 @@ class MsgStream(trio.abc.Channel): | ||||||
|             log.cancel(message) |             log.cancel(message) | ||||||
|             self._eoc = trio.EndOfChannel(message) |             self._eoc = trio.EndOfChannel(message) | ||||||
| 
 | 
 | ||||||
|  |             if ( | ||||||
|  |                 (rx_chan := self._rx_chan) | ||||||
|  |                 and | ||||||
|  |                 (stats := rx_chan.statistics()).tasks_waiting_receive | ||||||
|  |             ): | ||||||
|  |                 log.cancel( | ||||||
|  |                     f'Msg-stream is closing but there is still reader tasks,\n' | ||||||
|  |                     f'{stats}\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|         # ?XXX WAIT, why do we not close the local mem chan `._rx_chan` XXX? |         # ?XXX WAIT, why do we not close the local mem chan `._rx_chan` XXX? | ||||||
|         # => NO, DEFINITELY NOT! <= |         # => NO, DEFINITELY NOT! <= | ||||||
|         # if we're a bi-dir ``MsgStream`` BECAUSE this same |         # if we're a bi-dir `MsgStream` BECAUSE this same | ||||||
|         # core-msg-loop mem recv-chan is used to deliver the |         # core-msg-loop mem recv-chan is used to deliver the | ||||||
|         # potential final result from the surrounding inter-actor |         # potential final result from the surrounding inter-actor | ||||||
|         # `Context` so we don't want to close it until that |         # `Context` so we don't want to close it until that | ||||||
|  |  | ||||||
|  | @ -395,17 +395,23 @@ async def _open_and_supervise_one_cancels_all_nursery( | ||||||
|     # `ActorNursery.start_actor()`). |     # `ActorNursery.start_actor()`). | ||||||
| 
 | 
 | ||||||
|     # errors from this daemon actor nursery bubble up to caller |     # errors from this daemon actor nursery bubble up to caller | ||||||
|     async with trio.open_nursery() as da_nursery: |     async with trio.open_nursery( | ||||||
|  |         strict_exception_groups=False, | ||||||
|  |         # ^XXX^ TODO? instead unpack any RAE as per "loose" style? | ||||||
|  |     ) as da_nursery: | ||||||
|         try: |         try: | ||||||
|             # This is the inner level "run in actor" nursery. It is |             # This is the inner level "run in actor" nursery. It is | ||||||
|             # awaited first since actors spawned in this way (using |             # awaited first since actors spawned in this way (using | ||||||
|             # ``ActorNusery.run_in_actor()``) are expected to only |             # `ActorNusery.run_in_actor()`) are expected to only | ||||||
|             # return a single result and then complete (i.e. be canclled |             # return a single result and then complete (i.e. be canclled | ||||||
|             # gracefully). Errors collected from these actors are |             # gracefully). Errors collected from these actors are | ||||||
|             # immediately raised for handling by a supervisor strategy. |             # immediately raised for handling by a supervisor strategy. | ||||||
|             # As such if the strategy propagates any error(s) upwards |             # As such if the strategy propagates any error(s) upwards | ||||||
|             # the above "daemon actor" nursery will be notified. |             # the above "daemon actor" nursery will be notified. | ||||||
|             async with trio.open_nursery() as ria_nursery: |             async with trio.open_nursery( | ||||||
|  |                 strict_exception_groups=False, | ||||||
|  |                 # ^XXX^ TODO? instead unpack any RAE as per "loose" style? | ||||||
|  |             ) as ria_nursery: | ||||||
| 
 | 
 | ||||||
|                 an = ActorNursery( |                 an = ActorNursery( | ||||||
|                     actor, |                     actor, | ||||||
|  | @ -472,8 +478,8 @@ async def _open_and_supervise_one_cancels_all_nursery( | ||||||
|                             ContextCancelled, |                             ContextCancelled, | ||||||
|                         }: |                         }: | ||||||
|                             log.cancel( |                             log.cancel( | ||||||
|                                 'Actor-nursery caught remote cancellation\n\n' |                                 'Actor-nursery caught remote cancellation\n' | ||||||
| 
 |                                 '\n' | ||||||
|                                 f'{inner_err.tb_str}' |                                 f'{inner_err.tb_str}' | ||||||
|                             ) |                             ) | ||||||
|                         else: |                         else: | ||||||
|  | @ -565,7 +571,9 @@ async def _open_and_supervise_one_cancels_all_nursery( | ||||||
| @acm | @acm | ||||||
| # @api_frame | # @api_frame | ||||||
| async def open_nursery( | async def open_nursery( | ||||||
|  |     hide_tb: bool = True, | ||||||
|     **kwargs, |     **kwargs, | ||||||
|  |     # ^TODO, paramspec for `open_root_actor()` | ||||||
| 
 | 
 | ||||||
| ) -> typing.AsyncGenerator[ActorNursery, None]: | ) -> typing.AsyncGenerator[ActorNursery, None]: | ||||||
|     ''' |     ''' | ||||||
|  | @ -583,7 +591,7 @@ async def open_nursery( | ||||||
|     which cancellation scopes correspond to each spawned subactor set. |     which cancellation scopes correspond to each spawned subactor set. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     __tracebackhide__: bool = True |     __tracebackhide__: bool = hide_tb | ||||||
|     implicit_runtime: bool = False |     implicit_runtime: bool = False | ||||||
|     actor: Actor = current_actor(err_on_no_runtime=False) |     actor: Actor = current_actor(err_on_no_runtime=False) | ||||||
|     an: ActorNursery|None = None |     an: ActorNursery|None = None | ||||||
|  | @ -599,7 +607,10 @@ async def open_nursery( | ||||||
|             # mark us for teardown on exit |             # mark us for teardown on exit | ||||||
|             implicit_runtime: bool = True |             implicit_runtime: bool = True | ||||||
| 
 | 
 | ||||||
|             async with open_root_actor(**kwargs) as actor: |             async with open_root_actor( | ||||||
|  |                 hide_tb=hide_tb, | ||||||
|  |                 **kwargs, | ||||||
|  |             ) as actor: | ||||||
|                 assert actor is current_actor() |                 assert actor is current_actor() | ||||||
| 
 | 
 | ||||||
|                 try: |                 try: | ||||||
|  | @ -637,8 +648,10 @@ async def open_nursery( | ||||||
|         # show frame on any internal runtime-scope error |         # show frame on any internal runtime-scope error | ||||||
|         if ( |         if ( | ||||||
|             an |             an | ||||||
|             and not an.cancelled |             and | ||||||
|             and an._scope_error |             not an.cancelled | ||||||
|  |             and | ||||||
|  |             an._scope_error | ||||||
|         ): |         ): | ||||||
|             __tracebackhide__: bool = False |             __tracebackhide__: bool = False | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -19,10 +19,16 @@ Various helpers/utils for auditing your `tractor` app and/or the | ||||||
| core runtime. | core runtime. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import ( | ||||||
|  |     asynccontextmanager as acm, | ||||||
|  | ) | ||||||
|  | import os | ||||||
| import pathlib | import pathlib | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
|  | from tractor.devx._debug import ( | ||||||
|  |     BoxedMaybeException, | ||||||
|  | ) | ||||||
| from .pytest import ( | from .pytest import ( | ||||||
|     tractor_test as tractor_test |     tractor_test as tractor_test | ||||||
| ) | ) | ||||||
|  | @ -59,7 +65,12 @@ def mk_cmd( | ||||||
|     exs_subpath: str = 'debugging', |     exs_subpath: str = 'debugging', | ||||||
| ) -> str: | ) -> str: | ||||||
|     ''' |     ''' | ||||||
|     Generate a shell command suitable to pass to ``pexpect.spawn()``. |     Generate a shell command suitable to pass to `pexpect.spawn()` | ||||||
|  |     which runs the script as a python program's entrypoint. | ||||||
|  | 
 | ||||||
|  |     In particular ensure we disable the new tb coloring via unsetting | ||||||
|  |     `$PYTHON_COLORS` so that `pexpect` can pattern match without | ||||||
|  |     color-escape-codes. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     script_path: pathlib.Path = ( |     script_path: pathlib.Path = ( | ||||||
|  | @ -67,10 +78,15 @@ def mk_cmd( | ||||||
|         / exs_subpath |         / exs_subpath | ||||||
|         / f'{ex_name}.py' |         / f'{ex_name}.py' | ||||||
|     ) |     ) | ||||||
|     return ' '.join([ |     py_cmd: str = ' '.join([ | ||||||
|         'python', |         'python', | ||||||
|         str(script_path) |         str(script_path) | ||||||
|     ]) |     ]) | ||||||
|  |     # XXX, required for py 3.13+ | ||||||
|  |     # https://docs.python.org/3/using/cmdline.html#using-on-controlling-color | ||||||
|  |     # https://docs.python.org/3/using/cmdline.html#envvar-PYTHON_COLORS | ||||||
|  |     os.environ['PYTHON_COLORS'] = '0' | ||||||
|  |     return py_cmd | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
|  | @ -85,12 +101,13 @@ async def expect_ctxc( | ||||||
|     ''' |     ''' | ||||||
|     if yay: |     if yay: | ||||||
|         try: |         try: | ||||||
|             yield |             yield (maybe_exc := BoxedMaybeException()) | ||||||
|             raise RuntimeError('Never raised ctxc?') |             raise RuntimeError('Never raised ctxc?') | ||||||
|         except tractor.ContextCancelled: |         except tractor.ContextCancelled as ctxc: | ||||||
|  |             maybe_exc.value = ctxc | ||||||
|             if reraise: |             if reraise: | ||||||
|                 raise |                 raise | ||||||
|             else: |             else: | ||||||
|                 return |                 return | ||||||
|     else: |     else: | ||||||
|         yield |         yield (maybe_exc := BoxedMaybeException()) | ||||||
|  |  | ||||||
|  | @ -317,8 +317,6 @@ class Lock: | ||||||
|         we_released: bool = False |         we_released: bool = False | ||||||
|         ctx_in_debug: Context|None = cls.ctx_in_debug |         ctx_in_debug: Context|None = cls.ctx_in_debug | ||||||
|         repl_task: Task|Thread|None = DebugStatus.repl_task |         repl_task: Task|Thread|None = DebugStatus.repl_task | ||||||
|         message: str = '' |  | ||||||
| 
 |  | ||||||
|         try: |         try: | ||||||
|             if not DebugStatus.is_main_trio_thread(): |             if not DebugStatus.is_main_trio_thread(): | ||||||
|                 thread: threading.Thread = threading.current_thread() |                 thread: threading.Thread = threading.current_thread() | ||||||
|  | @ -333,6 +331,10 @@ class Lock: | ||||||
|                 return False |                 return False | ||||||
| 
 | 
 | ||||||
|             task: Task = current_task() |             task: Task = current_task() | ||||||
|  |             message: str = ( | ||||||
|  |                 'TTY NOT RELEASED on behalf of caller\n' | ||||||
|  |                 f'|_{task}\n' | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             # sanity check that if we're the root actor |             # sanity check that if we're the root actor | ||||||
|             # the lock is marked as such. |             # the lock is marked as such. | ||||||
|  | @ -347,11 +349,6 @@ class Lock: | ||||||
|             else: |             else: | ||||||
|                 assert DebugStatus.repl_task is not task |                 assert DebugStatus.repl_task is not task | ||||||
| 
 | 
 | ||||||
|             message: str = ( |  | ||||||
|                 'TTY lock was NOT released on behalf of caller\n' |  | ||||||
|                 f'|_{task}\n' |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             lock: trio.StrictFIFOLock = cls._debug_lock |             lock: trio.StrictFIFOLock = cls._debug_lock | ||||||
|             owner: Task = lock.statistics().owner |             owner: Task = lock.statistics().owner | ||||||
|             if ( |             if ( | ||||||
|  | @ -366,23 +363,21 @@ class Lock: | ||||||
|                 # correct task, greenback-spawned-task and/or thread |                 # correct task, greenback-spawned-task and/or thread | ||||||
|                 # being set to the `.repl_task` such that the above |                 # being set to the `.repl_task` such that the above | ||||||
|                 # condition matches and we actually release the lock. |                 # condition matches and we actually release the lock. | ||||||
|  |                 # | ||||||
|                 # This is particular of note from `.pause_from_sync()`! |                 # This is particular of note from `.pause_from_sync()`! | ||||||
| 
 |  | ||||||
|             ): |             ): | ||||||
|                 cls._debug_lock.release() |                 cls._debug_lock.release() | ||||||
|                 we_released: bool = True |                 we_released: bool = True | ||||||
|                 if repl_task: |                 if repl_task: | ||||||
|                     message: str = ( |                     message: str = ( | ||||||
|                         'Lock released on behalf of root-actor-local REPL owner\n' |                         'TTY released on behalf of root-actor-local REPL owner\n' | ||||||
|                         f'|_{repl_task}\n' |                         f'|_{repl_task}\n' | ||||||
|                     ) |                     ) | ||||||
|                 else: |                 else: | ||||||
|                     message: str = ( |                     message: str = ( | ||||||
|                         'TTY lock released by us on behalf of remote peer?\n' |                         'TTY released by us on behalf of remote peer?\n' | ||||||
|                         f'|_ctx_in_debug: {ctx_in_debug}\n\n' |                         f'{ctx_in_debug}\n' | ||||||
|                     ) |                     ) | ||||||
|                     # mk_pdb().set_trace() |  | ||||||
|                 # elif owner: |  | ||||||
| 
 | 
 | ||||||
|         except RuntimeError as rte: |         except RuntimeError as rte: | ||||||
|             log.exception( |             log.exception( | ||||||
|  | @ -400,7 +395,8 @@ class Lock: | ||||||
|             req_handler_finished: trio.Event|None = Lock.req_handler_finished |             req_handler_finished: trio.Event|None = Lock.req_handler_finished | ||||||
|             if ( |             if ( | ||||||
|                 not lock_stats.owner |                 not lock_stats.owner | ||||||
|                 and req_handler_finished is None |                 and | ||||||
|  |                 req_handler_finished is None | ||||||
|             ): |             ): | ||||||
|                 message += ( |                 message += ( | ||||||
|                     '-> No new task holds the TTY lock!\n\n' |                     '-> No new task holds the TTY lock!\n\n' | ||||||
|  | @ -418,8 +414,8 @@ class Lock: | ||||||
|                     repl_task |                     repl_task | ||||||
|                 ) |                 ) | ||||||
|                 message += ( |                 message += ( | ||||||
|                     f'A non-caller task still owns this lock on behalf of ' |                     f'A non-caller task still owns this lock on behalf of\n' | ||||||
|                     f'`{behalf_of_task}`\n' |                     f'{behalf_of_task}\n' | ||||||
|                     f'lock owner task: {lock_stats.owner}\n' |                     f'lock owner task: {lock_stats.owner}\n' | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|  | @ -447,8 +443,6 @@ class Lock: | ||||||
| 
 | 
 | ||||||
|             if message: |             if message: | ||||||
|                 log.devx(message) |                 log.devx(message) | ||||||
|             else: |  | ||||||
|                 import pdbp; pdbp.set_trace() |  | ||||||
| 
 | 
 | ||||||
|         return we_released |         return we_released | ||||||
| 
 | 
 | ||||||
|  | @ -668,10 +662,11 @@ async def lock_stdio_for_peer( | ||||||
|         fail_reason: str = ( |         fail_reason: str = ( | ||||||
|             f'on behalf of peer\n\n' |             f'on behalf of peer\n\n' | ||||||
|             f'x)<=\n' |             f'x)<=\n' | ||||||
|             f'  |_{subactor_task_uid!r}@{ctx.chan.uid!r}\n\n' |             f'   |_{subactor_task_uid!r}@{ctx.chan.uid!r}\n' | ||||||
| 
 |             f'\n' | ||||||
|             'Forcing `Lock.release()` due to acquire failure!\n\n' |             'Forcing `Lock.release()` due to acquire failure!\n\n' | ||||||
|             f'x)=> {ctx}\n' |             f'x)=>\n' | ||||||
|  |             f'   {ctx}' | ||||||
|         ) |         ) | ||||||
|         if isinstance(req_err, trio.Cancelled): |         if isinstance(req_err, trio.Cancelled): | ||||||
|             fail_reason = ( |             fail_reason = ( | ||||||
|  | @ -1179,7 +1174,7 @@ async def request_root_stdio_lock( | ||||||
|     log.devx( |     log.devx( | ||||||
|         'Initing stdio-lock request task with root actor' |         'Initing stdio-lock request task with root actor' | ||||||
|     ) |     ) | ||||||
|     # TODO: likely we can implement this mutex more generally as |     # TODO: can we implement this mutex more generally as | ||||||
|     #      a `._sync.Lock`? |     #      a `._sync.Lock`? | ||||||
|     # -[ ] simply add the wrapping needed for the debugger specifics? |     # -[ ] simply add the wrapping needed for the debugger specifics? | ||||||
|     #   - the `__pld_spec__` impl and maybe better APIs for the client |     #   - the `__pld_spec__` impl and maybe better APIs for the client | ||||||
|  | @ -1190,6 +1185,7 @@ async def request_root_stdio_lock( | ||||||
|     #   - https://docs.python.org/3.8/library/multiprocessing.html#multiprocessing.RLock |     #   - https://docs.python.org/3.8/library/multiprocessing.html#multiprocessing.RLock | ||||||
|     DebugStatus.req_finished = trio.Event() |     DebugStatus.req_finished = trio.Event() | ||||||
|     DebugStatus.req_task = current_task() |     DebugStatus.req_task = current_task() | ||||||
|  |     req_err: BaseException|None = None | ||||||
|     try: |     try: | ||||||
|         from tractor._discovery import get_root |         from tractor._discovery import get_root | ||||||
|         # NOTE: we need this to ensure that this task exits |         # NOTE: we need this to ensure that this task exits | ||||||
|  | @ -1212,6 +1208,7 @@ async def request_root_stdio_lock( | ||||||
|             # ) |             # ) | ||||||
|             DebugStatus.req_cs = req_cs |             DebugStatus.req_cs = req_cs | ||||||
|             req_ctx: Context|None = None |             req_ctx: Context|None = None | ||||||
|  |             ctx_eg: BaseExceptionGroup|None = None | ||||||
|             try: |             try: | ||||||
|                 # TODO: merge into single async with ? |                 # TODO: merge into single async with ? | ||||||
|                 async with get_root() as portal: |                 async with get_root() as portal: | ||||||
|  | @ -1242,7 +1239,12 @@ async def request_root_stdio_lock( | ||||||
|                         ) |                         ) | ||||||
| 
 | 
 | ||||||
|                         # try: |                         # try: | ||||||
|                         assert status.subactor_uid == actor_uid |                         if (locker := status.subactor_uid) != actor_uid: | ||||||
|  |                             raise DebugStateError( | ||||||
|  |                                 f'Root actor locked by another peer !?\n' | ||||||
|  |                                 f'locker: {locker!r}\n' | ||||||
|  |                                 f'actor_uid: {actor_uid}\n' | ||||||
|  |                             ) | ||||||
|                         assert status.cid |                         assert status.cid | ||||||
|                         # except AttributeError: |                         # except AttributeError: | ||||||
|                         #     log.exception('failed pldspec asserts!') |                         #     log.exception('failed pldspec asserts!') | ||||||
|  | @ -1279,10 +1281,11 @@ async def request_root_stdio_lock( | ||||||
|                         f'Exitting {req_ctx.side!r}-side of locking req_ctx\n' |                         f'Exitting {req_ctx.side!r}-side of locking req_ctx\n' | ||||||
|                     ) |                     ) | ||||||
| 
 | 
 | ||||||
|             except ( |             except* ( | ||||||
|                 tractor.ContextCancelled, |                 tractor.ContextCancelled, | ||||||
|                 trio.Cancelled, |                 trio.Cancelled, | ||||||
|             ): |             ) as _taskc_eg: | ||||||
|  |                 ctx_eg = _taskc_eg | ||||||
|                 log.cancel( |                 log.cancel( | ||||||
|                     'Debug lock request was CANCELLED?\n\n' |                     'Debug lock request was CANCELLED?\n\n' | ||||||
|                     f'<=c) {req_ctx}\n' |                     f'<=c) {req_ctx}\n' | ||||||
|  | @ -1291,21 +1294,23 @@ async def request_root_stdio_lock( | ||||||
|                 ) |                 ) | ||||||
|                 raise |                 raise | ||||||
| 
 | 
 | ||||||
|             except ( |             except* ( | ||||||
|                 BaseException, |                 BaseException, | ||||||
|             ) as ctx_err: |             ) as _ctx_eg: | ||||||
|  |                 ctx_eg = _ctx_eg | ||||||
|                 message: str = ( |                 message: str = ( | ||||||
|                     'Failed during debug request dialog with root actor?\n\n' |                     'Failed during debug request dialog with root actor?\n' | ||||||
|                 ) |                 ) | ||||||
|                 if (req_ctx := DebugStatus.req_ctx): |                 if (req_ctx := DebugStatus.req_ctx): | ||||||
|                     message += ( |                     message += ( | ||||||
|                         f'<=x) {req_ctx}\n\n' |                         f'<=x)\n' | ||||||
|  |                         f' |_{req_ctx}\n' | ||||||
|                         f'Cancelling IPC ctx!\n' |                         f'Cancelling IPC ctx!\n' | ||||||
|                     ) |                     ) | ||||||
|                     try: |                     try: | ||||||
|                         await req_ctx.cancel() |                         await req_ctx.cancel() | ||||||
|                     except trio.ClosedResourceError  as terr: |                     except trio.ClosedResourceError  as terr: | ||||||
|                         ctx_err.add_note( |                         ctx_eg.add_note( | ||||||
|                             # f'Failed with {type(terr)!r} x)> `req_ctx.cancel()` ' |                             # f'Failed with {type(terr)!r} x)> `req_ctx.cancel()` ' | ||||||
|                             f'Failed with `req_ctx.cancel()` <x) {type(terr)!r} ' |                             f'Failed with `req_ctx.cancel()` <x) {type(terr)!r} ' | ||||||
|                         ) |                         ) | ||||||
|  | @ -1314,21 +1319,45 @@ async def request_root_stdio_lock( | ||||||
|                     message += 'Failed in `Portal.open_context()` call ??\n' |                     message += 'Failed in `Portal.open_context()` call ??\n' | ||||||
| 
 | 
 | ||||||
|                 log.exception(message) |                 log.exception(message) | ||||||
|                 ctx_err.add_note(message) |                 ctx_eg.add_note(message) | ||||||
|                 raise ctx_err |                 raise ctx_eg | ||||||
| 
 | 
 | ||||||
|     except ( |     except BaseException as _req_err: | ||||||
|  |         req_err = _req_err | ||||||
|  | 
 | ||||||
|  |         # XXX NOTE, since new `trio` enforces strict egs by default | ||||||
|  |         # we have to always handle the eg explicitly given the | ||||||
|  |         # `Portal.open_context()` call above (which implicitly opens | ||||||
|  |         # a nursery). | ||||||
|  |         match req_err: | ||||||
|  |             case BaseExceptionGroup(): | ||||||
|  |                 # for an eg of just one taskc, just unpack and raise | ||||||
|  |                 # since we want to propagate a plane ol' `Cancelled` | ||||||
|  |                 # up from the `.pause()` call. | ||||||
|  |                 excs: list[BaseException] = req_err.exceptions | ||||||
|  |                 if ( | ||||||
|  |                     len(excs) == 1 | ||||||
|  |                     and | ||||||
|  |                     type(exc := excs[0]) in ( | ||||||
|                         tractor.ContextCancelled, |                         tractor.ContextCancelled, | ||||||
|                         trio.Cancelled, |                         trio.Cancelled, | ||||||
|  |                     ) | ||||||
|                 ): |                 ): | ||||||
|                     log.cancel( |                     log.cancel( | ||||||
|                         'Debug lock request CANCELLED?\n' |                         'Debug lock request CANCELLED?\n' | ||||||
|                         f'{req_ctx}\n' |                         f'{req_ctx}\n' | ||||||
|                     ) |                     ) | ||||||
|         raise |                     raise exc | ||||||
|  |             case ( | ||||||
|  |                 tractor.ContextCancelled(), | ||||||
|  |                 trio.Cancelled(), | ||||||
|  |             ): | ||||||
|  |                 log.cancel( | ||||||
|  |                     'Debug lock request CANCELLED?\n' | ||||||
|  |                     f'{req_ctx}\n' | ||||||
|  |                 ) | ||||||
|  |                 raise exc | ||||||
| 
 | 
 | ||||||
|     except BaseException as req_err: |  | ||||||
|         # log.error('Failed to request root stdio-lock?') |  | ||||||
|         DebugStatus.req_err = req_err |         DebugStatus.req_err = req_err | ||||||
|         DebugStatus.release() |         DebugStatus.release() | ||||||
| 
 | 
 | ||||||
|  | @ -1406,7 +1435,7 @@ def any_connected_locker_child() -> bool: | ||||||
|     actor: Actor = current_actor() |     actor: Actor = current_actor() | ||||||
| 
 | 
 | ||||||
|     if not is_root_process(): |     if not is_root_process(): | ||||||
|         raise RuntimeError('This is a root-actor only API!') |         raise InternalError('This is a root-actor only API!') | ||||||
| 
 | 
 | ||||||
|     if ( |     if ( | ||||||
|         (ctx := Lock.ctx_in_debug) |         (ctx := Lock.ctx_in_debug) | ||||||
|  | @ -2143,11 +2172,12 @@ async def _pause( | ||||||
|     # `_enter_repl_sync()` into a common @cm? |     # `_enter_repl_sync()` into a common @cm? | ||||||
|     except BaseException as _pause_err: |     except BaseException as _pause_err: | ||||||
|         pause_err: BaseException = _pause_err |         pause_err: BaseException = _pause_err | ||||||
|  |         _repl_fail_report: str|None = _repl_fail_msg | ||||||
|         if isinstance(pause_err, bdb.BdbQuit): |         if isinstance(pause_err, bdb.BdbQuit): | ||||||
|             log.devx( |             log.devx( | ||||||
|                 'REPL for pdb was explicitly quit!\n' |                 'REPL for pdb was explicitly quit!\n' | ||||||
|             ) |             ) | ||||||
|             _repl_fail_msg = None |             _repl_fail_report = None | ||||||
| 
 | 
 | ||||||
|         # when the actor is mid-runtime cancellation the |         # when the actor is mid-runtime cancellation the | ||||||
|         # `Actor._service_n` might get closed before we can spawn |         # `Actor._service_n` might get closed before we can spawn | ||||||
|  | @ -2167,16 +2197,16 @@ async def _pause( | ||||||
|             return |             return | ||||||
| 
 | 
 | ||||||
|         elif isinstance(pause_err, trio.Cancelled): |         elif isinstance(pause_err, trio.Cancelled): | ||||||
|             _repl_fail_msg = ( |             _repl_fail_report += ( | ||||||
|                 'You called `tractor.pause()` from an already cancelled scope!\n\n' |                 'You called `tractor.pause()` from an already cancelled scope!\n\n' | ||||||
|                 'Consider `await tractor.pause(shield=True)` to make it work B)\n' |                 'Consider `await tractor.pause(shield=True)` to make it work B)\n' | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         else: |         else: | ||||||
|             _repl_fail_msg += f'on behalf of {repl_task} ??\n' |             _repl_fail_report += f'on behalf of {repl_task} ??\n' | ||||||
| 
 | 
 | ||||||
|         if _repl_fail_msg: |         if _repl_fail_report: | ||||||
|             log.exception(_repl_fail_msg) |             log.exception(_repl_fail_report) | ||||||
| 
 | 
 | ||||||
|         if not actor.is_infected_aio(): |         if not actor.is_infected_aio(): | ||||||
|             DebugStatus.release(cancel_req_task=True) |             DebugStatus.release(cancel_req_task=True) | ||||||
|  | @ -2257,6 +2287,13 @@ def _set_trace( | ||||||
|     repl.set_trace(frame=caller_frame) |     repl.set_trace(frame=caller_frame) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # XXX TODO! XXX, ensure `pytest -s` doesn't just | ||||||
|  | # hang on this being called in a test.. XD | ||||||
|  | # -[ ] maybe something in our test suite or is there | ||||||
|  | #     some way we can detect output capture is enabled | ||||||
|  | #     from the process itself? | ||||||
|  | # |_ronny: ? | ||||||
|  | # | ||||||
| async def pause( | async def pause( | ||||||
|     *, |     *, | ||||||
|     hide_tb: bool = True, |     hide_tb: bool = True, | ||||||
|  | @ -3051,7 +3088,8 @@ async def maybe_wait_for_debugger( | ||||||
| 
 | 
 | ||||||
|     if ( |     if ( | ||||||
|         not debug_mode() |         not debug_mode() | ||||||
|         and not child_in_debug |         and | ||||||
|  |         not child_in_debug | ||||||
|     ): |     ): | ||||||
|         return False |         return False | ||||||
| 
 | 
 | ||||||
|  | @ -3109,7 +3147,7 @@ async def maybe_wait_for_debugger( | ||||||
|                 logmeth( |                 logmeth( | ||||||
|                     msg |                     msg | ||||||
|                     + |                     + | ||||||
|                     '\nRoot is waiting on tty lock to release from\n\n' |                     '\n^^ Root is waiting on tty lock release.. ^^\n' | ||||||
|                     # f'{caller_frame_info}\n' |                     # f'{caller_frame_info}\n' | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|  | @ -3163,6 +3201,15 @@ async def maybe_wait_for_debugger( | ||||||
|     return False |     return False | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | class BoxedMaybeException(Struct): | ||||||
|  |     ''' | ||||||
|  |     Box a maybe-exception for post-crash introspection usage | ||||||
|  |     from the body of a `open_crash_handler()` scope. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     value: BaseException|None = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| # TODO: better naming and what additionals? | # TODO: better naming and what additionals? | ||||||
| # - [ ] optional runtime plugging? | # - [ ] optional runtime plugging? | ||||||
| # - [ ] detection for sync vs. async code? | # - [ ] detection for sync vs. async code? | ||||||
|  | @ -3172,11 +3219,11 @@ async def maybe_wait_for_debugger( | ||||||
| @cm | @cm | ||||||
| def open_crash_handler( | def open_crash_handler( | ||||||
|     catch: set[BaseException] = { |     catch: set[BaseException] = { | ||||||
|         # Exception, |  | ||||||
|         BaseException, |         BaseException, | ||||||
|     }, |     }, | ||||||
|     ignore: set[BaseException] = { |     ignore: set[BaseException] = { | ||||||
|         KeyboardInterrupt, |         KeyboardInterrupt, | ||||||
|  |         trio.Cancelled, | ||||||
|     }, |     }, | ||||||
|     tb_hide: bool = True, |     tb_hide: bool = True, | ||||||
| ): | ): | ||||||
|  | @ -3193,9 +3240,6 @@ def open_crash_handler( | ||||||
|     ''' |     ''' | ||||||
|     __tracebackhide__: bool = tb_hide |     __tracebackhide__: bool = tb_hide | ||||||
| 
 | 
 | ||||||
|     class BoxedMaybeException(Struct): |  | ||||||
|         value: BaseException|None = None |  | ||||||
| 
 |  | ||||||
|     # TODO, yield a `outcome.Error`-like boxed type? |     # TODO, yield a `outcome.Error`-like boxed type? | ||||||
|     # -[~] use `outcome.Value/Error` X-> frozen! |     # -[~] use `outcome.Value/Error` X-> frozen! | ||||||
|     # -[x] write our own..? |     # -[x] write our own..? | ||||||
|  | @ -3237,6 +3281,8 @@ def open_crash_handler( | ||||||
| def maybe_open_crash_handler( | def maybe_open_crash_handler( | ||||||
|     pdb: bool = False, |     pdb: bool = False, | ||||||
|     tb_hide: bool = True, |     tb_hide: bool = True, | ||||||
|  | 
 | ||||||
|  |     **kwargs, | ||||||
| ): | ): | ||||||
|     ''' |     ''' | ||||||
|     Same as `open_crash_handler()` but with bool input flag |     Same as `open_crash_handler()` but with bool input flag | ||||||
|  | @ -3247,9 +3293,11 @@ def maybe_open_crash_handler( | ||||||
|     ''' |     ''' | ||||||
|     __tracebackhide__: bool = tb_hide |     __tracebackhide__: bool = tb_hide | ||||||
| 
 | 
 | ||||||
|     rtctx = nullcontext |     rtctx = nullcontext( | ||||||
|  |         enter_result=BoxedMaybeException() | ||||||
|  |     ) | ||||||
|     if pdb: |     if pdb: | ||||||
|         rtctx = open_crash_handler |         rtctx = open_crash_handler(**kwargs) | ||||||
| 
 | 
 | ||||||
|     with rtctx(): |     with rtctx as boxed_maybe_exc: | ||||||
|         yield |         yield boxed_maybe_exc | ||||||
|  |  | ||||||
|  | @ -33,6 +33,7 @@ from ._codec import ( | ||||||
| 
 | 
 | ||||||
|     apply_codec as apply_codec, |     apply_codec as apply_codec, | ||||||
|     mk_codec as mk_codec, |     mk_codec as mk_codec, | ||||||
|  |     mk_dec as mk_dec, | ||||||
|     MsgCodec as MsgCodec, |     MsgCodec as MsgCodec, | ||||||
|     MsgDec as MsgDec, |     MsgDec as MsgDec, | ||||||
|     current_codec as current_codec, |     current_codec as current_codec, | ||||||
|  |  | ||||||
|  | @ -61,6 +61,7 @@ from tractor.msg.pretty_struct import Struct | ||||||
| from tractor.msg.types import ( | from tractor.msg.types import ( | ||||||
|     mk_msg_spec, |     mk_msg_spec, | ||||||
|     MsgType, |     MsgType, | ||||||
|  |     PayloadMsg, | ||||||
| ) | ) | ||||||
| from tractor.log import get_logger | from tractor.log import get_logger | ||||||
| 
 | 
 | ||||||
|  | @ -80,6 +81,7 @@ class MsgDec(Struct): | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     _dec: msgpack.Decoder |     _dec: msgpack.Decoder | ||||||
|  |     # _ext_types_box: Struct|None = None | ||||||
| 
 | 
 | ||||||
|     @property |     @property | ||||||
|     def dec(self) -> msgpack.Decoder: |     def dec(self) -> msgpack.Decoder: | ||||||
|  | @ -179,21 +181,124 @@ class MsgDec(Struct): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def mk_dec( | def mk_dec( | ||||||
|     spec: Union[Type[Struct]]|Any = Any, |     spec: Union[Type[Struct]]|Type|None, | ||||||
|  | 
 | ||||||
|  |     # NOTE, required for ad-hoc type extensions to the underlying | ||||||
|  |     # serialization proto (which is default `msgpack`), | ||||||
|  |     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||||
|     dec_hook: Callable|None = None, |     dec_hook: Callable|None = None, | ||||||
|  |     ext_types: list[Type]|None = None, | ||||||
| 
 | 
 | ||||||
| ) -> MsgDec: | ) -> MsgDec: | ||||||
|     ''' |     ''' | ||||||
|     Create an IPC msg decoder, normally used as the |     Create an IPC msg decoder, a slightly higher level wrapper around | ||||||
|     `PayloadMsg.pld: PayloadT` field decoder inside a `PldRx`. |     a `msgspec.msgpack.Decoder` which provides, | ||||||
|  | 
 | ||||||
|  |     - easier introspection of the underlying type spec via | ||||||
|  |       the `.spec` and `.spec_str` attrs, | ||||||
|  |     - `.hook` access to the `Decoder.dec_hook()`, | ||||||
|  |     - automatic custom extension-types decode support when | ||||||
|  |       `dec_hook()` is provided such that any `PayloadMsg.pld` tagged | ||||||
|  |       as a type from from `ext_types` (presuming the `MsgCodec.encode()` also used | ||||||
|  |       a `.enc_hook()`) is processed and constructed by a `PldRx` implicitily. | ||||||
|  | 
 | ||||||
|  |     NOTE, as mentioned a `MsgDec` is normally used for `PayloadMsg.pld: PayloadT` field | ||||||
|  |     decoding inside an IPC-ctx-oriented `PldRx`. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  |     if ( | ||||||
|  |         spec is None | ||||||
|  |         and | ||||||
|  |         ext_types is None | ||||||
|  |     ): | ||||||
|  |         raise TypeError( | ||||||
|  |             f'MIssing type-`spec` for msg decoder!\n' | ||||||
|  |             f'\n' | ||||||
|  |             f'`spec=None` is **only** permitted is if custom extension types ' | ||||||
|  |             f'are provided via `ext_types`, meaning it must be non-`None`.\n' | ||||||
|  |             f'\n' | ||||||
|  |             f'In this case it is presumed that only the `ext_types`, ' | ||||||
|  |             f'which much be handled by a paired `dec_hook()`, ' | ||||||
|  |             f'will be permitted within the payload type-`spec`!\n' | ||||||
|  |             f'\n' | ||||||
|  |             f'spec = {spec!r}\n' | ||||||
|  |             f'dec_hook = {dec_hook!r}\n' | ||||||
|  |             f'ext_types = {ext_types!r}\n' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     if dec_hook: | ||||||
|  |         if ext_types is None: | ||||||
|  |             raise TypeError( | ||||||
|  |                 f'If extending the serializable types with a custom decode hook (`dec_hook()`), ' | ||||||
|  |                 f'you must also provide the expected type set that the hook will handle ' | ||||||
|  |                 f'via a `ext_types: Union[Type]|None = None` argument!\n' | ||||||
|  |                 f'\n' | ||||||
|  |                 f'dec_hook = {dec_hook!r}\n' | ||||||
|  |                 f'ext_types = {ext_types!r}\n' | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         # XXX, i *thought* we would require a boxing struct as per docs, | ||||||
|  |         # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||||
|  |         # |_ see comment, | ||||||
|  |         #  > Note that typed deserialization is required for | ||||||
|  |         #  > successful roundtripping here, so we pass `MyMessage` to | ||||||
|  |         #  > `Decoder`. | ||||||
|  |         # | ||||||
|  |         # BUT, turns out as long as you spec a union with `Raw` it | ||||||
|  |         # will work? kk B) | ||||||
|  |         # | ||||||
|  |         # maybe_box_struct = mk_boxed_ext_struct(ext_types) | ||||||
|  |         spec = Raw | Union[*ext_types] | ||||||
|  | 
 | ||||||
|     return MsgDec( |     return MsgDec( | ||||||
|         _dec=msgpack.Decoder( |         _dec=msgpack.Decoder( | ||||||
|             type=spec,  # like `MsgType[Any]` |             type=spec,  # like `MsgType[Any]` | ||||||
|             dec_hook=dec_hook, |             dec_hook=dec_hook, | ||||||
|  |         ), | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO? remove since didn't end up needing this? | ||||||
|  | def mk_boxed_ext_struct( | ||||||
|  |     ext_types: list[Type], | ||||||
|  | ) -> Struct: | ||||||
|  |     # NOTE, originally was to wrap non-msgpack-supported "extension | ||||||
|  |     # types" in a field-typed boxing struct, see notes around the | ||||||
|  |     # `dec_hook()` branch in `mk_dec()`. | ||||||
|  |     ext_types_union = Union[*ext_types] | ||||||
|  |     repr_ext_types_union: str = ( | ||||||
|  |         str(ext_types_union) | ||||||
|  |         or | ||||||
|  |         "|".join(ext_types) | ||||||
|  |     ) | ||||||
|  |     BoxedExtType = msgspec.defstruct( | ||||||
|  |         f'BoxedExts[{repr_ext_types_union}]', | ||||||
|  |         fields=[ | ||||||
|  |             ('boxed', ext_types_union), | ||||||
|  |         ], | ||||||
|  |     ) | ||||||
|  |     return BoxedExtType | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def unpack_spec_types( | ||||||
|  |     spec: Union[Type]|Type, | ||||||
|  | ) -> set[Type]: | ||||||
|  |     ''' | ||||||
|  |     Given an input type-`spec`, either a lone type | ||||||
|  |     or a `Union` of types (like `str|int|MyThing`), | ||||||
|  |     return a set of individual types. | ||||||
|  | 
 | ||||||
|  |     When `spec` is not a type-union returns `{spec,}`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     spec_subtypes: set[Union[Type]] = set( | ||||||
|  |          getattr( | ||||||
|  |              spec, | ||||||
|  |              '__args__', | ||||||
|  |              {spec,}, | ||||||
|          ) |          ) | ||||||
|     ) |     ) | ||||||
|  |     return spec_subtypes | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def mk_msgspec_table( | def mk_msgspec_table( | ||||||
|  | @ -273,6 +378,8 @@ class MsgCodec(Struct): | ||||||
|     _dec: msgpack.Decoder |     _dec: msgpack.Decoder | ||||||
|     _pld_spec: Type[Struct]|Raw|Any |     _pld_spec: Type[Struct]|Raw|Any | ||||||
| 
 | 
 | ||||||
|  |     # _ext_types_box: Struct|None = None | ||||||
|  | 
 | ||||||
|     def __repr__(self) -> str: |     def __repr__(self) -> str: | ||||||
|         speclines: str = textwrap.indent( |         speclines: str = textwrap.indent( | ||||||
|             pformat_msgspec(codec=self), |             pformat_msgspec(codec=self), | ||||||
|  | @ -339,12 +446,15 @@ class MsgCodec(Struct): | ||||||
| 
 | 
 | ||||||
|     def encode( |     def encode( | ||||||
|         self, |         self, | ||||||
|         py_obj: Any, |         py_obj: Any|PayloadMsg, | ||||||
| 
 | 
 | ||||||
|         use_buf: bool = False, |         use_buf: bool = False, | ||||||
|         # ^-XXX-^ uhh why am i getting this? |         # ^-XXX-^ uhh why am i getting this? | ||||||
|         # |_BufferError: Existing exports of data: object cannot be re-sized |         # |_BufferError: Existing exports of data: object cannot be re-sized | ||||||
| 
 | 
 | ||||||
|  |         as_ext_type: bool = False, | ||||||
|  |         hide_tb: bool = True, | ||||||
|  | 
 | ||||||
|     ) -> bytes: |     ) -> bytes: | ||||||
|         ''' |         ''' | ||||||
|         Encode input python objects to `msgpack` bytes for |         Encode input python objects to `msgpack` bytes for | ||||||
|  | @ -354,11 +464,46 @@ class MsgCodec(Struct): | ||||||
|         https://jcristharif.com/msgspec/perf-tips.html#reusing-an-output-buffer |         https://jcristharif.com/msgspec/perf-tips.html#reusing-an-output-buffer | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|  |         __tracebackhide__: bool = hide_tb | ||||||
|         if use_buf: |         if use_buf: | ||||||
|             self._enc.encode_into(py_obj, self._buf) |             self._enc.encode_into(py_obj, self._buf) | ||||||
|             return self._buf |             return self._buf | ||||||
|         else: | 
 | ||||||
|         return self._enc.encode(py_obj) |         return self._enc.encode(py_obj) | ||||||
|  |         # try: | ||||||
|  |         #     return self._enc.encode(py_obj) | ||||||
|  |         # except TypeError as typerr: | ||||||
|  |         #     typerr.add_note( | ||||||
|  |         #         '|_src error from `msgspec`' | ||||||
|  |         #         # f'|_{self._enc.encode!r}' | ||||||
|  |         #     ) | ||||||
|  |         #     raise typerr | ||||||
|  | 
 | ||||||
|  |         # TODO! REMOVE once i'm confident we won't ever need it! | ||||||
|  |         # | ||||||
|  |         # box: Struct = self._ext_types_box | ||||||
|  |         # if ( | ||||||
|  |         #     as_ext_type | ||||||
|  |         #     or | ||||||
|  |         #     ( | ||||||
|  |         #         # XXX NOTE, auto-detect if the input type | ||||||
|  |         #         box | ||||||
|  |         #         and | ||||||
|  |         #         (ext_types := unpack_spec_types( | ||||||
|  |         #             spec=box.__annotations__['boxed']) | ||||||
|  |         #         ) | ||||||
|  |         #     ) | ||||||
|  |         # ): | ||||||
|  |         #     match py_obj: | ||||||
|  |         #         # case PayloadMsg(pld=pld) if ( | ||||||
|  |         #         #     type(pld) in ext_types | ||||||
|  |         #         # ): | ||||||
|  |         #         #     py_obj.pld = box(boxed=py_obj) | ||||||
|  |         #         #     breakpoint() | ||||||
|  |         #         case _ if ( | ||||||
|  |         #             type(py_obj) in ext_types | ||||||
|  |         #         ): | ||||||
|  |         #             py_obj = box(boxed=py_obj) | ||||||
| 
 | 
 | ||||||
|     @property |     @property | ||||||
|     def dec(self) -> msgpack.Decoder: |     def dec(self) -> msgpack.Decoder: | ||||||
|  | @ -378,21 +523,30 @@ class MsgCodec(Struct): | ||||||
|         return self._dec.decode(msg) |         return self._dec.decode(msg) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # [x] TODO: a sub-decoder system as well? => No! | # ?TODO? time to remove this finally? | ||||||
|  | # | ||||||
|  | # -[x] TODO: a sub-decoder system as well? | ||||||
|  | # => No! already re-architected to include a "payload-receiver" | ||||||
|  | #   now found in `._ops`. | ||||||
| # | # | ||||||
| # -[x] do we still want to try and support the sub-decoder with | # -[x] do we still want to try and support the sub-decoder with | ||||||
| # `.Raw` technique in the case that the `Generic` approach gives | # `.Raw` technique in the case that the `Generic` approach gives | ||||||
| # future grief? | # future grief? | ||||||
| # => NO, since we went with the `PldRx` approach instead B) | # => well YES but NO, since we went with the `PldRx` approach | ||||||
|  | #   instead! | ||||||
| # | # | ||||||
| # IF however you want to see the code that was staged for this | # IF however you want to see the code that was staged for this | ||||||
| # from wayyy back, see the pure removal commit. | # from wayyy back, see the pure removal commit. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def mk_codec( | def mk_codec( | ||||||
|     # struct type unions set for `Decoder` |     ipc_pld_spec: Union[Type[Struct]]|Any|Raw = Raw, | ||||||
|     # https://jcristharif.com/msgspec/structs.html#tagged-unions |     # tagged-struct-types-union set for `Decoder`ing of payloads, as | ||||||
|     ipc_pld_spec: Union[Type[Struct]]|Any = Any, |     # per https://jcristharif.com/msgspec/structs.html#tagged-unions. | ||||||
|  |     # NOTE that the default `Raw` here **is very intentional** since | ||||||
|  |     # the `PldRx._pld_dec: MsgDec` is responsible for per ipc-ctx-task | ||||||
|  |     # decoding of msg-specs defined by the user as part of **their** | ||||||
|  |     # `tractor` "app's" type-limited IPC msg-spec. | ||||||
| 
 | 
 | ||||||
|     # TODO: offering a per-msg(-field) type-spec such that |     # TODO: offering a per-msg(-field) type-spec such that | ||||||
|     # the fields can be dynamically NOT decoded and left as `Raw` |     # the fields can be dynamically NOT decoded and left as `Raw` | ||||||
|  | @ -405,13 +559,18 @@ def mk_codec( | ||||||
| 
 | 
 | ||||||
|     libname: str = 'msgspec', |     libname: str = 'msgspec', | ||||||
| 
 | 
 | ||||||
|     # proxy as `Struct(**kwargs)` for ad-hoc type extensions |     # settings for encoding-to-send extension-types, | ||||||
|     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types |     # https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||||
|     # ------ - ------ |     # dec_hook: Callable|None = None, | ||||||
|     dec_hook: Callable|None = None, |  | ||||||
|     enc_hook: Callable|None = None, |     enc_hook: Callable|None = None, | ||||||
|     # ------ - ------ |     ext_types: list[Type]|None = None, | ||||||
|  | 
 | ||||||
|  |     # optionally provided msg-decoder from which we pull its, | ||||||
|  |     # |_.dec_hook() | ||||||
|  |     # |_.type | ||||||
|  |     ext_dec: MsgDec|None = None | ||||||
|     # |     # | ||||||
|  |     # ?TODO? other params we might want to support | ||||||
|     # Encoder: |     # Encoder: | ||||||
|     # write_buffer_size=write_buffer_size, |     # write_buffer_size=write_buffer_size, | ||||||
|     # |     # | ||||||
|  | @ -425,26 +584,44 @@ def mk_codec( | ||||||
|     `msgspec` ;). |     `msgspec` ;). | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     # (manually) generate a msg-payload-spec for all relevant |     pld_spec = ipc_pld_spec | ||||||
|     # god-boxing-msg subtypes, parameterizing the `PayloadMsg.pld: PayloadT` |     if enc_hook: | ||||||
|     # for the decoder such that all sub-type msgs in our SCIPP |         if not ext_types: | ||||||
|     # will automatically decode to a type-"limited" payload (`Struct`) |             raise TypeError( | ||||||
|     # object (set). |                 f'If extending the serializable types with a custom encode hook (`enc_hook()`), ' | ||||||
|  |                 f'you must also provide the expected type set that the hook will handle ' | ||||||
|  |                 f'via a `ext_types: Union[Type]|None = None` argument!\n' | ||||||
|  |                 f'\n' | ||||||
|  |                 f'enc_hook = {enc_hook!r}\n' | ||||||
|  |                 f'ext_types = {ext_types!r}\n' | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |     dec_hook: Callable|None = None | ||||||
|  |     if ext_dec: | ||||||
|  |         dec: msgspec.Decoder = ext_dec.dec | ||||||
|  |         dec_hook = dec.dec_hook | ||||||
|  |         pld_spec |= dec.type | ||||||
|  |         if ext_types: | ||||||
|  |             pld_spec |= Union[*ext_types] | ||||||
|  | 
 | ||||||
|  |     # (manually) generate a msg-spec (how appropes) for all relevant | ||||||
|  |     # payload-boxing-struct-msg-types, parameterizing the | ||||||
|  |     # `PayloadMsg.pld: PayloadT` for the decoder such that all msgs | ||||||
|  |     # in our SC-RPC-protocol will automatically decode to | ||||||
|  |     # a type-"limited" payload (`Struct`) object (set). | ||||||
|     ( |     ( | ||||||
|         ipc_msg_spec, |         ipc_msg_spec, | ||||||
|         msg_types, |         msg_types, | ||||||
|     ) = mk_msg_spec( |     ) = mk_msg_spec( | ||||||
|         payload_type_union=ipc_pld_spec, |         payload_type_union=pld_spec, | ||||||
|     ) |     ) | ||||||
|     assert len(ipc_msg_spec.__args__) == len(msg_types) |  | ||||||
|     assert ipc_msg_spec |  | ||||||
| 
 | 
 | ||||||
|     # TODO: use this shim instead? |     msg_spec_types: set[Type] = unpack_spec_types(ipc_msg_spec) | ||||||
|     # bc.. unification, err somethin? |     assert ( | ||||||
|     # dec: MsgDec = mk_dec( |         len(ipc_msg_spec.__args__) == len(msg_types) | ||||||
|     #     spec=ipc_msg_spec, |         and | ||||||
|     #     dec_hook=dec_hook, |         len(msg_spec_types) == len(msg_types) | ||||||
|     # ) |     ) | ||||||
| 
 | 
 | ||||||
|     dec = msgpack.Decoder( |     dec = msgpack.Decoder( | ||||||
|         type=ipc_msg_spec, |         type=ipc_msg_spec, | ||||||
|  | @ -453,22 +630,29 @@ def mk_codec( | ||||||
|     enc = msgpack.Encoder( |     enc = msgpack.Encoder( | ||||||
|        enc_hook=enc_hook, |        enc_hook=enc_hook, | ||||||
|     ) |     ) | ||||||
| 
 |  | ||||||
|     codec = MsgCodec( |     codec = MsgCodec( | ||||||
|         _enc=enc, |         _enc=enc, | ||||||
|         _dec=dec, |         _dec=dec, | ||||||
|         _pld_spec=ipc_pld_spec, |         _pld_spec=pld_spec, | ||||||
|     ) |     ) | ||||||
| 
 |  | ||||||
|     # sanity on expected backend support |     # sanity on expected backend support | ||||||
|     assert codec.lib.__name__ == libname |     assert codec.lib.__name__ == libname | ||||||
| 
 |  | ||||||
|     return codec |     return codec | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # instance of the default `msgspec.msgpack` codec settings, i.e. | # instance of the default `msgspec.msgpack` codec settings, i.e. | ||||||
| # no custom structs, hooks or other special types. | # no custom structs, hooks or other special types. | ||||||
| _def_msgspec_codec: MsgCodec = mk_codec(ipc_pld_spec=Any) | # | ||||||
|  | # XXX NOTE XXX, this will break our `Context.start()` call! | ||||||
|  | # | ||||||
|  | # * by default we roundtrip the started pld-`value` and if you apply | ||||||
|  | #   this codec (globally anyway with `apply_codec()`) then the | ||||||
|  | #   `roundtripped` value will include a non-`.pld: Raw` which will | ||||||
|  | #   then type-error on the consequent `._ops.validte_payload_msg()`.. | ||||||
|  | # | ||||||
|  | _def_msgspec_codec: MsgCodec = mk_codec( | ||||||
|  |     ipc_pld_spec=Any, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| # The built-in IPC `Msg` spec. | # The built-in IPC `Msg` spec. | ||||||
| # Our composing "shuttle" protocol which allows `tractor`-app code | # Our composing "shuttle" protocol which allows `tractor`-app code | ||||||
|  | @ -476,13 +660,13 @@ _def_msgspec_codec: MsgCodec = mk_codec(ipc_pld_spec=Any) | ||||||
| # https://jcristharif.com/msgspec/supported-types.html | # https://jcristharif.com/msgspec/supported-types.html | ||||||
| # | # | ||||||
| _def_tractor_codec: MsgCodec = mk_codec( | _def_tractor_codec: MsgCodec = mk_codec( | ||||||
|     # TODO: use this for debug mode locking prot? |     ipc_pld_spec=Raw,  # XXX should be default righ!? | ||||||
|     # ipc_pld_spec=Any, |  | ||||||
|     ipc_pld_spec=Raw, |  | ||||||
| ) | ) | ||||||
| # TODO: IDEALLY provides for per-`trio.Task` specificity of the | 
 | ||||||
|  | # -[x] TODO, IDEALLY provides for per-`trio.Task` specificity of the | ||||||
| # IPC msging codec used by the transport layer when doing | # IPC msging codec used by the transport layer when doing | ||||||
| # `Channel.send()/.recv()` of wire data. | # `Channel.send()/.recv()` of wire data. | ||||||
|  | # => impled as our `PldRx` which is `Context` scoped B) | ||||||
| 
 | 
 | ||||||
| # ContextVar-TODO: DIDN'T WORK, kept resetting in every new task to default!? | # ContextVar-TODO: DIDN'T WORK, kept resetting in every new task to default!? | ||||||
| # _ctxvar_MsgCodec: ContextVar[MsgCodec] = ContextVar( | # _ctxvar_MsgCodec: ContextVar[MsgCodec] = ContextVar( | ||||||
|  | @ -559,17 +743,6 @@ def apply_codec( | ||||||
|     ) |     ) | ||||||
|     token: Token = var.set(codec) |     token: Token = var.set(codec) | ||||||
| 
 | 
 | ||||||
|     # ?TODO? for TreeVar approach which copies from the |  | ||||||
|     # cancel-scope of the prior value, NOT the prior task |  | ||||||
|     # See the docs: |  | ||||||
|     # - https://tricycle.readthedocs.io/en/latest/reference.html#tree-variables |  | ||||||
|     # - https://github.com/oremanj/tricycle/blob/master/tricycle/_tests/test_tree_var.py |  | ||||||
|     #   ^- see docs for @cm `.being()` API |  | ||||||
|     # with _ctxvar_MsgCodec.being(codec): |  | ||||||
|     #     new = _ctxvar_MsgCodec.get() |  | ||||||
|     #     assert new is codec |  | ||||||
|     #     yield codec |  | ||||||
| 
 |  | ||||||
|     try: |     try: | ||||||
|         yield var.get() |         yield var.get() | ||||||
|     finally: |     finally: | ||||||
|  | @ -580,6 +753,19 @@ def apply_codec( | ||||||
|         ) |         ) | ||||||
|         assert var.get() is orig |         assert var.get() is orig | ||||||
| 
 | 
 | ||||||
|  |     # ?TODO? for TreeVar approach which copies from the | ||||||
|  |     # cancel-scope of the prior value, NOT the prior task | ||||||
|  |     # | ||||||
|  |     # See the docs: | ||||||
|  |     # - https://tricycle.readthedocs.io/en/latest/reference.html#tree-variables | ||||||
|  |     # - https://github.com/oremanj/tricycle/blob/master/tricycle/_tests/test_tree_var.py | ||||||
|  |     #   ^- see docs for @cm `.being()` API | ||||||
|  |     # | ||||||
|  |     # with _ctxvar_MsgCodec.being(codec): | ||||||
|  |     #     new = _ctxvar_MsgCodec.get() | ||||||
|  |     #     assert new is codec | ||||||
|  |     #     yield codec | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| def current_codec() -> MsgCodec: | def current_codec() -> MsgCodec: | ||||||
|     ''' |     ''' | ||||||
|  | @ -599,6 +785,7 @@ def limit_msg_spec( | ||||||
|     # -> related to the `MsgCodec._payload_decs` stuff above.. |     # -> related to the `MsgCodec._payload_decs` stuff above.. | ||||||
|     # tagged_structs: list[Struct]|None = None, |     # tagged_structs: list[Struct]|None = None, | ||||||
| 
 | 
 | ||||||
|  |     hide_tb: bool = True, | ||||||
|     **codec_kwargs, |     **codec_kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> MsgCodec: | ) -> MsgCodec: | ||||||
|  | @ -609,7 +796,7 @@ def limit_msg_spec( | ||||||
|     for all IPC contexts in use by the current `trio.Task`. |     for all IPC contexts in use by the current `trio.Task`. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     __tracebackhide__: bool = True |     __tracebackhide__: bool = hide_tb | ||||||
|     curr_codec: MsgCodec = current_codec() |     curr_codec: MsgCodec = current_codec() | ||||||
|     msgspec_codec: MsgCodec = mk_codec( |     msgspec_codec: MsgCodec = mk_codec( | ||||||
|         ipc_pld_spec=payload_spec, |         ipc_pld_spec=payload_spec, | ||||||
|  |  | ||||||
|  | @ -0,0 +1,94 @@ | ||||||
|  | # tractor: structured concurrent "actors". | ||||||
|  | # Copyright 2018-eternity Tyler Goodlet. | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | Type-extension-utils for codec-ing (python) objects not | ||||||
|  | covered by the `msgspec.msgpack` protocol. | ||||||
|  | 
 | ||||||
|  | See the various API docs from `msgspec`. | ||||||
|  | 
 | ||||||
|  | extending from native types, | ||||||
|  | - https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types | ||||||
|  | 
 | ||||||
|  | converters, | ||||||
|  | - https://jcristharif.com/msgspec/converters.html | ||||||
|  | - https://jcristharif.com/msgspec/api.html#msgspec.convert | ||||||
|  | 
 | ||||||
|  | `Raw` fields, | ||||||
|  | - https://jcristharif.com/msgspec/api.html#raw | ||||||
|  | - support for `.convert()` and `Raw`, | ||||||
|  |   |_ https://jcristharif.com/msgspec/changelog.html | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | from types import ( | ||||||
|  |     ModuleType, | ||||||
|  | ) | ||||||
|  | import typing | ||||||
|  | from typing import ( | ||||||
|  |     Type, | ||||||
|  |     Union, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | def dec_type_union( | ||||||
|  |     type_names: list[str], | ||||||
|  |     mods: list[ModuleType] = [] | ||||||
|  | ) -> Type|Union[Type]: | ||||||
|  |     ''' | ||||||
|  |     Look up types by name, compile into a list and then create and | ||||||
|  |     return a `typing.Union` from the full set. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # import importlib | ||||||
|  |     types: list[Type] = [] | ||||||
|  |     for type_name in type_names: | ||||||
|  |         for mod in [ | ||||||
|  |             typing, | ||||||
|  |             # importlib.import_module(__name__), | ||||||
|  |         ] + mods: | ||||||
|  |             if type_ref := getattr( | ||||||
|  |                 mod, | ||||||
|  |                 type_name, | ||||||
|  |                 False, | ||||||
|  |             ): | ||||||
|  |                 types.append(type_ref) | ||||||
|  | 
 | ||||||
|  |     # special case handling only.. | ||||||
|  |     # ipc_pld_spec: Union[Type] = eval( | ||||||
|  |     #     pld_spec_str, | ||||||
|  |     #     {},  # globals | ||||||
|  |     #     {'typing': typing},  # locals | ||||||
|  |     # ) | ||||||
|  | 
 | ||||||
|  |     return Union[*types] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def enc_type_union( | ||||||
|  |     union_or_type: Union[Type]|Type, | ||||||
|  | ) -> list[str]: | ||||||
|  |     ''' | ||||||
|  |     Encode a type-union or single type to a list of type-name-strings | ||||||
|  |     ready for IPC interchange. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     type_strs: list[str] = [] | ||||||
|  |     for typ in getattr( | ||||||
|  |         union_or_type, | ||||||
|  |         '__args__', | ||||||
|  |         {union_or_type,}, | ||||||
|  |     ): | ||||||
|  |         type_strs.append(typ.__qualname__) | ||||||
|  | 
 | ||||||
|  |     return type_strs | ||||||
|  | @ -50,7 +50,9 @@ from tractor._exceptions import ( | ||||||
|     _mk_recv_mte, |     _mk_recv_mte, | ||||||
|     pack_error, |     pack_error, | ||||||
| ) | ) | ||||||
| from tractor._state import current_ipc_ctx | from tractor._state import ( | ||||||
|  |     current_ipc_ctx, | ||||||
|  | ) | ||||||
| from ._codec import ( | from ._codec import ( | ||||||
|     mk_dec, |     mk_dec, | ||||||
|     MsgDec, |     MsgDec, | ||||||
|  | @ -78,7 +80,7 @@ if TYPE_CHECKING: | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _def_any_pldec: MsgDec[Any] = mk_dec() | _def_any_pldec: MsgDec[Any] = mk_dec(spec=Any) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class PldRx(Struct): | class PldRx(Struct): | ||||||
|  | @ -108,33 +110,11 @@ class PldRx(Struct): | ||||||
|     # TODO: better to bind it here? |     # TODO: better to bind it here? | ||||||
|     # _rx_mc: trio.MemoryReceiveChannel |     # _rx_mc: trio.MemoryReceiveChannel | ||||||
|     _pld_dec: MsgDec |     _pld_dec: MsgDec | ||||||
|     _ctx: Context|None = None |  | ||||||
|     _ipc: Context|MsgStream|None = None |  | ||||||
| 
 | 
 | ||||||
|     @property |     @property | ||||||
|     def pld_dec(self) -> MsgDec: |     def pld_dec(self) -> MsgDec: | ||||||
|         return self._pld_dec |         return self._pld_dec | ||||||
| 
 | 
 | ||||||
|     # TODO: a better name? |  | ||||||
|     # -[ ] when would this be used as it avoids needingn to pass the |  | ||||||
|     #   ipc prim to every method |  | ||||||
|     @cm |  | ||||||
|     def wraps_ipc( |  | ||||||
|         self, |  | ||||||
|         ipc_prim: Context|MsgStream, |  | ||||||
| 
 |  | ||||||
|     ) -> PldRx: |  | ||||||
|         ''' |  | ||||||
|         Apply this payload receiver to an IPC primitive type, one |  | ||||||
|         of `Context` or `MsgStream`. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         self._ipc = ipc_prim |  | ||||||
|         try: |  | ||||||
|             yield self |  | ||||||
|         finally: |  | ||||||
|             self._ipc = None |  | ||||||
| 
 |  | ||||||
|     @cm |     @cm | ||||||
|     def limit_plds( |     def limit_plds( | ||||||
|         self, |         self, | ||||||
|  | @ -148,6 +128,10 @@ class PldRx(Struct): | ||||||
|         exit. |         exit. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|  |         # TODO, ensure we pull the current `MsgCodec`'s custom | ||||||
|  |         # dec/enc_hook settings as well ? | ||||||
|  |         # -[ ] see `._codec.mk_codec()` inputs | ||||||
|  |         # | ||||||
|         orig_dec: MsgDec = self._pld_dec |         orig_dec: MsgDec = self._pld_dec | ||||||
|         limit_dec: MsgDec = mk_dec( |         limit_dec: MsgDec = mk_dec( | ||||||
|             spec=spec, |             spec=spec, | ||||||
|  | @ -163,7 +147,7 @@ class PldRx(Struct): | ||||||
|     def dec(self) -> msgpack.Decoder: |     def dec(self) -> msgpack.Decoder: | ||||||
|         return self._pld_dec.dec |         return self._pld_dec.dec | ||||||
| 
 | 
 | ||||||
|     def recv_pld_nowait( |     def recv_msg_nowait( | ||||||
|         self, |         self, | ||||||
|         # TODO: make this `MsgStream` compat as well, see above^ |         # TODO: make this `MsgStream` compat as well, see above^ | ||||||
|         # ipc_prim: Context|MsgStream, |         # ipc_prim: Context|MsgStream, | ||||||
|  | @ -174,34 +158,95 @@ class PldRx(Struct): | ||||||
|         hide_tb: bool = False, |         hide_tb: bool = False, | ||||||
|         **dec_pld_kwargs, |         **dec_pld_kwargs, | ||||||
| 
 | 
 | ||||||
|     ) -> Any|Raw: |     ) -> tuple[ | ||||||
|  |         MsgType[PayloadT], | ||||||
|  |         PayloadT, | ||||||
|  |     ]: | ||||||
|  |         ''' | ||||||
|  |         Attempt to non-blocking receive a message from the `._rx_chan` and | ||||||
|  |         unwrap it's payload delivering the pair to the caller. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|         __tracebackhide__: bool = hide_tb |         __tracebackhide__: bool = hide_tb | ||||||
| 
 | 
 | ||||||
|         msg: MsgType = ( |         msg: MsgType = ( | ||||||
|             ipc_msg |             ipc_msg | ||||||
|             or |             or | ||||||
| 
 |  | ||||||
|             # sync-rx msg from underlying IPC feeder (mem-)chan |             # sync-rx msg from underlying IPC feeder (mem-)chan | ||||||
|             ipc._rx_chan.receive_nowait() |             ipc._rx_chan.receive_nowait() | ||||||
|         ) |         ) | ||||||
|         return self.decode_pld( |         pld: PayloadT = self.decode_pld( | ||||||
|             msg, |             msg, | ||||||
|             ipc=ipc, |             ipc=ipc, | ||||||
|             expect_msg=expect_msg, |             expect_msg=expect_msg, | ||||||
|             hide_tb=hide_tb, |             hide_tb=hide_tb, | ||||||
|             **dec_pld_kwargs, |             **dec_pld_kwargs, | ||||||
|         ) |         ) | ||||||
|  |         return ( | ||||||
|  |             msg, | ||||||
|  |             pld, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     async def recv_msg( | ||||||
|  |         self, | ||||||
|  |         ipc: Context|MsgStream, | ||||||
|  |         expect_msg: MsgType, | ||||||
|  | 
 | ||||||
|  |         # NOTE: ONLY for handling `Stop`-msgs that arrive during | ||||||
|  |         # a call to `drain_to_final_msg()` above! | ||||||
|  |         passthrough_non_pld_msgs: bool = True, | ||||||
|  |         hide_tb: bool = True, | ||||||
|  | 
 | ||||||
|  |         **decode_pld_kwargs, | ||||||
|  | 
 | ||||||
|  |     ) -> tuple[MsgType, PayloadT]: | ||||||
|  |         ''' | ||||||
|  |         Retrieve the next avail IPC msg, decode its payload, and | ||||||
|  |         return the (msg, pld) pair. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         __tracebackhide__: bool = hide_tb | ||||||
|  |         msg: MsgType = await ipc._rx_chan.receive() | ||||||
|  |         match msg: | ||||||
|  |             case Return()|Error(): | ||||||
|  |                 log.runtime( | ||||||
|  |                     f'Rxed final outcome msg\n' | ||||||
|  |                     f'{msg}\n' | ||||||
|  |                 ) | ||||||
|  |             case Stop(): | ||||||
|  |                 log.runtime( | ||||||
|  |                     f'Rxed stream stopped msg\n' | ||||||
|  |                     f'{msg}\n' | ||||||
|  |                 ) | ||||||
|  |                 if passthrough_non_pld_msgs: | ||||||
|  |                     return msg, None | ||||||
|  | 
 | ||||||
|  |         # TODO: is there some way we can inject the decoded | ||||||
|  |         # payload into an existing output buffer for the original | ||||||
|  |         # msg instance? | ||||||
|  |         pld: PayloadT = self.decode_pld( | ||||||
|  |             msg, | ||||||
|  |             ipc=ipc, | ||||||
|  |             expect_msg=expect_msg, | ||||||
|  |             hide_tb=hide_tb, | ||||||
|  | 
 | ||||||
|  |             **decode_pld_kwargs, | ||||||
|  |         ) | ||||||
|  |         return ( | ||||||
|  |             msg, | ||||||
|  |             pld, | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     async def recv_pld( |     async def recv_pld( | ||||||
|         self, |         self, | ||||||
|         ipc: Context|MsgStream, |         ipc: Context|MsgStream, | ||||||
|         ipc_msg: MsgType|None = None, |         ipc_msg: MsgType[PayloadT]|None = None, | ||||||
|         expect_msg: Type[MsgType]|None = None, |         expect_msg: Type[MsgType]|None = None, | ||||||
|         hide_tb: bool = True, |         hide_tb: bool = True, | ||||||
| 
 | 
 | ||||||
|         **dec_pld_kwargs, |         **dec_pld_kwargs, | ||||||
| 
 | 
 | ||||||
|     ) -> Any|Raw: |     ) -> PayloadT: | ||||||
|         ''' |         ''' | ||||||
|         Receive a `MsgType`, then decode and return its `.pld` field. |         Receive a `MsgType`, then decode and return its `.pld` field. | ||||||
| 
 | 
 | ||||||
|  | @ -213,6 +258,13 @@ class PldRx(Struct): | ||||||
|             # async-rx msg from underlying IPC feeder (mem-)chan |             # async-rx msg from underlying IPC feeder (mem-)chan | ||||||
|             await ipc._rx_chan.receive() |             await ipc._rx_chan.receive() | ||||||
|         ) |         ) | ||||||
|  |         if ( | ||||||
|  |             type(msg) is Return | ||||||
|  |         ): | ||||||
|  |             log.info( | ||||||
|  |                 f'Rxed final result msg\n' | ||||||
|  |                 f'{msg}\n' | ||||||
|  |             ) | ||||||
|         return self.decode_pld( |         return self.decode_pld( | ||||||
|             msg=msg, |             msg=msg, | ||||||
|             ipc=ipc, |             ipc=ipc, | ||||||
|  | @ -258,6 +310,9 @@ class PldRx(Struct): | ||||||
|                         f'|_pld={pld!r}\n' |                         f'|_pld={pld!r}\n' | ||||||
|                     ) |                     ) | ||||||
|                     return pld |                     return pld | ||||||
|  |                 except TypeError as typerr: | ||||||
|  |                     __tracebackhide__: bool = False | ||||||
|  |                     raise typerr | ||||||
| 
 | 
 | ||||||
|                 # XXX pld-value type failure |                 # XXX pld-value type failure | ||||||
|                 except ValidationError as valerr: |                 except ValidationError as valerr: | ||||||
|  | @ -398,45 +453,6 @@ class PldRx(Struct): | ||||||
|             __tracebackhide__: bool = False |             __tracebackhide__: bool = False | ||||||
|             raise |             raise | ||||||
| 
 | 
 | ||||||
|     dec_msg = decode_pld |  | ||||||
| 
 |  | ||||||
|     async def recv_msg_w_pld( |  | ||||||
|         self, |  | ||||||
|         ipc: Context|MsgStream, |  | ||||||
|         expect_msg: MsgType, |  | ||||||
| 
 |  | ||||||
|         # NOTE: generally speaking only for handling `Stop`-msgs that |  | ||||||
|         # arrive during a call to `drain_to_final_msg()` above! |  | ||||||
|         passthrough_non_pld_msgs: bool = True, |  | ||||||
|         hide_tb: bool = True, |  | ||||||
|         **kwargs, |  | ||||||
| 
 |  | ||||||
|     ) -> tuple[MsgType, PayloadT]: |  | ||||||
|         ''' |  | ||||||
|         Retrieve the next avail IPC msg, decode it's payload, and return |  | ||||||
|         the pair of refs. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         __tracebackhide__: bool = hide_tb |  | ||||||
|         msg: MsgType = await ipc._rx_chan.receive() |  | ||||||
| 
 |  | ||||||
|         if passthrough_non_pld_msgs: |  | ||||||
|             match msg: |  | ||||||
|                 case Stop(): |  | ||||||
|                     return msg, None |  | ||||||
| 
 |  | ||||||
|         # TODO: is there some way we can inject the decoded |  | ||||||
|         # payload into an existing output buffer for the original |  | ||||||
|         # msg instance? |  | ||||||
|         pld: PayloadT = self.decode_pld( |  | ||||||
|             msg, |  | ||||||
|             ipc=ipc, |  | ||||||
|             expect_msg=expect_msg, |  | ||||||
|             hide_tb=hide_tb, |  | ||||||
|             **kwargs, |  | ||||||
|         ) |  | ||||||
|         return msg, pld |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| @cm | @cm | ||||||
| def limit_plds( | def limit_plds( | ||||||
|  | @ -452,11 +468,16 @@ def limit_plds( | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     __tracebackhide__: bool = True |     __tracebackhide__: bool = True | ||||||
|  |     curr_ctx: Context|None = current_ipc_ctx() | ||||||
|  |     if curr_ctx is None: | ||||||
|  |         raise RuntimeError( | ||||||
|  |             'No IPC `Context` is active !?\n' | ||||||
|  |             'Did you open `limit_plds()` from outside ' | ||||||
|  |             'a `Portal.open_context()` scope-block?' | ||||||
|  |         ) | ||||||
|     try: |     try: | ||||||
|         curr_ctx: Context = current_ipc_ctx() |  | ||||||
|         rx: PldRx = curr_ctx._pld_rx |         rx: PldRx = curr_ctx._pld_rx | ||||||
|         orig_pldec: MsgDec = rx.pld_dec |         orig_pldec: MsgDec = rx.pld_dec | ||||||
| 
 |  | ||||||
|         with rx.limit_plds( |         with rx.limit_plds( | ||||||
|             spec=spec, |             spec=spec, | ||||||
|             **dec_kwargs, |             **dec_kwargs, | ||||||
|  | @ -466,6 +487,11 @@ def limit_plds( | ||||||
|                 f'{pldec}\n' |                 f'{pldec}\n' | ||||||
|             ) |             ) | ||||||
|             yield pldec |             yield pldec | ||||||
|  | 
 | ||||||
|  |     except BaseException: | ||||||
|  |         __tracebackhide__: bool = False | ||||||
|  |         raise | ||||||
|  | 
 | ||||||
|     finally: |     finally: | ||||||
|         log.runtime( |         log.runtime( | ||||||
|             'Reverted to previous payload-decoder\n\n' |             'Reverted to previous payload-decoder\n\n' | ||||||
|  | @ -519,8 +545,8 @@ async def maybe_limit_plds( | ||||||
| async def drain_to_final_msg( | async def drain_to_final_msg( | ||||||
|     ctx: Context, |     ctx: Context, | ||||||
| 
 | 
 | ||||||
|     hide_tb: bool = True, |  | ||||||
|     msg_limit: int = 6, |     msg_limit: int = 6, | ||||||
|  |     hide_tb: bool = True, | ||||||
| 
 | 
 | ||||||
| ) -> tuple[ | ) -> tuple[ | ||||||
|     Return|None, |     Return|None, | ||||||
|  | @ -549,8 +575,8 @@ async def drain_to_final_msg( | ||||||
|     even after ctx closure and the `.open_context()` block exit. |     even after ctx closure and the `.open_context()` block exit. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     __tracebackhide__: bool = hide_tb |  | ||||||
|     raise_overrun: bool = not ctx._allow_overruns |     raise_overrun: bool = not ctx._allow_overruns | ||||||
|  |     parent_never_opened_stream: bool = ctx._stream is None | ||||||
| 
 | 
 | ||||||
|     # wait for a final context result by collecting (but |     # wait for a final context result by collecting (but | ||||||
|     # basically ignoring) any bi-dir-stream msgs still in transit |     # basically ignoring) any bi-dir-stream msgs still in transit | ||||||
|  | @ -559,13 +585,14 @@ async def drain_to_final_msg( | ||||||
|     result_msg: Return|Error|None = None |     result_msg: Return|Error|None = None | ||||||
|     while not ( |     while not ( | ||||||
|         ctx.maybe_error |         ctx.maybe_error | ||||||
|         and not ctx._final_result_is_set() |         and | ||||||
|  |         not ctx._final_result_is_set() | ||||||
|     ): |     ): | ||||||
|         try: |         try: | ||||||
|             # receive all msgs, scanning for either a final result |             # receive all msgs, scanning for either a final result | ||||||
|             # or error; the underlying call should never raise any |             # or error; the underlying call should never raise any | ||||||
|             # remote error directly! |             # remote error directly! | ||||||
|             msg, pld = await ctx._pld_rx.recv_msg_w_pld( |             msg, pld = await ctx._pld_rx.recv_msg( | ||||||
|                 ipc=ctx, |                 ipc=ctx, | ||||||
|                 expect_msg=Return, |                 expect_msg=Return, | ||||||
|                 raise_error=False, |                 raise_error=False, | ||||||
|  | @ -612,6 +639,11 @@ async def drain_to_final_msg( | ||||||
|                     ) |                     ) | ||||||
|                     __tracebackhide__: bool = False |                     __tracebackhide__: bool = False | ||||||
| 
 | 
 | ||||||
|  |             else: | ||||||
|  |                 log.cancel( | ||||||
|  |                     f'IPC ctx cancelled externally during result drain ?\n' | ||||||
|  |                     f'{ctx}' | ||||||
|  |                 ) | ||||||
|             # CASE 2: mask the local cancelled-error(s) |             # CASE 2: mask the local cancelled-error(s) | ||||||
|             # only when we are sure the remote error is |             # only when we are sure the remote error is | ||||||
|             # the source cause of this local task's |             # the source cause of this local task's | ||||||
|  | @ -643,17 +675,24 @@ async def drain_to_final_msg( | ||||||
|             case Yield(): |             case Yield(): | ||||||
|                 pre_result_drained.append(msg) |                 pre_result_drained.append(msg) | ||||||
|                 if ( |                 if ( | ||||||
|  |                     not parent_never_opened_stream | ||||||
|  |                     and ( | ||||||
|                         (ctx._stream.closed |                         (ctx._stream.closed | ||||||
|                      and (reason := 'stream was already closed') |                          and | ||||||
|                     ) |                          (reason := 'stream was already closed') | ||||||
|                     or (ctx.cancel_acked |                         ) or | ||||||
|                         and (reason := 'ctx cancelled other side') |                         (ctx.cancel_acked | ||||||
|  |                             and | ||||||
|  |                             (reason := 'ctx cancelled other side') | ||||||
|                         ) |                         ) | ||||||
|                         or (ctx._cancel_called |                         or (ctx._cancel_called | ||||||
|                         and (reason := 'ctx called `.cancel()`') |                             and | ||||||
|  |                             (reason := 'ctx called `.cancel()`') | ||||||
|                         ) |                         ) | ||||||
|                         or (len(pre_result_drained) > msg_limit |                         or (len(pre_result_drained) > msg_limit | ||||||
|                         and (reason := f'"yield" limit={msg_limit}') |                             and | ||||||
|  |                             (reason := f'"yield" limit={msg_limit}') | ||||||
|  |                         ) | ||||||
|                     ) |                     ) | ||||||
|                 ): |                 ): | ||||||
|                     log.cancel( |                     log.cancel( | ||||||
|  | @ -671,7 +710,7 @@ async def drain_to_final_msg( | ||||||
|                 # drain up to the `msg_limit` hoping to get |                 # drain up to the `msg_limit` hoping to get | ||||||
|                 # a final result or error/ctxc. |                 # a final result or error/ctxc. | ||||||
|                 else: |                 else: | ||||||
|                     log.warning( |                     report: str = ( | ||||||
|                         'Ignoring "yield" msg during `ctx.result()` drain..\n' |                         'Ignoring "yield" msg during `ctx.result()` drain..\n' | ||||||
|                         f'<= {ctx.chan.uid}\n' |                         f'<= {ctx.chan.uid}\n' | ||||||
|                         f'  |_{ctx._nsf}()\n\n' |                         f'  |_{ctx._nsf}()\n\n' | ||||||
|  | @ -680,6 +719,14 @@ async def drain_to_final_msg( | ||||||
| 
 | 
 | ||||||
|                         f'{pretty_struct.pformat(msg)}\n' |                         f'{pretty_struct.pformat(msg)}\n' | ||||||
|                     ) |                     ) | ||||||
|  |                     if parent_never_opened_stream: | ||||||
|  |                         report = ( | ||||||
|  |                             f'IPC ctx never opened stream on {ctx.side!r}-side!\n' | ||||||
|  |                             f'\n' | ||||||
|  |                             # f'{ctx}\n' | ||||||
|  |                         ) + report | ||||||
|  | 
 | ||||||
|  |                     log.warning(report) | ||||||
|                     continue |                     continue | ||||||
| 
 | 
 | ||||||
|             # stream terminated, but no result yet.. |             # stream terminated, but no result yet.. | ||||||
|  | @ -771,6 +818,7 @@ async def drain_to_final_msg( | ||||||
|             f'{ctx.outcome}\n' |             f'{ctx.outcome}\n' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |     __tracebackhide__: bool = hide_tb | ||||||
|     return ( |     return ( | ||||||
|         result_msg, |         result_msg, | ||||||
|         pre_result_drained, |         pre_result_drained, | ||||||
|  | @ -796,8 +844,14 @@ def validate_payload_msg( | ||||||
|     __tracebackhide__: bool = hide_tb |     __tracebackhide__: bool = hide_tb | ||||||
|     codec: MsgCodec = current_codec() |     codec: MsgCodec = current_codec() | ||||||
|     msg_bytes: bytes = codec.encode(pld_msg) |     msg_bytes: bytes = codec.encode(pld_msg) | ||||||
|  |     roundtripped: Started|None = None | ||||||
|     try: |     try: | ||||||
|         roundtripped: Started = codec.decode(msg_bytes) |         roundtripped: Started = codec.decode(msg_bytes) | ||||||
|  |     except TypeError as typerr: | ||||||
|  |         __tracebackhide__: bool = False | ||||||
|  |         raise typerr | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|         ctx: Context = getattr(ipc, 'ctx', ipc) |         ctx: Context = getattr(ipc, 'ctx', ipc) | ||||||
|         pld: PayloadT = ctx.pld_rx.decode_pld( |         pld: PayloadT = ctx.pld_rx.decode_pld( | ||||||
|             msg=roundtripped, |             msg=roundtripped, | ||||||
|  | @ -822,6 +876,11 @@ def validate_payload_msg( | ||||||
|             ) |             ) | ||||||
|             raise ValidationError(complaint) |             raise ValidationError(complaint) | ||||||
| 
 | 
 | ||||||
|  |     # usually due to `.decode()` input type | ||||||
|  |     except TypeError as typerr: | ||||||
|  |         __tracebackhide__: bool = False | ||||||
|  |         raise typerr | ||||||
|  | 
 | ||||||
|     # raise any msg type error NO MATTER WHAT! |     # raise any msg type error NO MATTER WHAT! | ||||||
|     except ValidationError as verr: |     except ValidationError as verr: | ||||||
|         try: |         try: | ||||||
|  | @ -832,9 +891,13 @@ def validate_payload_msg( | ||||||
|                 verb_header='Trying to send ', |                 verb_header='Trying to send ', | ||||||
|                 is_invalid_payload=True, |                 is_invalid_payload=True, | ||||||
|             ) |             ) | ||||||
|         except BaseException: |         except BaseException as _be: | ||||||
|  |             if not roundtripped: | ||||||
|  |                 raise verr | ||||||
|  | 
 | ||||||
|  |             be = _be | ||||||
|             __tracebackhide__: bool = False |             __tracebackhide__: bool = False | ||||||
|             raise |             raise be | ||||||
| 
 | 
 | ||||||
|         if not raise_mte: |         if not raise_mte: | ||||||
|             return mte |             return mte | ||||||
|  |  | ||||||
|  | @ -599,15 +599,15 @@ def mk_msg_spec( | ||||||
|         Msg[payload_type_union], |         Msg[payload_type_union], | ||||||
|         Generic[PayloadT], |         Generic[PayloadT], | ||||||
|     ) |     ) | ||||||
|     defstruct_bases: tuple = ( |     # defstruct_bases: tuple = ( | ||||||
|         Msg, # [payload_type_union], |     #     Msg, # [payload_type_union], | ||||||
|         # Generic[PayloadT], |     #     # Generic[PayloadT], | ||||||
|         # ^-XXX-^: not allowed? lul.. |     #     # ^-XXX-^: not allowed? lul.. | ||||||
|     ) |     # ) | ||||||
|     ipc_msg_types: list[Msg] = [] |     ipc_msg_types: list[Msg] = [] | ||||||
| 
 | 
 | ||||||
|     idx_msg_types: list[Msg] = [] |     idx_msg_types: list[Msg] = [] | ||||||
|     defs_msg_types: list[Msg] = [] |     # defs_msg_types: list[Msg] = [] | ||||||
|     nc_msg_types: list[Msg] = [] |     nc_msg_types: list[Msg] = [] | ||||||
| 
 | 
 | ||||||
|     for msgtype in __msg_types__: |     for msgtype in __msg_types__: | ||||||
|  | @ -625,7 +625,7 @@ def mk_msg_spec( | ||||||
|         # TODO: wait why do we need the dynamic version here? |         # TODO: wait why do we need the dynamic version here? | ||||||
|         # XXX ANSWER XXX -> BC INHERITANCE.. don't work w generics.. |         # XXX ANSWER XXX -> BC INHERITANCE.. don't work w generics.. | ||||||
|         # |         # | ||||||
|         # NOTE previously bc msgtypes WERE NOT inheritting |         # NOTE previously bc msgtypes WERE NOT inheriting | ||||||
|         # directly the `Generic[PayloadT]` type, the manual method |         # directly the `Generic[PayloadT]` type, the manual method | ||||||
|         # of generic-paraming with `.__class_getitem__()` wasn't |         # of generic-paraming with `.__class_getitem__()` wasn't | ||||||
|         # working.. |         # working.. | ||||||
|  | @ -662,38 +662,35 @@ def mk_msg_spec( | ||||||
| 
 | 
 | ||||||
|         # with `msgspec.structs.defstruct` |         # with `msgspec.structs.defstruct` | ||||||
|         # XXX ALSO DOESN'T WORK |         # XXX ALSO DOESN'T WORK | ||||||
|         defstruct_msgtype = defstruct( |         # defstruct_msgtype = defstruct( | ||||||
|             name=msgtype.__name__, |         #     name=msgtype.__name__, | ||||||
|             fields=[ |         #     fields=[ | ||||||
|                 ('cid', str), |         #         ('cid', str), | ||||||
| 
 | 
 | ||||||
|                 # XXX doesn't seem to work.. |         #         # XXX doesn't seem to work.. | ||||||
|                 # ('pld', PayloadT), |         #         # ('pld', PayloadT), | ||||||
| 
 |  | ||||||
|                 ('pld', payload_type_union), |  | ||||||
|             ], |  | ||||||
|             bases=defstruct_bases, |  | ||||||
|         ) |  | ||||||
|         defs_msg_types.append(defstruct_msgtype) |  | ||||||
| 
 | 
 | ||||||
|  |         #         ('pld', payload_type_union), | ||||||
|  |         #     ], | ||||||
|  |         #     bases=defstruct_bases, | ||||||
|  |         # ) | ||||||
|  |         # defs_msg_types.append(defstruct_msgtype) | ||||||
|         # assert index_paramed_msg_type == manual_paramed_msg_subtype |         # assert index_paramed_msg_type == manual_paramed_msg_subtype | ||||||
| 
 |  | ||||||
|         # paramed_msg_type = manual_paramed_msg_subtype |         # paramed_msg_type = manual_paramed_msg_subtype | ||||||
| 
 |  | ||||||
|         # ipc_payload_msgs_type_union |= index_paramed_msg_type |         # ipc_payload_msgs_type_union |= index_paramed_msg_type | ||||||
| 
 | 
 | ||||||
|     idx_spec: Union[Type[Msg]] = Union[*idx_msg_types] |     idx_spec: Union[Type[Msg]] = Union[*idx_msg_types] | ||||||
|     def_spec: Union[Type[Msg]] = Union[*defs_msg_types] |     # def_spec: Union[Type[Msg]] = Union[*defs_msg_types] | ||||||
|     nc_spec: Union[Type[Msg]] = Union[*nc_msg_types] |     nc_spec: Union[Type[Msg]] = Union[*nc_msg_types] | ||||||
| 
 | 
 | ||||||
|     specs: dict[str, Union[Type[Msg]]] = { |     specs: dict[str, Union[Type[Msg]]] = { | ||||||
|         'indexed_generics': idx_spec, |         'indexed_generics': idx_spec, | ||||||
|         'defstruct': def_spec, |         # 'defstruct': def_spec, | ||||||
|         'types_new_class': nc_spec, |         'types_new_class': nc_spec, | ||||||
|     } |     } | ||||||
|     msgtypes_table: dict[str, list[Msg]] = { |     msgtypes_table: dict[str, list[Msg]] = { | ||||||
|         'indexed_generics': idx_msg_types, |         'indexed_generics': idx_msg_types, | ||||||
|         'defstruct': defs_msg_types, |         # 'defstruct': defs_msg_types, | ||||||
|         'types_new_class': nc_msg_types, |         'types_new_class': nc_msg_types, | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -23,12 +23,10 @@ import asyncio | ||||||
| from asyncio.exceptions import ( | from asyncio.exceptions import ( | ||||||
|     CancelledError, |     CancelledError, | ||||||
| ) | ) | ||||||
| from asyncio import ( |  | ||||||
|     QueueShutDown, |  | ||||||
| ) |  | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
| from dataclasses import dataclass | from dataclasses import dataclass | ||||||
| import inspect | import inspect | ||||||
|  | import platform | ||||||
| import traceback | import traceback | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|  | @ -79,6 +77,20 @@ __all__ = [ | ||||||
|     'run_as_asyncio_guest', |     'run_as_asyncio_guest', | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | if (_py_313 := ( | ||||||
|  |         ('3', '13') | ||||||
|  |         == | ||||||
|  |         platform.python_version_tuple()[:-1] | ||||||
|  |     ) | ||||||
|  | ): | ||||||
|  |     # 3.13+ only.. lel. | ||||||
|  |     # https://docs.python.org/3.13/library/asyncio-queue.html#asyncio.QueueShutDown | ||||||
|  |     from asyncio import ( | ||||||
|  |         QueueShutDown, | ||||||
|  |     ) | ||||||
|  | else: | ||||||
|  |     QueueShutDown = False | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| # TODO, generally speaking we can generalize this abstraction, a "SC linked | # TODO, generally speaking we can generalize this abstraction, a "SC linked | ||||||
| # parent->child task pair", as the same "supervision scope primitive" | # parent->child task pair", as the same "supervision scope primitive" | ||||||
|  | @ -348,7 +360,6 @@ def _run_asyncio_task( | ||||||
|     trio_task: trio.Task = trio.lowlevel.current_task() |     trio_task: trio.Task = trio.lowlevel.current_task() | ||||||
|     trio_cs = trio.CancelScope() |     trio_cs = trio.CancelScope() | ||||||
|     aio_task_complete = trio.Event() |     aio_task_complete = trio.Event() | ||||||
|     aio_err: BaseException|None = None |  | ||||||
| 
 | 
 | ||||||
|     chan = LinkedTaskChannel( |     chan = LinkedTaskChannel( | ||||||
|         _to_aio=aio_q,  # asyncio.Queue |         _to_aio=aio_q,  # asyncio.Queue | ||||||
|  | @ -392,7 +403,7 @@ def _run_asyncio_task( | ||||||
|             if ( |             if ( | ||||||
|                 result != orig |                 result != orig | ||||||
|                 and |                 and | ||||||
|                 aio_err is None |                 chan._aio_err is None | ||||||
|                 and |                 and | ||||||
| 
 | 
 | ||||||
|                 # in the `open_channel_from()` case we don't |                 # in the `open_channel_from()` case we don't | ||||||
|  | @ -576,7 +587,11 @@ def _run_asyncio_task( | ||||||
|             # normally suppressed unless the trio.Task also errors |             # normally suppressed unless the trio.Task also errors | ||||||
|             # |             # | ||||||
|             # ?TODO, is this even needed (does it happen) now? |             # ?TODO, is this even needed (does it happen) now? | ||||||
|             elif isinstance(aio_err, QueueShutDown): |             elif ( | ||||||
|  |                 _py_313 | ||||||
|  |                 and | ||||||
|  |                 isinstance(aio_err, QueueShutDown) | ||||||
|  |             ): | ||||||
|                 # import pdbp; pdbp.set_trace() |                 # import pdbp; pdbp.set_trace() | ||||||
|                 trio_err = AsyncioTaskExited( |                 trio_err = AsyncioTaskExited( | ||||||
|                     'Task exited before `trio` side' |                     'Task exited before `trio` side' | ||||||
|  | @ -956,8 +971,9 @@ async def translate_aio_errors( | ||||||
|             # or an error, we ensure the aio-side gets signalled via |             # or an error, we ensure the aio-side gets signalled via | ||||||
|             # an explicit exception and its `Queue` is shutdown. |             # an explicit exception and its `Queue` is shutdown. | ||||||
|             if ya_trio_exited: |             if ya_trio_exited: | ||||||
|                 # raise `QueueShutDown` on next `Queue.get()` call on |                 # XXX py3.13+ ONLY.. | ||||||
|                 # aio side. |                 # raise `QueueShutDown` on next `Queue.get/put()` | ||||||
|  |                 if _py_313: | ||||||
|                     chan._to_aio.shutdown() |                     chan._to_aio.shutdown() | ||||||
| 
 | 
 | ||||||
|                 # pump this event-loop (well `Runner` but ya) |                 # pump this event-loop (well `Runner` but ya) | ||||||
|  |  | ||||||
|  | @ -29,3 +29,6 @@ from ._broadcast import ( | ||||||
|     BroadcastReceiver as BroadcastReceiver, |     BroadcastReceiver as BroadcastReceiver, | ||||||
|     Lagged as Lagged, |     Lagged as Lagged, | ||||||
| ) | ) | ||||||
|  | from ._beg import ( | ||||||
|  |     collapse_eg as collapse_eg, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | @ -0,0 +1,58 @@ | ||||||
|  | # tractor: structured concurrent "actors". | ||||||
|  | # Copyright 2018-eternity Tyler Goodlet. | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | `BaseExceptionGroup` related utils and helpers pertaining to | ||||||
|  | first-class-`trio` from a historical perspective B) | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | from contextlib import ( | ||||||
|  |     asynccontextmanager as acm, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def maybe_collapse_eg( | ||||||
|  |     beg: BaseExceptionGroup, | ||||||
|  | ) -> BaseException: | ||||||
|  |     ''' | ||||||
|  |     If the input beg can collapse to a single non-eg sub-exception, | ||||||
|  |     return it instead. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     if len(excs := beg.exceptions) == 1: | ||||||
|  |         return excs[0] | ||||||
|  | 
 | ||||||
|  |     return beg | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def collapse_eg(): | ||||||
|  |     ''' | ||||||
|  |     If `BaseExceptionGroup` raised in the body scope is | ||||||
|  |     "collapse-able" (in the same way that | ||||||
|  |     `trio.open_nursery(strict_exception_groups=False)` works) then | ||||||
|  |     only raise the lone emedded non-eg in in place. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     try: | ||||||
|  |         yield | ||||||
|  |     except* BaseException as beg: | ||||||
|  |         if ( | ||||||
|  |             exc := maybe_collapse_eg(beg) | ||||||
|  |         ) is not beg: | ||||||
|  |             raise exc | ||||||
|  | 
 | ||||||
|  |         raise beg | ||||||
|  | @ -15,7 +15,7 @@ | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| ``tokio`` style broadcast channel. | `tokio` style broadcast channel. | ||||||
| https://docs.rs/tokio/1.11.0/tokio/sync/broadcast/index.html | https://docs.rs/tokio/1.11.0/tokio/sync/broadcast/index.html | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
|  |  | ||||||
|  | @ -57,6 +57,8 @@ async def maybe_open_nursery( | ||||||
|     shield: bool = False, |     shield: bool = False, | ||||||
|     lib: ModuleType = trio, |     lib: ModuleType = trio, | ||||||
| 
 | 
 | ||||||
|  |     **kwargs,  # proxy thru | ||||||
|  | 
 | ||||||
| ) -> AsyncGenerator[trio.Nursery, Any]: | ) -> AsyncGenerator[trio.Nursery, Any]: | ||||||
|     ''' |     ''' | ||||||
|     Create a new nursery if None provided. |     Create a new nursery if None provided. | ||||||
|  | @ -67,7 +69,7 @@ async def maybe_open_nursery( | ||||||
|     if nursery is not None: |     if nursery is not None: | ||||||
|         yield nursery |         yield nursery | ||||||
|     else: |     else: | ||||||
|         async with lib.open_nursery() as nursery: |         async with lib.open_nursery(**kwargs) as nursery: | ||||||
|             nursery.cancel_scope.shield = shield |             nursery.cancel_scope.shield = shield | ||||||
|             yield nursery |             yield nursery | ||||||
| 
 | 
 | ||||||
|  | @ -143,9 +145,14 @@ async def gather_contexts( | ||||||
|             'Use a non-lazy iterator or sequence type intead!' |             'Use a non-lazy iterator or sequence type intead!' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     async with trio.open_nursery() as n: |     async with trio.open_nursery( | ||||||
|  |         strict_exception_groups=False, | ||||||
|  |         # ^XXX^ TODO? soo roll our own then ?? | ||||||
|  |         # -> since we kinda want the "if only one `.exception` then | ||||||
|  |         # just raise that" interface? | ||||||
|  |     ) as tn: | ||||||
|         for mngr in mngrs: |         for mngr in mngrs: | ||||||
|             n.start_soon( |             tn.start_soon( | ||||||
|                 _enter_and_wait, |                 _enter_and_wait, | ||||||
|                 mngr, |                 mngr, | ||||||
|                 unwrapped, |                 unwrapped, | ||||||
|  |  | ||||||
							
								
								
									
										90
									
								
								uv.lock
								
								
								
								
							
							
						
						
									
										90
									
								
								uv.lock
								
								
								
								
							|  | @ -126,7 +126,31 @@ wheels = [ | ||||||
| [[package]] | [[package]] | ||||||
| name = "msgspec" | name = "msgspec" | ||||||
| version = "0.19.0" | version = "0.19.0" | ||||||
| source = { git = "https://github.com/jcrist/msgspec.git#dd965dce22e5278d4935bea923441ecde31b5325" } | source = { registry = "https://pypi.org/simple" } | ||||||
|  | sdist = { url = "https://files.pythonhosted.org/packages/cf/9b/95d8ce458462b8b71b8a70fa94563b2498b89933689f3a7b8911edfae3d7/msgspec-0.19.0.tar.gz", hash = "sha256:604037e7cd475345848116e89c553aa9a233259733ab51986ac924ab1b976f8e", size = 216934 } | ||||||
|  | wheels = [ | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/24/d4/2ec2567ac30dab072cce3e91fb17803c52f0a37aab6b0c24375d2b20a581/msgspec-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa77046904db764b0462036bc63ef71f02b75b8f72e9c9dd4c447d6da1ed8f8e", size = 187939 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/2b/c0/18226e4328897f4f19875cb62bb9259fe47e901eade9d9376ab5f251a929/msgspec-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:047cfa8675eb3bad68722cfe95c60e7afabf84d1bd8938979dd2b92e9e4a9551", size = 182202 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/81/25/3a4b24d468203d8af90d1d351b77ea3cffb96b29492855cf83078f16bfe4/msgspec-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e78f46ff39a427e10b4a61614a2777ad69559cc8d603a7c05681f5a595ea98f7", size = 209029 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/85/2e/db7e189b57901955239f7689b5dcd6ae9458637a9c66747326726c650523/msgspec-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c7adf191e4bd3be0e9231c3b6dc20cf1199ada2af523885efc2ed218eafd011", size = 210682 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/03/97/7c8895c9074a97052d7e4a1cc1230b7b6e2ca2486714eb12c3f08bb9d284/msgspec-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f04cad4385e20be7c7176bb8ae3dca54a08e9756cfc97bcdb4f18560c3042063", size = 214003 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/61/61/e892997bcaa289559b4d5869f066a8021b79f4bf8e955f831b095f47a4cd/msgspec-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45c8fb410670b3b7eb884d44a75589377c341ec1392b778311acdbfa55187716", size = 216833 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/ce/3d/71b2dffd3a1c743ffe13296ff701ee503feaebc3f04d0e75613b6563c374/msgspec-0.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:70eaef4934b87193a27d802534dc466778ad8d536e296ae2f9334e182ac27b6c", size = 186184 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/b2/5f/a70c24f075e3e7af2fae5414c7048b0e11389685b7f717bb55ba282a34a7/msgspec-0.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f98bd8962ad549c27d63845b50af3f53ec468b6318400c9f1adfe8b092d7b62f", size = 190485 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/89/b0/1b9763938cfae12acf14b682fcf05c92855974d921a5a985ecc197d1c672/msgspec-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:43bbb237feab761b815ed9df43b266114203f53596f9b6e6f00ebd79d178cdf2", size = 183910 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/87/81/0c8c93f0b92c97e326b279795f9c5b956c5a97af28ca0fbb9fd86c83737a/msgspec-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cfc033c02c3e0aec52b71710d7f84cb3ca5eb407ab2ad23d75631153fdb1f12", size = 210633 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/d0/ef/c5422ce8af73928d194a6606f8ae36e93a52fd5e8df5abd366903a5ca8da/msgspec-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d911c442571605e17658ca2b416fd8579c5050ac9adc5e00c2cb3126c97f73bc", size = 213594 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/19/2b/4137bc2ed45660444842d042be2cf5b18aa06efd2cda107cff18253b9653/msgspec-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:757b501fa57e24896cf40a831442b19a864f56d253679f34f260dcb002524a6c", size = 214053 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/9d/e6/8ad51bdc806aac1dc501e8fe43f759f9ed7284043d722b53323ea421c360/msgspec-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5f0f65f29b45e2816d8bded36e6b837a4bf5fb60ec4bc3c625fa2c6da4124537", size = 219081 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/b1/ef/27dd35a7049c9a4f4211c6cd6a8c9db0a50647546f003a5867827ec45391/msgspec-0.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:067f0de1c33cfa0b6a8206562efdf6be5985b988b53dd244a8e06f993f27c8c0", size = 187467 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/3c/cb/2842c312bbe618d8fefc8b9cedce37f773cdc8fa453306546dba2c21fd98/msgspec-0.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f12d30dd6266557aaaf0aa0f9580a9a8fbeadfa83699c487713e355ec5f0bd86", size = 190498 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/58/95/c40b01b93465e1a5f3b6c7d91b10fb574818163740cc3acbe722d1e0e7e4/msgspec-0.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82b2c42c1b9ebc89e822e7e13bbe9d17ede0c23c187469fdd9505afd5a481314", size = 183950 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/e8/f0/5b764e066ce9aba4b70d1db8b087ea66098c7c27d59b9dd8a3532774d48f/msgspec-0.19.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19746b50be214a54239aab822964f2ac81e38b0055cca94808359d779338c10e", size = 210647 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/9d/87/bc14f49bc95c4cb0dd0a8c56028a67c014ee7e6818ccdce74a4862af259b/msgspec-0.19.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60ef4bdb0ec8e4ad62e5a1f95230c08efb1f64f32e6e8dd2ced685bcc73858b5", size = 213563 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/53/2f/2b1c2b056894fbaa975f68f81e3014bb447516a8b010f1bed3fb0e016ed7/msgspec-0.19.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac7f7c377c122b649f7545810c6cd1b47586e3aa3059126ce3516ac7ccc6a6a9", size = 213996 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/aa/5a/4cd408d90d1417e8d2ce6a22b98a6853c1b4d7cb7669153e4424d60087f6/msgspec-0.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5bc1472223a643f5ffb5bf46ccdede7f9795078194f14edd69e3aab7020d327", size = 219087 }, | ||||||
|  |     { url = "https://files.pythonhosted.org/packages/23/d8/f15b40611c2d5753d1abb0ca0da0c75348daf1252220e5dda2867bd81062/msgspec-0.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:317050bc0f7739cb30d257ff09152ca309bf5a369854bbf1e57dffc310c1f20f", size = 187432 }, | ||||||
|  | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "outcome" | name = "outcome" | ||||||
|  | @ -240,7 +264,7 @@ wheels = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "pytest" | name = "pytest" | ||||||
| version = "8.3.4" | version = "8.3.5" | ||||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||||
| dependencies = [ | dependencies = [ | ||||||
|     { name = "colorama", marker = "sys_platform == 'win32'" }, |     { name = "colorama", marker = "sys_platform == 'win32'" }, | ||||||
|  | @ -248,9 +272,9 @@ dependencies = [ | ||||||
|     { name = "packaging" }, |     { name = "packaging" }, | ||||||
|     { name = "pluggy" }, |     { name = "pluggy" }, | ||||||
| ] | ] | ||||||
| sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 } | sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 } | ||||||
| wheels = [ | wheels = [ | ||||||
|     { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 }, |     { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 }, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
|  | @ -300,6 +324,7 @@ dependencies = [ | ||||||
|     { name = "colorlog" }, |     { name = "colorlog" }, | ||||||
|     { name = "msgspec" }, |     { name = "msgspec" }, | ||||||
|     { name = "pdbp" }, |     { name = "pdbp" }, | ||||||
|  |     { name = "tabcompleter" }, | ||||||
|     { name = "tricycle" }, |     { name = "tricycle" }, | ||||||
|     { name = "trio" }, |     { name = "trio" }, | ||||||
|     { name = "wrapt" }, |     { name = "wrapt" }, | ||||||
|  | @ -314,17 +339,16 @@ dev = [ | ||||||
|     { name = "pytest" }, |     { name = "pytest" }, | ||||||
|     { name = "stackscope" }, |     { name = "stackscope" }, | ||||||
|     { name = "xonsh" }, |     { name = "xonsh" }, | ||||||
|     { name = "xonsh-vox-tabcomplete" }, |  | ||||||
|     { name = "xontrib-vox" }, |  | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.metadata] | [package.metadata] | ||||||
| requires-dist = [ | requires-dist = [ | ||||||
|     { name = "colorlog", specifier = ">=6.8.2,<7" }, |     { name = "colorlog", specifier = ">=6.8.2,<7" }, | ||||||
|     { name = "msgspec", git = "https://github.com/jcrist/msgspec.git" }, |     { name = "msgspec", specifier = ">=0.19.0" }, | ||||||
|     { name = "pdbp", specifier = ">=1.5.0,<2" }, |     { name = "pdbp", specifier = ">=1.6,<2" }, | ||||||
|  |     { name = "tabcompleter", specifier = ">=1.4.0" }, | ||||||
|     { name = "tricycle", specifier = ">=0.4.1,<0.5" }, |     { name = "tricycle", specifier = ">=0.4.1,<0.5" }, | ||||||
|     { name = "trio", specifier = ">=0.24,<0.25" }, |     { name = "trio", specifier = ">0.27" }, | ||||||
|     { name = "wrapt", specifier = ">=1.16.0,<2" }, |     { name = "wrapt", specifier = ">=1.16.0,<2" }, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | @ -332,13 +356,11 @@ requires-dist = [ | ||||||
| dev = [ | dev = [ | ||||||
|     { name = "greenback", specifier = ">=1.2.1,<2" }, |     { name = "greenback", specifier = ">=1.2.1,<2" }, | ||||||
|     { name = "pexpect", specifier = ">=4.9.0,<5" }, |     { name = "pexpect", specifier = ">=4.9.0,<5" }, | ||||||
|     { name = "prompt-toolkit", specifier = ">=3.0.43,<4" }, |     { name = "prompt-toolkit", specifier = ">=3.0.50" }, | ||||||
|     { name = "pyperclip", specifier = ">=1.9.0" }, |     { name = "pyperclip", specifier = ">=1.9.0" }, | ||||||
|     { name = "pytest", specifier = ">=8.2.0,<9" }, |     { name = "pytest", specifier = ">=8.3.5" }, | ||||||
|     { name = "stackscope", specifier = ">=0.2.2,<0.3" }, |     { name = "stackscope", specifier = ">=0.2.2,<0.3" }, | ||||||
|     { name = "xonsh", specifier = ">=0.19.1" }, |     { name = "xonsh", specifier = ">=0.19.2" }, | ||||||
|     { name = "xonsh-vox-tabcomplete", specifier = ">=0.5,<0.6" }, |  | ||||||
|     { name = "xontrib-vox", specifier = ">=0.0.1,<0.0.2" }, |  | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
|  | @ -355,7 +377,7 @@ wheels = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "trio" | name = "trio" | ||||||
| version = "0.24.0" | version = "0.29.0" | ||||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||||
| dependencies = [ | dependencies = [ | ||||||
|     { name = "attrs" }, |     { name = "attrs" }, | ||||||
|  | @ -365,9 +387,9 @@ dependencies = [ | ||||||
|     { name = "sniffio" }, |     { name = "sniffio" }, | ||||||
|     { name = "sortedcontainers" }, |     { name = "sortedcontainers" }, | ||||||
| ] | ] | ||||||
| sdist = { url = "https://files.pythonhosted.org/packages/8a/f3/07c152213222c615fe2391b8e1fea0f5af83599219050a549c20fcbd9ba2/trio-0.24.0.tar.gz", hash = "sha256:ffa09a74a6bf81b84f8613909fb0beaee84757450183a7a2e0b47b455c0cac5d", size = 545131 } | sdist = { url = "https://files.pythonhosted.org/packages/a1/47/f62e62a1a6f37909aed0bf8f5d5411e06fa03846cfcb64540cd1180ccc9f/trio-0.29.0.tar.gz", hash = "sha256:ea0d3967159fc130acb6939a0be0e558e364fee26b5deeecc893a6b08c361bdf", size = 588952 } | ||||||
| wheels = [ | wheels = [ | ||||||
|     { url = "https://files.pythonhosted.org/packages/14/fb/9299cf74953f473a15accfdbe2c15218e766bae8c796f2567c83bae03e98/trio-0.24.0-py3-none-any.whl", hash = "sha256:c3bd3a4e3e3025cd9a2241eae75637c43fe0b9e88b4c97b9161a55b9e54cd72c", size = 460205 }, |     { url = "https://files.pythonhosted.org/packages/c9/55/c4d9bea8b3d7937901958f65124123512419ab0eb73695e5f382521abbfb/trio-0.29.0-py3-none-any.whl", hash = "sha256:d8c463f1a9cc776ff63e331aba44c125f423a5a13c684307e828d930e625ba66", size = 492920 }, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
|  | @ -434,33 +456,13 @@ wheels = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "xonsh" | name = "xonsh" | ||||||
| version = "0.19.1" | version = "0.19.2" | ||||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||||
| sdist = { url = "https://files.pythonhosted.org/packages/98/6e/b54a0b2685535995ee50f655103c463f9d339455c9b08c4bce3e03e7bb17/xonsh-0.19.1.tar.gz", hash = "sha256:5d3de649c909f6d14bc69232219bcbdb8152c830e91ddf17ad169c672397fb97", size = 796468 } | sdist = { url = "https://files.pythonhosted.org/packages/68/4e/56e95a5e607eb3b0da37396f87cde70588efc8ef819ab16f02d5b8378dc4/xonsh-0.19.2.tar.gz", hash = "sha256:cfdd0680d954a2c3aefd6caddcc7143a3d06aa417ed18365a08219bb71b960b0", size = 799960 } | ||||||
| wheels = [ | wheels = [ | ||||||
|     { url = "https://files.pythonhosted.org/packages/8c/e6/db44068c5725af9678e37980ae9503165393d51b80dc8517fa4ec74af1cf/xonsh-0.19.1-py310-none-any.whl", hash = "sha256:83eb6610ed3535f8542abd80af9554fb7e2805b0b3f96e445f98d4b5cf1f7046", size = 640686 }, |     { url = "https://files.pythonhosted.org/packages/6c/13/281094759df87b23b3c02dc4a16603ab08ea54d7f6acfeb69f3341137c7a/xonsh-0.19.2-py310-none-any.whl", hash = "sha256:ec7f163fd3a4943782aa34069d4e72793328c916a5975949dbec8536cbfc089b", size = 642301 }, | ||||||
|     { url = "https://files.pythonhosted.org/packages/77/4e/e487e82349866b245c559433c9ba626026a2e66bd17d7f9ac1045082f146/xonsh-0.19.1-py311-none-any.whl", hash = "sha256:c176e515b0260ab803963d1f0924f1e32f1064aa6fd5d791aa0cf6cda3a924ae", size = 640680 }, |     { url = "https://files.pythonhosted.org/packages/29/41/a51e4c3918fe9a293b150cb949b1b8c6d45eb17dfed480dcb76ea43df4e7/xonsh-0.19.2-py311-none-any.whl", hash = "sha256:53c45f7a767901f2f518f9b8dd60fc653e0498e56e89825e1710bb0859985049", size = 642286 }, | ||||||
|     { url = "https://files.pythonhosted.org/packages/5d/88/09060815548219b8f6953a06c247cb5c92d03cbdf7a02a980bda1b5754db/xonsh-0.19.1-py312-none-any.whl", hash = "sha256:fe1266c86b117aced3bdc4d5972420bda715864435d0bd3722d63451e8001036", size = 640604 }, |     { url = "https://files.pythonhosted.org/packages/0a/93/9a77b731f492fac27c577dea2afb5a2bcc2a6a1c79be0c86c95498060270/xonsh-0.19.2-py312-none-any.whl", hash = "sha256:b24c619aa52b59eae4d35c4195dba9b19a2c548fb5c42c6f85f2b8ccb96807b5", size = 642386 }, | ||||||
|     { url = "https://files.pythonhosted.org/packages/83/ff/7873cb8184cffeafddbf861712831c2baa2e9dbecdbfd33b1228f0db0019/xonsh-0.19.1-py313-none-any.whl", hash = "sha256:3f158b6fc0bba954e0b989004d4261bafc4bd94c68c2abd75b825da23e5a869c", size = 641166 }, |     { url = "https://files.pythonhosted.org/packages/be/75/070324769c1ff88d971ce040f4f486339be98e0a365c8dd9991eb654265b/xonsh-0.19.2-py313-none-any.whl", hash = "sha256:c53ef6c19f781fbc399ed1b382b5c2aac2125010679a3b61d643978273c27df0", size = 642873 }, | ||||||
|     { url = "https://files.pythonhosted.org/packages/cc/03/b9f8dd338df0a330011d104e63d4d0acd8bbbc1e990ff049487b6bdf585d/xonsh-0.19.1-py39-none-any.whl", hash = "sha256:a900a6eb87d881a7ef90b1ac8522ba3699582f0bcb1e9abd863d32f6d63faf04", size = 632912 }, |     { url = "https://files.pythonhosted.org/packages/fa/cb/2c7ccec54f5b0e73fdf7650e8336582ff0347d9001c5ef8271dc00c034fe/xonsh-0.19.2-py39-none-any.whl", hash = "sha256:bcc0225dc3847f1ed2f175dac6122fbcc54cea67d9c2dc2753d9615e2a5ff284", size = 634602 }, | ||||||
| ] |  | ||||||
| 
 |  | ||||||
| [[package]] |  | ||||||
| name = "xonsh-vox-tabcomplete" |  | ||||||
| version = "0.5" |  | ||||||
| source = { registry = "https://pypi.org/simple" } |  | ||||||
| wheels = [ |  | ||||||
|     { url = "https://files.pythonhosted.org/packages/ab/fd/af0c2ee6c067c2a4dc64ec03598c94de1f6ec5984b3116af917f3add4a16/xonsh_vox_tabcomplete-0.5-py3-none-any.whl", hash = "sha256:9701b198180f167071234e77eab87b7befa97c1873b088d0b3fbbe6d6d8dcaad", size = 14381 }, |  | ||||||
| ] |  | ||||||
| 
 |  | ||||||
| [[package]] |  | ||||||
| name = "xontrib-vox" |  | ||||||
| version = "0.0.1" |  | ||||||
| source = { registry = "https://pypi.org/simple" } |  | ||||||
| dependencies = [ |  | ||||||
|     { name = "xonsh" }, |  | ||||||
| ] |  | ||||||
| sdist = { url = "https://files.pythonhosted.org/packages/6c/ac/a5db68a1f2e4036f7ff4c8546b1cbe29edee2ff40e0ff931836745988b79/xontrib-vox-0.0.1.tar.gz", hash = "sha256:c1f0b155992b4b0ebe6dcfd651084a8707ade7372f7e456c484d2a85339d9907", size = 16504 } |  | ||||||
| wheels = [ |  | ||||||
|     { url = "https://files.pythonhosted.org/packages/23/58/dcdf11849c8340033da00669527ce75d8292a4e8d82605c082ed236a081a/xontrib_vox-0.0.1-py3-none-any.whl", hash = "sha256:df2bbb815832db5b04d46684f540eac967ee40ef265add2662a95d6947d04c70", size = 13467 }, |  | ||||||
| ] | ] | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue