forked from goodboy/tractor
				
			Use context for remote debugger locking
A context is the natural fit (vs. a receive stream) for locking the root proc's tty usage via it's `.started()` sync point. Simplify the `_breakpoin()` routine to be a simple async func instead of all this "returning a coroutine" stuff from before we decided that `tractor.breakpoint()` must be async. Use `runtime` level for locking logging making it easier to trace.CI_increment_for_windows_bidirstreaming
							parent
							
								
									929b6dcc83
								
							
						
					
					
						commit
						970d8b371c
					
				|  | @ -7,7 +7,6 @@ from functools import partial | |||
| from contextlib import asynccontextmanager | ||||
| from typing import Awaitable, Tuple, Optional, Callable, AsyncIterator | ||||
| 
 | ||||
| from async_generator import aclosing | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
|  | @ -38,7 +37,9 @@ _pdb_release_hook: Optional[Callable] = None | |||
| _in_debug = False | ||||
| 
 | ||||
| # lock in root actor preventing multi-access to local tty | ||||
| _debug_lock = trio.StrictFIFOLock() | ||||
| _debug_lock: trio.StrictFIFOLock = trio.StrictFIFOLock() | ||||
| _debug_lock._uid = None | ||||
| _pdb_complete: trio.Event = None | ||||
| 
 | ||||
| # XXX: set by the current task waiting on the root tty lock | ||||
| # and must be cancelled if this actor is cancelled via message | ||||
|  | @ -119,18 +120,21 @@ async def _acquire_debug_lock(uid: Tuple[str, str]) -> AsyncIterator[None]: | |||
|     """Acquire a actor local FIFO lock meant to mutex entry to a local | ||||
|     debugger entry point to avoid tty clobbering by multiple processes. | ||||
|     """ | ||||
|     task_name = trio.lowlevel.current_task().name | ||||
|     try: | ||||
|         log.debug( | ||||
|             f"Attempting to acquire TTY lock, remote task: {task_name}:{uid}") | ||||
|         await _debug_lock.acquire() | ||||
|     global _debug_lock | ||||
| 
 | ||||
|         log.debug(f"TTY lock acquired, remote task: {task_name}:{uid}") | ||||
|     task_name = trio.lowlevel.current_task().name | ||||
| 
 | ||||
|     log.runtime( | ||||
|         f"Attempting to acquire TTY lock, remote task: {task_name}:{uid}") | ||||
| 
 | ||||
|     async with _debug_lock: | ||||
| 
 | ||||
|         _debug_lock._uid = uid | ||||
|         log.runtime(f"TTY lock acquired, remote task: {task_name}:{uid}") | ||||
|         yield | ||||
| 
 | ||||
|     finally: | ||||
|         _debug_lock.release() | ||||
|         log.debug(f"TTY lock released, remote task: {task_name}:{uid}") | ||||
|     _debug_lock._uid = None | ||||
|     log.runtime(f"TTY lock released, remote task: {task_name}:{uid}") | ||||
| 
 | ||||
| 
 | ||||
| # @contextmanager | ||||
|  | @ -144,73 +148,96 @@ async def _acquire_debug_lock(uid: Tuple[str, str]) -> AsyncIterator[None]: | |||
| #         signal.signal(signal.SIGINT, prior_handler) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def _hijack_stdin_relay_to_child( | ||||
|     subactor_uid: Tuple[str, str] | ||||
| ) -> AsyncIterator[str]: | ||||
|     # TODO: when we get to true remote debugging | ||||
|     # this will deliver stdin data | ||||
|     log.warning(f"Actor {subactor_uid} is WAITING on stdin hijack lock") | ||||
|     async with _acquire_debug_lock(subactor_uid): | ||||
|         log.warning(f"Actor {subactor_uid} ACQUIRED stdin hijack lock") | ||||
| 
 | ||||
|         # with _disable_sigint(): | ||||
|     ctx: tractor.context, | ||||
|     subactor_uid: Tuple[str, str] | ||||
| 
 | ||||
| ) -> AsyncIterator[str]: | ||||
| 
 | ||||
|     global _pdb_complete | ||||
| 
 | ||||
|     task_name = trio.lowlevel.current_task().name | ||||
| 
 | ||||
|     # TODO: when we get to true remote debugging | ||||
|     # this will deliver stdin data? | ||||
| 
 | ||||
|     log.debug( | ||||
|         "Attempting to acquire TTY lock, " | ||||
|         f"remote task: {task_name}:{subactor_uid}" | ||||
|     ) | ||||
| 
 | ||||
|     log.runtime(f"Actor {subactor_uid} is WAITING on stdin hijack lock") | ||||
| 
 | ||||
|     async with _acquire_debug_lock(subactor_uid): | ||||
| 
 | ||||
|         with trio.CancelScope(shield=True): | ||||
| 
 | ||||
|             # indicate to child that we've locked stdio | ||||
|         yield 'Locked' | ||||
|             await ctx.started('Locked') | ||||
|             log.runtime(f"Actor {subactor_uid} ACQUIRED stdin hijack lock") | ||||
| 
 | ||||
|         # wait for cancellation of stream by child | ||||
|         # indicating debugger is dis-engaged | ||||
|         await trio.sleep_forever() | ||||
|         # wait for unlock pdb by child | ||||
|         async with ctx.open_stream() as stream: | ||||
|             assert await stream.receive() == 'Unlock' | ||||
| 
 | ||||
|     log.runtime( | ||||
|         f"TTY lock released, remote task: {task_name}:{subactor_uid}") | ||||
| 
 | ||||
|     log.debug(f"Actor {subactor_uid} RELEASED stdin hijack lock") | ||||
| 
 | ||||
| 
 | ||||
| # XXX: We only make this sync in case someone wants to | ||||
| # overload the ``breakpoint()`` built-in. | ||||
| def _breakpoint(debug_func) -> Awaitable[None]: | ||||
| async def _breakpoint(debug_func) -> Awaitable[None]: | ||||
|     """``tractor`` breakpoint entry for engaging pdb machinery | ||||
|     in subactors. | ||||
|     """ | ||||
|     actor = tractor.current_actor() | ||||
|     do_unlock = trio.Event() | ||||
|     task_name = trio.lowlevel.current_task().name | ||||
| 
 | ||||
|     global _pdb_complete | ||||
|     global _pdb_release_hook | ||||
|     global _in_debug | ||||
| 
 | ||||
|     async def wait_for_parent_stdin_hijack( | ||||
|         task_status=trio.TASK_STATUS_IGNORED | ||||
|     ): | ||||
|         global _debugger_request_cs | ||||
| 
 | ||||
|         with trio.CancelScope() as cs: | ||||
|             _debugger_request_cs = cs | ||||
| 
 | ||||
|             try: | ||||
|                 async with get_root() as portal: | ||||
|                         async with portal.open_stream_from( | ||||
| 
 | ||||
|                     # this syncs to child's ``Context.started()`` call. | ||||
|                     async with portal.open_context( | ||||
| 
 | ||||
|                         tractor._debug._hijack_stdin_relay_to_child, | ||||
|                         subactor_uid=actor.uid, | ||||
|                         ) as stream: | ||||
| 
 | ||||
|                                 # block until first yield above | ||||
|                                 async for val in stream: | ||||
|                     ) as (ctx, val): | ||||
| 
 | ||||
|                         assert val == 'Locked' | ||||
| 
 | ||||
|                         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|                             # unblock local caller | ||||
|                             task_status.started() | ||||
| 
 | ||||
|                                     # with trio.CancelScope(shield=True): | ||||
|                                     await do_unlock.wait() | ||||
|                             await _pdb_complete.wait() | ||||
|                             await stream.send('Unlock') | ||||
| 
 | ||||
|                                     # trigger cancellation of remote stream | ||||
|                                     break | ||||
|             finally: | ||||
|                 log.debug(f"Exiting debugger for actor {actor}") | ||||
|                 global _in_debug | ||||
|                 _in_debug = False | ||||
|                 log.debug(f"Child {actor} released parent stdio lock") | ||||
| 
 | ||||
|     async def _bp(): | ||||
|         """Async breakpoint which schedules a parent stdio lock, and once complete | ||||
|         enters the ``pdbpp`` debugging console. | ||||
|         """ | ||||
|         task_name = trio.lowlevel.current_task().name | ||||
| 
 | ||||
|         global _in_debug | ||||
|     if not _pdb_complete or _pdb_complete.is_set(): | ||||
|         _pdb_complete = trio.Event() | ||||
| 
 | ||||
|     # TODO: need a more robust check for the "root" actor | ||||
|     if actor._parent_chan and not is_root_process(): | ||||
|  | @ -225,38 +252,56 @@ def _breakpoint(debug_func) -> Awaitable[None]: | |||
|             # support for recursive entries to `tractor.breakpoint()` | ||||
|             log.warning( | ||||
|                 f"Actor {actor.uid} already has a debug lock, waiting...") | ||||
|                 await do_unlock.wait() | ||||
|             await _pdb_complete.wait() | ||||
|             await trio.sleep(0.1) | ||||
| 
 | ||||
|             # assign unlock callback for debugger teardown hooks | ||||
|             global _pdb_release_hook | ||||
|             _pdb_release_hook = do_unlock.set | ||||
| 
 | ||||
|         # mark local actor as "in debug mode" to avoid recurrent | ||||
|         # entries/requests to the root process | ||||
|         _in_debug = task_name | ||||
| 
 | ||||
|         # assign unlock callback for debugger teardown hooks | ||||
|         _pdb_release_hook = _pdb_complete.set | ||||
| 
 | ||||
|         # this **must** be awaited by the caller and is done using the | ||||
|         # root nursery so that the debugger can continue to run without | ||||
|         # being restricted by the scope of a new task nursery. | ||||
|         await actor._service_n.start(wait_for_parent_stdin_hijack) | ||||
| 
 | ||||
|     elif is_root_process(): | ||||
| 
 | ||||
|         # we also wait in the root-parent for any child that | ||||
|         # may have the tty locked prior | ||||
|             if _debug_lock.locked():  # root process already has it; ignore | ||||
|         global _debug_lock | ||||
| 
 | ||||
|         # TODO: wait, what about multiple root tasks acquiring | ||||
|         # it though.. shrug? | ||||
|         # root process (us) already has it; ignore | ||||
|         if _debug_lock._uid == actor.uid: | ||||
|             return | ||||
| 
 | ||||
|         # XXX: since we need to enter pdb synchronously below, | ||||
|         # we have to release the lock manually from pdb completion | ||||
|         # callbacks. Can't think of a nicer way then this atm. | ||||
|         await _debug_lock.acquire() | ||||
|             _pdb_release_hook = _debug_lock.release | ||||
| 
 | ||||
|         _debug_lock._uid = actor.uid | ||||
| 
 | ||||
|         # the lock must be released on pdb completion | ||||
|         def teardown(): | ||||
|             global _pdb_complete | ||||
|             global _debug_lock | ||||
| 
 | ||||
|             _debug_lock.release() | ||||
|             _debug_lock._uid = None | ||||
|             _pdb_complete.set() | ||||
| 
 | ||||
|         _pdb_release_hook = teardown | ||||
| 
 | ||||
|     # block here one (at the appropriate frame *up* where | ||||
|     # ``breakpoint()`` was awaited and begin handling stdio | ||||
|     log.debug("Entering the synchronous world of pdb") | ||||
|     debug_func(actor) | ||||
| 
 | ||||
|     # user code **must** await this! | ||||
|     return _bp() | ||||
| 
 | ||||
| 
 | ||||
| def _mk_pdb(): | ||||
|     # XXX: setting these flags on the pdb instance are absolutely | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue