forked from goodboy/tractor
1
0
Fork 0

More-n-more scops annots in logging

aio_abandons
Tyler Goodlet 2024-07-04 15:06:15 -04:00
parent b56352b0e4
commit 5f8f8e98ba
4 changed files with 42 additions and 26 deletions

View File

@ -2376,8 +2376,9 @@ async def open_context_from_portal(
and ctx.cancel_acked
):
log.cancel(
f'Context cancelled by {ctx.side!r}-side task\n'
f'|_{ctx._task}\n\n'
f'Context cancelled by local {ctx.side!r}-side task\n'
f'c)>\n'
f' |_{ctx._task}\n\n'
f'{repr(scope_err)}\n'
)
@ -2393,8 +2394,10 @@ async def open_context_from_portal(
# type_only=True,
)
log.cancel(
f'Context terminated due to local {ctx.side!r}-side error:\n\n'
f'{ctx.chan.uid} => {outcome_str}\n'
f'Context terminated due to {ctx.side!r}-side\n\n'
# TODO: do an x)> on err and c)> only for ctxc?
f'c)> {outcome_str}\n'
f' |_{ctx.repr_rpc}\n'
)
# FINALLY, remove the context from runtime tracking and

View File

@ -243,6 +243,7 @@ def _trio_main(
nest_from_op(
input_op=')>', # like a "closed-to-play"-icon from super perspective
tree_str=actor_info,
back_from_op=1,
)
)
try:

View File

@ -263,11 +263,11 @@ class Portal:
return False
reminfo: str = (
f'Portal.cancel_actor() => {self.channel.uid}\n'
f'|_{chan}\n'
f'c)=> {self.channel.uid}\n'
f' |_{chan}\n'
)
log.cancel(
f'Requesting runtime cancel for peer\n\n'
f'Requesting actor-runtime cancel for peer\n\n'
f'{reminfo}'
)

View File

@ -439,10 +439,10 @@ class Actor:
TransportClosed,
):
# XXX: This may propagate up from ``Channel._aiter_recv()``
# and ``MsgpackStream._inter_packets()`` on a read from the
# XXX: This may propagate up from `Channel._aiter_recv()`
# and `MsgpackStream._inter_packets()` on a read from the
# stream particularly when the runtime is first starting up
# inside ``open_root_actor()`` where there is a check for
# inside `open_root_actor()` where there is a check for
# a bound listener on the "arbiter" addr. the reset will be
# because the handshake was never meant took place.
log.runtime(
@ -507,8 +507,9 @@ class Actor:
)
except trio.Cancelled:
log.cancel(
'IPC transport msg loop was cancelled for \n'
f'|_{chan}\n'
'IPC transport msg loop was cancelled\n'
f'c)>\n'
f' |_{chan}\n'
)
raise
@ -545,7 +546,7 @@ class Actor:
):
log.cancel(
'Waiting on cancel request to peer\n'
'Waiting on cancel request to peer..\n'
f'c)=>\n'
f' |_{chan.uid}\n'
)
@ -646,10 +647,14 @@ class Actor:
):
report: str = (
'Timed out waiting on local actor-nursery to exit?\n'
f'{local_nursery}\n'
f'c)>\n'
f' |_{local_nursery}\n'
)
if children := local_nursery._children:
report += f' |_{pformat(children)}\n'
# indent from above local-nurse repr
report += (
f' |_{pformat(children)}\n'
)
log.warning(report)
@ -1236,8 +1241,9 @@ class Actor:
# TODO: just use the new `Context.repr_rpc: str` (and
# other) repr fields instead of doing this all manual..
msg: str = (
f'Runtime cancel request from {requester_type}:\n\n'
f'<= .cancel(): {requesting_uid}\n\n'
f'Actor-runtime cancel request from {requester_type}\n\n'
f'<=c) {requesting_uid}\n'
f' |_{self}\n'
)
# TODO: what happens here when we self-cancel tho?
@ -1471,11 +1477,11 @@ class Actor:
)
log.cancel(
f'Cancelling {descr} RPC tasks\n\n'
f'<= canceller: {req_uid}\n'
f'<=c) {req_uid} [canceller]\n'
f'{rent_chan_repr}'
f'=> cancellee: {self.uid}\n'
f' |_{self}.cancel_rpc_tasks()\n'
f' |_tasks: {len(tasks)}\n'
f'c)=> {self.uid} [cancellee]\n'
f' |_{self} [with {len(tasks)} tasks]\n'
# f' |_tasks: {len(tasks)}\n'
# f'{tasks_str}'
)
for (
@ -1932,9 +1938,15 @@ async def async_main(
with CancelScope(shield=True):
await actor._no_more_peers.wait()
teardown_report += ('-> All peer channels are complete\n')
teardown_report += (
'-> All peer channels are complete\n'
)
teardown_report += ('Actor runtime exited')
teardown_report += (
'Actor runtime exiting\n'
f'>)\n'
f'|_{actor}\n'
)
log.info(teardown_report)