Compare commits
	
		
			12 Commits 
		
	
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | e232d9dd06 | |
|  | 6b3cc72e5c | |
|  | 81c33bf550 | |
|  | fee1ee315c | |
|  | 22e62ed88e | |
|  | fdba9e42d3 | |
|  | 3ec72e6af8 | |
|  | c538cb3004 | |
|  | 8842b758d7 | |
|  | 54ee624632 | |
|  | e8f2dfc088 | |
|  | d2282f4275 | 
|  | @ -47,6 +47,7 @@ dependencies = [ | |||
|   "msgspec>=0.19.0", | ||||
|   "cffi>=1.17.1", | ||||
|   "bidict>=0.23.1", | ||||
|   "platformdirs>=4.4.0", | ||||
| ] | ||||
| 
 | ||||
| # ------ project ------ | ||||
|  | @ -63,7 +64,6 @@ dev = [ | |||
|   "stackscope>=0.2.2,<0.3", | ||||
|   # ^ requires this? | ||||
|   "typing-extensions>=4.14.1", | ||||
| 
 | ||||
|   "pyperclip>=1.9.0", | ||||
|   "prompt-toolkit>=3.0.50", | ||||
|   "xonsh>=0.19.2", | ||||
|  |  | |||
|  | @ -732,15 +732,21 @@ def test_aio_errors_and_channel_propagates_and_closes( | |||
| 
 | ||||
| 
 | ||||
| async def aio_echo_server( | ||||
|     to_trio: trio.MemorySendChannel, | ||||
|     from_trio: asyncio.Queue, | ||||
|     chan: to_asyncio.LinkedTaskChannel, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     An IPC-msg "echo server" with msgs received and relayed by | ||||
|     a parent `trio.Task` into a child `asyncio.Task` | ||||
|     and then repeated back to that local parent (`trio.Task`) | ||||
|     and sent again back to the original calling remote actor. | ||||
| 
 | ||||
|     to_trio.send_nowait('start') | ||||
|     ''' | ||||
|     # same semantics as `trio.TaskStatus.started()` | ||||
|     chan.started_nowait('start') | ||||
| 
 | ||||
|     while True: | ||||
|         try: | ||||
|             msg = await from_trio.get() | ||||
|             msg = await chan.get() | ||||
|         except to_asyncio.TrioTaskExited: | ||||
|             print( | ||||
|                 'breaking aio echo loop due to `trio` exit!' | ||||
|  | @ -748,7 +754,7 @@ async def aio_echo_server( | |||
|             break | ||||
| 
 | ||||
|         # echo the msg back | ||||
|         to_trio.send_nowait(msg) | ||||
|         chan.send_nowait(msg) | ||||
| 
 | ||||
|         # if we get the terminate sentinel | ||||
|         # break the echo loop | ||||
|  | @ -765,7 +771,10 @@ async def trio_to_aio_echo_server( | |||
| ): | ||||
|     async with to_asyncio.open_channel_from( | ||||
|         aio_echo_server, | ||||
|     ) as (first, chan): | ||||
|     ) as ( | ||||
|         first,  # value from `chan.started_nowait()` above | ||||
|         chan, | ||||
|     ): | ||||
|         assert first == 'start' | ||||
| 
 | ||||
|         await ctx.started(first) | ||||
|  | @ -776,7 +785,8 @@ async def trio_to_aio_echo_server( | |||
|                 await chan.send(msg) | ||||
| 
 | ||||
|                 out = await chan.receive() | ||||
|                 # echo back to parent actor-task | ||||
| 
 | ||||
|                 # echo back to parent-actor's remote parent-ctx-task! | ||||
|                 await stream.send(out) | ||||
| 
 | ||||
|                 if out is None: | ||||
|  | @ -1090,14 +1100,12 @@ def test_sigint_closes_lifetime_stack( | |||
| 
 | ||||
| 
 | ||||
| # ?TODO asyncio.Task fn-deco? | ||||
| # -[ ] do sig checkingat import time like @context? | ||||
| # -[ ] maybe name it @aio_task ?? | ||||
| # -[ ] chan: to_asyncio.InterloopChannel ?? | ||||
| # -[ ] do fn-sig checking at import time like @context? | ||||
| #  |_[ ] maybe name it @a(sync)io_task ?? | ||||
| # @asyncio_task  <- not bad ?? | ||||
| async def raise_before_started( | ||||
|     # from_trio: asyncio.Queue, | ||||
|     # to_trio: trio.abc.SendChannel, | ||||
|     chan: to_asyncio.LinkedTaskChannel, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     `asyncio.Task` entry point which RTEs before calling | ||||
|  |  | |||
|  | @ -0,0 +1,150 @@ | |||
| ''' | ||||
| `tractor.log`-wrapping unit tests. | ||||
| 
 | ||||
| ''' | ||||
| from pathlib import Path | ||||
| import shutil | ||||
| 
 | ||||
| import pytest | ||||
| import tractor | ||||
| from tractor import _code_load | ||||
| 
 | ||||
| 
 | ||||
| def test_root_pkg_not_duplicated_in_logger_name(): | ||||
|     ''' | ||||
|     When both `pkg_name` and `name` are passed and they have | ||||
|     a common `<root_name>.< >` prefix, ensure that it is not | ||||
|     duplicated in the child's `StackLevelAdapter.name: str`. | ||||
| 
 | ||||
|     ''' | ||||
|     project_name: str = 'pylib' | ||||
|     pkg_path: str = 'pylib.subpkg.mod' | ||||
| 
 | ||||
|     proj_log = tractor.log.get_logger( | ||||
|         pkg_name=project_name, | ||||
|         mk_sublog=False, | ||||
|     ) | ||||
| 
 | ||||
|     sublog = tractor.log.get_logger( | ||||
|         pkg_name=project_name, | ||||
|         name=pkg_path, | ||||
|     ) | ||||
| 
 | ||||
|     assert proj_log is not sublog | ||||
|     assert sublog.name.count(proj_log.name) == 1 | ||||
|     assert 'mod' not in sublog.name | ||||
| 
 | ||||
| 
 | ||||
| def test_implicit_mod_name_applied_for_child( | ||||
|     testdir: pytest.Pytester, | ||||
|     loglevel: str, | ||||
| ): | ||||
|     ''' | ||||
|     Verify that when `.log.get_logger(pkg_name='pylib')` is called | ||||
|     from a given sub-mod from within the `pylib` pkg-path, we | ||||
|     implicitly set the equiv of `name=__name__` from the caller's | ||||
|     module. | ||||
| 
 | ||||
|     ''' | ||||
|     # tractor.log.get_console_log(level=loglevel) | ||||
|     proj_name: str = 'snakelib' | ||||
|     mod_code: str = ( | ||||
|         f'import tractor\n' | ||||
|         f'\n' | ||||
|         f'log = tractor.log.get_logger(pkg_name="{proj_name}")\n' | ||||
|     ) | ||||
| 
 | ||||
|     # create a sub-module for each pkg layer | ||||
|     _lib = testdir.mkpydir(proj_name) | ||||
|     pkg: Path = Path(_lib) | ||||
|     subpkg: Path = pkg / 'subpkg' | ||||
|     subpkg.mkdir() | ||||
| 
 | ||||
|     pkgmod: Path = subpkg / "__init__.py" | ||||
|     pkgmod.touch() | ||||
| 
 | ||||
|     _submod: Path = testdir.makepyfile( | ||||
|         _mod=mod_code, | ||||
|     ) | ||||
| 
 | ||||
|     pkg_mod = pkg / 'mod.py' | ||||
|     pkg_subpkg_submod = subpkg / 'submod.py' | ||||
|     shutil.copyfile( | ||||
|         _submod, | ||||
|         pkg_mod, | ||||
|     ) | ||||
|     shutil.copyfile( | ||||
|         _submod, | ||||
|         pkg_subpkg_submod, | ||||
|     ) | ||||
|     testdir.chdir() | ||||
| 
 | ||||
|     # XXX NOTE, once the "top level" pkg mod has been | ||||
|     # imported, we can then use `import` syntax to | ||||
|     # import it's sub-pkgs and modules. | ||||
|     pkgmod = _code_load.load_module_from_path( | ||||
|         Path(pkg / '__init__.py'), | ||||
|         module_name=proj_name, | ||||
|     ) | ||||
|     pkg_root_log = tractor.log.get_logger( | ||||
|         pkg_name=proj_name, | ||||
|         mk_sublog=False, | ||||
|     ) | ||||
|     assert pkg_root_log.name == proj_name | ||||
|     assert not pkg_root_log.logger.getChildren() | ||||
| 
 | ||||
|     from snakelib import mod | ||||
|     assert mod.log.name == proj_name | ||||
| 
 | ||||
|     from snakelib.subpkg import submod | ||||
|     assert ( | ||||
|         submod.log.name | ||||
|         == | ||||
|         submod.__package__  # ?TODO, use this in `.get_logger()` instead? | ||||
|         == | ||||
|         f'{proj_name}.subpkg' | ||||
|     ) | ||||
| 
 | ||||
|     sub_logs = pkg_root_log.logger.getChildren() | ||||
|     assert len(sub_logs) == 1  # only one nested sub-pkg module | ||||
|     assert submod.log.logger in sub_logs | ||||
| 
 | ||||
|     # breakpoint() | ||||
| 
 | ||||
| 
 | ||||
| # TODO, moar tests against existing feats: | ||||
| # ------ - ------ | ||||
| # - [ ] color settings? | ||||
| # - [ ] header contents like, | ||||
| #   - actor + thread + task names from various conc-primitives, | ||||
| # - [ ] `StackLevelAdapter` extensions, | ||||
| #   - our custom levels/methods: `transport|runtime|cance|pdb|devx` | ||||
| # - [ ] custom-headers support? | ||||
| # | ||||
| 
 | ||||
| # TODO, test driven dev of new-ideas/long-wanted feats, | ||||
| # ------ - ------ | ||||
| # - [ ] https://github.com/goodboy/tractor/issues/244 | ||||
| #  - [ ] @catern mentioned using a sync / deterministic sys | ||||
| #       and in particular `svlogd`? | ||||
| #       |_ https://smarden.org/runit/svlogd.8 | ||||
| 
 | ||||
| # - [ ] using adapter vs. filters? | ||||
| #    - https://stackoverflow.com/questions/60691759/add-information-to-every-log-message-in-python-logging/61830838#61830838 | ||||
| 
 | ||||
| # - [ ] `.at_least_level()` optimization which short circuits wtv | ||||
| #      `logging` is doing behind the scenes when the level filters | ||||
| #      the emission..? | ||||
| 
 | ||||
| # - [ ] use of `.log.get_console_log()` in subactors and the | ||||
| #    subtleties of ensuring it actually emits from a subproc. | ||||
| 
 | ||||
| # - [ ] this idea of activating per-subsys emissions with some | ||||
| #    kind of `.name` filter passed to the runtime or maybe configured | ||||
| #    via the root `StackLevelAdapter`? | ||||
| 
 | ||||
| # - [ ] use of `logging.dict.dictConfig()` to simplify the impl | ||||
| #      of any of ^^ ?? | ||||
| #    - https://stackoverflow.com/questions/7507825/where-is-a-complete-example-of-logging-config-dictconfig | ||||
| #    - https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema | ||||
| #    - https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig | ||||
|  | @ -0,0 +1,48 @@ | |||
| # tractor: structured concurrent "actors". | ||||
| # Copyright 2018-eternity Tyler Goodlet. | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| (Hot) coad (re-)load utils for python. | ||||
| 
 | ||||
| ''' | ||||
| import importlib | ||||
| from pathlib import Path | ||||
| import sys | ||||
| from types import ModuleType | ||||
| 
 | ||||
| # ?TODO, move this into internal libs? | ||||
| # -[ ] we already use it in `modden.config._pymod` as well | ||||
| def load_module_from_path( | ||||
|     path: Path, | ||||
|     module_name: str|None = None, | ||||
| ) -> ModuleType: | ||||
|     ''' | ||||
|     Taken from SO, | ||||
|     https://stackoverflow.com/a/67208147 | ||||
| 
 | ||||
|     which is based on stdlib docs, | ||||
|     https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly | ||||
| 
 | ||||
|     ''' | ||||
|     module_name = module_name or path.stem | ||||
|     spec = importlib.util.spec_from_file_location( | ||||
|         module_name, | ||||
|         str(path), | ||||
|     ) | ||||
|     module = importlib.util.module_from_spec(spec) | ||||
|     sys.modules[module_name] = module | ||||
|     spec.loader.exec_module(module) | ||||
|     return module | ||||
|  | @ -284,6 +284,10 @@ async def _errors_relayed_via_ipc( | |||
|     try: | ||||
|         yield  # run RPC invoke body | ||||
| 
 | ||||
|     except TransportClosed: | ||||
|         log.exception('Tpt disconnect during remote-exc relay?') | ||||
|         raise | ||||
| 
 | ||||
|     # box and ship RPC errors for wire-transit via | ||||
|     # the task's requesting parent IPC-channel. | ||||
|     except ( | ||||
|  | @ -319,6 +323,9 @@ async def _errors_relayed_via_ipc( | |||
|                         and debug_kbis | ||||
|                     ) | ||||
|                 ) | ||||
|                 # TODO? better then `debug_filter` below? | ||||
|                 and | ||||
|                 not isinstance(err, TransportClosed) | ||||
|             ): | ||||
|                 # XXX QUESTION XXX: is there any case where we'll | ||||
|                 # want to debug IPC disconnects as a default? | ||||
|  | @ -327,13 +334,25 @@ async def _errors_relayed_via_ipc( | |||
|                 # recovery logic - the only case is some kind of | ||||
|                 # strange bug in our transport layer itself? Going | ||||
|                 # to keep this open ended for now. | ||||
|                 log.debug( | ||||
|                     'RPC task crashed, attempting to enter debugger\n' | ||||
|                     f'|_{ctx}' | ||||
|                 ) | ||||
| 
 | ||||
|                 if _state.debug_mode(): | ||||
|                     log.exception( | ||||
|                         f'RPC task crashed!\n' | ||||
|                         f'Attempting to enter debugger\n' | ||||
|                         f'\n' | ||||
|                         f'{ctx}' | ||||
|                     ) | ||||
| 
 | ||||
|                 entered_debug = await debug._maybe_enter_pm( | ||||
|                     err, | ||||
|                     api_frame=inspect.currentframe(), | ||||
| 
 | ||||
|                     # don't REPL any psuedo-expected tpt-disconnect | ||||
|                     # debug_filter=lambda exc: ( | ||||
|                     #     type (exc) not in { | ||||
|                     #         TransportClosed, | ||||
|                     #     } | ||||
|                     # ), | ||||
|                 ) | ||||
|                 if not entered_debug: | ||||
|                     # if we prolly should have entered the REPL but | ||||
|  | @ -675,6 +694,22 @@ async def _invoke( | |||
|                     f'{pretty_struct.pformat(return_msg)}\n' | ||||
|                 ) | ||||
|                 await chan.send(return_msg) | ||||
|                 # ?TODO, remove the below since .send() already | ||||
|                 # doesn't raise on tpt-closed? | ||||
|                 # try: | ||||
|                 #     await chan.send(return_msg) | ||||
|                 # except TransportClosed: | ||||
|                 #     log.exception( | ||||
|                 #         f"Failed send final result to 'parent'-side of IPC-ctx!\n" | ||||
|                 #         f'\n' | ||||
|                 #         f'{chan}\n' | ||||
|                 #         f'Channel already disconnected ??\n' | ||||
|                 #         f'\n' | ||||
|                 #         f'{pretty_struct.pformat(return_msg)}' | ||||
|                 #     ) | ||||
|                 #     # ?TODO? will this ever be true though? | ||||
|                 #     if chan.connected(): | ||||
|                 #         raise | ||||
| 
 | ||||
|             # NOTE: this happens IFF `ctx._scope.cancel()` is | ||||
|             # called by any of, | ||||
|  |  | |||
|  | @ -22,7 +22,6 @@ from __future__ import annotations | |||
| from contextvars import ( | ||||
|     ContextVar, | ||||
| ) | ||||
| import os | ||||
| from pathlib import Path | ||||
| from typing import ( | ||||
|     Any, | ||||
|  | @ -30,6 +29,7 @@ from typing import ( | |||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import platformdirs | ||||
| from trio.lowlevel import current_task | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|  | @ -172,23 +172,32 @@ def current_ipc_ctx( | |||
|     return ctx | ||||
| 
 | ||||
| 
 | ||||
| # std ODE (mutable) app state location | ||||
| _rtdir: Path = Path(os.environ['XDG_RUNTIME_DIR']) | ||||
| 
 | ||||
| 
 | ||||
| def get_rt_dir( | ||||
|     subdir: str = 'tractor' | ||||
|     subdir: str|Path|None = None, | ||||
| ) -> Path: | ||||
|     ''' | ||||
|     Return the user "runtime dir" where most userspace apps stick | ||||
|     their IPC and cache related system util-files; we take hold | ||||
|     of a `'XDG_RUNTIME_DIR'/tractor/` subdir by default. | ||||
|     Return the user "runtime dir", the file-sys location where most | ||||
|     userspace apps stick their IPC and cache related system | ||||
|     util-files. | ||||
| 
 | ||||
|     On linux we take use a `'${XDG_RUNTIME_DIR}/tractor/` subdir by | ||||
|     default but equivalents are mapped for each platform using | ||||
|     the lovely `platformdirs`. | ||||
| 
 | ||||
|     ''' | ||||
|     rtdir: Path = _rtdir / subdir | ||||
|     if not rtdir.is_dir(): | ||||
|         rtdir.mkdir() | ||||
|     return rtdir | ||||
|     rt_dir: Path = Path( | ||||
|         platformdirs.user_runtime_dir( | ||||
|             appname='tractor', | ||||
|         ), | ||||
|     ) | ||||
|     if subdir: | ||||
|         rt_dir: Path = rt_dir / subdir | ||||
| 
 | ||||
|     if not rt_dir.is_dir(): | ||||
|         rt_dir.mkdir() | ||||
| 
 | ||||
|     return rt_dir | ||||
| 
 | ||||
| 
 | ||||
| def current_ipc_protos() -> list[str]: | ||||
|  |  | |||
|  | @ -561,6 +561,9 @@ async def _pause( | |||
|             return | ||||
| 
 | ||||
|         elif isinstance(pause_err, trio.Cancelled): | ||||
|             __tracebackhide__: bool = False | ||||
|             # XXX, unmask to REPL it. | ||||
|             # mk_pdb().set_trace(frame=inspect.currentframe()) | ||||
|             _repl_fail_report += ( | ||||
|                 'You called `tractor.pause()` from an already cancelled scope!\n\n' | ||||
|                 'Consider `await tractor.pause(shield=True)` to make it work B)\n' | ||||
|  |  | |||
							
								
								
									
										198
									
								
								tractor/log.py
								
								
								
								
							
							
						
						
									
										198
									
								
								tractor/log.py
								
								
								
								
							|  | @ -14,11 +14,22 @@ | |||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Log like a forester! | ||||
| ''' | ||||
| An enhanced logging subsys. | ||||
| 
 | ||||
| """ | ||||
| An extended logging layer using (for now) the stdlib's `logging` | ||||
| + `colorlog` which embeds concurrency-primitive/runtime info into | ||||
| records (headers) to help you better grok your distributed systems | ||||
| built on `tractor`. | ||||
| 
 | ||||
| 
 | ||||
| ''' | ||||
| from collections.abc import Mapping | ||||
| from inspect import ( | ||||
|     FrameInfo, | ||||
|     getmodule, | ||||
|     stack, | ||||
| ) | ||||
| import sys | ||||
| import logging | ||||
| from logging import ( | ||||
|  | @ -26,8 +37,10 @@ from logging import ( | |||
|     Logger, | ||||
|     StreamHandler, | ||||
| ) | ||||
| import colorlog  # type: ignore | ||||
| from types import ModuleType | ||||
| import warnings | ||||
| 
 | ||||
| import colorlog  # type: ignore | ||||
| import trio | ||||
| 
 | ||||
| from ._state import current_actor | ||||
|  | @ -39,7 +52,7 @@ _default_loglevel: str = 'ERROR' | |||
| # Super sexy formatting thanks to ``colorlog``. | ||||
| # (NOTE: we use the '{' format style) | ||||
| # Here, `thin_white` is just the layperson's gray. | ||||
| LOG_FORMAT = ( | ||||
| LOG_FORMAT: str = ( | ||||
|     # "{bold_white}{log_color}{asctime}{reset}" | ||||
|     "{log_color}{asctime}{reset}" | ||||
|     " {bold_white}{thin_white}({reset}" | ||||
|  | @ -51,7 +64,7 @@ LOG_FORMAT = ( | |||
|     " {reset}{bold_white}{thin_white}{message}" | ||||
| ) | ||||
| 
 | ||||
| DATE_FORMAT = '%b %d %H:%M:%S' | ||||
| DATE_FORMAT: str = '%b %d %H:%M:%S' | ||||
| 
 | ||||
| # FYI, ERROR is 40 | ||||
| # TODO: use a `bidict` to avoid the :155 check? | ||||
|  | @ -75,7 +88,10 @@ STD_PALETTE = { | |||
|     'TRANSPORT': 'cyan', | ||||
| } | ||||
| 
 | ||||
| BOLD_PALETTE = { | ||||
| BOLD_PALETTE: dict[ | ||||
|     str, | ||||
|     dict[int, str], | ||||
| ] = { | ||||
|     'bold': { | ||||
|         level: f"bold_{color}" for level, color in STD_PALETTE.items()} | ||||
| } | ||||
|  | @ -97,10 +113,17 @@ def at_least_level( | |||
|     return False | ||||
| 
 | ||||
| 
 | ||||
| # TODO: this isn't showing the correct '{filename}' | ||||
| # as it did before.. | ||||
| # TODO, compare with using a "filter" instead? | ||||
| # - https://stackoverflow.com/questions/60691759/add-information-to-every-log-message-in-python-logging/61830838#61830838 | ||||
| #  |_corresponding dict-config, | ||||
| #    https://stackoverflow.com/questions/7507825/where-is-a-complete-example-of-logging-config-dictconfig/7507842#7507842 | ||||
| #  - [ ] what's the benefit/tradeoffs? | ||||
| # | ||||
| class StackLevelAdapter(LoggerAdapter): | ||||
|     ''' | ||||
|     A (software) stack oriented logger "adapter". | ||||
| 
 | ||||
|     ''' | ||||
|     def at_least_level( | ||||
|         self, | ||||
|         level: str, | ||||
|  | @ -284,7 +307,9 @@ class ActorContextInfo(Mapping): | |||
| 
 | ||||
| def get_logger( | ||||
|     name: str|None = None, | ||||
|     _root_name: str = _proj_name, | ||||
|     pkg_name: str = _proj_name, | ||||
|     # XXX, deprecated, use ^ | ||||
|     _root_name: str|None = None, | ||||
| 
 | ||||
|     logger: Logger|None = None, | ||||
| 
 | ||||
|  | @ -293,22 +318,89 @@ def get_logger( | |||
|     #  |_https://stackoverflow.com/questions/7507825/where-is-a-complete-example-of-logging-config-dictconfig | ||||
|     #  |_https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema | ||||
|     subsys_spec: str|None = None, | ||||
|     mk_sublog: bool = True, | ||||
| 
 | ||||
| ) -> StackLevelAdapter: | ||||
|     ''' | ||||
|     Return the `tractor`-library root logger or a sub-logger for | ||||
|     `name` if provided. | ||||
| 
 | ||||
|     When `name` is left null we try to auto-detect the caller's | ||||
|     `mod.__name__` and use that as a the sub-logger key. | ||||
|     This allows for example creating a module level instance like, | ||||
| 
 | ||||
|     .. code:: python | ||||
| 
 | ||||
|         log = tractor.log.get_logger(_root_name='mylib') | ||||
| 
 | ||||
|     and by default all console record headers will show the caller's | ||||
|     (of any `log.<level>()`-method) correct sub-pkg's | ||||
|     + py-module-file. | ||||
| 
 | ||||
|     ''' | ||||
|     if _root_name: | ||||
|         msg: str = ( | ||||
|             'The `_root_name: str` param of `get_logger()` is now deprecated.\n' | ||||
|             'Use the new `pkg_name: str` instead, it is the same usage.\n' | ||||
|         ) | ||||
|         warnings.warn( | ||||
|             msg, | ||||
|             DeprecationWarning, | ||||
|             stacklevel=2, | ||||
|         ) | ||||
|     pkg_name: str = _root_name or pkg_name | ||||
|     log: Logger | ||||
|     log = rlog = logger or logging.getLogger(_root_name) | ||||
|     log = rlog = logger or logging.getLogger(pkg_name) | ||||
| 
 | ||||
|     # Implicitly introspect the caller's module-name whenever `name` | ||||
|     # if left as the null default. | ||||
|     # | ||||
|     # When the `pkg_name` is `in` in the `mod.__name__` we presume | ||||
|     # this instance can be created as a sub-`StackLevelAdapter` and | ||||
|     # that the intention is get free module-path tracing and | ||||
|     # filtering (well once we implement that) oriented around the | ||||
|     # py-module code hierarchy of the consuming project. | ||||
|     if ( | ||||
|         pkg_name != _proj_name | ||||
|         and | ||||
|         name is None | ||||
|         and | ||||
|         mk_sublog | ||||
|     ): | ||||
|         callstack: list[FrameInfo] = stack() | ||||
|         caller_fi: FrameInfo = callstack[1] | ||||
|         caller_mod: ModuleType = getmodule(caller_fi.frame) | ||||
|         if caller_mod: | ||||
|             # ?how is this `mod.__name__` defined? | ||||
|             # -> well by how the mod is imported.. | ||||
|             # |_https://stackoverflow.com/a/15883682 | ||||
|             mod_name: str = caller_mod.__name__ | ||||
|             mod_pkg: str = caller_mod.__package__ | ||||
|             log.info( | ||||
|                 f'Generating sub-logger name,\n' | ||||
|                 f'{mod_pkg}.{mod_name}\n' | ||||
|             ) | ||||
|             # if pkg_name in caller_mod.__package__: | ||||
|             #     from tractor.devx.debug import mk_pdb | ||||
|             #     mk_pdb().set_trace() | ||||
| 
 | ||||
|             if ( | ||||
|                 pkg_name | ||||
|                 # and | ||||
|                 # pkg_name in mod_name | ||||
|             ): | ||||
|                 name = mod_name | ||||
| 
 | ||||
|     # XXX, lowlevel debuggin.. | ||||
|     # if pkg_name != _proj_name: | ||||
|         # from tractor.devx.debug import mk_pdb | ||||
|         # mk_pdb().set_trace() | ||||
| 
 | ||||
|     if ( | ||||
|         name | ||||
|         and | ||||
|         name != _proj_name | ||||
|         and | ||||
|         name | ||||
|     ): | ||||
| 
 | ||||
|         # NOTE: for handling for modules that use `get_logger(__name__)` | ||||
|         # we make the following stylistic choice: | ||||
|         # - always avoid duplicate project-package token | ||||
|  | @ -318,24 +410,63 @@ def get_logger( | |||
|         #   since in python the {filename} is always this same | ||||
|         #   module-file. | ||||
| 
 | ||||
|         sub_name: None|str = None | ||||
|         rname, _, sub_name = name.partition('.') | ||||
|         pkgpath, _, modfilename = sub_name.rpartition('.') | ||||
|         rname: str = pkg_name | ||||
|         pkg_path: str = name | ||||
| 
 | ||||
|         # NOTE: for tractor itself never include the last level | ||||
|         # module key in the name such that something like: eg. | ||||
|         # 'tractor.trionics._broadcast` only includes the first | ||||
|         # 2 tokens in the (coloured) name part. | ||||
|         if rname == 'tractor': | ||||
|             sub_name = pkgpath | ||||
|         # ex. modden.runtime.progman | ||||
|         # -> rname='modden', _, pkg_path='runtime.progman' | ||||
|         if pkg_name in name: | ||||
|             rname, _, pkg_path = name.partition('.') | ||||
| 
 | ||||
|         if _root_name in sub_name: | ||||
|             duplicate, _, sub_name = sub_name.partition('.') | ||||
|         # ex. modden.runtime.progman | ||||
|         # -> pkgpath='runtime', _, leaf_mod='progman' | ||||
|         subpkg_path, _, leaf_mod = pkg_path.rpartition('.') | ||||
| 
 | ||||
|         if not sub_name: | ||||
|         # NOTE: special usage for passing `name=__name__`, | ||||
|         # | ||||
|         # - remove duplication of any root-pkg-name in the | ||||
|         #   (sub/child-)logger name; i.e. never include the | ||||
|         #   `pkg_name` *twice* in the top-most-pkg-name/level | ||||
|         # | ||||
|         # -> this happens normally since it is added to `.getChild()` | ||||
|         #   and as the name of its root-logger. | ||||
|         # | ||||
|         # => So for ex. (module key in the name) something like | ||||
|         #   `name='tractor.trionics._broadcast` is passed, | ||||
|         #   only includes the first 2 sub-pkg name-tokens in the | ||||
|         #   child-logger's name; the colored "pkg-namespace" header | ||||
|         #   will then correctly show the same value as `name`. | ||||
|         if rname == pkg_name: | ||||
|             pkg_path = subpkg_path | ||||
| 
 | ||||
|         # XXX, do some double-checks for duplication of, | ||||
|         # - root-pkg-name, already in root logger | ||||
|         # - leaf-module-name already in `{filename}` header-field | ||||
|         if pkg_name in pkg_path: | ||||
|             _duplicate, _, pkg_path = pkg_path.partition('.') | ||||
|             if _duplicate: | ||||
|                 # assert _duplicate == rname | ||||
|                 _root_log.warning( | ||||
|                     f'Duplicate pkg-name in sub-logger key?\n' | ||||
|                     f'pkg_name = {pkg_name!r}\n' | ||||
|                     f'pkg_path = {pkg_path!r}\n' | ||||
|                 ) | ||||
| 
 | ||||
|         if ( | ||||
|             leaf_mod | ||||
|             and | ||||
|             leaf_mod in pkg_path | ||||
|         ): | ||||
|             _root_log.warning( | ||||
|                 f'Duplicate leaf-module-name in sub-logger key?\n' | ||||
|                 f'leaf_mod = {leaf_mod!r}\n' | ||||
|                 f'pkg_path = {pkg_path!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|         if not pkg_path: | ||||
|             log = rlog | ||||
|         else: | ||||
|             log = rlog.getChild(sub_name) | ||||
|         elif mk_sublog: | ||||
|             log = rlog.getChild(pkg_path) | ||||
| 
 | ||||
|         log.level = rlog.level | ||||
| 
 | ||||
|  | @ -350,8 +481,13 @@ def get_logger( | |||
|     for name, val in CUSTOM_LEVELS.items(): | ||||
|         logging.addLevelName(val, name) | ||||
| 
 | ||||
|         # ensure customs levels exist as methods | ||||
|         assert getattr(logger, name.lower()), f'Logger does not define {name}' | ||||
|         # ensure our custom adapter levels exist as methods | ||||
|         assert getattr( | ||||
|             logger, | ||||
|             name.lower() | ||||
|         ), ( | ||||
|             f'Logger does not define {name}' | ||||
|         ) | ||||
| 
 | ||||
|     return logger | ||||
| 
 | ||||
|  | @ -425,4 +561,4 @@ def get_loglevel() -> str: | |||
| 
 | ||||
| 
 | ||||
| # global module logger for tractor itself | ||||
| log: StackLevelAdapter = get_logger('tractor') | ||||
| _root_log: StackLevelAdapter = get_logger('tractor') | ||||
|  |  | |||
|  | @ -94,10 +94,14 @@ else: | |||
|     QueueShutDown = False | ||||
| 
 | ||||
| 
 | ||||
| # TODO, generally speaking we can generalize this abstraction, a "SC linked | ||||
| # parent->child task pair", as the same "supervision scope primitive" | ||||
| # **that is** our `._context.Context` with the only difference being | ||||
| # in how the tasks conduct msg-passing comms. | ||||
| # TODO, generally speaking we can generalize this abstraction as, | ||||
| # | ||||
| # > A "SC linked, inter-event-loop" channel for comms between | ||||
| # > a `parent: trio.Task` -> `child: asyncio.Task` pair. | ||||
| # | ||||
| # It is **very similar** in terms of its operation as a "supervision | ||||
| # scope primitive" to that of our `._context.Context` with the only | ||||
| # difference being in how the tasks conduct msg-passing comms. | ||||
| # | ||||
| # For `LinkedTaskChannel` we are passing the equivalent of (once you | ||||
| # include all the recently added  `._trio/aio_to_raise` | ||||
|  | @ -122,6 +126,7 @@ class LinkedTaskChannel( | |||
|     task scheduled in the host loop. | ||||
| 
 | ||||
|     ''' | ||||
|     # ?TODO, rename as `._aio_q` since it's 2-way? | ||||
|     _to_aio: asyncio.Queue | ||||
|     _from_aio: trio.MemoryReceiveChannel | ||||
| 
 | ||||
|  | @ -235,9 +240,11 @@ class LinkedTaskChannel( | |||
|     # | ||||
|     async def receive(self) -> Any: | ||||
|         ''' | ||||
|         Receive a value from the paired `asyncio.Task` with | ||||
|         Receive a value `trio.Task` <- `asyncio.Task`. | ||||
| 
 | ||||
|         Note the tasks in each loop are "SC linked" as a pair with | ||||
|         exception/cancel handling to teardown both sides on any | ||||
|         unexpected error. | ||||
|         unexpected error or cancellation. | ||||
| 
 | ||||
|         ''' | ||||
|         try: | ||||
|  | @ -261,15 +268,42 @@ class LinkedTaskChannel( | |||
|             ): | ||||
|                 raise err | ||||
| 
 | ||||
|     async def get(self) -> Any: | ||||
|         ''' | ||||
|         Receive a value `asyncio.Task` <- `trio.Task`. | ||||
| 
 | ||||
|         This is equiv to `await self._from_trio.get()`. | ||||
| 
 | ||||
|         ''' | ||||
|         return await self._to_aio.get() | ||||
| 
 | ||||
|     async def send(self, item: Any) -> None: | ||||
|         ''' | ||||
|         Send a value through to the asyncio task presuming | ||||
|         it defines a ``from_trio`` argument, if it does not | ||||
|         Send a value through `trio.Task` -> `asyncio.Task` | ||||
|         presuming | ||||
|         it defines a `from_trio` argument or makes calls | ||||
|         to `chan.get()` , if it does not | ||||
|         this method will raise an error. | ||||
| 
 | ||||
|         ''' | ||||
|         self._to_aio.put_nowait(item) | ||||
| 
 | ||||
|     # TODO? could we only compile-in this method on an instance | ||||
|     # handed to the `asyncio`-side, i.e. the fn invoked with | ||||
|     # `.open_channel_from()`. | ||||
|     def send_nowait( | ||||
|         self, | ||||
|         item: Any, | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Send a value through FROM the `asyncio.Task` to | ||||
|         the `trio.Task` NON-BLOCKING. | ||||
| 
 | ||||
|         This is equiv to `self._to_trio.send_nowait()`. | ||||
| 
 | ||||
|         ''' | ||||
|         self._to_trio.send_nowait(item) | ||||
| 
 | ||||
|     # TODO? needed? | ||||
|     # async def wait_aio_complete(self) -> None: | ||||
|     #     await self._aio_task_complete.wait() | ||||
|  | @ -337,9 +371,12 @@ def _run_asyncio_task( | |||
| 
 | ||||
|     ''' | ||||
|     __tracebackhide__: bool = hide_tb | ||||
|     if not tractor.current_actor().is_infected_aio(): | ||||
|     if not (actor := tractor.current_actor()).is_infected_aio(): | ||||
|         raise RuntimeError( | ||||
|             "`infect_asyncio` mode is not enabled!?" | ||||
|             f'`infect_asyncio: bool` mode is not enabled ??\n' | ||||
|             f'Ensure you pass `ActorNursery.start_actor(infect_asyncio=True)`\n' | ||||
|             f'\n' | ||||
|             f'{actor}\n' | ||||
|         ) | ||||
| 
 | ||||
|     # ITC (inter task comms), these channel/queue names are mostly from | ||||
|  |  | |||
							
								
								
									
										13
									
								
								uv.lock
								
								
								
								
							
							
						
						
									
										13
									
								
								uv.lock
								
								
								
								
							|  | @ -1,5 +1,5 @@ | |||
| version = 1 | ||||
| revision = 2 | ||||
| revision = 3 | ||||
| requires-python = ">=3.11" | ||||
| 
 | ||||
| [[package]] | ||||
|  | @ -236,6 +236,15 @@ wheels = [ | |||
|     { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "platformdirs" | ||||
| version = "4.4.0" | ||||
| source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } | ||||
| wheels = [ | ||||
|     { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "pluggy" | ||||
| version = "1.5.0" | ||||
|  | @ -378,6 +387,7 @@ dependencies = [ | |||
|     { name = "colorlog" }, | ||||
|     { name = "msgspec" }, | ||||
|     { name = "pdbp" }, | ||||
|     { name = "platformdirs" }, | ||||
|     { name = "tricycle" }, | ||||
|     { name = "trio" }, | ||||
|     { name = "wrapt" }, | ||||
|  | @ -403,6 +413,7 @@ requires-dist = [ | |||
|     { name = "colorlog", specifier = ">=6.8.2,<7" }, | ||||
|     { name = "msgspec", specifier = ">=0.19.0" }, | ||||
|     { name = "pdbp", specifier = ">=1.6,<2" }, | ||||
|     { name = "platformdirs", specifier = ">=4.4.0" }, | ||||
|     { name = "tricycle", specifier = ">=0.4.1,<0.5" }, | ||||
|     { name = "trio", specifier = ">0.27" }, | ||||
|     { name = "wrapt", specifier = ">=1.16.0,<2" }, | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue