Compare commits
	
		
			13 Commits 
		
	
	
		
			0223074d24
			...
			e4ed956eed
		
	
	| Author | SHA1 | Date | 
|---|---|---|
| 
							
							
								 | 
						e4ed956eed | |
| 
							
							
								 | 
						1bbb612857 | |
| 
							
							
								 | 
						b1d041035c | |
| 
							
							
								
									
								
								 | 
						fae35a0e5b | |
| 
							
							
								
									
								
								 | 
						fda5c694c5 | |
| 
							
							
								 | 
						11a6b7d82b | |
| 
							
							
								
									
								
								 | 
						b17756cc24 | |
| 
							
							
								 | 
						df8fe99d89 | |
| 
							
							
								 | 
						cebf2cc3ad | |
| 
							
							
								 | 
						3fa158053d | |
| 
							
							
								 | 
						fbc226bf7d | |
| 
							
							
								
									
								
								 | 
						7d5ac3561e | |
| 
							
							
								
									
								
								 | 
						740f081d7e | 
| 
						 | 
					@ -0,0 +1,130 @@
 | 
				
			||||||
 | 
					with (import <nixpkgs> {});
 | 
				
			||||||
 | 
					let
 | 
				
			||||||
 | 
					  glibStorePath = lib.getLib glib;
 | 
				
			||||||
 | 
					  zstdStorePath = lib.getLib zstd;
 | 
				
			||||||
 | 
					  dbusStorePath = lib.getLib dbus;
 | 
				
			||||||
 | 
					  libGLStorePath = lib.getLib libGL;
 | 
				
			||||||
 | 
					  freetypeStorePath = lib.getLib freetype;
 | 
				
			||||||
 | 
					  qt6baseStorePath = lib.getLib qt6.qtbase;
 | 
				
			||||||
 | 
					  fontconfigStorePath = lib.getLib fontconfig;
 | 
				
			||||||
 | 
					  libxkbcommonStorePath = lib.getLib libxkbcommon;
 | 
				
			||||||
 | 
					  xcbutilcursorStorePath = lib.getLib xcb-util-cursor;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  qtpyStorePath = lib.getLib python312Packages.qtpy;
 | 
				
			||||||
 | 
					  pyqt6StorePath = lib.getLib python312Packages.pyqt6;
 | 
				
			||||||
 | 
					  pyqt6SipStorePath = lib.getLib python312Packages.pyqt6-sip;
 | 
				
			||||||
 | 
					  rapidfuzzStorePath = lib.getLib python312Packages.rapidfuzz;
 | 
				
			||||||
 | 
					  qdarkstyleStorePath = lib.getLib python312Packages.qdarkstyle;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  xorgLibX11StorePath = lib.getLib xorg.libX11;
 | 
				
			||||||
 | 
					  xorgLibxcbStorePath = lib.getLib xorg.libxcb;
 | 
				
			||||||
 | 
					  xorgxcbutilwmStorePath = lib.getLib xorg.xcbutilwm;
 | 
				
			||||||
 | 
					  xorgxcbutilimageStorePath = lib.getLib xorg.xcbutilimage;
 | 
				
			||||||
 | 
					  xorgxcbutilerrorsStorePath = lib.getLib xorg.xcbutilerrors;
 | 
				
			||||||
 | 
					  xorgxcbutilkeysymsStorePath = lib.getLib xorg.xcbutilkeysyms;
 | 
				
			||||||
 | 
					  xorgxcbutilrenderutilStorePath = lib.getLib xorg.xcbutilrenderutil;
 | 
				
			||||||
 | 
					in
 | 
				
			||||||
 | 
					stdenv.mkDerivation {
 | 
				
			||||||
 | 
					  name = "piker-qt6-uv";
 | 
				
			||||||
 | 
					  buildInputs = [
 | 
				
			||||||
 | 
					    # System requirements.
 | 
				
			||||||
 | 
					    glib
 | 
				
			||||||
 | 
					    dbus
 | 
				
			||||||
 | 
					    zstd
 | 
				
			||||||
 | 
					    libGL
 | 
				
			||||||
 | 
					    freetype
 | 
				
			||||||
 | 
					    qt6.qtbase
 | 
				
			||||||
 | 
					    libgcc.lib
 | 
				
			||||||
 | 
					    fontconfig
 | 
				
			||||||
 | 
					    libxkbcommon
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Xorg requirements
 | 
				
			||||||
 | 
					    xcb-util-cursor
 | 
				
			||||||
 | 
					    xorg.libxcb
 | 
				
			||||||
 | 
					    xorg.libX11
 | 
				
			||||||
 | 
					    xorg.xcbutilwm
 | 
				
			||||||
 | 
					    xorg.xcbutilimage
 | 
				
			||||||
 | 
					    xorg.xcbutilerrors
 | 
				
			||||||
 | 
					    xorg.xcbutilkeysyms
 | 
				
			||||||
 | 
					    xorg.xcbutilrenderutil
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Python requirements.
 | 
				
			||||||
 | 
					    python312Full
 | 
				
			||||||
 | 
					    python312Packages.uv
 | 
				
			||||||
 | 
					    python312Packages.qdarkstyle
 | 
				
			||||||
 | 
					    python312Packages.rapidfuzz
 | 
				
			||||||
 | 
					    python312Packages.pyqt6
 | 
				
			||||||
 | 
					    python312Packages.qtpy
 | 
				
			||||||
 | 
					  ];
 | 
				
			||||||
 | 
					  src = null;
 | 
				
			||||||
 | 
					  shellHook = ''
 | 
				
			||||||
 | 
					    set -e
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Set the Qt plugin path
 | 
				
			||||||
 | 
					    # export QT_DEBUG_PLUGINS=1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    QTBASE_PATH="${qt6baseStorePath}/lib"
 | 
				
			||||||
 | 
					    QT_PLUGIN_PATH="$QTBASE_PATH/qt-6/plugins"
 | 
				
			||||||
 | 
					    QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    LIB_GCC_PATH="${libgcc.lib}/lib"
 | 
				
			||||||
 | 
					    GLIB_PATH="${glibStorePath}/lib"
 | 
				
			||||||
 | 
					    ZSTD_PATH="${zstdStorePath}/lib"
 | 
				
			||||||
 | 
					    DBUS_PATH="${dbusStorePath}/lib"
 | 
				
			||||||
 | 
					    LIBGL_PATH="${libGLStorePath}/lib"
 | 
				
			||||||
 | 
					    FREETYPE_PATH="${freetypeStorePath}/lib"
 | 
				
			||||||
 | 
					    FONTCONFIG_PATH="${fontconfigStorePath}/lib"
 | 
				
			||||||
 | 
					    LIB_XKB_COMMON_PATH="${libxkbcommonStorePath}/lib"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    XCB_UTIL_CURSOR_PATH="${xcbutilcursorStorePath}/lib"
 | 
				
			||||||
 | 
					    XORG_LIB_X11_PATH="${xorgLibX11StorePath}/lib"
 | 
				
			||||||
 | 
					    XORG_LIB_XCB_PATH="${xorgLibxcbStorePath}/lib"
 | 
				
			||||||
 | 
					    XORG_XCB_UTIL_IMAGE_PATH="${xorgxcbutilimageStorePath}/lib"
 | 
				
			||||||
 | 
					    XORG_XCB_UTIL_WM_PATH="${xorgxcbutilwmStorePath}/lib"
 | 
				
			||||||
 | 
					    XORG_XCB_UTIL_RENDER_UTIL_PATH="${xorgxcbutilrenderutilStorePath}/lib"
 | 
				
			||||||
 | 
					    XORG_XCB_UTIL_KEYSYMS_PATH="${xorgxcbutilkeysymsStorePath}/lib"
 | 
				
			||||||
 | 
					    XORG_XCB_UTIL_ERRORS_PATH="${xorgxcbutilerrorsStorePath}/lib"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QTBASE_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_PLUGIN_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_QPA_PLATFORM_PLUGIN_PATH"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_GCC_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$DBUS_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GLIB_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZSTD_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIBGL_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FONTCONFIG_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FREETYPE_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_XKB_COMMON_PATH"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XCB_UTIL_CURSOR_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_X11_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_XCB_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_IMAGE_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_WM_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_RENDER_UTIL_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_KEYSYMS_PATH"
 | 
				
			||||||
 | 
					    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_ERRORS_PATH"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    export LD_LIBRARY_PATH
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages"
 | 
				
			||||||
 | 
					    QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages"
 | 
				
			||||||
 | 
					    QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages"
 | 
				
			||||||
 | 
					    PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages"
 | 
				
			||||||
 | 
					    PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    PATCH="$PATCH:$RPDFUZZ_PATH"
 | 
				
			||||||
 | 
					    PATCH="$PATCH:$QDRKSTYLE_PATH"
 | 
				
			||||||
 | 
					    PATCH="$PATCH:$QTPY_PATH"
 | 
				
			||||||
 | 
					    PATCH="$PATCH:$PYQT6_PATH"
 | 
				
			||||||
 | 
					    PATCH="$PATCH:$PYQT6_SIP_PATH"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    export PATCH 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Install deps
 | 
				
			||||||
 | 
					    uv lock
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  '';
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
| 
						 | 
					@ -30,7 +30,8 @@ from types import ModuleType
 | 
				
			||||||
from typing import (
 | 
					from typing import (
 | 
				
			||||||
    Any,
 | 
					    Any,
 | 
				
			||||||
    Iterator,
 | 
					    Iterator,
 | 
				
			||||||
    Generator
 | 
					    Generator,
 | 
				
			||||||
 | 
					    TYPE_CHECKING,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import pendulum
 | 
					import pendulum
 | 
				
			||||||
| 
						 | 
					@ -59,8 +60,10 @@ from ..clearing._messages import (
 | 
				
			||||||
    BrokerdPosition,
 | 
					    BrokerdPosition,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from piker.types import Struct
 | 
					from piker.types import Struct
 | 
				
			||||||
from piker.data._symcache import SymbologyCache
 | 
					from piker.log import get_logger
 | 
				
			||||||
from ..log import get_logger
 | 
					
 | 
				
			||||||
 | 
					if TYPE_CHECKING:
 | 
				
			||||||
 | 
					    from piker.data._symcache import SymbologyCache
 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -493,6 +496,17 @@ class Account(Struct):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        _mktmap_table: dict[str, MktPair] | None = None,
 | 
					        _mktmap_table: dict[str, MktPair] | None = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        only_require: list[str]|True = True,
 | 
				
			||||||
 | 
					        # ^list of fqmes that are "required" to be processed from
 | 
				
			||||||
 | 
					        # this ledger pass; we often don't care about others and
 | 
				
			||||||
 | 
					        # definitely shouldn't always error in such cases.
 | 
				
			||||||
 | 
					        # (eg. broker backend loaded that doesn't yet supsport the
 | 
				
			||||||
 | 
					        # symcache but also, inside the paper engine we don't ad-hoc
 | 
				
			||||||
 | 
					        # request `get_mkt_info()` for every symbol in the ledger,
 | 
				
			||||||
 | 
					        # only the one for which we're simulating against).
 | 
				
			||||||
 | 
					        # TODO, not sure if there's a better soln for this, ideally
 | 
				
			||||||
 | 
					        # all backends get symcache support afap i guess..
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> dict[str, Position]:
 | 
					    ) -> dict[str, Position]:
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Update the internal `.pps[str, Position]` table from input
 | 
					        Update the internal `.pps[str, Position]` table from input
 | 
				
			||||||
| 
						 | 
					@ -535,11 +549,32 @@ class Account(Struct):
 | 
				
			||||||
                if _mktmap_table is None:
 | 
					                if _mktmap_table is None:
 | 
				
			||||||
                    raise
 | 
					                    raise
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                required: bool = (
 | 
				
			||||||
 | 
					                    only_require is True
 | 
				
			||||||
 | 
					                    or (
 | 
				
			||||||
 | 
					                        only_require is not True
 | 
				
			||||||
 | 
					                        and
 | 
				
			||||||
 | 
					                        fqme in only_require
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					                )
 | 
				
			||||||
                # XXX: caller is allowed to provide a fallback
 | 
					                # XXX: caller is allowed to provide a fallback
 | 
				
			||||||
                # mktmap table for the case where a new position is
 | 
					                # mktmap table for the case where a new position is
 | 
				
			||||||
                # being added and the preloaded symcache didn't
 | 
					                # being added and the preloaded symcache didn't
 | 
				
			||||||
                # have this entry prior (eg. with frickin IB..)
 | 
					                # have this entry prior (eg. with frickin IB..)
 | 
				
			||||||
                mkt = _mktmap_table[fqme]
 | 
					                if (
 | 
				
			||||||
 | 
					                    not (mkt := _mktmap_table.get(fqme))
 | 
				
			||||||
 | 
					                    and
 | 
				
			||||||
 | 
					                    required
 | 
				
			||||||
 | 
					                ):
 | 
				
			||||||
 | 
					                    raise
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                elif not required:
 | 
				
			||||||
 | 
					                    continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                else:
 | 
				
			||||||
 | 
					                    # should be an entry retreived somewhere
 | 
				
			||||||
 | 
					                    assert mkt
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if not (pos := pps.get(bs_mktid)):
 | 
					            if not (pos := pps.get(bs_mktid)):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -656,7 +691,7 @@ class Account(Struct):
 | 
				
			||||||
    def write_config(self) -> None:
 | 
					    def write_config(self) -> None:
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Write the current account state to the user's account TOML file, normally
 | 
					        Write the current account state to the user's account TOML file, normally
 | 
				
			||||||
        something like ``pps.toml``.
 | 
					        something like `pps.toml`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        # TODO: show diff output?
 | 
					        # TODO: show diff output?
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -23,6 +23,7 @@ from __future__ import annotations
 | 
				
			||||||
from contextlib import (
 | 
					from contextlib import (
 | 
				
			||||||
    asynccontextmanager as acm,
 | 
					    asynccontextmanager as acm,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					from functools import partial
 | 
				
			||||||
from types import ModuleType
 | 
					from types import ModuleType
 | 
				
			||||||
from typing import (
 | 
					from typing import (
 | 
				
			||||||
    TYPE_CHECKING,
 | 
					    TYPE_CHECKING,
 | 
				
			||||||
| 
						 | 
					@ -190,14 +191,17 @@ def broker_init(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def spawn_brokerd(
 | 
					async def spawn_brokerd(
 | 
				
			||||||
 | 
					 | 
				
			||||||
    brokername: str,
 | 
					    brokername: str,
 | 
				
			||||||
    loglevel: str | None = None,
 | 
					    loglevel: str | None = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    **tractor_kwargs,
 | 
					    **tractor_kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> bool:
 | 
					) -> bool:
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    Spawn a `brokerd.<backendname>` subactor service daemon
 | 
				
			||||||
 | 
					    using `pikerd`'s service mngr.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
    from piker.service._util import log  # use service mngr log
 | 
					    from piker.service._util import log  # use service mngr log
 | 
				
			||||||
    log.info(f'Spawning {brokername} broker daemon')
 | 
					    log.info(f'Spawning {brokername} broker daemon')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -217,27 +221,35 @@ async def spawn_brokerd(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # ask `pikerd` to spawn a new sub-actor and manage it under its
 | 
					    # ask `pikerd` to spawn a new sub-actor and manage it under its
 | 
				
			||||||
    # actor nursery
 | 
					    # actor nursery
 | 
				
			||||||
    from piker.service import Services
 | 
					    from piker.service import (
 | 
				
			||||||
 | 
					        get_service_mngr,
 | 
				
			||||||
 | 
					        ServiceMngr,
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
    dname: str = tractor_kwargs.pop('name')  # f'brokerd.{brokername}'
 | 
					    dname: str = tractor_kwargs.pop('name')  # f'brokerd.{brokername}'
 | 
				
			||||||
    portal = await Services.actor_n.start_actor(
 | 
					    mngr: ServiceMngr = get_service_mngr()
 | 
				
			||||||
        dname,
 | 
					    ctx: tractor.Context = await mngr.start_service(
 | 
				
			||||||
        enable_modules=_data_mods + tractor_kwargs.pop('enable_modules'),
 | 
					        daemon_name=dname,
 | 
				
			||||||
        debug_mode=Services.debug_mode,
 | 
					        ctx_ep=partial(
 | 
				
			||||||
 | 
					            # signature of target root-task endpoint
 | 
				
			||||||
 | 
					            daemon_fixture_ep,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # passed to daemon_fixture_ep(**kwargs)
 | 
				
			||||||
 | 
					            brokername=brokername,
 | 
				
			||||||
 | 
					            loglevel=loglevel,
 | 
				
			||||||
 | 
					        ),
 | 
				
			||||||
 | 
					        debug_mode=mngr.debug_mode,
 | 
				
			||||||
 | 
					        loglevel=loglevel,
 | 
				
			||||||
 | 
					        enable_modules=(
 | 
				
			||||||
 | 
					            _data_mods
 | 
				
			||||||
 | 
					            +
 | 
				
			||||||
 | 
					            tractor_kwargs.pop('enable_modules')
 | 
				
			||||||
 | 
					        ),
 | 
				
			||||||
        **tractor_kwargs
 | 
					        **tractor_kwargs
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					    assert (
 | 
				
			||||||
    # NOTE: the service mngr expects an already spawned actor + its
 | 
					        not ctx.cancel_called
 | 
				
			||||||
    # portal ref in order to do non-blocking setup of brokerd
 | 
					        and ctx.portal  # parent side
 | 
				
			||||||
    # service nursery.
 | 
					        and dname in ctx.chan.uid  # subactor is named as desired
 | 
				
			||||||
    await Services.start_service_task(
 | 
					 | 
				
			||||||
        dname,
 | 
					 | 
				
			||||||
        portal,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # signature of target root-task endpoint
 | 
					 | 
				
			||||||
        daemon_fixture_ep,
 | 
					 | 
				
			||||||
        brokername=brokername,
 | 
					 | 
				
			||||||
        loglevel=loglevel,
 | 
					 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    return True
 | 
					    return True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -262,8 +274,7 @@ async def maybe_spawn_brokerd(
 | 
				
			||||||
    from piker.service import maybe_spawn_daemon
 | 
					    from piker.service import maybe_spawn_daemon
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async with maybe_spawn_daemon(
 | 
					    async with maybe_spawn_daemon(
 | 
				
			||||||
 | 
					        service_name=f'brokerd.{brokername}',
 | 
				
			||||||
        f'brokerd.{brokername}',
 | 
					 | 
				
			||||||
        service_task_target=spawn_brokerd,
 | 
					        service_task_target=spawn_brokerd,
 | 
				
			||||||
        spawn_args={
 | 
					        spawn_args={
 | 
				
			||||||
            'brokername': brokername,
 | 
					            'brokername': brokername,
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -567,6 +567,7 @@ class Client:
 | 
				
			||||||
    ) -> str:
 | 
					    ) -> str:
 | 
				
			||||||
        return {
 | 
					        return {
 | 
				
			||||||
            'USDTM': 'usdtm_futes',
 | 
					            'USDTM': 'usdtm_futes',
 | 
				
			||||||
 | 
					            'SPOT': 'spot',
 | 
				
			||||||
            # 'COINM': 'coin_futes',
 | 
					            # 'COINM': 'coin_futes',
 | 
				
			||||||
            # ^-TODO-^ bc someone might want it..?
 | 
					            # ^-TODO-^ bc someone might want it..?
 | 
				
			||||||
        }[pair.venue]
 | 
					        }[pair.venue]
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -181,7 +181,6 @@ class FutesPair(Pair):
 | 
				
			||||||
    quoteAsset: str  # 'USDT',
 | 
					    quoteAsset: str  # 'USDT',
 | 
				
			||||||
    quotePrecision: int  # 8,
 | 
					    quotePrecision: int  # 8,
 | 
				
			||||||
    requiredMarginPercent: float  # '5.0000',
 | 
					    requiredMarginPercent: float  # '5.0000',
 | 
				
			||||||
    settlePlan: int  # 0,
 | 
					 | 
				
			||||||
    timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'],
 | 
					    timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'],
 | 
				
			||||||
    triggerProtect: float  # '0.0500',
 | 
					    triggerProtect: float  # '0.0500',
 | 
				
			||||||
    underlyingSubType: list[str]  # ['PoW'],
 | 
					    underlyingSubType: list[str]  # ['PoW'],
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -111,6 +111,10 @@ class KucoinMktPair(Struct, frozen=True):
 | 
				
			||||||
    quoteMaxSize: float
 | 
					    quoteMaxSize: float
 | 
				
			||||||
    quoteMinSize: float
 | 
					    quoteMinSize: float
 | 
				
			||||||
    symbol: str  # our bs_mktid, kucoin's internal id
 | 
					    symbol: str  # our bs_mktid, kucoin's internal id
 | 
				
			||||||
 | 
					    feeCategory: int
 | 
				
			||||||
 | 
					    makerFeeCoefficient: float
 | 
				
			||||||
 | 
					    takerFeeCoefficient: float
 | 
				
			||||||
 | 
					    st: bool
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class AccountTrade(Struct, frozen=True):
 | 
					class AccountTrade(Struct, frozen=True):
 | 
				
			||||||
| 
						 | 
					@ -593,7 +597,7 @@ async def get_client() -> AsyncGenerator[Client, None]:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    async with (
 | 
					    async with (
 | 
				
			||||||
        httpx.AsyncClient(
 | 
					        httpx.AsyncClient(
 | 
				
			||||||
            base_url=f'https://api.kucoin.com/api',
 | 
					            base_url='https://api.kucoin.com/api',
 | 
				
			||||||
        ) as trio_client,
 | 
					        ) as trio_client,
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        client = Client(httpx_client=trio_client)
 | 
					        client = Client(httpx_client=trio_client)
 | 
				
			||||||
| 
						 | 
					@ -637,7 +641,7 @@ async def open_ping_task(
 | 
				
			||||||
                await trio.sleep((ping_interval - 1000) / 1000)
 | 
					                await trio.sleep((ping_interval - 1000) / 1000)
 | 
				
			||||||
                await ws.send_msg({'id': connect_id, 'type': 'ping'})
 | 
					                await ws.send_msg({'id': connect_id, 'type': 'ping'})
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        log.info('Starting ping task for kucoin ws connection')
 | 
					        log.warning('Starting ping task for kucoin ws connection')
 | 
				
			||||||
        n.start_soon(ping_server)
 | 
					        n.start_soon(ping_server)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        yield
 | 
					        yield
 | 
				
			||||||
| 
						 | 
					@ -649,9 +653,14 @@ async def open_ping_task(
 | 
				
			||||||
async def get_mkt_info(
 | 
					async def get_mkt_info(
 | 
				
			||||||
    fqme: str,
 | 
					    fqme: str,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> tuple[MktPair, KucoinMktPair]:
 | 
					) -> tuple[
 | 
				
			||||||
 | 
					    MktPair,
 | 
				
			||||||
 | 
					    KucoinMktPair,
 | 
				
			||||||
 | 
					]:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    Query for and return a `MktPair` and `KucoinMktPair`.
 | 
					    Query for and return both a `piker.accounting.MktPair` and
 | 
				
			||||||
 | 
					    `KucoinMktPair` from provided `fqme: str`
 | 
				
			||||||
 | 
					    (fully-qualified-market-endpoint).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    async with open_cached_client('kucoin') as client:
 | 
					    async with open_cached_client('kucoin') as client:
 | 
				
			||||||
| 
						 | 
					@ -726,6 +735,8 @@ async def stream_quotes(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        log.info(f'Starting up quote stream(s) for {symbols}')
 | 
					        log.info(f'Starting up quote stream(s) for {symbols}')
 | 
				
			||||||
        for sym_str in symbols:
 | 
					        for sym_str in symbols:
 | 
				
			||||||
 | 
					            mkt: MktPair
 | 
				
			||||||
 | 
					            pair: KucoinMktPair
 | 
				
			||||||
            mkt, pair = await get_mkt_info(sym_str)
 | 
					            mkt, pair = await get_mkt_info(sym_str)
 | 
				
			||||||
            init_msgs.append(
 | 
					            init_msgs.append(
 | 
				
			||||||
                FeedInit(mkt_info=mkt)
 | 
					                FeedInit(mkt_info=mkt)
 | 
				
			||||||
| 
						 | 
					@ -733,7 +744,11 @@ async def stream_quotes(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        ws: NoBsWs
 | 
					        ws: NoBsWs
 | 
				
			||||||
        token, ping_interval = await client._get_ws_token()
 | 
					        token, ping_interval = await client._get_ws_token()
 | 
				
			||||||
        connect_id = str(uuid4())
 | 
					        log.info('API reported ping_interval: {ping_interval}\n')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        connect_id: str = str(uuid4())
 | 
				
			||||||
 | 
					        typ: str
 | 
				
			||||||
 | 
					        quote: dict
 | 
				
			||||||
        async with (
 | 
					        async with (
 | 
				
			||||||
            open_autorecon_ws(
 | 
					            open_autorecon_ws(
 | 
				
			||||||
                (
 | 
					                (
 | 
				
			||||||
| 
						 | 
					@ -747,20 +762,37 @@ async def stream_quotes(
 | 
				
			||||||
                ),
 | 
					                ),
 | 
				
			||||||
            ) as ws,
 | 
					            ) as ws,
 | 
				
			||||||
            open_ping_task(ws, ping_interval, connect_id),
 | 
					            open_ping_task(ws, ping_interval, connect_id),
 | 
				
			||||||
            aclosing(stream_messages(ws, sym_str)) as msg_gen,
 | 
					            aclosing(
 | 
				
			||||||
 | 
					                iter_normed_quotes(
 | 
				
			||||||
 | 
					                    ws, sym_str
 | 
				
			||||||
 | 
					                )
 | 
				
			||||||
 | 
					            ) as iter_quotes,
 | 
				
			||||||
        ):
 | 
					        ):
 | 
				
			||||||
            typ, quote = await anext(msg_gen)
 | 
					            typ, quote = await anext(iter_quotes)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            while typ != 'trade':
 | 
					            # take care to not unblock here until we get a real
 | 
				
			||||||
                # take care to not unblock here until we get a real
 | 
					            # trade quote?
 | 
				
			||||||
                # trade quote
 | 
					            # ^TODO, remove this right?
 | 
				
			||||||
                typ, quote = await anext(msg_gen)
 | 
					            # -[ ] what often blocks chart boot/new-feed switching
 | 
				
			||||||
 | 
					            #   since we'ere waiting for a live quote instead of just
 | 
				
			||||||
 | 
					            #   loading history afap..
 | 
				
			||||||
 | 
					            #  |_ XXX, not sure if we require a bit of rework to core
 | 
				
			||||||
 | 
					            #    feed init logic or if backends justg gotta be
 | 
				
			||||||
 | 
					            #    changed up.. feel like there was some causality
 | 
				
			||||||
 | 
					            #    dilema prolly only seen with IB too..
 | 
				
			||||||
 | 
					            # while typ != 'trade':
 | 
				
			||||||
 | 
					            #     typ, quote = await anext(iter_quotes)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            task_status.started((init_msgs, quote))
 | 
					            task_status.started((init_msgs, quote))
 | 
				
			||||||
            feed_is_live.set()
 | 
					            feed_is_live.set()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            async for typ, msg in msg_gen:
 | 
					            # XXX NOTE, DO NOT include the `.<backend>` suffix!
 | 
				
			||||||
                await send_chan.send({sym_str: msg})
 | 
					            # OW the sampling loop will not broadcast correctly..
 | 
				
			||||||
 | 
					            # since `bus._subscribers.setdefault(bs_fqme, set())`
 | 
				
			||||||
 | 
					            # is used inside `.data.open_feed_bus()` !!!
 | 
				
			||||||
 | 
					            topic: str = mkt.bs_fqme
 | 
				
			||||||
 | 
					            async for typ, quote in iter_quotes:
 | 
				
			||||||
 | 
					                await send_chan.send({topic: quote})
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@acm
 | 
				
			||||||
| 
						 | 
					@ -815,7 +847,7 @@ async def subscribe(
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def stream_messages(
 | 
					async def iter_normed_quotes(
 | 
				
			||||||
    ws: NoBsWs,
 | 
					    ws: NoBsWs,
 | 
				
			||||||
    sym: str,
 | 
					    sym: str,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -846,6 +878,9 @@ async def stream_messages(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                yield 'trade', {
 | 
					                yield 'trade', {
 | 
				
			||||||
                    'symbol': sym,
 | 
					                    'symbol': sym,
 | 
				
			||||||
 | 
					                    # TODO, is 'last' even used elsewhere/a-good
 | 
				
			||||||
 | 
					                    # semantic? can't we just read the ticks with our
 | 
				
			||||||
 | 
					                    # .data.ticktools.frame_ticks()`/
 | 
				
			||||||
                    'last': trade_data.price,
 | 
					                    'last': trade_data.price,
 | 
				
			||||||
                    'brokerd_ts': last_trade_ts,
 | 
					                    'brokerd_ts': last_trade_ts,
 | 
				
			||||||
                    'ticks': [
 | 
					                    'ticks': [
 | 
				
			||||||
| 
						 | 
					@ -938,7 +973,7 @@ async def open_history_client(
 | 
				
			||||||
            if end_dt is None:
 | 
					            if end_dt is None:
 | 
				
			||||||
                inow = round(time.time())
 | 
					                inow = round(time.time())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                print(
 | 
					                log.debug(
 | 
				
			||||||
                    f'difference in time between load and processing'
 | 
					                    f'difference in time between load and processing'
 | 
				
			||||||
                    f'{inow - times[-1]}'
 | 
					                    f'{inow - times[-1]}'
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -653,7 +653,11 @@ class Router(Struct):
 | 
				
			||||||
            flume = feed.flumes[fqme]
 | 
					            flume = feed.flumes[fqme]
 | 
				
			||||||
            first_quote: dict = flume.first_quote
 | 
					            first_quote: dict = flume.first_quote
 | 
				
			||||||
            book: DarkBook = self.get_dark_book(broker)
 | 
					            book: DarkBook = self.get_dark_book(broker)
 | 
				
			||||||
            book.lasts[fqme]: float = float(first_quote['last'])
 | 
					
 | 
				
			||||||
 | 
					            if not (last := first_quote.get('last')):
 | 
				
			||||||
 | 
					                last: float = flume.rt_shm.array[-1]['close']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            book.lasts[fqme]: float = float(last)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            async with self.maybe_open_brokerd_dialog(
 | 
					            async with self.maybe_open_brokerd_dialog(
 | 
				
			||||||
                brokermod=brokermod,
 | 
					                brokermod=brokermod,
 | 
				
			||||||
| 
						 | 
					@ -716,7 +720,7 @@ class Router(Struct):
 | 
				
			||||||
            subs = self.subscribers[sub_key]
 | 
					            subs = self.subscribers[sub_key]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        sent_some: bool = False
 | 
					        sent_some: bool = False
 | 
				
			||||||
        for client_stream in subs:
 | 
					        for client_stream in subs.copy():
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                await client_stream.send(msg)
 | 
					                await client_stream.send(msg)
 | 
				
			||||||
                sent_some = True
 | 
					                sent_some = True
 | 
				
			||||||
| 
						 | 
					@ -1010,10 +1014,14 @@ async def translate_and_relay_brokerd_events(
 | 
				
			||||||
                status_msg.brokerd_msg = msg
 | 
					                status_msg.brokerd_msg = msg
 | 
				
			||||||
                status_msg.src = msg.broker_details['name']
 | 
					                status_msg.src = msg.broker_details['name']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                await router.client_broadcast(
 | 
					                if not status_msg.req:
 | 
				
			||||||
                    status_msg.req.symbol,
 | 
					                    # likely some order change state?
 | 
				
			||||||
                    status_msg,
 | 
					                    await tractor.pause()
 | 
				
			||||||
                )
 | 
					                else:
 | 
				
			||||||
 | 
					                    await router.client_broadcast(
 | 
				
			||||||
 | 
					                        status_msg.req.symbol,
 | 
				
			||||||
 | 
					                        status_msg,
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                if status == 'closed':
 | 
					                if status == 'closed':
 | 
				
			||||||
                    log.info(f'Execution for {oid} is complete!')
 | 
					                    log.info(f'Execution for {oid} is complete!')
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -653,6 +653,7 @@ async def open_trade_dialog(
 | 
				
			||||||
                # in) use manually constructed table from calling
 | 
					                # in) use manually constructed table from calling
 | 
				
			||||||
                # the `.get_mkt_info()` provider EP above.
 | 
					                # the `.get_mkt_info()` provider EP above.
 | 
				
			||||||
                _mktmap_table=mkt_by_fqme,
 | 
					                _mktmap_table=mkt_by_fqme,
 | 
				
			||||||
 | 
					                only_require=list(mkt_by_fqme),
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            pp_msgs: list[BrokerdPosition] = []
 | 
					            pp_msgs: list[BrokerdPosition] = []
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -335,7 +335,7 @@ def services(config, tl, ports):
 | 
				
			||||||
                name='service_query',
 | 
					                name='service_query',
 | 
				
			||||||
                loglevel=config['loglevel'] if tl else None,
 | 
					                loglevel=config['loglevel'] if tl else None,
 | 
				
			||||||
            ),
 | 
					            ),
 | 
				
			||||||
            tractor.get_arbiter(
 | 
					            tractor.get_registry(
 | 
				
			||||||
                host=host,
 | 
					                host=host,
 | 
				
			||||||
                port=ports[0]
 | 
					                port=ports[0]
 | 
				
			||||||
            ) as portal
 | 
					            ) as portal
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -25,6 +25,7 @@ from collections import (
 | 
				
			||||||
    defaultdict,
 | 
					    defaultdict,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from contextlib import asynccontextmanager as acm
 | 
					from contextlib import asynccontextmanager as acm
 | 
				
			||||||
 | 
					from functools import partial
 | 
				
			||||||
import time
 | 
					import time
 | 
				
			||||||
from typing import (
 | 
					from typing import (
 | 
				
			||||||
    Any,
 | 
					    Any,
 | 
				
			||||||
| 
						 | 
					@ -42,7 +43,7 @@ from tractor.trionics import (
 | 
				
			||||||
    maybe_open_nursery,
 | 
					    maybe_open_nursery,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
from trio_typing import TaskStatus
 | 
					from trio import TaskStatus
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from .ticktools import (
 | 
					from .ticktools import (
 | 
				
			||||||
    frame_ticks,
 | 
					    frame_ticks,
 | 
				
			||||||
| 
						 | 
					@ -70,6 +71,7 @@ if TYPE_CHECKING:
 | 
				
			||||||
_default_delay_s: float = 1.0
 | 
					_default_delay_s: float = 1.0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# TODO: use new `tractor.singleton_acm` API for this!
 | 
				
			||||||
class Sampler:
 | 
					class Sampler:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    Global sampling engine registry.
 | 
					    Global sampling engine registry.
 | 
				
			||||||
| 
						 | 
					@ -79,9 +81,9 @@ class Sampler:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    This non-instantiated type is meant to be a singleton within
 | 
					    This non-instantiated type is meant to be a singleton within
 | 
				
			||||||
    a `samplerd` actor-service spawned once by the user wishing to
 | 
					    a `samplerd` actor-service spawned once by the user wishing to
 | 
				
			||||||
    time-step-sample (real-time) quote feeds, see
 | 
					    time-step-sample a (real-time) quote feeds, see
 | 
				
			||||||
    ``.service.maybe_open_samplerd()`` and the below
 | 
					    `.service.maybe_open_samplerd()` and the below
 | 
				
			||||||
    ``register_with_sampler()``.
 | 
					    `register_with_sampler()`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    service_nursery: None | trio.Nursery = None
 | 
					    service_nursery: None | trio.Nursery = None
 | 
				
			||||||
| 
						 | 
					@ -375,7 +377,10 @@ async def register_with_sampler(
 | 
				
			||||||
                assert Sampler.ohlcv_shms
 | 
					                assert Sampler.ohlcv_shms
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # unblock caller
 | 
					            # unblock caller
 | 
				
			||||||
            await ctx.started(set(Sampler.ohlcv_shms.keys()))
 | 
					            await ctx.started(
 | 
				
			||||||
 | 
					                # XXX bc msgpack only allows one array type!
 | 
				
			||||||
 | 
					                list(Sampler.ohlcv_shms.keys())
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if open_index_stream:
 | 
					            if open_index_stream:
 | 
				
			||||||
                try:
 | 
					                try:
 | 
				
			||||||
| 
						 | 
					@ -419,7 +424,6 @@ async def register_with_sampler(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def spawn_samplerd(
 | 
					async def spawn_samplerd(
 | 
				
			||||||
 | 
					 | 
				
			||||||
    loglevel: str | None = None,
 | 
					    loglevel: str | None = None,
 | 
				
			||||||
    **extra_tractor_kwargs
 | 
					    **extra_tractor_kwargs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -429,7 +433,10 @@ async def spawn_samplerd(
 | 
				
			||||||
    update and increment count write and stream broadcasting.
 | 
					    update and increment count write and stream broadcasting.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    from piker.service import Services
 | 
					    from piker.service import (
 | 
				
			||||||
 | 
					        get_service_mngr,
 | 
				
			||||||
 | 
					        ServiceMngr,
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    dname = 'samplerd'
 | 
					    dname = 'samplerd'
 | 
				
			||||||
    log.info(f'Spawning `{dname}`')
 | 
					    log.info(f'Spawning `{dname}`')
 | 
				
			||||||
| 
						 | 
					@ -437,26 +444,33 @@ async def spawn_samplerd(
 | 
				
			||||||
    # singleton lock creation of ``samplerd`` since we only ever want
 | 
					    # singleton lock creation of ``samplerd`` since we only ever want
 | 
				
			||||||
    # one daemon per ``pikerd`` proc tree.
 | 
					    # one daemon per ``pikerd`` proc tree.
 | 
				
			||||||
    # TODO: make this built-into the service api?
 | 
					    # TODO: make this built-into the service api?
 | 
				
			||||||
    async with Services.locks[dname + '_singleton']:
 | 
					    mngr: ServiceMngr = get_service_mngr()
 | 
				
			||||||
 | 
					    already_started: bool = dname in mngr.service_tasks
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if dname not in Services.service_tasks:
 | 
					    async with mngr._locks[dname + '_singleton']:
 | 
				
			||||||
 | 
					        ctx: Context = await mngr.start_service(
 | 
				
			||||||
            portal = await Services.actor_n.start_actor(
 | 
					            daemon_name=dname,
 | 
				
			||||||
                dname,
 | 
					            ctx_ep=partial(
 | 
				
			||||||
                enable_modules=[
 | 
					 | 
				
			||||||
                    'piker.data._sampling',
 | 
					 | 
				
			||||||
                ],
 | 
					 | 
				
			||||||
                loglevel=loglevel,
 | 
					 | 
				
			||||||
                debug_mode=Services.debug_mode,  # set by pikerd flag
 | 
					 | 
				
			||||||
                **extra_tractor_kwargs
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            await Services.start_service_task(
 | 
					 | 
				
			||||||
                dname,
 | 
					 | 
				
			||||||
                portal,
 | 
					 | 
				
			||||||
                register_with_sampler,
 | 
					                register_with_sampler,
 | 
				
			||||||
                period_s=1,
 | 
					                period_s=1,
 | 
				
			||||||
                sub_for_broadcasts=False,
 | 
					                sub_for_broadcasts=False,
 | 
				
			||||||
 | 
					            ),
 | 
				
			||||||
 | 
					            debug_mode=mngr.debug_mode,  # set by pikerd flag
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # proxy-through to tractor
 | 
				
			||||||
 | 
					            enable_modules=[
 | 
				
			||||||
 | 
					                'piker.data._sampling',
 | 
				
			||||||
 | 
					            ],
 | 
				
			||||||
 | 
					            loglevel=loglevel,
 | 
				
			||||||
 | 
					            **extra_tractor_kwargs
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					        if not already_started:
 | 
				
			||||||
 | 
					            assert (
 | 
				
			||||||
 | 
					                ctx
 | 
				
			||||||
 | 
					                and
 | 
				
			||||||
 | 
					                ctx.portal
 | 
				
			||||||
 | 
					                and
 | 
				
			||||||
 | 
					                not ctx.cancel_called
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
            return True
 | 
					            return True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -889,6 +903,7 @@ async def uniform_rate_send(
 | 
				
			||||||
            # to consumers which crash or lose network connection.
 | 
					            # to consumers which crash or lose network connection.
 | 
				
			||||||
            # I.e. we **DO NOT** want to crash and propagate up to
 | 
					            # I.e. we **DO NOT** want to crash and propagate up to
 | 
				
			||||||
            # ``pikerd`` these kinds of errors!
 | 
					            # ``pikerd`` these kinds of errors!
 | 
				
			||||||
 | 
					            trio.EndOfChannel,
 | 
				
			||||||
            trio.ClosedResourceError,
 | 
					            trio.ClosedResourceError,
 | 
				
			||||||
            trio.BrokenResourceError,
 | 
					            trio.BrokenResourceError,
 | 
				
			||||||
            ConnectionResetError,
 | 
					            ConnectionResetError,
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -273,7 +273,7 @@ async def _reconnect_forever(
 | 
				
			||||||
                nobsws._connected.set()
 | 
					                nobsws._connected.set()
 | 
				
			||||||
                await trio.sleep_forever()
 | 
					                await trio.sleep_forever()
 | 
				
			||||||
        except HandshakeError:
 | 
					        except HandshakeError:
 | 
				
			||||||
            log.exception(f'Retrying connection')
 | 
					            log.exception('Retrying connection')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # ws & nursery block ends
 | 
					        # ws & nursery block ends
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -359,8 +359,8 @@ async def open_autorecon_ws(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
'''
 | 
					'''
 | 
				
			||||||
JSONRPC response-request style machinery for transparent multiplexing of msgs
 | 
					JSONRPC response-request style machinery for transparent multiplexing
 | 
				
			||||||
over a NoBsWs.
 | 
					of msgs over a NoBsWs.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
'''
 | 
					'''
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -377,16 +377,20 @@ async def open_jsonrpc_session(
 | 
				
			||||||
    url: str,
 | 
					    url: str,
 | 
				
			||||||
    start_id: int = 0,
 | 
					    start_id: int = 0,
 | 
				
			||||||
    response_type: type = JSONRPCResult,
 | 
					    response_type: type = JSONRPCResult,
 | 
				
			||||||
    request_type: Optional[type] = None,
 | 
					    # request_type: Optional[type] = None,
 | 
				
			||||||
    request_hook: Optional[Callable] = None,
 | 
					    # request_hook: Optional[Callable] = None,
 | 
				
			||||||
    error_hook: Optional[Callable] = None,
 | 
					    # error_hook: Optional[Callable] = None,
 | 
				
			||||||
) -> Callable[[str, dict], dict]:
 | 
					) -> Callable[[str, dict], dict]:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # NOTE, store all request msgs so we can raise errors on the
 | 
				
			||||||
 | 
					    # caller side!
 | 
				
			||||||
 | 
					    req_msgs: dict[int, dict] = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async with (
 | 
					    async with (
 | 
				
			||||||
        trio.open_nursery() as n,
 | 
					        trio.open_nursery() as n,
 | 
				
			||||||
        open_autorecon_ws(url) as ws
 | 
					        open_autorecon_ws(url) as ws
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        rpc_id: Iterable = count(start_id)
 | 
					        rpc_id: Iterable[int] = count(start_id)
 | 
				
			||||||
        rpc_results: dict[int, dict] = {}
 | 
					        rpc_results: dict[int, dict] = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        async def json_rpc(method: str, params: dict) -> dict:
 | 
					        async def json_rpc(method: str, params: dict) -> dict:
 | 
				
			||||||
| 
						 | 
					@ -394,26 +398,40 @@ async def open_jsonrpc_session(
 | 
				
			||||||
            perform a json rpc call and wait for the result, raise exception in
 | 
					            perform a json rpc call and wait for the result, raise exception in
 | 
				
			||||||
            case of error field present on response
 | 
					            case of error field present on response
 | 
				
			||||||
            '''
 | 
					            '''
 | 
				
			||||||
 | 
					            nonlocal req_msgs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            req_id: int = next(rpc_id)
 | 
				
			||||||
            msg = {
 | 
					            msg = {
 | 
				
			||||||
                'jsonrpc': '2.0',
 | 
					                'jsonrpc': '2.0',
 | 
				
			||||||
                'id': next(rpc_id),
 | 
					                'id': req_id,
 | 
				
			||||||
                'method': method,
 | 
					                'method': method,
 | 
				
			||||||
                'params': params
 | 
					                'params': params
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
            _id = msg['id']
 | 
					            _id = msg['id']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            rpc_results[_id] = {
 | 
					            result = rpc_results[_id] = {
 | 
				
			||||||
                'result': None,
 | 
					                'result': None,
 | 
				
			||||||
                'event': trio.Event()
 | 
					                'error': None,
 | 
				
			||||||
 | 
					                'event': trio.Event(),  # signal caller resp arrived
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
 | 
					            req_msgs[_id] = msg
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            await ws.send_msg(msg)
 | 
					            await ws.send_msg(msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # wait for reponse before unblocking requester code
 | 
				
			||||||
            await rpc_results[_id]['event'].wait()
 | 
					            await rpc_results[_id]['event'].wait()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            ret = rpc_results[_id]['result']
 | 
					            if (maybe_result := result['result']):
 | 
				
			||||||
 | 
					                ret = maybe_result
 | 
				
			||||||
 | 
					                del rpc_results[_id]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            del rpc_results[_id]
 | 
					            else:
 | 
				
			||||||
 | 
					                err = result['error']
 | 
				
			||||||
 | 
					                raise Exception(
 | 
				
			||||||
 | 
					                    f'JSONRPC request failed\n'
 | 
				
			||||||
 | 
					                    f'req: {msg}\n'
 | 
				
			||||||
 | 
					                    f'resp: {err}\n'
 | 
				
			||||||
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if ret.error is not None:
 | 
					            if ret.error is not None:
 | 
				
			||||||
                raise Exception(json.dumps(ret.error, indent=4))
 | 
					                raise Exception(json.dumps(ret.error, indent=4))
 | 
				
			||||||
| 
						 | 
					@ -428,6 +446,7 @@ async def open_jsonrpc_session(
 | 
				
			||||||
            the server side.
 | 
					            the server side.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            '''
 | 
					            '''
 | 
				
			||||||
 | 
					            nonlocal req_msgs
 | 
				
			||||||
            async for msg in ws:
 | 
					            async for msg in ws:
 | 
				
			||||||
                match msg:
 | 
					                match msg:
 | 
				
			||||||
                    case {
 | 
					                    case {
 | 
				
			||||||
| 
						 | 
					@ -451,15 +470,29 @@ async def open_jsonrpc_session(
 | 
				
			||||||
                        'params': _,
 | 
					                        'params': _,
 | 
				
			||||||
                    }:
 | 
					                    }:
 | 
				
			||||||
                        log.debug(f'Recieved\n{msg}')
 | 
					                        log.debug(f'Recieved\n{msg}')
 | 
				
			||||||
                        if request_hook:
 | 
					                        # if request_hook:
 | 
				
			||||||
                            await request_hook(request_type(**msg))
 | 
					                        #     await request_hook(request_type(**msg))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    case {
 | 
					                    case {
 | 
				
			||||||
                        'error': error
 | 
					                        'error': error
 | 
				
			||||||
                    }:
 | 
					                    }:
 | 
				
			||||||
                        log.warning(f'Recieved\n{error}')
 | 
					                        # if error_hook:
 | 
				
			||||||
                        if error_hook:
 | 
					                        #     await error_hook(response_type(**msg))
 | 
				
			||||||
                            await error_hook(response_type(**msg))
 | 
					
 | 
				
			||||||
 | 
					                        # retreive orig request msg, set error
 | 
				
			||||||
 | 
					                        # response in original "result" msg,
 | 
				
			||||||
 | 
					                        # THEN FINALLY set the event to signal caller
 | 
				
			||||||
 | 
					                        # to raise the error in the parent task.
 | 
				
			||||||
 | 
					                        req_id: int = error['id']
 | 
				
			||||||
 | 
					                        req_msg: dict = req_msgs[req_id]
 | 
				
			||||||
 | 
					                        result: dict = rpc_results[req_id]
 | 
				
			||||||
 | 
					                        result['error'] = error
 | 
				
			||||||
 | 
					                        result['event'].set()
 | 
				
			||||||
 | 
					                        log.error(
 | 
				
			||||||
 | 
					                            f'JSONRPC request failed\n'
 | 
				
			||||||
 | 
					                            f'req: {req_msg}\n'
 | 
				
			||||||
 | 
					                            f'resp: {error}\n'
 | 
				
			||||||
 | 
					                        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    case _:
 | 
					                    case _:
 | 
				
			||||||
                        log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
 | 
					                        log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -30,7 +30,11 @@ Actor runtime primtives and (distributed) service APIs for,
 | 
				
			||||||
  => TODO: maybe to (re)move elsewhere?
 | 
					  => TODO: maybe to (re)move elsewhere?
 | 
				
			||||||
 | 
					
 | 
				
			||||||
'''
 | 
					'''
 | 
				
			||||||
from ._mngr import Services as Services
 | 
					from ._mngr import (
 | 
				
			||||||
 | 
					    get_service_mngr as get_service_mngr,
 | 
				
			||||||
 | 
					    open_service_mngr as open_service_mngr,
 | 
				
			||||||
 | 
					    ServiceMngr as ServiceMngr,
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
from ._registry import (
 | 
					from ._registry import (
 | 
				
			||||||
    _tractor_kwargs as _tractor_kwargs,
 | 
					    _tractor_kwargs as _tractor_kwargs,
 | 
				
			||||||
    _default_reg_addr as _default_reg_addr,
 | 
					    _default_reg_addr as _default_reg_addr,
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -21,7 +21,6 @@
 | 
				
			||||||
from __future__ import annotations
 | 
					from __future__ import annotations
 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
from typing import (
 | 
					from typing import (
 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    Any,
 | 
					    Any,
 | 
				
			||||||
    ClassVar,
 | 
					    ClassVar,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
| 
						 | 
					@ -30,13 +29,13 @@ from contextlib import (
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
import trio
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ._util import (
 | 
					from ._util import (
 | 
				
			||||||
    get_console_log,
 | 
					    get_console_log,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from ._mngr import (
 | 
					from ._mngr import (
 | 
				
			||||||
    Services,
 | 
					    open_service_mngr,
 | 
				
			||||||
 | 
					    ServiceMngr,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from ._registry import (  # noqa
 | 
					from ._registry import (  # noqa
 | 
				
			||||||
    _tractor_kwargs,
 | 
					    _tractor_kwargs,
 | 
				
			||||||
| 
						 | 
					@ -59,7 +58,7 @@ async def open_piker_runtime(
 | 
				
			||||||
    registry_addrs: list[tuple[str, int]] = [],
 | 
					    registry_addrs: list[tuple[str, int]] = [],
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    enable_modules: list[str] = [],
 | 
					    enable_modules: list[str] = [],
 | 
				
			||||||
    loglevel: Optional[str] = None,
 | 
					    loglevel: str|None = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # XXX NOTE XXX: you should pretty much never want debug mode
 | 
					    # XXX NOTE XXX: you should pretty much never want debug mode
 | 
				
			||||||
    # for data daemons when running in production.
 | 
					    # for data daemons when running in production.
 | 
				
			||||||
| 
						 | 
					@ -69,7 +68,7 @@ async def open_piker_runtime(
 | 
				
			||||||
    # and spawn the service tree distributed per that.
 | 
					    # and spawn the service tree distributed per that.
 | 
				
			||||||
    start_method: str = 'trio',
 | 
					    start_method: str = 'trio',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    tractor_runtime_overrides: dict | None = None,
 | 
					    tractor_runtime_overrides: dict|None = None,
 | 
				
			||||||
    **tractor_kwargs,
 | 
					    **tractor_kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> tuple[
 | 
					) -> tuple[
 | 
				
			||||||
| 
						 | 
					@ -119,6 +118,10 @@ async def open_piker_runtime(
 | 
				
			||||||
                # spawn other specialized daemons I think?
 | 
					                # spawn other specialized daemons I think?
 | 
				
			||||||
                enable_modules=enable_modules,
 | 
					                enable_modules=enable_modules,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # TODO: how to configure this?
 | 
				
			||||||
 | 
					                # keep it on by default if debug mode is set?
 | 
				
			||||||
 | 
					                # maybe_enable_greenback=debug_mode,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                **tractor_kwargs,
 | 
					                **tractor_kwargs,
 | 
				
			||||||
            ) as actor,
 | 
					            ) as actor,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -167,12 +170,13 @@ async def open_pikerd(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    **kwargs,
 | 
					    **kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> Services:
 | 
					) -> ServiceMngr:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    Start a root piker daemon with an indefinite lifetime.
 | 
					    Start a root piker daemon actor (aka `pikerd`) with an indefinite
 | 
				
			||||||
 | 
					    lifetime.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    A root actor nursery is created which can be used to create and keep
 | 
					    A root actor-nursery is created which can be used to spawn and
 | 
				
			||||||
    alive underling services (see below).
 | 
					    supervise underling service sub-actors (see below).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    # NOTE: for the root daemon we always enable the root
 | 
					    # NOTE: for the root daemon we always enable the root
 | 
				
			||||||
| 
						 | 
					@ -199,8 +203,6 @@ async def open_pikerd(
 | 
				
			||||||
            root_actor,
 | 
					            root_actor,
 | 
				
			||||||
            reg_addrs,
 | 
					            reg_addrs,
 | 
				
			||||||
        ),
 | 
					        ),
 | 
				
			||||||
        tractor.open_nursery() as actor_nursery,
 | 
					 | 
				
			||||||
        trio.open_nursery() as service_nursery,
 | 
					 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        for addr in reg_addrs:
 | 
					        for addr in reg_addrs:
 | 
				
			||||||
            if addr not in root_actor.accept_addrs:
 | 
					            if addr not in root_actor.accept_addrs:
 | 
				
			||||||
| 
						 | 
					@ -209,25 +211,17 @@ async def open_pikerd(
 | 
				
			||||||
                    'Maybe you have another daemon already running?'
 | 
					                    'Maybe you have another daemon already running?'
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # assign globally for future daemon/task creation
 | 
					        mngr: ServiceMngr
 | 
				
			||||||
        Services.actor_n = actor_nursery
 | 
					        async with open_service_mngr(
 | 
				
			||||||
        Services.service_n = service_nursery
 | 
					            debug_mode=debug_mode,
 | 
				
			||||||
        Services.debug_mode = debug_mode
 | 
					        ) as mngr:
 | 
				
			||||||
 | 
					            yield mngr
 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
            yield Services
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        finally:
 | 
					 | 
				
			||||||
            # TODO: is this more clever/efficient?
 | 
					 | 
				
			||||||
            # if 'samplerd' in Services.service_tasks:
 | 
					 | 
				
			||||||
            #     await Services.cancel_service('samplerd')
 | 
					 | 
				
			||||||
            service_nursery.cancel_scope.cancel()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# TODO: do we even need this?
 | 
					# TODO: do we even need this?
 | 
				
			||||||
# @acm
 | 
					# @acm
 | 
				
			||||||
# async def maybe_open_runtime(
 | 
					# async def maybe_open_runtime(
 | 
				
			||||||
#     loglevel: Optional[str] = None,
 | 
					#     loglevel: str|None = None,
 | 
				
			||||||
#     **kwargs,
 | 
					#     **kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# ) -> None:
 | 
					# ) -> None:
 | 
				
			||||||
| 
						 | 
					@ -256,7 +250,7 @@ async def maybe_open_pikerd(
 | 
				
			||||||
    loglevel: str | None = None,
 | 
					    loglevel: str | None = None,
 | 
				
			||||||
    **kwargs,
 | 
					    **kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> tractor._portal.Portal | ClassVar[Services]:
 | 
					) -> tractor._portal.Portal | ClassVar[ServiceMngr]:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    If no ``pikerd`` daemon-root-actor can be found start it and
 | 
					    If no ``pikerd`` daemon-root-actor can be found start it and
 | 
				
			||||||
    yield up (we should probably figure out returning a portal to self
 | 
					    yield up (we should probably figure out returning a portal to self
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -49,7 +49,7 @@ from requests.exceptions import (
 | 
				
			||||||
    ReadTimeout,
 | 
					    ReadTimeout,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ._mngr import Services
 | 
					from ._mngr import ServiceMngr
 | 
				
			||||||
from ._util import (
 | 
					from ._util import (
 | 
				
			||||||
    log,  # sub-sys logger
 | 
					    log,  # sub-sys logger
 | 
				
			||||||
    get_console_log,
 | 
					    get_console_log,
 | 
				
			||||||
| 
						 | 
					@ -453,7 +453,7 @@ async def open_ahabd(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@acm
 | 
				
			||||||
async def start_ahab_service(
 | 
					async def start_ahab_service(
 | 
				
			||||||
    services: Services,
 | 
					    services: ServiceMngr,
 | 
				
			||||||
    service_name: str,
 | 
					    service_name: str,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # endpoint config passed as **kwargs
 | 
					    # endpoint config passed as **kwargs
 | 
				
			||||||
| 
						 | 
					@ -549,7 +549,8 @@ async def start_ahab_service(
 | 
				
			||||||
        log.warning('Failed to cancel root permsed container')
 | 
					        log.warning('Failed to cancel root permsed container')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    except (
 | 
					    except (
 | 
				
			||||||
        trio.MultiError,
 | 
					        # trio.MultiError,
 | 
				
			||||||
 | 
					        ExceptionGroup,
 | 
				
			||||||
    ) as err:
 | 
					    ) as err:
 | 
				
			||||||
        for subexc in err.exceptions:
 | 
					        for subexc in err.exceptions:
 | 
				
			||||||
            if isinstance(subexc, PermissionError):
 | 
					            if isinstance(subexc, PermissionError):
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -26,14 +26,17 @@ from typing import (
 | 
				
			||||||
from contextlib import (
 | 
					from contextlib import (
 | 
				
			||||||
    asynccontextmanager as acm,
 | 
					    asynccontextmanager as acm,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					from collections import defaultdict
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
 | 
					import trio
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ._util import (
 | 
					from ._util import (
 | 
				
			||||||
    log,  # sub-sys logger
 | 
					    log,  # sub-sys logger
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from ._mngr import (
 | 
					from ._mngr import (
 | 
				
			||||||
    Services,
 | 
					    get_service_mngr,
 | 
				
			||||||
 | 
					    ServiceMngr,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from ._actor_runtime import maybe_open_pikerd
 | 
					from ._actor_runtime import maybe_open_pikerd
 | 
				
			||||||
from ._registry import find_service
 | 
					from ._registry import find_service
 | 
				
			||||||
| 
						 | 
					@ -41,15 +44,14 @@ from ._registry import find_service
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@acm
 | 
				
			||||||
async def maybe_spawn_daemon(
 | 
					async def maybe_spawn_daemon(
 | 
				
			||||||
 | 
					 | 
				
			||||||
    service_name: str,
 | 
					    service_name: str,
 | 
				
			||||||
    service_task_target: Callable,
 | 
					    service_task_target: Callable,
 | 
				
			||||||
 | 
					 | 
				
			||||||
    spawn_args: dict[str, Any],
 | 
					    spawn_args: dict[str, Any],
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    loglevel: str | None = None,
 | 
					    loglevel: str | None = None,
 | 
				
			||||||
    singleton: bool = False,
 | 
					    singleton: bool = False,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    _locks = defaultdict(trio.Lock),
 | 
				
			||||||
    **pikerd_kwargs,
 | 
					    **pikerd_kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> tractor.Portal:
 | 
					) -> tractor.Portal:
 | 
				
			||||||
| 
						 | 
					@ -67,7 +69,7 @@ async def maybe_spawn_daemon(
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    # serialize access to this section to avoid
 | 
					    # serialize access to this section to avoid
 | 
				
			||||||
    # 2 or more tasks racing to create a daemon
 | 
					    # 2 or more tasks racing to create a daemon
 | 
				
			||||||
    lock = Services.locks[service_name]
 | 
					    lock = _locks[service_name]
 | 
				
			||||||
    await lock.acquire()
 | 
					    await lock.acquire()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async with find_service(
 | 
					    async with find_service(
 | 
				
			||||||
| 
						 | 
					@ -132,7 +134,65 @@ async def maybe_spawn_daemon(
 | 
				
			||||||
        async with tractor.wait_for_actor(service_name) as portal:
 | 
					        async with tractor.wait_for_actor(service_name) as portal:
 | 
				
			||||||
            lock.release()
 | 
					            lock.release()
 | 
				
			||||||
            yield portal
 | 
					            yield portal
 | 
				
			||||||
            await portal.cancel_actor()
 | 
					            # --- ---- ---
 | 
				
			||||||
 | 
					            # XXX NOTE XXX
 | 
				
			||||||
 | 
					            # --- ---- ---
 | 
				
			||||||
 | 
					            # DO NOT PUT A `portal.cancel_actor()` here (as was prior)!
 | 
				
			||||||
 | 
					            #
 | 
				
			||||||
 | 
					            # Doing so will cause an "out-of-band" ctxc
 | 
				
			||||||
 | 
					            # (`tractor.ContextCancelled`) to be raised inside the
 | 
				
			||||||
 | 
					            # `ServiceMngr.open_context_in_task()`'s call to
 | 
				
			||||||
 | 
					            # `ctx.wait_for_result()` AND the internal self-ctxc
 | 
				
			||||||
 | 
					            # "graceful capture" WILL NOT CATCH IT!
 | 
				
			||||||
 | 
					            #
 | 
				
			||||||
 | 
					            # This can cause certain types of operations to raise
 | 
				
			||||||
 | 
					            # that ctxc BEFORE THEY `return`, resulting in
 | 
				
			||||||
 | 
					            # a "false-negative" ctxc being raised when really
 | 
				
			||||||
 | 
					            # nothing actually failed, other then our semantic
 | 
				
			||||||
 | 
					            # "failure" to suppress an expected, graceful,
 | 
				
			||||||
 | 
					            # self-cancel scenario..
 | 
				
			||||||
 | 
					            #
 | 
				
			||||||
 | 
					            # bUt wHy duZ It WorK lIKe dis..
 | 
				
			||||||
 | 
					            # ------------------------------
 | 
				
			||||||
 | 
					            # from the perspective of the `tractor.Context` this
 | 
				
			||||||
 | 
					            # cancel request was conducted "out of band" since
 | 
				
			||||||
 | 
					            # `Context.cancel()` was never called and thus the
 | 
				
			||||||
 | 
					            # `._cancel_called: bool` was never set. Despite the
 | 
				
			||||||
 | 
					            # remote `.canceller` being set to `pikerd` (i.e. the
 | 
				
			||||||
 | 
					            # same `Actor.uid` of the raising service-mngr task) the
 | 
				
			||||||
 | 
					            # service-task's ctx itself was never marked as having
 | 
				
			||||||
 | 
					            # requested cancellation and thus still raises the ctxc
 | 
				
			||||||
 | 
					            # bc it was unaware of any such request.
 | 
				
			||||||
 | 
					            #
 | 
				
			||||||
 | 
					            # How to make grokin these cases easier tho?
 | 
				
			||||||
 | 
					            # ------------------------------------------
 | 
				
			||||||
 | 
					            # Because `Portal.cancel_actor()` was called it requests
 | 
				
			||||||
 | 
					            # "full-`Actor`-runtime-cancellation" of it's peer
 | 
				
			||||||
 | 
					            # process which IS NOT THE SAME as a single inter-actor
 | 
				
			||||||
 | 
					            # RPC task cancelling its local context with a remote
 | 
				
			||||||
 | 
					            # peer `Task` in that same peer process.
 | 
				
			||||||
 | 
					            #
 | 
				
			||||||
 | 
					            # ?TODO? It might be better if we do one (or all) of the
 | 
				
			||||||
 | 
					            # following:
 | 
				
			||||||
 | 
					            #
 | 
				
			||||||
 | 
					            # -[ ] at least set a special message for the
 | 
				
			||||||
 | 
					            #    `ContextCancelled` when raised locally by the
 | 
				
			||||||
 | 
					            #    unaware ctx task such that we check for the
 | 
				
			||||||
 | 
					            #    `.canceller` being *our `Actor`* and in the case
 | 
				
			||||||
 | 
					            #    where `Context._cancel_called == False` we specially
 | 
				
			||||||
 | 
					            #    note that this is likely an "out-of-band"
 | 
				
			||||||
 | 
					            #    runtime-cancel request triggered by some call to
 | 
				
			||||||
 | 
					            #    `Portal.cancel_actor()`, possibly even reporting the
 | 
				
			||||||
 | 
					            #    exact LOC of that caller by tracking it inside our
 | 
				
			||||||
 | 
					            #    portal-type?
 | 
				
			||||||
 | 
					            # -[ ] possibly add another field `ContextCancelled` like
 | 
				
			||||||
 | 
					            #    maybe a,
 | 
				
			||||||
 | 
					            #    `.request_type: Literal['os', 'proc', 'actor',
 | 
				
			||||||
 | 
					            #    'ctx']` type thing which would allow immediately
 | 
				
			||||||
 | 
					            #    being able to tell what kind of cancellation caused
 | 
				
			||||||
 | 
					            #    the unexpected ctxc?
 | 
				
			||||||
 | 
					            # -[ ] REMOVE THIS COMMENT, once we've settled on how to
 | 
				
			||||||
 | 
					            #     better augment `tractor` to be more explicit on this!
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def spawn_emsd(
 | 
					async def spawn_emsd(
 | 
				
			||||||
| 
						 | 
					@ -147,21 +207,22 @@ async def spawn_emsd(
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
    log.info('Spawning emsd')
 | 
					    log.info('Spawning emsd')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    portal = await Services.actor_n.start_actor(
 | 
					    smngr: ServiceMngr = get_service_mngr()
 | 
				
			||||||
 | 
					    portal = await smngr.actor_n.start_actor(
 | 
				
			||||||
        'emsd',
 | 
					        'emsd',
 | 
				
			||||||
        enable_modules=[
 | 
					        enable_modules=[
 | 
				
			||||||
            'piker.clearing._ems',
 | 
					            'piker.clearing._ems',
 | 
				
			||||||
            'piker.clearing._client',
 | 
					            'piker.clearing._client',
 | 
				
			||||||
        ],
 | 
					        ],
 | 
				
			||||||
        loglevel=loglevel,
 | 
					        loglevel=loglevel,
 | 
				
			||||||
        debug_mode=Services.debug_mode,  # set by pikerd flag
 | 
					        debug_mode=smngr.debug_mode,  # set by pikerd flag
 | 
				
			||||||
        **extra_tractor_kwargs
 | 
					        **extra_tractor_kwargs
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # non-blocking setup of clearing service
 | 
					    # non-blocking setup of clearing service
 | 
				
			||||||
    from ..clearing._ems import _setup_persistent_emsd
 | 
					    from ..clearing._ems import _setup_persistent_emsd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    await Services.start_service_task(
 | 
					    await smngr.start_service_task(
 | 
				
			||||||
        'emsd',
 | 
					        'emsd',
 | 
				
			||||||
        portal,
 | 
					        portal,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -18,16 +18,29 @@
 | 
				
			||||||
daemon-service management API.
 | 
					daemon-service management API.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
 | 
					from __future__ import annotations
 | 
				
			||||||
 | 
					from contextlib import (
 | 
				
			||||||
 | 
					    asynccontextmanager as acm,
 | 
				
			||||||
 | 
					    # contextmanager as cm,
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
from collections import defaultdict
 | 
					from collections import defaultdict
 | 
				
			||||||
 | 
					from dataclasses import (
 | 
				
			||||||
 | 
					    dataclass,
 | 
				
			||||||
 | 
					    field,
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					import functools
 | 
				
			||||||
 | 
					import inspect
 | 
				
			||||||
from typing import (
 | 
					from typing import (
 | 
				
			||||||
    Callable,
 | 
					    Callable,
 | 
				
			||||||
    Any,
 | 
					    Any,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import trio
 | 
					import msgspec
 | 
				
			||||||
from trio_typing import TaskStatus
 | 
					 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
 | 
					import trio
 | 
				
			||||||
 | 
					from trio import TaskStatus
 | 
				
			||||||
from tractor import (
 | 
					from tractor import (
 | 
				
			||||||
 | 
					    ActorNursery,
 | 
				
			||||||
    current_actor,
 | 
					    current_actor,
 | 
				
			||||||
    ContextCancelled,
 | 
					    ContextCancelled,
 | 
				
			||||||
    Context,
 | 
					    Context,
 | 
				
			||||||
| 
						 | 
					@ -39,6 +52,130 @@ from ._util import (
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# TODO: implement a singleton deco-API for wrapping the below
 | 
				
			||||||
 | 
					# factory's impl for general actor-singleton use?
 | 
				
			||||||
 | 
					#
 | 
				
			||||||
 | 
					# @singleton
 | 
				
			||||||
 | 
					# async def open_service_mngr(
 | 
				
			||||||
 | 
					#     **init_kwargs,
 | 
				
			||||||
 | 
					# ) -> ServiceMngr:
 | 
				
			||||||
 | 
					#     '''
 | 
				
			||||||
 | 
					#     Note this function body is invoke IFF no existing singleton instance already
 | 
				
			||||||
 | 
					#     exists in this proc's memory.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					#     '''
 | 
				
			||||||
 | 
					#     # setup
 | 
				
			||||||
 | 
					#     yield ServiceMngr(**init_kwargs)
 | 
				
			||||||
 | 
					#     # teardown
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# TODO: singleton factory API instead of a class API
 | 
				
			||||||
 | 
					@acm
 | 
				
			||||||
 | 
					async def open_service_mngr(
 | 
				
			||||||
 | 
					    *,
 | 
				
			||||||
 | 
					    debug_mode: bool = False,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # impl deat which ensures a single global instance
 | 
				
			||||||
 | 
					    _singleton: list[ServiceMngr|None] = [None],
 | 
				
			||||||
 | 
					    **init_kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					) -> ServiceMngr:
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    Open a multi-subactor-as-service-daemon tree supervisor.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    The delivered `ServiceMngr` is a singleton instance for each
 | 
				
			||||||
 | 
					    actor-process and is allocated on first open and never
 | 
				
			||||||
 | 
					    de-allocated unless explicitly deleted by al call to
 | 
				
			||||||
 | 
					    `del_service_mngr()`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    # TODO: factor this an allocation into
 | 
				
			||||||
 | 
					    # a `._mngr.open_service_mngr()` and put in the
 | 
				
			||||||
 | 
					    # once-n-only-once setup/`.__aenter__()` part!
 | 
				
			||||||
 | 
					    # -[ ] how to make this only happen on the `mngr == None` case?
 | 
				
			||||||
 | 
					    #  |_ use `.trionics.maybe_open_context()` (for generic
 | 
				
			||||||
 | 
					    #     async-with-style-only-once of the factory impl, though
 | 
				
			||||||
 | 
					    #     what do we do for the allocation case?
 | 
				
			||||||
 | 
					    #    / `.maybe_open_nursery()` (since for this specific case
 | 
				
			||||||
 | 
					    #    it's simpler?) to activate
 | 
				
			||||||
 | 
					    async with (
 | 
				
			||||||
 | 
					        tractor.open_nursery() as an,
 | 
				
			||||||
 | 
					        trio.open_nursery() as tn,
 | 
				
			||||||
 | 
					    ):
 | 
				
			||||||
 | 
					        # impl specific obvi..
 | 
				
			||||||
 | 
					        init_kwargs.update({
 | 
				
			||||||
 | 
					            'actor_n': an,
 | 
				
			||||||
 | 
					            'service_n': tn,
 | 
				
			||||||
 | 
					        })
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        mngr: ServiceMngr|None
 | 
				
			||||||
 | 
					        if (mngr := _singleton[0]) is None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            log.info('Allocating a new service mngr!')
 | 
				
			||||||
 | 
					            mngr = _singleton[0] = ServiceMngr(**init_kwargs)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # TODO: put into `.__aenter__()` section of
 | 
				
			||||||
 | 
					            # eventual `@singleton_acm` API wrapper.
 | 
				
			||||||
 | 
					            #
 | 
				
			||||||
 | 
					            # assign globally for future daemon/task creation
 | 
				
			||||||
 | 
					            mngr.actor_n = an
 | 
				
			||||||
 | 
					            mngr.service_n = tn
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            assert (
 | 
				
			||||||
 | 
					                mngr.actor_n
 | 
				
			||||||
 | 
					                and
 | 
				
			||||||
 | 
					                mngr.service_tn
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					            log.info(
 | 
				
			||||||
 | 
					                'Using extant service mngr!\n\n'
 | 
				
			||||||
 | 
					                f'{mngr!r}\n'  # it has a nice `.__repr__()` of services state
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            # NOTE: this is a singleton factory impl specific detail
 | 
				
			||||||
 | 
					            # which should be supported in the condensed
 | 
				
			||||||
 | 
					            # `@singleton_acm` API?
 | 
				
			||||||
 | 
					            mngr.debug_mode = debug_mode
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            yield mngr
 | 
				
			||||||
 | 
					        finally:
 | 
				
			||||||
 | 
					            # TODO: is this more clever/efficient?
 | 
				
			||||||
 | 
					            # if 'samplerd' in mngr.service_tasks:
 | 
				
			||||||
 | 
					            #     await mngr.cancel_service('samplerd')
 | 
				
			||||||
 | 
					            tn.cancel_scope.cancel()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def get_service_mngr() -> ServiceMngr:
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    Try to get the singleton service-mngr for this actor presuming it
 | 
				
			||||||
 | 
					    has already been allocated using,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    .. code:: python
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        async with open_<@singleton_acm(func)>() as mngr`
 | 
				
			||||||
 | 
					            ... this block kept open ...
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    If not yet allocated raise a `ServiceError`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    # https://stackoverflow.com/a/12627202
 | 
				
			||||||
 | 
					    # https://docs.python.org/3/library/inspect.html#inspect.Signature
 | 
				
			||||||
 | 
					    maybe_mngr: ServiceMngr|None = inspect.signature(
 | 
				
			||||||
 | 
					        open_service_mngr
 | 
				
			||||||
 | 
					    ).parameters['_singleton'].default[0]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if maybe_mngr is None:
 | 
				
			||||||
 | 
					        raise RuntimeError(
 | 
				
			||||||
 | 
					            'Someone must allocate a `ServiceMngr` using\n\n'
 | 
				
			||||||
 | 
					            '`async with open_service_mngr()` beforehand!!\n'
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return maybe_mngr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# TODO: we need remote wrapping and a general soln:
 | 
					# TODO: we need remote wrapping and a general soln:
 | 
				
			||||||
# - factor this into a ``tractor.highlevel`` extension # pack for the
 | 
					# - factor this into a ``tractor.highlevel`` extension # pack for the
 | 
				
			||||||
#   library.
 | 
					#   library.
 | 
				
			||||||
| 
						 | 
					@ -46,31 +183,46 @@ from ._util import (
 | 
				
			||||||
#   to the pikerd actor for starting services remotely!
 | 
					#   to the pikerd actor for starting services remotely!
 | 
				
			||||||
# - prolly rename this to ActorServicesNursery since it spawns
 | 
					# - prolly rename this to ActorServicesNursery since it spawns
 | 
				
			||||||
#   new actors and supervises them to completion?
 | 
					#   new actors and supervises them to completion?
 | 
				
			||||||
class Services:
 | 
					@dataclass
 | 
				
			||||||
 | 
					class ServiceMngr:
 | 
				
			||||||
 | 
					# class ServiceMngr(msgspec.Struct):
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    A multi-subactor-as-service manager.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    actor_n: tractor._supervise.ActorNursery
 | 
					    Spawn, supervise and monitor service/daemon subactors in a SC
 | 
				
			||||||
 | 
					    process tree.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    actor_n: ActorNursery
 | 
				
			||||||
    service_n: trio.Nursery
 | 
					    service_n: trio.Nursery
 | 
				
			||||||
    debug_mode: bool  # tractor sub-actor debug mode flag
 | 
					    debug_mode: bool = False # tractor sub-actor debug mode flag
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    service_tasks: dict[
 | 
					    service_tasks: dict[
 | 
				
			||||||
        str,
 | 
					        str,
 | 
				
			||||||
        tuple[
 | 
					        tuple[
 | 
				
			||||||
            trio.CancelScope,
 | 
					            trio.CancelScope,
 | 
				
			||||||
 | 
					            Context,
 | 
				
			||||||
            Portal,
 | 
					            Portal,
 | 
				
			||||||
            trio.Event,
 | 
					            trio.Event,
 | 
				
			||||||
        ]
 | 
					        ]
 | 
				
			||||||
    ] = {}
 | 
					    ] = field(default_factory=dict)
 | 
				
			||||||
    locks = defaultdict(trio.Lock)
 | 
					
 | 
				
			||||||
 | 
					    # internal per-service task mutexs
 | 
				
			||||||
 | 
					    _locks = defaultdict(trio.Lock)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @classmethod
 | 
					 | 
				
			||||||
    async def start_service_task(
 | 
					    async def start_service_task(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        name: str,
 | 
					        name: str,
 | 
				
			||||||
        portal: Portal,
 | 
					        portal: Portal,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # TODO: typevar for the return type of the target and then
 | 
				
			||||||
 | 
					        # use it below for `ctx_res`?
 | 
				
			||||||
        target: Callable,
 | 
					        target: Callable,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        allow_overruns: bool = False,
 | 
					        allow_overruns: bool = False,
 | 
				
			||||||
        **ctx_kwargs,
 | 
					        **ctx_kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> (trio.CancelScope, Context):
 | 
					    ) -> (trio.CancelScope, Context, Any):
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Open a context in a service sub-actor, add to a stack
 | 
					        Open a context in a service sub-actor, add to a stack
 | 
				
			||||||
        that gets unwound at ``pikerd`` teardown.
 | 
					        that gets unwound at ``pikerd`` teardown.
 | 
				
			||||||
| 
						 | 
					@ -83,6 +235,7 @@ class Services:
 | 
				
			||||||
            task_status: TaskStatus[
 | 
					            task_status: TaskStatus[
 | 
				
			||||||
                tuple[
 | 
					                tuple[
 | 
				
			||||||
                    trio.CancelScope,
 | 
					                    trio.CancelScope,
 | 
				
			||||||
 | 
					                    Context,
 | 
				
			||||||
                    trio.Event,
 | 
					                    trio.Event,
 | 
				
			||||||
                    Any,
 | 
					                    Any,
 | 
				
			||||||
                ]
 | 
					                ]
 | 
				
			||||||
| 
						 | 
					@ -90,64 +243,87 @@ class Services:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        ) -> Any:
 | 
					        ) -> Any:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # TODO: use the ctx._scope directly here instead?
 | 
				
			||||||
 | 
					            # -[ ] actually what semantics do we expect for this
 | 
				
			||||||
 | 
					            #   usage!?
 | 
				
			||||||
            with trio.CancelScope() as cs:
 | 
					            with trio.CancelScope() as cs:
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
 | 
					                    async with portal.open_context(
 | 
				
			||||||
 | 
					                        target,
 | 
				
			||||||
 | 
					                        allow_overruns=allow_overruns,
 | 
				
			||||||
 | 
					                        **ctx_kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                async with portal.open_context(
 | 
					                    ) as (ctx, started):
 | 
				
			||||||
                    target,
 | 
					 | 
				
			||||||
                    allow_overruns=allow_overruns,
 | 
					 | 
				
			||||||
                    **ctx_kwargs,
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                ) as (ctx, first):
 | 
					                        # unblock once the remote context has started
 | 
				
			||||||
 | 
					                        complete = trio.Event()
 | 
				
			||||||
                    # unblock once the remote context has started
 | 
					                        task_status.started((
 | 
				
			||||||
                    complete = trio.Event()
 | 
					                            cs,
 | 
				
			||||||
                    task_status.started((cs, complete, first))
 | 
					                            ctx,
 | 
				
			||||||
                    log.info(
 | 
					                            complete,
 | 
				
			||||||
                        f'`pikerd` service {name} started with value {first}'
 | 
					                            started,
 | 
				
			||||||
                    )
 | 
					                        ))
 | 
				
			||||||
                    try:
 | 
					                        log.info(
 | 
				
			||||||
 | 
					                            f'`pikerd` service {name} started with value {started}'
 | 
				
			||||||
 | 
					                        )
 | 
				
			||||||
                        # wait on any context's return value
 | 
					                        # wait on any context's return value
 | 
				
			||||||
                        # and any final portal result from the
 | 
					                        # and any final portal result from the
 | 
				
			||||||
                        # sub-actor.
 | 
					                        # sub-actor.
 | 
				
			||||||
                        ctx_res: Any = await ctx.result()
 | 
					                        ctx_res: Any = await ctx.wait_for_result()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        # NOTE: blocks indefinitely until cancelled
 | 
					                        # NOTE: blocks indefinitely until cancelled
 | 
				
			||||||
                        # either by error from the target context
 | 
					                        # either by error from the target context
 | 
				
			||||||
                        # function or by being cancelled here by the
 | 
					                        # function or by being cancelled here by the
 | 
				
			||||||
                        # surrounding cancel scope.
 | 
					                        # surrounding cancel scope.
 | 
				
			||||||
                        return (await portal.result(), ctx_res)
 | 
					                        return (
 | 
				
			||||||
                    except ContextCancelled as ctxe:
 | 
					                            await portal.wait_for_result(),
 | 
				
			||||||
                        canceller: tuple[str, str] = ctxe.canceller
 | 
					                            ctx_res,
 | 
				
			||||||
                        our_uid: tuple[str, str] = current_actor().uid
 | 
					                        )
 | 
				
			||||||
                        if (
 | 
					 | 
				
			||||||
                            canceller != portal.channel.uid
 | 
					 | 
				
			||||||
                            and
 | 
					 | 
				
			||||||
                            canceller != our_uid
 | 
					 | 
				
			||||||
                        ):
 | 
					 | 
				
			||||||
                            log.cancel(
 | 
					 | 
				
			||||||
                                f'Actor-service {name} was remotely cancelled?\n'
 | 
					 | 
				
			||||||
                                f'remote canceller: {canceller}\n'
 | 
					 | 
				
			||||||
                                f'Keeping {our_uid} alive, ignoring sub-actor cancel..\n'
 | 
					 | 
				
			||||||
                            )
 | 
					 | 
				
			||||||
                        else:
 | 
					 | 
				
			||||||
                            raise
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                except ContextCancelled as ctxe:
 | 
				
			||||||
 | 
					                    canceller: tuple[str, str] = ctxe.canceller
 | 
				
			||||||
 | 
					                    our_uid: tuple[str, str] = current_actor().uid
 | 
				
			||||||
 | 
					                    if (
 | 
				
			||||||
 | 
					                        canceller != portal.chan.uid
 | 
				
			||||||
 | 
					                        and
 | 
				
			||||||
 | 
					                        canceller != our_uid
 | 
				
			||||||
 | 
					                    ):
 | 
				
			||||||
 | 
					                        log.cancel(
 | 
				
			||||||
 | 
					                            f'Actor-service `{name}` was remotely cancelled by a peer?\n'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                            # TODO: this would be a good spot to use
 | 
				
			||||||
 | 
					                            # a respawn feature Bo
 | 
				
			||||||
 | 
					                            f'-> Keeping `pikerd` service manager alive despite this inter-peer cancel\n\n'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    finally:
 | 
					                            f'cancellee: {portal.chan.uid}\n'
 | 
				
			||||||
                        await portal.cancel_actor()
 | 
					                            f'canceller: {canceller}\n'
 | 
				
			||||||
                        complete.set()
 | 
					                        )
 | 
				
			||||||
                        self.service_tasks.pop(name)
 | 
					                    else:
 | 
				
			||||||
 | 
					                        raise
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        cs, complete, first = await self.service_n.start(open_context_in_task)
 | 
					                finally:
 | 
				
			||||||
 | 
					                    # NOTE: the ctx MUST be cancelled first if we
 | 
				
			||||||
 | 
					                    # don't want the above `ctx.wait_for_result()` to
 | 
				
			||||||
 | 
					                    # raise a self-ctxc. WHY, well since from the ctx's
 | 
				
			||||||
 | 
					                    # perspective the cancel request will have
 | 
				
			||||||
 | 
					                    # arrived out-out-of-band at the `Actor.cancel()`
 | 
				
			||||||
 | 
					                    # level, thus `Context.cancel_called == False`,
 | 
				
			||||||
 | 
					                    # meaning `ctx._is_self_cancelled() == False`.
 | 
				
			||||||
 | 
					                    # with trio.CancelScope(shield=True):
 | 
				
			||||||
 | 
					                    # await ctx.cancel()
 | 
				
			||||||
 | 
					                    await portal.cancel_actor()
 | 
				
			||||||
 | 
					                    complete.set()
 | 
				
			||||||
 | 
					                    self.service_tasks.pop(name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        cs, sub_ctx, complete, started = await self.service_n.start(
 | 
				
			||||||
 | 
					            open_context_in_task
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # store the cancel scope and portal for later cancellation or
 | 
					        # store the cancel scope and portal for later cancellation or
 | 
				
			||||||
        # retstart if needed.
 | 
					        # retstart if needed.
 | 
				
			||||||
        self.service_tasks[name] = (cs, portal, complete)
 | 
					        self.service_tasks[name] = (cs, sub_ctx, portal, complete)
 | 
				
			||||||
 | 
					        return cs, sub_ctx, started
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return cs, first
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    @classmethod
 | 
					 | 
				
			||||||
    async def cancel_service(
 | 
					    async def cancel_service(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        name: str,
 | 
					        name: str,
 | 
				
			||||||
| 
						 | 
					@ -158,8 +334,80 @@ class Services:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        log.info(f'Cancelling `pikerd` service {name}')
 | 
					        log.info(f'Cancelling `pikerd` service {name}')
 | 
				
			||||||
        cs, portal, complete = self.service_tasks[name]
 | 
					        cs, sub_ctx, portal, complete = self.service_tasks[name]
 | 
				
			||||||
        cs.cancel()
 | 
					
 | 
				
			||||||
 | 
					        # cs.cancel()
 | 
				
			||||||
 | 
					        await sub_ctx.cancel()
 | 
				
			||||||
        await complete.wait()
 | 
					        await complete.wait()
 | 
				
			||||||
        assert name not in self.service_tasks, \
 | 
					
 | 
				
			||||||
            f'Serice task for {name} not terminated?'
 | 
					        if name in self.service_tasks:
 | 
				
			||||||
 | 
					            # TODO: custom err?
 | 
				
			||||||
 | 
					            # raise ServiceError(
 | 
				
			||||||
 | 
					            raise RuntimeError(
 | 
				
			||||||
 | 
					                f'Serice task for {name} not terminated?'
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # assert name not in self.service_tasks, \
 | 
				
			||||||
 | 
					        #     f'Serice task for {name} not terminated?'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    async def start_service(
 | 
				
			||||||
 | 
					        self,
 | 
				
			||||||
 | 
					        daemon_name: str,
 | 
				
			||||||
 | 
					        ctx_ep: Callable,  # kwargs must `partial`-ed in!
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        debug_mode: bool = False,
 | 
				
			||||||
 | 
					        **tractor_actor_kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ) -> Context:
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        Start a "service" task in a new sub-actor (daemon) and manage it's lifetime
 | 
				
			||||||
 | 
					        indefinitely.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        Services can be cancelled/shutdown using `.cancel_service()`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        entry: tuple|None = self.service_tasks.get(daemon_name)
 | 
				
			||||||
 | 
					        if entry:
 | 
				
			||||||
 | 
					            (cs, sub_ctx, portal, complete) = entry
 | 
				
			||||||
 | 
					            return sub_ctx
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if daemon_name not in self.service_tasks:
 | 
				
			||||||
 | 
					            portal = await self.actor_n.start_actor(
 | 
				
			||||||
 | 
					                daemon_name,
 | 
				
			||||||
 | 
					                debug_mode=(  # maybe set globally during allocate
 | 
				
			||||||
 | 
					                    debug_mode
 | 
				
			||||||
 | 
					                    or
 | 
				
			||||||
 | 
					                    self.debug_mode
 | 
				
			||||||
 | 
					                ),
 | 
				
			||||||
 | 
					                **tractor_actor_kwargs,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					            ctx_kwargs: dict[str, Any] = {}
 | 
				
			||||||
 | 
					            if isinstance(ctx_ep, functools.partial):
 | 
				
			||||||
 | 
					                ctx_kwargs: dict[str, Any] = ctx_ep.keywords
 | 
				
			||||||
 | 
					                ctx_ep: Callable = ctx_ep.func
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            (cs, sub_ctx, started) = await self.start_service_task(
 | 
				
			||||||
 | 
					                daemon_name,
 | 
				
			||||||
 | 
					                portal,
 | 
				
			||||||
 | 
					                ctx_ep,
 | 
				
			||||||
 | 
					                **ctx_kwargs,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            return sub_ctx
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# TODO:
 | 
				
			||||||
 | 
					# -[ ] factor all the common shit from `.data._sampling`
 | 
				
			||||||
 | 
					#   and `.brokers._daemon` into here / `ServiceMngr`
 | 
				
			||||||
 | 
					#   in terms of allocating the `Portal` as part of the
 | 
				
			||||||
 | 
					#   "service-in-subactor" starting!
 | 
				
			||||||
 | 
					# -[ ] move to `tractor.hilevel._service`, import and use here!
 | 
				
			||||||
 | 
					# NOTE: purposely leaks the ref to the mod-scope Bo
 | 
				
			||||||
 | 
					# import tractor
 | 
				
			||||||
 | 
					# from tractor.hilevel import (
 | 
				
			||||||
 | 
					#     open_service_mngr,
 | 
				
			||||||
 | 
					#     ServiceMngr,
 | 
				
			||||||
 | 
					# )
 | 
				
			||||||
 | 
					# mngr: ServiceMngr|None = None
 | 
				
			||||||
 | 
					# with tractor.hilevel.open_service_mngr() as mngr:
 | 
				
			||||||
 | 
					#     Services = proxy(mngr)
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -21,11 +21,13 @@ from typing import (
 | 
				
			||||||
    TYPE_CHECKING,
 | 
					    TYPE_CHECKING,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# TODO: oof, needs to be changed to `httpx`!
 | 
				
			||||||
import asks
 | 
					import asks
 | 
				
			||||||
 | 
					
 | 
				
			||||||
if TYPE_CHECKING:
 | 
					if TYPE_CHECKING:
 | 
				
			||||||
    import docker
 | 
					    import docker
 | 
				
			||||||
    from ._ahab import DockerContainer
 | 
					    from ._ahab import DockerContainer
 | 
				
			||||||
 | 
					    from . import ServiceMngr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ._util import log  # sub-sys logger
 | 
					from ._util import log  # sub-sys logger
 | 
				
			||||||
from ._util import (
 | 
					from ._util import (
 | 
				
			||||||
| 
						 | 
					@ -127,7 +129,7 @@ def start_elasticsearch(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@acm
 | 
				
			||||||
async def start_ahab_daemon(
 | 
					async def start_ahab_daemon(
 | 
				
			||||||
    service_mngr: Services,
 | 
					    service_mngr: ServiceMngr,
 | 
				
			||||||
    user_config: dict | None = None,
 | 
					    user_config: dict | None = None,
 | 
				
			||||||
    loglevel: str | None = None,
 | 
					    loglevel: str | None = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -53,7 +53,7 @@ import pendulum
 | 
				
			||||||
# import purerpc
 | 
					# import purerpc
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ..data.feed import maybe_open_feed
 | 
					from ..data.feed import maybe_open_feed
 | 
				
			||||||
from . import Services
 | 
					from . import ServiceMngr
 | 
				
			||||||
from ._util import (
 | 
					from ._util import (
 | 
				
			||||||
    log,  # sub-sys logger
 | 
					    log,  # sub-sys logger
 | 
				
			||||||
    get_console_log,
 | 
					    get_console_log,
 | 
				
			||||||
| 
						 | 
					@ -233,7 +233,7 @@ def start_marketstore(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@acm
 | 
				
			||||||
async def start_ahab_daemon(
 | 
					async def start_ahab_daemon(
 | 
				
			||||||
    service_mngr: Services,
 | 
					    service_mngr: ServiceMngr,
 | 
				
			||||||
    user_config: dict | None = None,
 | 
					    user_config: dict | None = None,
 | 
				
			||||||
    loglevel: str | None = None,
 | 
					    loglevel: str | None = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -458,13 +458,15 @@ async def start_backfill(
 | 
				
			||||||
                    'bf_until <- last_start_dt:\n'
 | 
					                    'bf_until <- last_start_dt:\n'
 | 
				
			||||||
                    f'{backfill_until_dt} <- {last_start_dt}\n'
 | 
					                    f'{backfill_until_dt} <- {last_start_dt}\n'
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					                # UGH: what's a better way?
 | 
				
			||||||
                # ugh, what's a better way?
 | 
					                # TODO: backends are responsible for being correct on
 | 
				
			||||||
                # TODO: fwiw, we probably want a way to signal a throttle
 | 
					                # this right!?
 | 
				
			||||||
                # condition (eg. with ib) so that we can halt the
 | 
					                # -[ ] in the `ib` case we could maybe offer some way
 | 
				
			||||||
                # request loop until the condition is resolved?
 | 
					                #     to halt the request loop until the condition is
 | 
				
			||||||
                if timeframe > 1:
 | 
					                #     resolved or should the backend be entirely in
 | 
				
			||||||
                    await tractor.pause()
 | 
					                #     charge of solving such faults? yes, right?
 | 
				
			||||||
 | 
					                # if timeframe > 1:
 | 
				
			||||||
 | 
					                #     await tractor.pause()
 | 
				
			||||||
                return
 | 
					                return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            assert (
 | 
					            assert (
 | 
				
			||||||
| 
						 | 
					@ -578,6 +580,7 @@ async def start_backfill(
 | 
				
			||||||
                    'crypto',
 | 
					                    'crypto',
 | 
				
			||||||
                    'crypto_currency',
 | 
					                    'crypto_currency',
 | 
				
			||||||
                    'fiat',  # a "forex pair"
 | 
					                    'fiat',  # a "forex pair"
 | 
				
			||||||
 | 
					                    'perpetual_future',  # stupid "perps" from cex land
 | 
				
			||||||
                }:
 | 
					                }:
 | 
				
			||||||
                    # for now, our table key schema is not including
 | 
					                    # for now, our table key schema is not including
 | 
				
			||||||
                    # the dst[/src] source asset token.
 | 
					                    # the dst[/src] source asset token.
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										227
									
								
								pyproject.toml
								
								
								
								
							
							
						
						
									
										227
									
								
								pyproject.toml
								
								
								
								
							| 
						 | 
					@ -15,8 +15,8 @@
 | 
				
			||||||
# You should have received a copy of the GNU Affero General Public License
 | 
					# You should have received a copy of the GNU Affero General Public License
 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
				
			||||||
[build-system]
 | 
					[build-system]
 | 
				
			||||||
requires = ["poetry-core"]
 | 
					requires = ["hatchling"]
 | 
				
			||||||
build-backend = "poetry.core.masonry.api"
 | 
					build-backend = "hatchling.build"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# ------ - ------
 | 
					# ------ - ------
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -34,121 +34,114 @@ ignore = []
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# ------ - ------
 | 
					# ------ - ------
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[tool.poetry]
 | 
					 | 
				
			||||||
name = "piker"
 | 
					 | 
				
			||||||
version = "0.1.0.alpha0.dev0"
 | 
					 | 
				
			||||||
description = "trading gear for hackers"
 | 
					 | 
				
			||||||
authors = ["Tyler Goodlet <goodboy_foss@protonmail.com>"]
 | 
					 | 
				
			||||||
license = "AGPLv3"
 | 
					 | 
				
			||||||
readme = "README.rst"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# ------ - ------
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[tool.poetry.dependencies]
 | 
					 | 
				
			||||||
async-generator = "^1.10"
 | 
					 | 
				
			||||||
attrs = "^23.1.0"
 | 
					 | 
				
			||||||
bidict = "^0.22.1"
 | 
					 | 
				
			||||||
colorama = "^0.4.6"
 | 
					 | 
				
			||||||
colorlog = "^6.7.0"
 | 
					 | 
				
			||||||
cython = "^3.0.0"
 | 
					 | 
				
			||||||
greenback = "^1.1.1"
 | 
					 | 
				
			||||||
ib-insync = "^0.9.86"
 | 
					 | 
				
			||||||
msgspec = "^0.18.0"
 | 
					 | 
				
			||||||
numba = "^0.59.0"
 | 
					 | 
				
			||||||
numpy = "^1.25"
 | 
					 | 
				
			||||||
polars = "^0.18.13"
 | 
					 | 
				
			||||||
pygments = "^2.16.1"
 | 
					 | 
				
			||||||
python = ">=3.11, <3.13"
 | 
					 | 
				
			||||||
rich = "^13.5.2"
 | 
					 | 
				
			||||||
# setuptools = "^68.0.0"
 | 
					 | 
				
			||||||
tomli = "^2.0.1"
 | 
					 | 
				
			||||||
tomli-w = "^1.0.0"
 | 
					 | 
				
			||||||
trio-util = "^0.7.0"
 | 
					 | 
				
			||||||
trio-websocket = "^0.10.3"
 | 
					 | 
				
			||||||
typer = "^0.9.0"
 | 
					 | 
				
			||||||
rapidfuzz = "^3.5.2"
 | 
					 | 
				
			||||||
pdbp = "^1.5.0"
 | 
					 | 
				
			||||||
trio = "^0.24"
 | 
					 | 
				
			||||||
pendulum = "^3.0.0"
 | 
					 | 
				
			||||||
httpx = "^0.27.0"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[tool.poetry.dependencies.tractor]
 | 
					 | 
				
			||||||
develop = true
 | 
					 | 
				
			||||||
git = 'https://github.com/goodboy/tractor.git'
 | 
					 | 
				
			||||||
branch = 'asyncio_debugger_support'
 | 
					 | 
				
			||||||
# path = "../tractor"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[tool.poetry.dependencies.asyncvnc]
 | 
					 | 
				
			||||||
git = 'https://github.com/pikers/asyncvnc.git'
 | 
					 | 
				
			||||||
branch = 'main'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[tool.poetry.dependencies.tomlkit]
 | 
					 | 
				
			||||||
develop = true
 | 
					 | 
				
			||||||
git = 'https://github.com/pikers/tomlkit.git'
 | 
					 | 
				
			||||||
branch = 'piker_pin'
 | 
					 | 
				
			||||||
# path = "../tomlkit/"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[tool.poetry.group.uis]
 | 
					 | 
				
			||||||
optional = true
 | 
					 | 
				
			||||||
[tool.poetry.group.uis.dependencies]
 | 
					 | 
				
			||||||
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
 | 
					 | 
				
			||||||
# TODO: make sure the levenshtein shit compiles on nix..
 | 
					 | 
				
			||||||
# rapidfuzz = {extras = ["speedup"], version = "^0.18.0"}
 | 
					 | 
				
			||||||
rapidfuzz = "^3.2.0"
 | 
					 | 
				
			||||||
qdarkstyle = ">=3.0.2"
 | 
					 | 
				
			||||||
pyqtgraph = { git = 'https://github.com/pikers/pyqtgraph.git' }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# ------ - ------
 | 
					 | 
				
			||||||
pyqt6 = "^6.7.0"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[tool.poetry.group.dev]
 | 
					 | 
				
			||||||
optional = true
 | 
					 | 
				
			||||||
[tool.poetry.group.dev.dependencies]
 | 
					 | 
				
			||||||
# testing / CI
 | 
					 | 
				
			||||||
pytest = "^6.0.0"
 | 
					 | 
				
			||||||
elasticsearch = "^8.9.0"
 | 
					 | 
				
			||||||
xonsh = "^0.14.2"
 | 
					 | 
				
			||||||
prompt-toolkit = "3.0.40"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# console ehancements and eventually remote debugging
 | 
					 | 
				
			||||||
# extras/helpers.
 | 
					 | 
				
			||||||
# TODO: add a toolset that makes debugging a `pikerd` service
 | 
					 | 
				
			||||||
# (tree) easy to hack on directly using more or less the local env:
 | 
					 | 
				
			||||||
# - xonsh + xxh
 | 
					 | 
				
			||||||
# - rsyscall + pdbp
 | 
					 | 
				
			||||||
# - actor runtime control console like BEAM/OTP
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# ------ - ------
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# TODO: add an `--only daemon` group for running non-ui / pikerd
 | 
					 | 
				
			||||||
# service tree in distributed mode B)
 | 
					 | 
				
			||||||
# https://python-poetry.org/docs/managing-dependencies/#installing-group-dependencies
 | 
					 | 
				
			||||||
# [tool.poetry.group.daemon.dependencies]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[tool.poetry.scripts]
 | 
					 | 
				
			||||||
piker = 'piker.cli:cli'
 | 
					 | 
				
			||||||
pikerd = 'piker.cli:pikerd'
 | 
					 | 
				
			||||||
ledger = 'piker.accounting.cli:ledger'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[project]
 | 
					[project]
 | 
				
			||||||
keywords=[
 | 
					name = "piker"
 | 
				
			||||||
  "async",
 | 
					version = "0.1.0a0dev0"
 | 
				
			||||||
  "trading",
 | 
					description = "trading gear for hackers"
 | 
				
			||||||
  "finance",
 | 
					authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }]
 | 
				
			||||||
  "quant",
 | 
					requires-python = ">=3.12, <3.13"
 | 
				
			||||||
  "charting",
 | 
					license = "AGPL-3.0-or-later"
 | 
				
			||||||
 | 
					readme = "README.rst"
 | 
				
			||||||
 | 
					keywords = [
 | 
				
			||||||
 | 
					    "async",
 | 
				
			||||||
 | 
					    "trading",
 | 
				
			||||||
 | 
					    "finance",
 | 
				
			||||||
 | 
					    "quant",
 | 
				
			||||||
 | 
					    "charting",
 | 
				
			||||||
]
 | 
					]
 | 
				
			||||||
classifiers=[
 | 
					classifiers = [
 | 
				
			||||||
  'Development Status :: 3 - Alpha',
 | 
					    "Development Status :: 3 - Alpha",
 | 
				
			||||||
  "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
 | 
					    "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
 | 
				
			||||||
  'Operating System :: POSIX :: Linux',
 | 
					    "Operating System :: POSIX :: Linux",
 | 
				
			||||||
  "Programming Language :: Python :: Implementation :: CPython",
 | 
					    "Programming Language :: Python :: Implementation :: CPython",
 | 
				
			||||||
  "Programming Language :: Python :: 3 :: Only",
 | 
					    "Programming Language :: Python :: 3 :: Only",
 | 
				
			||||||
  "Programming Language :: Python :: 3.11",
 | 
					    "Programming Language :: Python :: 3.11",
 | 
				
			||||||
  "Programming Language :: Python :: 3.12",
 | 
					    "Programming Language :: Python :: 3.12",
 | 
				
			||||||
  'Intended Audience :: Financial and Insurance Industry',
 | 
					    "Intended Audience :: Financial and Insurance Industry",
 | 
				
			||||||
  'Intended Audience :: Science/Research',
 | 
					    "Intended Audience :: Science/Research",
 | 
				
			||||||
  'Intended Audience :: Developers',
 | 
					    "Intended Audience :: Developers",
 | 
				
			||||||
  'Intended Audience :: Education',
 | 
					    "Intended Audience :: Education",
 | 
				
			||||||
]
 | 
					]
 | 
				
			||||||
 | 
					dependencies = [
 | 
				
			||||||
 | 
					    "async-generator >=1.10, <2.0.0",
 | 
				
			||||||
 | 
					    "attrs >=23.1.0, <24.0.0",
 | 
				
			||||||
 | 
					    "bidict >=0.22.1, <0.23.0",
 | 
				
			||||||
 | 
					    "colorama >=0.4.6, <0.5.0",
 | 
				
			||||||
 | 
					    "colorlog >=6.7.0, <7.0.0",
 | 
				
			||||||
 | 
					    "ib-insync >=0.9.86, <0.10.0",
 | 
				
			||||||
 | 
					    "numba >=0.59.0, <0.60.0",
 | 
				
			||||||
 | 
					    "numpy >=1.25, <2.0",
 | 
				
			||||||
 | 
					    "polars >=0.18.13, <0.19.0",
 | 
				
			||||||
 | 
					    "pygments >=2.16.1, <3.0.0",
 | 
				
			||||||
 | 
					    "rich >=13.5.2, <14.0.0",
 | 
				
			||||||
 | 
					    "tomli >=2.0.1, <3.0.0",
 | 
				
			||||||
 | 
					    "tomli-w >=1.0.0, <2.0.0",
 | 
				
			||||||
 | 
					    "trio-util >=0.7.0, <0.8.0",
 | 
				
			||||||
 | 
					    "trio-websocket >=0.10.3, <0.11.0",
 | 
				
			||||||
 | 
					    "typer >=0.9.0, <1.0.0",
 | 
				
			||||||
 | 
					    "rapidfuzz >=3.5.2, <4.0.0",
 | 
				
			||||||
 | 
					    "pdbp >=1.5.0, <2.0.0",
 | 
				
			||||||
 | 
					    "trio >=0.24, <0.25",
 | 
				
			||||||
 | 
					    "pendulum >=3.0.0, <4.0.0",
 | 
				
			||||||
 | 
					    "httpx >=0.27.0, <0.28.0",
 | 
				
			||||||
 | 
					    "cryptofeed >=2.4.0, <3.0.0",
 | 
				
			||||||
 | 
					    "pyarrow >=17.0.0, <18.0.0",
 | 
				
			||||||
 | 
					    "websockets ==12.0",
 | 
				
			||||||
 | 
					    "msgspec",
 | 
				
			||||||
 | 
					    "tractor",
 | 
				
			||||||
 | 
					    "asyncvnc",
 | 
				
			||||||
 | 
					    "tomlkit",
 | 
				
			||||||
 | 
					]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[project.optional-dependencies]
 | 
				
			||||||
 | 
					uis = [
 | 
				
			||||||
 | 
					    # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies
 | 
				
			||||||
 | 
					    # TODO: make sure the levenshtein shit compiles on nix..
 | 
				
			||||||
 | 
					    # rapidfuzz = {extras = ["speedup"], version = "^0.18.0"}
 | 
				
			||||||
 | 
					    "rapidfuzz >=3.2.0, <4.0.0",
 | 
				
			||||||
 | 
					    "qdarkstyle >=3.0.2, <4.0.0",
 | 
				
			||||||
 | 
					    "pyqt6 >=6.7.0, <7.0.0",
 | 
				
			||||||
 | 
					    "pyqtgraph",
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # ------ - ------
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # TODO: add an `--only daemon` group for running non-ui / pikerd
 | 
				
			||||||
 | 
					    # service tree in distributed mode B)
 | 
				
			||||||
 | 
					    # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies
 | 
				
			||||||
 | 
					    # [project.optional-dependencies]
 | 
				
			||||||
 | 
					]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[dependency-groups]
 | 
				
			||||||
 | 
					dev = [
 | 
				
			||||||
 | 
					    "pytest >=6.0.0, <7.0.0",
 | 
				
			||||||
 | 
					    "elasticsearch >=8.9.0, <9.0.0",
 | 
				
			||||||
 | 
					    "xonsh >=0.14.2, <0.15.0",
 | 
				
			||||||
 | 
					    "prompt-toolkit ==3.0.40",
 | 
				
			||||||
 | 
					    "cython >=3.0.0, <4.0.0",
 | 
				
			||||||
 | 
					    "greenback >=1.1.1, <2.0.0",
 | 
				
			||||||
 | 
					    # console ehancements and eventually remote debugging
 | 
				
			||||||
 | 
					    # extras/helpers.
 | 
				
			||||||
 | 
					    # TODO: add a toolset that makes debugging a `pikerd` service
 | 
				
			||||||
 | 
					    # (tree) easy to hack on directly using more or less the local env:
 | 
				
			||||||
 | 
					    # - xonsh + xxh
 | 
				
			||||||
 | 
					    # - rsyscall + pdbp
 | 
				
			||||||
 | 
					    # - actor runtime control console like BEAM/OTP
 | 
				
			||||||
 | 
					]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[project.scripts]
 | 
				
			||||||
 | 
					piker = "piker.cli:cli"
 | 
				
			||||||
 | 
					pikerd = "piker.cli:pikerd"
 | 
				
			||||||
 | 
					ledger = "piker.accounting.cli:ledger"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[tool.hatch.build.targets.sdist]
 | 
				
			||||||
 | 
					include = ["piker"]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[tool.hatch.build.targets.wheel]
 | 
				
			||||||
 | 
					include = ["piker"]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[tool.uv.sources]
 | 
				
			||||||
 | 
					pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" }
 | 
				
			||||||
 | 
					asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" }
 | 
				
			||||||
 | 
					tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" }
 | 
				
			||||||
 | 
					msgspec = { git = "https://github.com/jcrist/msgspec.git" }
 | 
				
			||||||
 | 
					tractor = { path = "../tractor" }
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -10,7 +10,7 @@ from piker import (
 | 
				
			||||||
    config,
 | 
					    config,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from piker.service import (
 | 
					from piker.service import (
 | 
				
			||||||
    Services,
 | 
					    get_service_mngr,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from piker.log import get_console_log
 | 
					from piker.log import get_console_log
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -129,7 +129,7 @@ async def _open_test_pikerd(
 | 
				
			||||||
        ) as service_manager,
 | 
					        ) as service_manager,
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        # this proc/actor is the pikerd
 | 
					        # this proc/actor is the pikerd
 | 
				
			||||||
        assert service_manager is Services
 | 
					        assert service_manager is get_service_mngr()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        async with tractor.wait_for_actor(
 | 
					        async with tractor.wait_for_actor(
 | 
				
			||||||
            'pikerd',
 | 
					            'pikerd',
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -26,7 +26,7 @@ import pytest
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
from uuid import uuid4
 | 
					from uuid import uuid4
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from piker.service import Services
 | 
					from piker.service import ServiceMngr
 | 
				
			||||||
from piker.log import get_logger
 | 
					from piker.log import get_logger
 | 
				
			||||||
from piker.clearing._messages import (
 | 
					from piker.clearing._messages import (
 | 
				
			||||||
    Order,
 | 
					    Order,
 | 
				
			||||||
| 
						 | 
					@ -158,7 +158,7 @@ def load_and_check_pos(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def test_ems_err_on_bad_broker(
 | 
					def test_ems_err_on_bad_broker(
 | 
				
			||||||
    open_test_pikerd: Services,
 | 
					    open_test_pikerd: ServiceMngr,
 | 
				
			||||||
    loglevel: str,
 | 
					    loglevel: str,
 | 
				
			||||||
):
 | 
					):
 | 
				
			||||||
    async def load_bad_fqme():
 | 
					    async def load_bad_fqme():
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -15,7 +15,7 @@ import tractor
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from piker.service import (
 | 
					from piker.service import (
 | 
				
			||||||
    find_service,
 | 
					    find_service,
 | 
				
			||||||
    Services,
 | 
					    ServiceMngr,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from piker.data import (
 | 
					from piker.data import (
 | 
				
			||||||
    open_feed,
 | 
					    open_feed,
 | 
				
			||||||
| 
						 | 
					@ -44,7 +44,7 @@ def test_runtime_boot(
 | 
				
			||||||
    async def main():
 | 
					    async def main():
 | 
				
			||||||
        port = 6666
 | 
					        port = 6666
 | 
				
			||||||
        daemon_addr = ('127.0.0.1', port)
 | 
					        daemon_addr = ('127.0.0.1', port)
 | 
				
			||||||
        services: Services
 | 
					        services: ServiceMngr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        async with (
 | 
					        async with (
 | 
				
			||||||
            open_test_pikerd(
 | 
					            open_test_pikerd(
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue