Compare commits
	
		
			66 Commits 
		
	
	
		
			cbffb6f227
			...
			6b7af35cd0
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | 6b7af35cd0 | |
|  | 0d9d6e15ba | |
|  | da55856dd2 | |
|  | dc2b255548 | |
|  | 2ef20761a0 | |
|  | 5cefe8bcdb | |
|  | d96e9d4f11 | |
|  | a0dcf14aba | |
|  | 1705afb607 | |
|  | dafd5a3ca5 | |
|  | b9dde98d1e | |
|  | 1616cc0e82 | |
|  | 0a2ed195a7 | |
|  | 28e8628c61 | |
|  | b734245183 | |
|  | dc2c379d86 | |
|  | be84d0dae1 | |
|  | bdc3bc9219 | |
|  | 9232d09440 | |
|  | f96bd51442 | |
|  | 6555ccfbba | |
|  | 75d1d007fb | |
|  | 2bdbe0f20e | |
|  | a117177759 | |
|  | 30060a83c9 | |
|  | 156a35b606 | |
|  | 89e241c132 | |
|  | df8d1274ae | |
|  | 0916b707e2 | |
|  | 45788b0b53 | |
|  | 38a1f0b9ee | |
|  | f291654dbe | |
|  | e9fa422916 | |
|  | 5304a36b87 | |
|  | 089c79e905 | |
|  | d848050b52 | |
|  | ddffe2bec6 | |
|  | 19b4ca9d85 | |
|  | f037f851d8 | |
|  | a3ab8dd8fe | |
|  | 6fa0d4bcf3 | |
|  | a4f7fa9c1a | |
|  | 266ecf6206 | |
|  | ea6126d310 | |
|  | 1f4a5b80c4 | |
|  | ac6f52088a | |
|  | 960298514c | |
|  | 71f3a0a4cd | |
|  | b25a7699ab | |
|  | b39affc96e | |
|  | be8629929b | |
|  | 4776be6736 | |
|  | 008e68174b | |
|  | b4a9b86783 | |
|  | d3ca571c0e | |
|  | b3bbef30c0 | |
|  | 499b2d0090 | |
|  | 8b0f1e7045 | |
|  | b2cfa3444f | |
|  | 0be454c3d6 | |
|  | de6189da4d | |
|  | cc5b21a7e6 | |
|  | 35a9d8ec9d | |
|  | a831212c86 | |
|  | e987d7d7c4 | |
|  | 5ec756234a | 
|  | @ -0,0 +1,130 @@ | ||||||
|  | with (import <nixpkgs> {}); | ||||||
|  | let | ||||||
|  |   glibStorePath = lib.getLib glib; | ||||||
|  |   zstdStorePath = lib.getLib zstd; | ||||||
|  |   dbusStorePath = lib.getLib dbus; | ||||||
|  |   libGLStorePath = lib.getLib libGL; | ||||||
|  |   freetypeStorePath = lib.getLib freetype; | ||||||
|  |   qt6baseStorePath = lib.getLib qt6.qtbase; | ||||||
|  |   fontconfigStorePath = lib.getLib fontconfig; | ||||||
|  |   libxkbcommonStorePath = lib.getLib libxkbcommon; | ||||||
|  |   xcbutilcursorStorePath = lib.getLib xcb-util-cursor; | ||||||
|  | 
 | ||||||
|  |   qtpyStorePath = lib.getLib python312Packages.qtpy; | ||||||
|  |   pyqt6StorePath = lib.getLib python312Packages.pyqt6; | ||||||
|  |   pyqt6SipStorePath = lib.getLib python312Packages.pyqt6-sip; | ||||||
|  |   rapidfuzzStorePath = lib.getLib python312Packages.rapidfuzz; | ||||||
|  |   qdarkstyleStorePath = lib.getLib python312Packages.qdarkstyle; | ||||||
|  | 
 | ||||||
|  |   xorgLibX11StorePath = lib.getLib xorg.libX11; | ||||||
|  |   xorgLibxcbStorePath = lib.getLib xorg.libxcb; | ||||||
|  |   xorgxcbutilwmStorePath = lib.getLib xorg.xcbutilwm; | ||||||
|  |   xorgxcbutilimageStorePath = lib.getLib xorg.xcbutilimage; | ||||||
|  |   xorgxcbutilerrorsStorePath = lib.getLib xorg.xcbutilerrors; | ||||||
|  |   xorgxcbutilkeysymsStorePath = lib.getLib xorg.xcbutilkeysyms; | ||||||
|  |   xorgxcbutilrenderutilStorePath = lib.getLib xorg.xcbutilrenderutil; | ||||||
|  | in | ||||||
|  | stdenv.mkDerivation { | ||||||
|  |   name = "piker-qt6-uv"; | ||||||
|  |   buildInputs = [ | ||||||
|  |     # System requirements. | ||||||
|  |     glib | ||||||
|  |     dbus | ||||||
|  |     zstd | ||||||
|  |     libGL | ||||||
|  |     freetype | ||||||
|  |     qt6.qtbase | ||||||
|  |     libgcc.lib | ||||||
|  |     fontconfig | ||||||
|  |     libxkbcommon | ||||||
|  | 
 | ||||||
|  |     # Xorg requirements | ||||||
|  |     xcb-util-cursor | ||||||
|  |     xorg.libxcb | ||||||
|  |     xorg.libX11 | ||||||
|  |     xorg.xcbutilwm | ||||||
|  |     xorg.xcbutilimage | ||||||
|  |     xorg.xcbutilerrors | ||||||
|  |     xorg.xcbutilkeysyms | ||||||
|  |     xorg.xcbutilrenderutil | ||||||
|  | 
 | ||||||
|  |     # Python requirements. | ||||||
|  |     python312Full | ||||||
|  |     python312Packages.uv | ||||||
|  |     python312Packages.qdarkstyle | ||||||
|  |     python312Packages.rapidfuzz | ||||||
|  |     python312Packages.pyqt6 | ||||||
|  |     python312Packages.qtpy | ||||||
|  |   ]; | ||||||
|  |   src = null; | ||||||
|  |   shellHook = '' | ||||||
|  |     set -e | ||||||
|  | 
 | ||||||
|  |     # Set the Qt plugin path | ||||||
|  |     # export QT_DEBUG_PLUGINS=1 | ||||||
|  |      | ||||||
|  |     QTBASE_PATH="${qt6baseStorePath}/lib" | ||||||
|  |     QT_PLUGIN_PATH="$QTBASE_PATH/qt-6/plugins" | ||||||
|  |     QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms" | ||||||
|  | 
 | ||||||
|  |     LIB_GCC_PATH="${libgcc.lib}/lib" | ||||||
|  |     GLIB_PATH="${glibStorePath}/lib" | ||||||
|  |     ZSTD_PATH="${zstdStorePath}/lib" | ||||||
|  |     DBUS_PATH="${dbusStorePath}/lib" | ||||||
|  |     LIBGL_PATH="${libGLStorePath}/lib" | ||||||
|  |     FREETYPE_PATH="${freetypeStorePath}/lib" | ||||||
|  |     FONTCONFIG_PATH="${fontconfigStorePath}/lib" | ||||||
|  |     LIB_XKB_COMMON_PATH="${libxkbcommonStorePath}/lib" | ||||||
|  | 
 | ||||||
|  |     XCB_UTIL_CURSOR_PATH="${xcbutilcursorStorePath}/lib" | ||||||
|  |     XORG_LIB_X11_PATH="${xorgLibX11StorePath}/lib" | ||||||
|  |     XORG_LIB_XCB_PATH="${xorgLibxcbStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_IMAGE_PATH="${xorgxcbutilimageStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_WM_PATH="${xorgxcbutilwmStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_RENDER_UTIL_PATH="${xorgxcbutilrenderutilStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_KEYSYMS_PATH="${xorgxcbutilkeysymsStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_ERRORS_PATH="${xorgxcbutilerrorsStorePath}/lib" | ||||||
|  | 
 | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QTBASE_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_PLUGIN_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_QPA_PLATFORM_PLUGIN_PATH" | ||||||
|  | 
 | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_GCC_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$DBUS_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GLIB_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZSTD_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIBGL_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FONTCONFIG_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FREETYPE_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_XKB_COMMON_PATH" | ||||||
|  | 
 | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XCB_UTIL_CURSOR_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_X11_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_XCB_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_IMAGE_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_WM_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_RENDER_UTIL_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_KEYSYMS_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_ERRORS_PATH" | ||||||
|  | 
 | ||||||
|  |     export LD_LIBRARY_PATH | ||||||
|  | 
 | ||||||
|  |     RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages" | ||||||
|  |     QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages" | ||||||
|  |     QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages" | ||||||
|  |     PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages" | ||||||
|  |     PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages" | ||||||
|  | 
 | ||||||
|  |     PATCH="$PATCH:$RPDFUZZ_PATH" | ||||||
|  |     PATCH="$PATCH:$QDRKSTYLE_PATH" | ||||||
|  |     PATCH="$PATCH:$QTPY_PATH" | ||||||
|  |     PATCH="$PATCH:$PYQT6_PATH" | ||||||
|  |     PATCH="$PATCH:$PYQT6_SIP_PATH" | ||||||
|  | 
 | ||||||
|  |     export PATCH | ||||||
|  | 
 | ||||||
|  |     # Install deps | ||||||
|  |     uv lock | ||||||
|  | 
 | ||||||
|  |   ''; | ||||||
|  | } | ||||||
|  | @ -0,0 +1,139 @@ | ||||||
|  | #!/usr/bin/env python | ||||||
|  | from decimal import ( | ||||||
|  |     Decimal, | ||||||
|  | ) | ||||||
|  | import trio | ||||||
|  | import tractor | ||||||
|  | from datetime import datetime | ||||||
|  | from pprint import pformat | ||||||
|  | from piker.brokers.deribit.api import ( | ||||||
|  |     get_client, | ||||||
|  |     maybe_open_oi_feed, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | def check_if_complete( | ||||||
|  |         oi: dict[str, dict[str, Decimal | None]] | ||||||
|  |     ) -> bool: | ||||||
|  |     return all( | ||||||
|  |         oi[strike]['C'] is not None  | ||||||
|  |         and | ||||||
|  |         oi[strike]['P'] is not None for strike in oi | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def max_pain_daemon( | ||||||
|  | ) -> None: | ||||||
|  |     oi_by_strikes: dict[str, dict[str, Decimal | None]] | ||||||
|  |     expiry_dates: list[str] | ||||||
|  |     currency: str = 'btc' | ||||||
|  |     kind: str = 'option' | ||||||
|  | 
 | ||||||
|  |     async with get_client( | ||||||
|  |     ) as client: | ||||||
|  |         expiry_dates: list[str] = await client.get_expiration_dates( | ||||||
|  |             currency=currency, | ||||||
|  |             kind=kind | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     print(f'Available expiration dates for {currency}-{kind}:') | ||||||
|  |     print(f'{expiry_dates}') | ||||||
|  |     expiry_date: str = input('Please enter a valid expiration date: ').upper() | ||||||
|  |     print('Starting little daemon...') | ||||||
|  |     instruments: list[Symbol] = [] | ||||||
|  |     oi_by_strikes: dict[str, dict[str, Decimal]] | ||||||
|  | 
 | ||||||
|  |     def update_oi_by_strikes(msg: tuple): | ||||||
|  |         nonlocal oi_by_strikes | ||||||
|  |         if 'oi' == msg[0]: | ||||||
|  |             strike_price = msg[1]['strike_price'] | ||||||
|  |             option_type = msg[1]['option_type'] | ||||||
|  |             open_interest = msg[1]['open_interest'] | ||||||
|  |             oi_by_strikes.setdefault( | ||||||
|  |                 strike_price, {} | ||||||
|  |             ).update( | ||||||
|  |                 {option_type: open_interest} | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |     def get_max_pain( | ||||||
|  |         oi_by_strikes: dict[str, dict[str, Decimal]] | ||||||
|  |     ) -> dict[str, str | Decimal]: | ||||||
|  |         ''' | ||||||
|  |         This method requires only the strike_prices and oi for call | ||||||
|  |         and puts, the closes list are the same as the strike_prices | ||||||
|  |         the idea is to sum all the calls and puts cash for each strike | ||||||
|  |         and the ITM strikes from that strike, the lowest value is what we  | ||||||
|  |         are looking for the intrinsic value. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  | 
 | ||||||
|  |         nonlocal timestamp | ||||||
|  |         # We meed to find the lowest value, so we start at  | ||||||
|  |         # infinity to ensure that, and the max_pain must be  | ||||||
|  |         # an amount greater than zero. | ||||||
|  |         total_intrinsic_value: Decimal = Decimal('Infinity') | ||||||
|  |         max_pain: Decimal = Decimal(0) | ||||||
|  |         call_cash: Decimal = Decimal(0) | ||||||
|  |         put_cash: Decimal = Decimal(0) | ||||||
|  |         intrinsic_values: dict[str, dict[str, Decimal]] = {} | ||||||
|  |         closes: list = sorted(Decimal(close) for close in oi_by_strikes) | ||||||
|  | 
 | ||||||
|  |         for strike, oi in oi_by_strikes.items(): | ||||||
|  |             s = Decimal(strike) | ||||||
|  |             call_cash = sum(max(0, (s - c) * oi_by_strikes[str(c)]['C']) for c in closes) | ||||||
|  |             put_cash = sum(max(0, (c - s) * oi_by_strikes[str(c)]['P']) for c in closes) | ||||||
|  | 
 | ||||||
|  |             intrinsic_values[strike] = { | ||||||
|  |                 'C': call_cash, | ||||||
|  |                 'P': put_cash, | ||||||
|  |                 'total': call_cash + put_cash, | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             if intrinsic_values[strike]['total'] < total_intrinsic_value: | ||||||
|  |                 total_intrinsic_value = intrinsic_values[strike]['total'] | ||||||
|  |                 max_pain = s | ||||||
|  | 
 | ||||||
|  |         return { | ||||||
|  |             'timestamp': timestamp, | ||||||
|  |             'expiry_date': expiry_date, | ||||||
|  |             'total_intrinsic_value': total_intrinsic_value, | ||||||
|  |             'max_pain': max_pain, | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |     async with get_client( | ||||||
|  |     ) as client: | ||||||
|  |         instruments = await client.get_instruments( | ||||||
|  |             expiry_date=expiry_date, | ||||||
|  |         ) | ||||||
|  |         oi_by_strikes = client.get_strikes_dict(instruments) | ||||||
|  | 
 | ||||||
|  |     async with maybe_open_oi_feed( | ||||||
|  |         instruments, | ||||||
|  |     ) as oi_feed: | ||||||
|  |         async for msg in oi_feed: | ||||||
|  | 
 | ||||||
|  |             update_oi_by_strikes(msg) | ||||||
|  |             if check_if_complete(oi_by_strikes): | ||||||
|  |                 if 'oi' == msg[0]: | ||||||
|  |                     timestamp = msg[1]['timestamp'] | ||||||
|  |                     max_pain = get_max_pain(oi_by_strikes) | ||||||
|  |                     print('-----------------------------------------------') | ||||||
|  |                     print(f'timestamp:             {datetime.fromtimestamp(max_pain['timestamp'])}') | ||||||
|  |                     print(f'expiry_date:           {max_pain['expiry_date']}') | ||||||
|  |                     print(f'max_pain:              {max_pain['max_pain']}') | ||||||
|  |                     print(f'total intrinsic value: {max_pain['total_intrinsic_value']}') | ||||||
|  |                     print('-----------------------------------------------') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def main(): | ||||||
|  | 
 | ||||||
|  |     async with tractor.open_nursery() as n: | ||||||
|  |          | ||||||
|  |         p: tractor.Portal = await n.start_actor( | ||||||
|  |             'max_pain_daemon', | ||||||
|  |             enable_modules=[__name__], | ||||||
|  |             infect_asyncio=True, | ||||||
|  |         ) | ||||||
|  |         await p.run(max_pain_daemon) | ||||||
|  | 
 | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     trio.run(main) | ||||||
|  | @ -0,0 +1,19 @@ | ||||||
|  | ## Max Pain Calculation for Deribit Options | ||||||
|  | 
 | ||||||
|  | This feature, which calculates the max pain point for options traded on the Deribit exchange using cryptofeed library. | ||||||
|  | 
 | ||||||
|  | - Functions in the api module for fetching options data from Deribit. [commit](https://pikers.dev/pikers/piker/commit/da55856dd2876291f55a06eb0561438a912d8241) | ||||||
|  | 
 | ||||||
|  | - Compute the max pain point based on open interest data using deribit's api. [commit](https://pikers.dev/pikers/piker/commit/0d9d6e15ba0edeb662ec97f7599dd66af3046b94) | ||||||
|  | 
 | ||||||
|  | ### How to test it? | ||||||
|  | 
 | ||||||
|  | **Before start:** in order to get this working with `uv`,  you **must** use my `tractor` [fork](https://pikers.dev/ntorres/tractor/src/branch/aio_abandons) and this branch: `aio_abandons`, the reason is that I cherry-pick the `uv_migration` that guille made, for some reason that a didn't dive into, in my system y need tractor using `uv` too. quite hacky I guess. | ||||||
|  | 
 | ||||||
|  | 1. `uv lock` | ||||||
|  | 
 | ||||||
|  | 2. `uv run --no-dev python examples/max_pain.py` | ||||||
|  | 
 | ||||||
|  | 3. A message should be display, enter one of the expiration date available. | ||||||
|  | 
 | ||||||
|  | 4. The script should be up and running. | ||||||
|  | @ -51,6 +51,7 @@ __brokers__: list[str] = [ | ||||||
|     'ib', |     'ib', | ||||||
|     'kraken', |     'kraken', | ||||||
|     'kucoin', |     'kucoin', | ||||||
|  |     'deribit', | ||||||
| 
 | 
 | ||||||
|     # broken but used to work |     # broken but used to work | ||||||
|     # 'questrade', |     # 'questrade', | ||||||
|  | @ -61,7 +62,6 @@ __brokers__: list[str] = [ | ||||||
|     # wstrade |     # wstrade | ||||||
|     # iex |     # iex | ||||||
| 
 | 
 | ||||||
|     # deribit |  | ||||||
|     # bitso |     # bitso | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -23,6 +23,7 @@ from __future__ import annotations | ||||||
| from contextlib import ( | from contextlib import ( | ||||||
|     asynccontextmanager as acm, |     asynccontextmanager as acm, | ||||||
| ) | ) | ||||||
|  | from functools import partial | ||||||
| from types import ModuleType | from types import ModuleType | ||||||
| from typing import ( | from typing import ( | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
|  | @ -190,14 +191,17 @@ def broker_init( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def spawn_brokerd( | async def spawn_brokerd( | ||||||
| 
 |  | ||||||
|     brokername: str, |     brokername: str, | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
| 
 | 
 | ||||||
|     **tractor_kwargs, |     **tractor_kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> bool: | ) -> bool: | ||||||
|  |     ''' | ||||||
|  |     Spawn a `brokerd.<backendname>` subactor service daemon | ||||||
|  |     using `pikerd`'s service mngr. | ||||||
| 
 | 
 | ||||||
|  |     ''' | ||||||
|     from piker.service._util import log  # use service mngr log |     from piker.service._util import log  # use service mngr log | ||||||
|     log.info(f'Spawning {brokername} broker daemon') |     log.info(f'Spawning {brokername} broker daemon') | ||||||
| 
 | 
 | ||||||
|  | @ -217,27 +221,35 @@ async def spawn_brokerd( | ||||||
| 
 | 
 | ||||||
|     # ask `pikerd` to spawn a new sub-actor and manage it under its |     # ask `pikerd` to spawn a new sub-actor and manage it under its | ||||||
|     # actor nursery |     # actor nursery | ||||||
|     from piker.service import Services |     from piker.service import ( | ||||||
| 
 |         get_service_mngr, | ||||||
|     dname: str = tractor_kwargs.pop('name')  # f'brokerd.{brokername}' |         ServiceMngr, | ||||||
|     portal = await Services.actor_n.start_actor( |  | ||||||
|         dname, |  | ||||||
|         enable_modules=_data_mods + tractor_kwargs.pop('enable_modules'), |  | ||||||
|         debug_mode=Services.debug_mode, |  | ||||||
|         **tractor_kwargs |  | ||||||
|     ) |     ) | ||||||
| 
 |     dname: str = tractor_kwargs.pop('name')  # f'brokerd.{brokername}' | ||||||
|     # NOTE: the service mngr expects an already spawned actor + its |     mngr: ServiceMngr = get_service_mngr() | ||||||
|     # portal ref in order to do non-blocking setup of brokerd |     ctx: tractor.Context = await mngr.start_service( | ||||||
|     # service nursery. |         daemon_name=dname, | ||||||
|     await Services.start_service_task( |         ctx_ep=partial( | ||||||
|         dname, |  | ||||||
|         portal, |  | ||||||
| 
 |  | ||||||
|             # signature of target root-task endpoint |             # signature of target root-task endpoint | ||||||
|             daemon_fixture_ep, |             daemon_fixture_ep, | ||||||
|  | 
 | ||||||
|  |             # passed to daemon_fixture_ep(**kwargs) | ||||||
|             brokername=brokername, |             brokername=brokername, | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
|  |         ), | ||||||
|  |         debug_mode=mngr.debug_mode, | ||||||
|  |         loglevel=loglevel, | ||||||
|  |         enable_modules=( | ||||||
|  |             _data_mods | ||||||
|  |             + | ||||||
|  |             tractor_kwargs.pop('enable_modules') | ||||||
|  |         ), | ||||||
|  |         **tractor_kwargs | ||||||
|  |     ) | ||||||
|  |     assert ( | ||||||
|  |         not ctx.cancel_called | ||||||
|  |         and ctx.portal  # parent side | ||||||
|  |         and dname in ctx.chan.uid  # subactor is named as desired | ||||||
|     ) |     ) | ||||||
|     return True |     return True | ||||||
| 
 | 
 | ||||||
|  | @ -262,8 +274,7 @@ async def maybe_spawn_brokerd( | ||||||
|     from piker.service import maybe_spawn_daemon |     from piker.service import maybe_spawn_daemon | ||||||
| 
 | 
 | ||||||
|     async with maybe_spawn_daemon( |     async with maybe_spawn_daemon( | ||||||
| 
 |         service_name=f'brokerd.{brokername}', | ||||||
|         f'brokerd.{brokername}', |  | ||||||
|         service_task_target=spawn_brokerd, |         service_task_target=spawn_brokerd, | ||||||
|         spawn_args={ |         spawn_args={ | ||||||
|             'brokername': brokername, |             'brokername': brokername, | ||||||
|  |  | ||||||
|  | @ -567,6 +567,7 @@ class Client: | ||||||
|     ) -> str: |     ) -> str: | ||||||
|         return { |         return { | ||||||
|             'USDTM': 'usdtm_futes', |             'USDTM': 'usdtm_futes', | ||||||
|  |             'SPOT': 'spot', | ||||||
|             # 'COINM': 'coin_futes', |             # 'COINM': 'coin_futes', | ||||||
|             # ^-TODO-^ bc someone might want it..? |             # ^-TODO-^ bc someone might want it..? | ||||||
|         }[pair.venue] |         }[pair.venue] | ||||||
|  |  | ||||||
|  | @ -181,7 +181,6 @@ class FutesPair(Pair): | ||||||
|     quoteAsset: str  # 'USDT', |     quoteAsset: str  # 'USDT', | ||||||
|     quotePrecision: int  # 8, |     quotePrecision: int  # 8, | ||||||
|     requiredMarginPercent: float  # '5.0000', |     requiredMarginPercent: float  # '5.0000', | ||||||
|     settlePlan: int  # 0, |  | ||||||
|     timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'], |     timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'], | ||||||
|     triggerProtect: float  # '0.0500', |     triggerProtect: float  # '0.0500', | ||||||
|     underlyingSubType: list[str]  # ['PoW'], |     underlyingSubType: list[str]  # ['PoW'], | ||||||
|  |  | ||||||
|  | @ -25,6 +25,7 @@ from .api import ( | ||||||
|     get_client, |     get_client, | ||||||
| ) | ) | ||||||
| from .feed import ( | from .feed import ( | ||||||
|  |     get_mkt_info, | ||||||
|     open_history_client, |     open_history_client, | ||||||
|     open_symbol_search, |     open_symbol_search, | ||||||
|     stream_quotes, |     stream_quotes, | ||||||
|  | @ -34,15 +35,20 @@ from .feed import ( | ||||||
|     # open_trade_dialog, |     # open_trade_dialog, | ||||||
|     # norm_trade_records, |     # norm_trade_records, | ||||||
| # ) | # ) | ||||||
|  | from .venues import ( | ||||||
|  |     OptionPair, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| __all__ = [ | __all__ = [ | ||||||
|     'get_client', |     'get_client', | ||||||
| #    'trades_dialogue', | #    'trades_dialogue', | ||||||
|  |     'get_mkt_info', | ||||||
|     'open_history_client', |     'open_history_client', | ||||||
|     'open_symbol_search', |     'open_symbol_search', | ||||||
|     'stream_quotes', |     'stream_quotes', | ||||||
|  |     'OptionPair', | ||||||
| #    'norm_trade_records', | #    'norm_trade_records', | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -18,38 +18,59 @@ | ||||||
| Deribit backend. | Deribit backend. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
|  | from __future__ import annotations | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from typing import Any, Optional, Callable | from typing import ( | ||||||
|  |     # Any, | ||||||
|  |     # Optional, | ||||||
|  |     Callable, | ||||||
|  | ) | ||||||
|  | # from pprint import pformat | ||||||
| import time | import time | ||||||
| 
 | 
 | ||||||
|  | import cryptofeed | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import pendulum | from pendulum import ( | ||||||
| from rapidfuzz import process as fuzzy |     from_timestamp, | ||||||
|  | ) | ||||||
| import numpy as np | import numpy as np | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| from piker.brokers import open_cached_client | from piker.accounting import ( | ||||||
| from piker.log import get_logger, get_console_log |     Asset, | ||||||
| from piker.data import ShmArray |     MktPair, | ||||||
| from piker.brokers._util import ( |     unpack_fqme, | ||||||
|     BrokerError, | ) | ||||||
|  | from piker.brokers import ( | ||||||
|  |     open_cached_client, | ||||||
|  |     NoData, | ||||||
|     DataUnavailable, |     DataUnavailable, | ||||||
| ) | ) | ||||||
| 
 | from piker._cacheables import ( | ||||||
| from cryptofeed import FeedHandler |     async_lifo_cache, | ||||||
| from cryptofeed.defines import ( |  | ||||||
|     DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT |  | ||||||
| ) | ) | ||||||
| from cryptofeed.symbols import Symbol | from piker.log import ( | ||||||
|  |     get_logger, | ||||||
|  |     mk_repr, | ||||||
|  | ) | ||||||
|  | from piker.data.validate import FeedInit | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| from .api import ( | from .api import ( | ||||||
|     Client, Trade, |     Client, | ||||||
|     get_config, |     # get_config, | ||||||
|     str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst, |     piker_sym_to_cb_sym, | ||||||
|  |     cb_sym_to_deribit_inst, | ||||||
|  |     str_to_cb_sym, | ||||||
|     maybe_open_price_feed |     maybe_open_price_feed | ||||||
| ) | ) | ||||||
|  | from .venues import ( | ||||||
|  |     Pair, | ||||||
|  |     OptionPair, | ||||||
|  |     Trade, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| _spawn_kwargs = { | _spawn_kwargs = { | ||||||
|     'infect_asyncio': True, |     'infect_asyncio': True, | ||||||
|  | @ -64,90 +85,215 @@ async def open_history_client( | ||||||
|     mkt: MktPair, |     mkt: MktPair, | ||||||
| ) -> tuple[Callable, int]: | ) -> tuple[Callable, int]: | ||||||
| 
 | 
 | ||||||
|     fnstrument: str = mkt.bs_fqme |  | ||||||
|     # TODO implement history getter for the new storage layer. |     # TODO implement history getter for the new storage layer. | ||||||
|     async with open_cached_client('deribit') as client: |     async with open_cached_client('deribit') as client: | ||||||
| 
 | 
 | ||||||
|  |         pair: OptionPair = client._pairs[mkt.dst.name] | ||||||
|  |         # XXX NOTE, the cuckers use ms !!! | ||||||
|  |         creation_time_s: int = pair.creation_timestamp/1000 | ||||||
|  | 
 | ||||||
|         async def get_ohlc( |         async def get_ohlc( | ||||||
|             end_dt: Optional[datetime] = None, |             timeframe: float, | ||||||
|             start_dt: Optional[datetime] = None, |             end_dt: datetime | None = None, | ||||||
|  |             start_dt: datetime | None = None, | ||||||
| 
 | 
 | ||||||
|         ) -> tuple[ |         ) -> tuple[ | ||||||
|             np.ndarray, |             np.ndarray, | ||||||
|             datetime,  # start |             datetime,  # start | ||||||
|             datetime,  # end |             datetime,  # end | ||||||
|         ]: |         ]: | ||||||
|  |             if timeframe != 60: | ||||||
|  |                 raise DataUnavailable('Only 1m bars are supported') | ||||||
| 
 | 
 | ||||||
|             array = await client.bars( |             array: np.ndarray = await client.bars( | ||||||
|                 instrument, |                 mkt, | ||||||
|                 start_dt=start_dt, |                 start_dt=start_dt, | ||||||
|                 end_dt=end_dt, |                 end_dt=end_dt, | ||||||
|             ) |             ) | ||||||
|             if len(array) == 0: |             if len(array) == 0: | ||||||
|                 raise DataUnavailable |                 if ( | ||||||
|  |                     end_dt is None | ||||||
|  |                 ): | ||||||
|  |                     raise DataUnavailable( | ||||||
|  |                         'No history seems to exist yet?\n\n' | ||||||
|  |                         f'{mkt}' | ||||||
|  |                     ) | ||||||
|  |                 elif ( | ||||||
|  |                     end_dt | ||||||
|  |                     and | ||||||
|  |                     end_dt.timestamp() < creation_time_s | ||||||
|  |                 ): | ||||||
|  |                     # the contract can't have history | ||||||
|  |                     # before it was created. | ||||||
|  |                     pair_type_str: str = type(pair).__name__ | ||||||
|  |                     create_dt: datetime = from_timestamp(creation_time_s) | ||||||
|  |                     raise DataUnavailable( | ||||||
|  |                         f'No history prior to\n' | ||||||
|  |                         f'`{pair_type_str}.creation_timestamp: int = ' | ||||||
|  |                         f'{pair.creation_timestamp}\n\n' | ||||||
|  |                         f'------ deribit sux ------\n' | ||||||
|  |                         f'WHICH IN "NORMAL PEOPLE WHO USE EPOCH TIME" form is,\n' | ||||||
|  |                         f'creation_time_s: {creation_time_s}\n' | ||||||
|  |                         f'create_dt: {create_dt}\n' | ||||||
|  |                     ) | ||||||
|  |                 raise NoData( | ||||||
|  |                     f'No frame for {start_dt} -> {end_dt}\n' | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) |             start_dt = from_timestamp(array[0]['time']) | ||||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) |             end_dt = from_timestamp(array[-1]['time']) | ||||||
|  | 
 | ||||||
|  |             times = array['time'] | ||||||
|  |             if not times.any(): | ||||||
|  |                 raise ValueError( | ||||||
|  |                     'Bad frame with null-times?\n\n' | ||||||
|  |                     f'{times}' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             if end_dt is None: | ||||||
|  |                 inow: int = round(time.time()) | ||||||
|  |                 if (inow - times[-1]) > 60: | ||||||
|  |                     await tractor.pause() | ||||||
| 
 | 
 | ||||||
|             return array, start_dt, end_dt |             return array, start_dt, end_dt | ||||||
| 
 | 
 | ||||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} |         yield ( | ||||||
|  |             get_ohlc, | ||||||
|  |             {  # backfill config | ||||||
|  |                 'erlangs': 3, | ||||||
|  |                 'rate': 3, | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @async_lifo_cache() | ||||||
|  | async def get_mkt_info( | ||||||
|  |     fqme: str, | ||||||
|  | 
 | ||||||
|  | ) -> tuple[MktPair, Pair|OptionPair] | None: | ||||||
|  | 
 | ||||||
|  |     # uppercase since kraken bs_mktid is always upper | ||||||
|  |     if 'deribit' not in fqme.lower(): | ||||||
|  |         fqme += '.deribit' | ||||||
|  | 
 | ||||||
|  |     mkt_mode: str = '' | ||||||
|  |     broker, mkt_ep, venue, expiry = unpack_fqme(fqme) | ||||||
|  | 
 | ||||||
|  |     # NOTE: we always upper case all tokens to be consistent with | ||||||
|  |     # binance's symbology style for pairs, like `BTCUSDT`, but in | ||||||
|  |     # theory we could also just keep things lower case; as long as | ||||||
|  |     # we're consistent and the symcache matches whatever this func | ||||||
|  |     # returns, always! | ||||||
|  |     expiry: str = expiry.upper() | ||||||
|  |     venue: str = venue.upper() | ||||||
|  |     # venue_lower: str = venue.lower() | ||||||
|  | 
 | ||||||
|  |     mkt_mode: str = 'option' | ||||||
|  | 
 | ||||||
|  |     async with open_cached_client( | ||||||
|  |         'deribit', | ||||||
|  |     ) as client: | ||||||
|  | 
 | ||||||
|  |         assets: dict[str, Asset] = await client.get_assets() | ||||||
|  |         pair_str: str = mkt_ep.lower() | ||||||
|  | 
 | ||||||
|  |         pair: Pair = await client.exch_info( | ||||||
|  |             sym=pair_str, | ||||||
|  |         ) | ||||||
|  |         mkt_mode = pair.venue | ||||||
|  |         client.mkt_mode = mkt_mode | ||||||
|  | 
 | ||||||
|  |         dst: Asset | None = assets.get(pair.bs_dst_asset) | ||||||
|  |         src: Asset | None = assets.get(pair.bs_src_asset) | ||||||
|  | 
 | ||||||
|  |         mkt = MktPair( | ||||||
|  |             dst=dst, | ||||||
|  |             src=src, | ||||||
|  |             price_tick=pair.price_tick, | ||||||
|  |             size_tick=pair.size_tick, | ||||||
|  |             bs_mktid=pair.symbol, | ||||||
|  |             venue=mkt_mode, | ||||||
|  |             broker='deribit', | ||||||
|  |             _atype=mkt_mode, | ||||||
|  |             _fqme_without_src=True, | ||||||
|  | 
 | ||||||
|  |             # expiry=pair.expiry, | ||||||
|  |             # XXX TODO, currently we don't use it since it's | ||||||
|  |             # already "described" in the `OptionPair.symbol: str` | ||||||
|  |             # and if we slap in the ISO repr it's kinda hideous.. | ||||||
|  |             # -[ ] figure out the best either std | ||||||
|  |         ) | ||||||
|  |         return mkt, pair | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def stream_quotes( | async def stream_quotes( | ||||||
| 
 |  | ||||||
|     send_chan: trio.abc.SendChannel, |     send_chan: trio.abc.SendChannel, | ||||||
|     symbols: list[str], |     symbols: list[str], | ||||||
|     feed_is_live: trio.Event, |     feed_is_live: trio.Event, | ||||||
|     loglevel: str = None, |  | ||||||
| 
 | 
 | ||||||
|     # startup sync |     # startup sync | ||||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, |     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging |     ''' | ||||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) |     Open a live quote stream for the market set defined by `symbols`. | ||||||
| 
 | 
 | ||||||
|     sym = symbols[0] |     Internally this starts a `cryptofeed.FeedHandler` inside an `asyncio`-side | ||||||
|  |     task and relays through L1 and `Trade` msgs here to our `trio.Task`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     sym = symbols[0].split('.')[0] | ||||||
|  |     init_msgs: list[FeedInit] = [] | ||||||
|  | 
 | ||||||
|  |     # multiline nested `dict` formatter (since rn quote-msgs are | ||||||
|  |     # just that). | ||||||
|  |     pfmt: Callable[[str], str] = mk_repr( | ||||||
|  |         # so we can see `deribit`'s delightfully mega-long bs fields.. | ||||||
|  |         maxstring=100, | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         open_cached_client('deribit') as client, |         open_cached_client('deribit') as client, | ||||||
|         send_chan as send_chan |         send_chan as send_chan | ||||||
|     ): |     ): | ||||||
|  |         mkt: MktPair | ||||||
|  |         pair: Pair | ||||||
|  |         mkt, pair = await get_mkt_info(sym) | ||||||
| 
 | 
 | ||||||
|         init_msgs = { |         # build out init msgs according to latest spec | ||||||
|             # pass back token, and bool, signalling if we're the writer |         init_msgs.append( | ||||||
|             # and that history has been written |             FeedInit( | ||||||
|             sym: { |                 mkt_info=mkt, | ||||||
|                 'symbol_info': { |             ) | ||||||
|                     'asset_type': 'option', |         ) | ||||||
|                     'price_tick_size': 0.0005 |         # build `cryptofeed` feed-handle | ||||||
|                 }, |         cf_sym: cryptofeed.Symbol = piker_sym_to_cb_sym(sym) | ||||||
|                 'shm_write_opts': {'sum_tick_vml': False}, |  | ||||||
|                 'fqsn': sym, |  | ||||||
|             }, |  | ||||||
|         } |  | ||||||
| 
 | 
 | ||||||
|         nsym = piker_sym_to_cb_sym(sym) |         from_cf: tractor.to_asyncio.LinkedTaskChannel | ||||||
|  |         async with maybe_open_price_feed(sym) as from_cf: | ||||||
| 
 | 
 | ||||||
|         async with maybe_open_price_feed(sym) as stream: |             # load the "last trades" summary | ||||||
|  |             last_trades_res: cryptofeed.LastTradesResult = await client.last_trades( | ||||||
|  |                 cb_sym_to_deribit_inst(cf_sym), | ||||||
|  |                 count=1, | ||||||
|  |             ) | ||||||
|  |             last_trades: list[Trade] = last_trades_res.trades | ||||||
| 
 | 
 | ||||||
|             cache = await client.cache_symbols() |             # TODO, do we even need this or will the above always | ||||||
|  |             # work? | ||||||
|  |             # if not last_trades: | ||||||
|  |             #     await tractor.pause() | ||||||
|  |             #     async for typ, quote in from_cf: | ||||||
|  |             #         if typ == 'trade': | ||||||
|  |             #             last_trade = Trade(**(quote['data'])) | ||||||
|  |             #             break | ||||||
| 
 | 
 | ||||||
|             last_trades = (await client.last_trades( |             # else: | ||||||
|                 cb_sym_to_deribit_inst(nsym), count=1)).trades |             last_trade = Trade( | ||||||
|  |                 **(last_trades[0]) | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             if len(last_trades) == 0: |             first_quote: dict = { | ||||||
|                 last_trade = None |  | ||||||
|                 async for typ, quote in stream: |  | ||||||
|                     if typ == 'trade': |  | ||||||
|                         last_trade = Trade(**(quote['data'])) |  | ||||||
|                         break |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 last_trade = Trade(**(last_trades[0])) |  | ||||||
| 
 |  | ||||||
|             first_quote = { |  | ||||||
|                 'symbol': sym, |                 'symbol': sym, | ||||||
|                 'last': last_trade.price, |                 'last': last_trade.price, | ||||||
|                 'brokerd_ts': last_trade.timestamp, |                 'brokerd_ts': last_trade.timestamp, | ||||||
|  | @ -158,13 +304,84 @@ async def stream_quotes( | ||||||
|                     'broker_ts': last_trade.timestamp |                     'broker_ts': last_trade.timestamp | ||||||
|                 }] |                 }] | ||||||
|             } |             } | ||||||
|             task_status.started((init_msgs,  first_quote)) |             task_status.started(( | ||||||
|  |                 init_msgs, | ||||||
|  |                 first_quote, | ||||||
|  |             )) | ||||||
| 
 | 
 | ||||||
|             feed_is_live.set() |             feed_is_live.set() | ||||||
| 
 | 
 | ||||||
|             async for typ, quote in stream: |             # NOTE XXX, static for now! | ||||||
|                 topic = quote['symbol'] |             # => since this only handles ONE mkt feed at a time we | ||||||
|                 await send_chan.send({topic: quote}) |             # don't need a lookup table to map interleaved quotes | ||||||
|  |             # from multiple possible mkt-pairs | ||||||
|  |             topic: str = mkt.bs_fqme | ||||||
|  | 
 | ||||||
|  |             # deliver until cancelled | ||||||
|  |             async for typ, ref in from_cf: | ||||||
|  |                 match typ: | ||||||
|  |                     case 'trade': | ||||||
|  |                         trade: cryptofeed.types.Trade = ref | ||||||
|  | 
 | ||||||
|  |                         # TODO, re-impl this according to teh ideal | ||||||
|  |                         # fqme for opts that we choose!! | ||||||
|  |                         bs_fqme: str = cb_sym_to_deribit_inst( | ||||||
|  |                             str_to_cb_sym(trade.symbol) | ||||||
|  |                         ).lower() | ||||||
|  | 
 | ||||||
|  |                         piker_quote: dict = { | ||||||
|  |                             'symbol': bs_fqme, | ||||||
|  |                             'last': trade.price, | ||||||
|  |                             'broker_ts': time.time(), | ||||||
|  |                             # ^TODO, name this `brokerd/datad_ts` and | ||||||
|  |                             # use `time.time_ns()` ?? | ||||||
|  |                             'ticks': [{ | ||||||
|  |                                 'type': 'trade', | ||||||
|  |                                 'price': float(trade.price), | ||||||
|  |                                 'size': float(trade.amount), | ||||||
|  |                                 'broker_ts': trade.timestamp, | ||||||
|  |                             }], | ||||||
|  |                         } | ||||||
|  |                         log.info( | ||||||
|  |                             f'deribit {typ!r} quote for {sym!r}\n\n' | ||||||
|  |                             f'{trade}\n\n' | ||||||
|  |                             f'{pfmt(piker_quote)}\n' | ||||||
|  |                         ) | ||||||
|  | 
 | ||||||
|  |                     case 'l1': | ||||||
|  |                         book: cryptofeed.types.L1Book = ref | ||||||
|  | 
 | ||||||
|  |                         # TODO, so this is where we can possibly change things | ||||||
|  |                         # and instead lever the `MktPair.bs_fqme: str` output? | ||||||
|  |                         bs_fqme: str = cb_sym_to_deribit_inst( | ||||||
|  |                             str_to_cb_sym(book.symbol) | ||||||
|  |                         ).lower() | ||||||
|  | 
 | ||||||
|  |                         piker_quote: dict = { | ||||||
|  |                             'symbol': bs_fqme, | ||||||
|  |                             'ticks': [ | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'bid', | ||||||
|  |                                  'price': float(book.bid_price), | ||||||
|  |                                  'size': float(book.bid_size)}, | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'bsize', | ||||||
|  |                                  'price': float(book.bid_price), | ||||||
|  |                                  'size': float(book.bid_size),}, | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'ask', | ||||||
|  |                                  'price': float(book.ask_price), | ||||||
|  |                                  'size': float(book.ask_size),}, | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'asize', | ||||||
|  |                                  'price': float(book.ask_price), | ||||||
|  |                                  'size': float(book.ask_size),} | ||||||
|  |                             ] | ||||||
|  |                         } | ||||||
|  | 
 | ||||||
|  |                 await send_chan.send({ | ||||||
|  |                     topic: piker_quote, | ||||||
|  |                 }) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
|  | @ -174,12 +391,21 @@ async def open_symbol_search( | ||||||
|     async with open_cached_client('deribit') as client: |     async with open_cached_client('deribit') as client: | ||||||
| 
 | 
 | ||||||
|         # load all symbols locally for fast search |         # load all symbols locally for fast search | ||||||
|         cache = await client.cache_symbols() |         # cache = client._pairs | ||||||
|         await ctx.started() |         await ctx.started() | ||||||
| 
 | 
 | ||||||
|         async with ctx.open_stream() as stream: |         async with ctx.open_stream() as stream: | ||||||
| 
 |             pattern: str | ||||||
|             async for pattern in stream: |             async for pattern in stream: | ||||||
|                 # repack in dict form | 
 | ||||||
|                 await stream.send( |                 # NOTE: pattern fuzzy-matching is done within | ||||||
|                     await client.search_symbols(pattern)) |                 # the methd impl. | ||||||
|  |                 pairs: dict[str, Pair] = await client.search_symbols( | ||||||
|  |                     pattern, | ||||||
|  |                 ) | ||||||
|  |                 # repack in fqme-keyed table | ||||||
|  |                 byfqme: dict[str, Pair] = {} | ||||||
|  |                 for pair in pairs.values(): | ||||||
|  |                     byfqme[pair.bs_fqme] = pair | ||||||
|  | 
 | ||||||
|  |                 await stream.send(byfqme) | ||||||
|  |  | ||||||
|  | @ -0,0 +1,196 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | Per market data-type definitions and schemas types. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | from __future__ import annotations | ||||||
|  | import pendulum | ||||||
|  | from typing import ( | ||||||
|  |     Literal, | ||||||
|  |     Optional, | ||||||
|  | ) | ||||||
|  | from decimal import Decimal | ||||||
|  | 
 | ||||||
|  | from piker.types import Struct | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # API endpoint paths by venue / sub-API | ||||||
|  | _domain: str = 'deribit.com' | ||||||
|  | _url = f'https://www.{_domain}' | ||||||
|  | 
 | ||||||
|  | # WEBsocketz | ||||||
|  | _ws_url: str = f'wss://www.{_domain}/ws/api/v2' | ||||||
|  | 
 | ||||||
|  | # test nets | ||||||
|  | _testnet_ws_url: str = f'wss://test.{_domain}/ws/api/v2' | ||||||
|  | 
 | ||||||
|  | MarketType = Literal[ | ||||||
|  |     'option' | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_api_eps(venue: MarketType) -> tuple[str, str]: | ||||||
|  |     ''' | ||||||
|  |     Return API ep root paths per venue. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     return { | ||||||
|  |         'option': ( | ||||||
|  |             _ws_url, | ||||||
|  |         ), | ||||||
|  |     }[venue] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Pair(Struct, frozen=True, kw_only=True): | ||||||
|  | 
 | ||||||
|  |     symbol: str | ||||||
|  | 
 | ||||||
|  |     # src | ||||||
|  |     quote_currency: str # 'BTC' | ||||||
|  | 
 | ||||||
|  |     # dst | ||||||
|  |     base_currency: str # "BTC", | ||||||
|  | 
 | ||||||
|  |     tick_size: float # 0.0001 # [{'above_price': 0.005, 'tick_size': 0.0005}] | ||||||
|  |     tick_size_steps: list[dict[str, float]]  | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def price_tick(self) -> Decimal: | ||||||
|  |         return Decimal(str(self.tick_size_steps[0]['above_price'])) | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def size_tick(self) -> Decimal: | ||||||
|  |         return Decimal(str(self.tick_size)) | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_fqme(self) -> str: | ||||||
|  |         return f'{self.symbol}' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_mktid(self) -> str: | ||||||
|  |         return f'{self.symbol}.{self.venue}' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class OptionPair(Pair, frozen=True): | ||||||
|  | 
 | ||||||
|  |     taker_commission: float # 0.0003 | ||||||
|  |     strike: float # 5000.0 | ||||||
|  |     settlement_period: str # 'day' | ||||||
|  |     settlement_currency: str # "BTC", | ||||||
|  |     rfq: bool # false | ||||||
|  |     price_index: str # 'btc_usd' | ||||||
|  |     option_type: str # 'call' | ||||||
|  |     min_trade_amount: float # 0.1 | ||||||
|  |     maker_commission: float # 0.0003 | ||||||
|  |     kind: str # 'option' | ||||||
|  |     is_active: bool # true | ||||||
|  |     instrument_type: str # 'reversed' | ||||||
|  |     instrument_name: str # 'BTC-1SEP24-55000-C' | ||||||
|  |     instrument_id: int # 364671 | ||||||
|  |     expiration_timestamp: int # 1725177600000 | ||||||
|  |     creation_timestamp: int # 1724918461000 | ||||||
|  |     counter_currency: str # 'USD'  | ||||||
|  |     contract_size: float # '1.0' | ||||||
|  |     block_trade_tick_size: float # '0.0001' | ||||||
|  |     block_trade_min_trade_amount: int # '25' | ||||||
|  |     block_trade_commission: float # '0.003' | ||||||
|  | 
 | ||||||
|  |     # NOTE: see `.data._symcache.SymbologyCache.load()` for why | ||||||
|  |     ns_path: str = 'piker.brokers.deribit:OptionPair' | ||||||
|  | 
 | ||||||
|  |     # TODO, impl this without the MM:SS part of | ||||||
|  |     # the `'THH:MM:SS..'` etc.. | ||||||
|  |     @property | ||||||
|  |     def expiry(self) -> str: | ||||||
|  |         iso_date = pendulum.from_timestamp( | ||||||
|  |             self.expiration_timestamp / 1000 | ||||||
|  |         ).isoformat() | ||||||
|  |         return iso_date  | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def venue(self) -> str: | ||||||
|  |         return f'{self.instrument_type}_option' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_fqme(self) -> str: | ||||||
|  |         return f'{self.symbol}' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_src_asset(self) -> str: | ||||||
|  |         return f'{self.quote_currency}' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_dst_asset(self) -> str: | ||||||
|  |         return f'{self.symbol}' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | PAIRTYPES: dict[MarketType, Pair] = { | ||||||
|  |     'option': OptionPair, | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class JSONRPCResult(Struct): | ||||||
|  |     id: int | ||||||
|  |     usIn: int | ||||||
|  |     usOut: int | ||||||
|  |     usDiff: int | ||||||
|  |     testnet: bool | ||||||
|  |     jsonrpc: str = '2.0' | ||||||
|  |     error: Optional[dict] = None | ||||||
|  |     result: Optional[list[dict]] = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class JSONRPCChannel(Struct): | ||||||
|  |     method: str | ||||||
|  |     params: dict | ||||||
|  |     jsonrpc: str = '2.0' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class KLinesResult(Struct): | ||||||
|  |     low: list[float] | ||||||
|  |     cost: list[float] | ||||||
|  |     high: list[float] | ||||||
|  |     open: list[float] | ||||||
|  |     close: list[float] | ||||||
|  |     ticks: list[int] | ||||||
|  |     status: str | ||||||
|  |     volume: list[float] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Trade(Struct): | ||||||
|  |     iv: float | ||||||
|  |     price: float | ||||||
|  |     amount: float | ||||||
|  |     trade_id: str | ||||||
|  |     contracts: float | ||||||
|  |     direction: str | ||||||
|  |     trade_seq: int | ||||||
|  |     timestamp: int | ||||||
|  |     mark_price: float | ||||||
|  |     index_price: float | ||||||
|  |     tick_direction: int | ||||||
|  |     instrument_name: str | ||||||
|  |     combo_id: Optional[str] = '', | ||||||
|  |     combo_trade_id: Optional[int] = 0, | ||||||
|  |     block_trade_id: Optional[str] = '', | ||||||
|  |     block_trade_leg_count: Optional[int] = 0, | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class LastTradesResult(Struct): | ||||||
|  |     trades: list[Trade] | ||||||
|  |     has_more: bool | ||||||
|  | @ -111,6 +111,10 @@ class KucoinMktPair(Struct, frozen=True): | ||||||
|     quoteMaxSize: float |     quoteMaxSize: float | ||||||
|     quoteMinSize: float |     quoteMinSize: float | ||||||
|     symbol: str  # our bs_mktid, kucoin's internal id |     symbol: str  # our bs_mktid, kucoin's internal id | ||||||
|  |     feeCategory: int | ||||||
|  |     makerFeeCoefficient: float | ||||||
|  |     takerFeeCoefficient: float | ||||||
|  |     st: bool | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class AccountTrade(Struct, frozen=True): | class AccountTrade(Struct, frozen=True): | ||||||
|  | @ -593,7 +597,7 @@ async def get_client() -> AsyncGenerator[Client, None]: | ||||||
|     ''' |     ''' | ||||||
|     async with ( |     async with ( | ||||||
|         httpx.AsyncClient( |         httpx.AsyncClient( | ||||||
|             base_url=f'https://api.kucoin.com/api', |             base_url='https://api.kucoin.com/api', | ||||||
|         ) as trio_client, |         ) as trio_client, | ||||||
|     ): |     ): | ||||||
|         client = Client(httpx_client=trio_client) |         client = Client(httpx_client=trio_client) | ||||||
|  | @ -637,7 +641,7 @@ async def open_ping_task( | ||||||
|                 await trio.sleep((ping_interval - 1000) / 1000) |                 await trio.sleep((ping_interval - 1000) / 1000) | ||||||
|                 await ws.send_msg({'id': connect_id, 'type': 'ping'}) |                 await ws.send_msg({'id': connect_id, 'type': 'ping'}) | ||||||
| 
 | 
 | ||||||
|         log.info('Starting ping task for kucoin ws connection') |         log.warning('Starting ping task for kucoin ws connection') | ||||||
|         n.start_soon(ping_server) |         n.start_soon(ping_server) | ||||||
| 
 | 
 | ||||||
|         yield |         yield | ||||||
|  | @ -649,9 +653,14 @@ async def open_ping_task( | ||||||
| async def get_mkt_info( | async def get_mkt_info( | ||||||
|     fqme: str, |     fqme: str, | ||||||
| 
 | 
 | ||||||
| ) -> tuple[MktPair, KucoinMktPair]: | ) -> tuple[ | ||||||
|  |     MktPair, | ||||||
|  |     KucoinMktPair, | ||||||
|  | ]: | ||||||
|     ''' |     ''' | ||||||
|     Query for and return a `MktPair` and `KucoinMktPair`. |     Query for and return both a `piker.accounting.MktPair` and | ||||||
|  |     `KucoinMktPair` from provided `fqme: str` | ||||||
|  |     (fully-qualified-market-endpoint). | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async with open_cached_client('kucoin') as client: |     async with open_cached_client('kucoin') as client: | ||||||
|  | @ -726,6 +735,8 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|         log.info(f'Starting up quote stream(s) for {symbols}') |         log.info(f'Starting up quote stream(s) for {symbols}') | ||||||
|         for sym_str in symbols: |         for sym_str in symbols: | ||||||
|  |             mkt: MktPair | ||||||
|  |             pair: KucoinMktPair | ||||||
|             mkt, pair = await get_mkt_info(sym_str) |             mkt, pair = await get_mkt_info(sym_str) | ||||||
|             init_msgs.append( |             init_msgs.append( | ||||||
|                 FeedInit(mkt_info=mkt) |                 FeedInit(mkt_info=mkt) | ||||||
|  | @ -733,7 +744,11 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|         ws: NoBsWs |         ws: NoBsWs | ||||||
|         token, ping_interval = await client._get_ws_token() |         token, ping_interval = await client._get_ws_token() | ||||||
|         connect_id = str(uuid4()) |         log.info('API reported ping_interval: {ping_interval}\n') | ||||||
|  | 
 | ||||||
|  |         connect_id: str = str(uuid4()) | ||||||
|  |         typ: str | ||||||
|  |         quote: dict | ||||||
|         async with ( |         async with ( | ||||||
|             open_autorecon_ws( |             open_autorecon_ws( | ||||||
|                 ( |                 ( | ||||||
|  | @ -747,20 +762,37 @@ async def stream_quotes( | ||||||
|                 ), |                 ), | ||||||
|             ) as ws, |             ) as ws, | ||||||
|             open_ping_task(ws, ping_interval, connect_id), |             open_ping_task(ws, ping_interval, connect_id), | ||||||
|             aclosing(stream_messages(ws, sym_str)) as msg_gen, |             aclosing( | ||||||
|  |                 iter_normed_quotes( | ||||||
|  |                     ws, sym_str | ||||||
|  |                 ) | ||||||
|  |             ) as iter_quotes, | ||||||
|         ): |         ): | ||||||
|             typ, quote = await anext(msg_gen) |             typ, quote = await anext(iter_quotes) | ||||||
| 
 | 
 | ||||||
|             while typ != 'trade': |  | ||||||
|             # take care to not unblock here until we get a real |             # take care to not unblock here until we get a real | ||||||
|                 # trade quote |             # trade quote? | ||||||
|                 typ, quote = await anext(msg_gen) |             # ^TODO, remove this right? | ||||||
|  |             # -[ ] what often blocks chart boot/new-feed switching | ||||||
|  |             #   since we'ere waiting for a live quote instead of just | ||||||
|  |             #   loading history afap.. | ||||||
|  |             #  |_ XXX, not sure if we require a bit of rework to core | ||||||
|  |             #    feed init logic or if backends justg gotta be | ||||||
|  |             #    changed up.. feel like there was some causality | ||||||
|  |             #    dilema prolly only seen with IB too.. | ||||||
|  |             # while typ != 'trade': | ||||||
|  |             #     typ, quote = await anext(iter_quotes) | ||||||
| 
 | 
 | ||||||
|             task_status.started((init_msgs, quote)) |             task_status.started((init_msgs, quote)) | ||||||
|             feed_is_live.set() |             feed_is_live.set() | ||||||
| 
 | 
 | ||||||
|             async for typ, msg in msg_gen: |             # XXX NOTE, DO NOT include the `.<backend>` suffix! | ||||||
|                 await send_chan.send({sym_str: msg}) |             # OW the sampling loop will not broadcast correctly.. | ||||||
|  |             # since `bus._subscribers.setdefault(bs_fqme, set())` | ||||||
|  |             # is used inside `.data.open_feed_bus()` !!! | ||||||
|  |             topic: str = mkt.bs_fqme | ||||||
|  |             async for typ, quote in iter_quotes: | ||||||
|  |                 await send_chan.send({topic: quote}) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
|  | @ -815,7 +847,7 @@ async def subscribe( | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def stream_messages( | async def iter_normed_quotes( | ||||||
|     ws: NoBsWs, |     ws: NoBsWs, | ||||||
|     sym: str, |     sym: str, | ||||||
| 
 | 
 | ||||||
|  | @ -846,6 +878,9 @@ async def stream_messages( | ||||||
| 
 | 
 | ||||||
|                 yield 'trade', { |                 yield 'trade', { | ||||||
|                     'symbol': sym, |                     'symbol': sym, | ||||||
|  |                     # TODO, is 'last' even used elsewhere/a-good | ||||||
|  |                     # semantic? can't we just read the ticks with our | ||||||
|  |                     # .data.ticktools.frame_ticks()`/ | ||||||
|                     'last': trade_data.price, |                     'last': trade_data.price, | ||||||
|                     'brokerd_ts': last_trade_ts, |                     'brokerd_ts': last_trade_ts, | ||||||
|                     'ticks': [ |                     'ticks': [ | ||||||
|  | @ -938,7 +973,7 @@ async def open_history_client( | ||||||
|             if end_dt is None: |             if end_dt is None: | ||||||
|                 inow = round(time.time()) |                 inow = round(time.time()) | ||||||
| 
 | 
 | ||||||
|                 print( |                 log.debug( | ||||||
|                     f'difference in time between load and processing' |                     f'difference in time between load and processing' | ||||||
|                     f'{inow - times[-1]}' |                     f'{inow - times[-1]}' | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|  | @ -653,7 +653,11 @@ class Router(Struct): | ||||||
|             flume = feed.flumes[fqme] |             flume = feed.flumes[fqme] | ||||||
|             first_quote: dict = flume.first_quote |             first_quote: dict = flume.first_quote | ||||||
|             book: DarkBook = self.get_dark_book(broker) |             book: DarkBook = self.get_dark_book(broker) | ||||||
|             book.lasts[fqme]: float = float(first_quote['last']) | 
 | ||||||
|  |             if not (last := first_quote.get('last')): | ||||||
|  |                 last: float = flume.rt_shm.array[-1]['close'] | ||||||
|  | 
 | ||||||
|  |             book.lasts[fqme]: float = float(last) | ||||||
| 
 | 
 | ||||||
|             async with self.maybe_open_brokerd_dialog( |             async with self.maybe_open_brokerd_dialog( | ||||||
|                 brokermod=brokermod, |                 brokermod=brokermod, | ||||||
|  | @ -716,7 +720,7 @@ class Router(Struct): | ||||||
|             subs = self.subscribers[sub_key] |             subs = self.subscribers[sub_key] | ||||||
| 
 | 
 | ||||||
|         sent_some: bool = False |         sent_some: bool = False | ||||||
|         for client_stream in subs: |         for client_stream in subs.copy(): | ||||||
|             try: |             try: | ||||||
|                 await client_stream.send(msg) |                 await client_stream.send(msg) | ||||||
|                 sent_some = True |                 sent_some = True | ||||||
|  | @ -1010,6 +1014,10 @@ async def translate_and_relay_brokerd_events( | ||||||
|                 status_msg.brokerd_msg = msg |                 status_msg.brokerd_msg = msg | ||||||
|                 status_msg.src = msg.broker_details['name'] |                 status_msg.src = msg.broker_details['name'] | ||||||
| 
 | 
 | ||||||
|  |                 if not status_msg.req: | ||||||
|  |                     # likely some order change state? | ||||||
|  |                     await tractor.pause() | ||||||
|  |                 else: | ||||||
|                     await router.client_broadcast( |                     await router.client_broadcast( | ||||||
|                         status_msg.req.symbol, |                         status_msg.req.symbol, | ||||||
|                         status_msg, |                         status_msg, | ||||||
|  |  | ||||||
|  | @ -335,7 +335,7 @@ def services(config, tl, ports): | ||||||
|                 name='service_query', |                 name='service_query', | ||||||
|                 loglevel=config['loglevel'] if tl else None, |                 loglevel=config['loglevel'] if tl else None, | ||||||
|             ), |             ), | ||||||
|             tractor.get_arbiter( |             tractor.get_registry( | ||||||
|                 host=host, |                 host=host, | ||||||
|                 port=ports[0] |                 port=ports[0] | ||||||
|             ) as portal |             ) as portal | ||||||
|  |  | ||||||
|  | @ -25,10 +25,12 @@ from collections import ( | ||||||
|     defaultdict, |     defaultdict, | ||||||
| ) | ) | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
|  | from functools import partial | ||||||
| import time | import time | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     AsyncIterator, |     AsyncIterator, | ||||||
|  |     Callable, | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | @ -42,7 +44,7 @@ from tractor.trionics import ( | ||||||
|     maybe_open_nursery, |     maybe_open_nursery, | ||||||
| ) | ) | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio import TaskStatus | ||||||
| 
 | 
 | ||||||
| from .ticktools import ( | from .ticktools import ( | ||||||
|     frame_ticks, |     frame_ticks, | ||||||
|  | @ -53,6 +55,9 @@ from ._util import ( | ||||||
|     get_console_log, |     get_console_log, | ||||||
| ) | ) | ||||||
| from ..service import maybe_spawn_daemon | from ..service import maybe_spawn_daemon | ||||||
|  | from piker.log import ( | ||||||
|  |     mk_repr, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ._sharedmem import ( |     from ._sharedmem import ( | ||||||
|  | @ -70,6 +75,7 @@ if TYPE_CHECKING: | ||||||
| _default_delay_s: float = 1.0 | _default_delay_s: float = 1.0 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO: use new `tractor.singleton_acm` API for this! | ||||||
| class Sampler: | class Sampler: | ||||||
|     ''' |     ''' | ||||||
|     Global sampling engine registry. |     Global sampling engine registry. | ||||||
|  | @ -79,9 +85,9 @@ class Sampler: | ||||||
| 
 | 
 | ||||||
|     This non-instantiated type is meant to be a singleton within |     This non-instantiated type is meant to be a singleton within | ||||||
|     a `samplerd` actor-service spawned once by the user wishing to |     a `samplerd` actor-service spawned once by the user wishing to | ||||||
|     time-step-sample (real-time) quote feeds, see |     time-step-sample a (real-time) quote feeds, see | ||||||
|     ``.service.maybe_open_samplerd()`` and the below |     `.service.maybe_open_samplerd()` and the below | ||||||
|     ``register_with_sampler()``. |     `register_with_sampler()`. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     service_nursery: None | trio.Nursery = None |     service_nursery: None | trio.Nursery = None | ||||||
|  | @ -375,7 +381,10 @@ async def register_with_sampler( | ||||||
|                 assert Sampler.ohlcv_shms |                 assert Sampler.ohlcv_shms | ||||||
| 
 | 
 | ||||||
|             # unblock caller |             # unblock caller | ||||||
|             await ctx.started(set(Sampler.ohlcv_shms.keys())) |             await ctx.started( | ||||||
|  |                 # XXX bc msgpack only allows one array type! | ||||||
|  |                 list(Sampler.ohlcv_shms.keys()) | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             if open_index_stream: |             if open_index_stream: | ||||||
|                 try: |                 try: | ||||||
|  | @ -419,7 +428,6 @@ async def register_with_sampler( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def spawn_samplerd( | async def spawn_samplerd( | ||||||
| 
 |  | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
|     **extra_tractor_kwargs |     **extra_tractor_kwargs | ||||||
| 
 | 
 | ||||||
|  | @ -429,7 +437,10 @@ async def spawn_samplerd( | ||||||
|     update and increment count write and stream broadcasting. |     update and increment count write and stream broadcasting. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     from piker.service import Services |     from piker.service import ( | ||||||
|  |         get_service_mngr, | ||||||
|  |         ServiceMngr, | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     dname = 'samplerd' |     dname = 'samplerd' | ||||||
|     log.info(f'Spawning `{dname}`') |     log.info(f'Spawning `{dname}`') | ||||||
|  | @ -437,26 +448,33 @@ async def spawn_samplerd( | ||||||
|     # singleton lock creation of ``samplerd`` since we only ever want |     # singleton lock creation of ``samplerd`` since we only ever want | ||||||
|     # one daemon per ``pikerd`` proc tree. |     # one daemon per ``pikerd`` proc tree. | ||||||
|     # TODO: make this built-into the service api? |     # TODO: make this built-into the service api? | ||||||
|     async with Services.locks[dname + '_singleton']: |     mngr: ServiceMngr = get_service_mngr() | ||||||
|  |     already_started: bool = dname in mngr.service_tasks | ||||||
| 
 | 
 | ||||||
|         if dname not in Services.service_tasks: |     async with mngr._locks[dname + '_singleton']: | ||||||
|  |         ctx: Context = await mngr.start_service( | ||||||
|  |             daemon_name=dname, | ||||||
|  |             ctx_ep=partial( | ||||||
|  |                 register_with_sampler, | ||||||
|  |                 period_s=1, | ||||||
|  |                 sub_for_broadcasts=False, | ||||||
|  |             ), | ||||||
|  |             debug_mode=mngr.debug_mode,  # set by pikerd flag | ||||||
| 
 | 
 | ||||||
|             portal = await Services.actor_n.start_actor( |             # proxy-through to tractor | ||||||
|                 dname, |  | ||||||
|             enable_modules=[ |             enable_modules=[ | ||||||
|                 'piker.data._sampling', |                 'piker.data._sampling', | ||||||
|             ], |             ], | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
|                 debug_mode=Services.debug_mode,  # set by pikerd flag |  | ||||||
|             **extra_tractor_kwargs |             **extra_tractor_kwargs | ||||||
|         ) |         ) | ||||||
| 
 |         if not already_started: | ||||||
|             await Services.start_service_task( |             assert ( | ||||||
|                 dname, |                 ctx | ||||||
|                 portal, |                 and | ||||||
|                 register_with_sampler, |                 ctx.portal | ||||||
|                 period_s=1, |                 and | ||||||
|                 sub_for_broadcasts=False, |                 not ctx.cancel_called | ||||||
|             ) |             ) | ||||||
|             return True |             return True | ||||||
| 
 | 
 | ||||||
|  | @ -561,7 +579,6 @@ async def open_sample_stream( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def sample_and_broadcast( | async def sample_and_broadcast( | ||||||
| 
 |  | ||||||
|     bus: _FeedsBus,  # noqa |     bus: _FeedsBus,  # noqa | ||||||
|     rt_shm: ShmArray, |     rt_shm: ShmArray, | ||||||
|     hist_shm: ShmArray, |     hist_shm: ShmArray, | ||||||
|  | @ -582,11 +599,22 @@ async def sample_and_broadcast( | ||||||
| 
 | 
 | ||||||
|     overruns = Counter() |     overruns = Counter() | ||||||
| 
 | 
 | ||||||
|  |     # multiline nested `dict` formatter (since rn quote-msgs are | ||||||
|  |     # just that). | ||||||
|  |     pfmt: Callable[[str], str] = mk_repr() | ||||||
|  | 
 | ||||||
|     # iterate stream delivered by broker |     # iterate stream delivered by broker | ||||||
|     async for quotes in quote_stream: |     async for quotes in quote_stream: | ||||||
|         # print(quotes) |  | ||||||
| 
 | 
 | ||||||
|         # TODO: ``numba`` this! |         # XXX WARNING XXX only enable for debugging bc ow can cost | ||||||
|  |         # ALOT of perf with HF-feedz!!! | ||||||
|  |         # | ||||||
|  |         # log.info( | ||||||
|  |         #     'Rx live quotes:\n' | ||||||
|  |         #     f'{pfmt(quotes)}' | ||||||
|  |         # ) | ||||||
|  | 
 | ||||||
|  |         # TODO: `numba` this! | ||||||
|         for broker_symbol, quote in quotes.items(): |         for broker_symbol, quote in quotes.items(): | ||||||
|             # TODO: in theory you can send the IPC msg *before* writing |             # TODO: in theory you can send the IPC msg *before* writing | ||||||
|             # to the sharedmem array to decrease latency, however, that |             # to the sharedmem array to decrease latency, however, that | ||||||
|  | @ -659,6 +687,18 @@ async def sample_and_broadcast( | ||||||
|             sub_key: str = broker_symbol.lower() |             sub_key: str = broker_symbol.lower() | ||||||
|             subs: set[Sub] = bus.get_subs(sub_key) |             subs: set[Sub] = bus.get_subs(sub_key) | ||||||
| 
 | 
 | ||||||
|  |             if not subs: | ||||||
|  |                 all_bs_fqmes: list[str] = list( | ||||||
|  |                     bus._subscribers.keys() | ||||||
|  |                 ) | ||||||
|  |                 log.warning( | ||||||
|  |                     f'No subscribers for {brokername!r} live-quote ??\n' | ||||||
|  |                     f'broker_symbol: {broker_symbol}\n\n' | ||||||
|  | 
 | ||||||
|  |                     f'Maybe the backend-sys symbol does not match one of,\n' | ||||||
|  |                     f'{pfmt(all_bs_fqmes)}\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|             # NOTE: by default the broker backend doesn't append |             # NOTE: by default the broker backend doesn't append | ||||||
|             # it's own "name" into the fqme schema (but maybe it |             # it's own "name" into the fqme schema (but maybe it | ||||||
|             # should?) so we have to manually generate the correct |             # should?) so we have to manually generate the correct | ||||||
|  | @ -889,6 +929,7 @@ async def uniform_rate_send( | ||||||
|             # to consumers which crash or lose network connection. |             # to consumers which crash or lose network connection. | ||||||
|             # I.e. we **DO NOT** want to crash and propagate up to |             # I.e. we **DO NOT** want to crash and propagate up to | ||||||
|             # ``pikerd`` these kinds of errors! |             # ``pikerd`` these kinds of errors! | ||||||
|  |             trio.EndOfChannel, | ||||||
|             trio.ClosedResourceError, |             trio.ClosedResourceError, | ||||||
|             trio.BrokenResourceError, |             trio.BrokenResourceError, | ||||||
|             ConnectionResetError, |             ConnectionResetError, | ||||||
|  |  | ||||||
|  | @ -273,7 +273,7 @@ async def _reconnect_forever( | ||||||
|                 nobsws._connected.set() |                 nobsws._connected.set() | ||||||
|                 await trio.sleep_forever() |                 await trio.sleep_forever() | ||||||
|         except HandshakeError: |         except HandshakeError: | ||||||
|             log.exception(f'Retrying connection') |             log.exception('Retrying connection') | ||||||
| 
 | 
 | ||||||
|         # ws & nursery block ends |         # ws & nursery block ends | ||||||
| 
 | 
 | ||||||
|  | @ -359,8 +359,8 @@ async def open_autorecon_ws( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| JSONRPC response-request style machinery for transparent multiplexing of msgs | JSONRPC response-request style machinery for transparent multiplexing | ||||||
| over a NoBsWs. | of msgs over a `NoBsWs`. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| 
 | 
 | ||||||
|  | @ -377,44 +377,78 @@ async def open_jsonrpc_session( | ||||||
|     url: str, |     url: str, | ||||||
|     start_id: int = 0, |     start_id: int = 0, | ||||||
|     response_type: type = JSONRPCResult, |     response_type: type = JSONRPCResult, | ||||||
|     request_type: Optional[type] = None, |     msg_recv_timeout: float = float('inf'), | ||||||
|     request_hook: Optional[Callable] = None, |     # ^NOTE, since only `deribit` is using this jsonrpc stuff atm | ||||||
|     error_hook: Optional[Callable] = None, |     # and options mkts are generally "slow moving".. | ||||||
|  |     # | ||||||
|  |     # FURTHER if we break the underlying ws connection then since we | ||||||
|  |     # don't pass a `fixture` to the task that manages `NoBsWs`, i.e. | ||||||
|  |     # `_reconnect_forever()`, the jsonrpc "transport pipe" get's | ||||||
|  |     # broken and never restored with wtv init sequence is required to | ||||||
|  |     # re-establish a working req-resp session. | ||||||
|  | 
 | ||||||
|  |     # request_type: Optional[type] = None, | ||||||
|  |     # request_hook: Optional[Callable] = None, | ||||||
|  |     # error_hook: Optional[Callable] = None, | ||||||
| ) -> Callable[[str, dict], dict]: | ) -> Callable[[str, dict], dict]: | ||||||
| 
 | 
 | ||||||
|  |     # NOTE, store all request msgs so we can raise errors on the | ||||||
|  |     # caller side! | ||||||
|  |     req_msgs: dict[int, dict] = {} | ||||||
|  | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         trio.open_nursery() as n, |         trio.open_nursery() as n, | ||||||
|         open_autorecon_ws(url) as ws |         open_autorecon_ws( | ||||||
|  |             url=url, | ||||||
|  |             msg_recv_timeout=msg_recv_timeout, | ||||||
|  |         ) as ws | ||||||
|     ): |     ): | ||||||
|         rpc_id: Iterable = count(start_id) |         rpc_id: Iterable[int] = count(start_id) | ||||||
|         rpc_results: dict[int, dict] = {} |         rpc_results: dict[int, dict] = {} | ||||||
| 
 | 
 | ||||||
|         async def json_rpc(method: str, params: dict) -> dict: |         async def json_rpc( | ||||||
|  |             method: str, | ||||||
|  |             params: dict, | ||||||
|  |         ) -> dict: | ||||||
|             ''' |             ''' | ||||||
|             perform a json rpc call and wait for the result, raise exception in |             perform a json rpc call and wait for the result, raise exception in | ||||||
|             case of error field present on response |             case of error field present on response | ||||||
|             ''' |             ''' | ||||||
|  |             nonlocal req_msgs | ||||||
|  | 
 | ||||||
|  |             req_id: int = next(rpc_id) | ||||||
|             msg = { |             msg = { | ||||||
|                 'jsonrpc': '2.0', |                 'jsonrpc': '2.0', | ||||||
|                 'id': next(rpc_id), |                 'id': req_id, | ||||||
|                 'method': method, |                 'method': method, | ||||||
|                 'params': params |                 'params': params | ||||||
|             } |             } | ||||||
|             _id = msg['id'] |             _id = msg['id'] | ||||||
| 
 | 
 | ||||||
|             rpc_results[_id] = { |             result = rpc_results[_id] = { | ||||||
|                 'result': None, |                 'result': None, | ||||||
|                 'event': trio.Event() |                 'error': None, | ||||||
|  |                 'event': trio.Event(),  # signal caller resp arrived | ||||||
|             } |             } | ||||||
|  |             req_msgs[_id] = msg | ||||||
| 
 | 
 | ||||||
|             await ws.send_msg(msg) |             await ws.send_msg(msg) | ||||||
| 
 | 
 | ||||||
|  |             # wait for reponse before unblocking requester code | ||||||
|             await rpc_results[_id]['event'].wait() |             await rpc_results[_id]['event'].wait() | ||||||
| 
 | 
 | ||||||
|             ret = rpc_results[_id]['result'] |             if (maybe_result := result['result']): | ||||||
| 
 |                 ret = maybe_result | ||||||
|                 del rpc_results[_id] |                 del rpc_results[_id] | ||||||
| 
 | 
 | ||||||
|  |             else: | ||||||
|  |                 err = result['error'] | ||||||
|  |                 raise Exception( | ||||||
|  |                     f'JSONRPC request failed\n' | ||||||
|  |                     f'req: {msg}\n' | ||||||
|  |                     f'resp: {err}\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|             if ret.error is not None: |             if ret.error is not None: | ||||||
|                 raise Exception(json.dumps(ret.error, indent=4)) |                 raise Exception(json.dumps(ret.error, indent=4)) | ||||||
| 
 | 
 | ||||||
|  | @ -428,6 +462,7 @@ async def open_jsonrpc_session( | ||||||
|             the server side. |             the server side. | ||||||
| 
 | 
 | ||||||
|             ''' |             ''' | ||||||
|  |             nonlocal req_msgs | ||||||
|             async for msg in ws: |             async for msg in ws: | ||||||
|                 match msg: |                 match msg: | ||||||
|                     case { |                     case { | ||||||
|  | @ -451,15 +486,29 @@ async def open_jsonrpc_session( | ||||||
|                         'params': _, |                         'params': _, | ||||||
|                     }: |                     }: | ||||||
|                         log.debug(f'Recieved\n{msg}') |                         log.debug(f'Recieved\n{msg}') | ||||||
|                         if request_hook: |                         # if request_hook: | ||||||
|                             await request_hook(request_type(**msg)) |                         #     await request_hook(request_type(**msg)) | ||||||
| 
 | 
 | ||||||
|                     case { |                     case { | ||||||
|                         'error': error |                         'error': error | ||||||
|                     }: |                     }: | ||||||
|                         log.warning(f'Recieved\n{error}') |                         # if error_hook: | ||||||
|                         if error_hook: |                         #     await error_hook(response_type(**msg)) | ||||||
|                             await error_hook(response_type(**msg)) | 
 | ||||||
|  |                         # retreive orig request msg, set error | ||||||
|  |                         # response in original "result" msg, | ||||||
|  |                         # THEN FINALLY set the event to signal caller | ||||||
|  |                         # to raise the error in the parent task. | ||||||
|  |                         req_id: int = msg['id'] | ||||||
|  |                         req_msg: dict = req_msgs[req_id] | ||||||
|  |                         result: dict = rpc_results[req_id] | ||||||
|  |                         result['error'] = error | ||||||
|  |                         result['event'].set() | ||||||
|  |                         log.error( | ||||||
|  |                             f'JSONRPC request failed\n' | ||||||
|  |                             f'req: {req_msg}\n' | ||||||
|  |                             f'resp: {error}\n' | ||||||
|  |                         ) | ||||||
| 
 | 
 | ||||||
|                     case _: |                     case _: | ||||||
|                         log.warning(f'Unhandled JSON-RPC msg!?\n{msg}') |                         log.warning(f'Unhandled JSON-RPC msg!?\n{msg}') | ||||||
|  |  | ||||||
|  | @ -540,7 +540,10 @@ async def open_feed_bus( | ||||||
|         # subscription since the backend isn't (yet) expected to |         # subscription since the backend isn't (yet) expected to | ||||||
|         # append it's own name to the fqme, so we filter on keys |         # append it's own name to the fqme, so we filter on keys | ||||||
|         # which *do not* include that name (e.g .ib) . |         # which *do not* include that name (e.g .ib) . | ||||||
|         bus._subscribers.setdefault(bs_fqme, set()) |         bus._subscribers.setdefault( | ||||||
|  |             bs_fqme, | ||||||
|  |             set(), | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     # sync feed subscribers with flume handles |     # sync feed subscribers with flume handles | ||||||
|     await ctx.started( |     await ctx.started( | ||||||
|  |  | ||||||
							
								
								
									
										28
									
								
								piker/log.py
								
								
								
								
							
							
						
						
									
										28
									
								
								piker/log.py
								
								
								
								
							|  | @ -18,7 +18,11 @@ | ||||||
| Log like a forester! | Log like a forester! | ||||||
| """ | """ | ||||||
| import logging | import logging | ||||||
|  | import reprlib | ||||||
| import json | import json | ||||||
|  | from typing import ( | ||||||
|  |     Callable, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
| from pygments import ( | from pygments import ( | ||||||
|  | @ -84,3 +88,27 @@ def colorize_json( | ||||||
|         # likeable styles: algol_nu, tango, monokai |         # likeable styles: algol_nu, tango, monokai | ||||||
|         formatters.TerminalTrueColorFormatter(style=style) |         formatters.TerminalTrueColorFormatter(style=style) | ||||||
|     ) |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def mk_repr( | ||||||
|  |     **repr_kws, | ||||||
|  | ) -> Callable[[str], str]: | ||||||
|  |     ''' | ||||||
|  |     Allocate and deliver a `repr.Repr` instance with provided input | ||||||
|  |     settings using the std-lib's `reprlib` mod, | ||||||
|  |      * https://docs.python.org/3/library/reprlib.html | ||||||
|  | 
 | ||||||
|  |     ------ Ex. ------ | ||||||
|  |     An up to 6-layer-nested `dict` as multi-line: | ||||||
|  |     - https://stackoverflow.com/a/79102479 | ||||||
|  |     - https://docs.python.org/3/library/reprlib.html#reprlib.Repr.maxlevel | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     def_kws: dict[str, int] = dict( | ||||||
|  |         indent=2, | ||||||
|  |         maxlevel=6,  # recursion levels | ||||||
|  |         maxstring=66,  # match editor line-len limit | ||||||
|  |     ) | ||||||
|  |     def_kws |= repr_kws | ||||||
|  |     reprr = reprlib.Repr(**def_kws) | ||||||
|  |     return reprr.repr | ||||||
|  |  | ||||||
|  | @ -30,7 +30,11 @@ Actor runtime primtives and (distributed) service APIs for, | ||||||
|   => TODO: maybe to (re)move elsewhere? |   => TODO: maybe to (re)move elsewhere? | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from ._mngr import Services as Services | from ._mngr import ( | ||||||
|  |     get_service_mngr as get_service_mngr, | ||||||
|  |     open_service_mngr as open_service_mngr, | ||||||
|  |     ServiceMngr as ServiceMngr, | ||||||
|  | ) | ||||||
| from ._registry import ( | from ._registry import ( | ||||||
|     _tractor_kwargs as _tractor_kwargs, |     _tractor_kwargs as _tractor_kwargs, | ||||||
|     _default_reg_addr as _default_reg_addr, |     _default_reg_addr as _default_reg_addr, | ||||||
|  |  | ||||||
|  | @ -21,7 +21,6 @@ | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| import os | import os | ||||||
| from typing import ( | from typing import ( | ||||||
|     Optional, |  | ||||||
|     Any, |     Any, | ||||||
|     ClassVar, |     ClassVar, | ||||||
| ) | ) | ||||||
|  | @ -30,13 +29,13 @@ from contextlib import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
| import trio |  | ||||||
| 
 | 
 | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     get_console_log, |     get_console_log, | ||||||
| ) | ) | ||||||
| from ._mngr import ( | from ._mngr import ( | ||||||
|     Services, |     open_service_mngr, | ||||||
|  |     ServiceMngr, | ||||||
| ) | ) | ||||||
| from ._registry import (  # noqa | from ._registry import (  # noqa | ||||||
|     _tractor_kwargs, |     _tractor_kwargs, | ||||||
|  | @ -59,7 +58,7 @@ async def open_piker_runtime( | ||||||
|     registry_addrs: list[tuple[str, int]] = [], |     registry_addrs: list[tuple[str, int]] = [], | ||||||
| 
 | 
 | ||||||
|     enable_modules: list[str] = [], |     enable_modules: list[str] = [], | ||||||
|     loglevel: Optional[str] = None, |     loglevel: str|None = None, | ||||||
| 
 | 
 | ||||||
|     # XXX NOTE XXX: you should pretty much never want debug mode |     # XXX NOTE XXX: you should pretty much never want debug mode | ||||||
|     # for data daemons when running in production. |     # for data daemons when running in production. | ||||||
|  | @ -119,6 +118,10 @@ async def open_piker_runtime( | ||||||
|                 # spawn other specialized daemons I think? |                 # spawn other specialized daemons I think? | ||||||
|                 enable_modules=enable_modules, |                 enable_modules=enable_modules, | ||||||
| 
 | 
 | ||||||
|  |                 # TODO: how to configure this? | ||||||
|  |                 # keep it on by default if debug mode is set? | ||||||
|  |                 # maybe_enable_greenback=debug_mode, | ||||||
|  | 
 | ||||||
|                 **tractor_kwargs, |                 **tractor_kwargs, | ||||||
|             ) as actor, |             ) as actor, | ||||||
| 
 | 
 | ||||||
|  | @ -167,12 +170,13 @@ async def open_pikerd( | ||||||
| 
 | 
 | ||||||
|     **kwargs, |     **kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> Services: | ) -> ServiceMngr: | ||||||
|     ''' |     ''' | ||||||
|     Start a root piker daemon with an indefinite lifetime. |     Start a root piker daemon actor (aka `pikerd`) with an indefinite | ||||||
|  |     lifetime. | ||||||
| 
 | 
 | ||||||
|     A root actor nursery is created which can be used to create and keep |     A root actor-nursery is created which can be used to spawn and | ||||||
|     alive underling services (see below). |     supervise underling service sub-actors (see below). | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     # NOTE: for the root daemon we always enable the root |     # NOTE: for the root daemon we always enable the root | ||||||
|  | @ -199,8 +203,6 @@ async def open_pikerd( | ||||||
|             root_actor, |             root_actor, | ||||||
|             reg_addrs, |             reg_addrs, | ||||||
|         ), |         ), | ||||||
|         tractor.open_nursery() as actor_nursery, |  | ||||||
|         trio.open_nursery() as service_nursery, |  | ||||||
|     ): |     ): | ||||||
|         for addr in reg_addrs: |         for addr in reg_addrs: | ||||||
|             if addr not in root_actor.accept_addrs: |             if addr not in root_actor.accept_addrs: | ||||||
|  | @ -209,25 +211,17 @@ async def open_pikerd( | ||||||
|                     'Maybe you have another daemon already running?' |                     'Maybe you have another daemon already running?' | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|         # assign globally for future daemon/task creation |         mngr: ServiceMngr | ||||||
|         Services.actor_n = actor_nursery |         async with open_service_mngr( | ||||||
|         Services.service_n = service_nursery |             debug_mode=debug_mode, | ||||||
|         Services.debug_mode = debug_mode |         ) as mngr: | ||||||
| 
 |             yield mngr | ||||||
|         try: |  | ||||||
|             yield Services |  | ||||||
| 
 |  | ||||||
|         finally: |  | ||||||
|             # TODO: is this more clever/efficient? |  | ||||||
|             # if 'samplerd' in Services.service_tasks: |  | ||||||
|             #     await Services.cancel_service('samplerd') |  | ||||||
|             service_nursery.cancel_scope.cancel() |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: do we even need this? | # TODO: do we even need this? | ||||||
| # @acm | # @acm | ||||||
| # async def maybe_open_runtime( | # async def maybe_open_runtime( | ||||||
| #     loglevel: Optional[str] = None, | #     loglevel: str|None = None, | ||||||
| #     **kwargs, | #     **kwargs, | ||||||
| 
 | 
 | ||||||
| # ) -> None: | # ) -> None: | ||||||
|  | @ -256,7 +250,7 @@ async def maybe_open_pikerd( | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
|     **kwargs, |     **kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> tractor._portal.Portal | ClassVar[Services]: | ) -> tractor._portal.Portal | ClassVar[ServiceMngr]: | ||||||
|     ''' |     ''' | ||||||
|     If no ``pikerd`` daemon-root-actor can be found start it and |     If no ``pikerd`` daemon-root-actor can be found start it and | ||||||
|     yield up (we should probably figure out returning a portal to self |     yield up (we should probably figure out returning a portal to self | ||||||
|  |  | ||||||
|  | @ -49,7 +49,7 @@ from requests.exceptions import ( | ||||||
|     ReadTimeout, |     ReadTimeout, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| from ._mngr import Services | from ._mngr import ServiceMngr | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     log,  # sub-sys logger |     log,  # sub-sys logger | ||||||
|     get_console_log, |     get_console_log, | ||||||
|  | @ -453,7 +453,7 @@ async def open_ahabd( | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def start_ahab_service( | async def start_ahab_service( | ||||||
|     services: Services, |     services: ServiceMngr, | ||||||
|     service_name: str, |     service_name: str, | ||||||
| 
 | 
 | ||||||
|     # endpoint config passed as **kwargs |     # endpoint config passed as **kwargs | ||||||
|  | @ -549,7 +549,8 @@ async def start_ahab_service( | ||||||
|         log.warning('Failed to cancel root permsed container') |         log.warning('Failed to cancel root permsed container') | ||||||
| 
 | 
 | ||||||
|     except ( |     except ( | ||||||
|         trio.MultiError, |         # trio.MultiError, | ||||||
|  |         ExceptionGroup, | ||||||
|     ) as err: |     ) as err: | ||||||
|         for subexc in err.exceptions: |         for subexc in err.exceptions: | ||||||
|             if isinstance(subexc, PermissionError): |             if isinstance(subexc, PermissionError): | ||||||
|  |  | ||||||
|  | @ -26,14 +26,17 @@ from typing import ( | ||||||
| from contextlib import ( | from contextlib import ( | ||||||
|     asynccontextmanager as acm, |     asynccontextmanager as acm, | ||||||
| ) | ) | ||||||
|  | from collections import defaultdict | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
|  | import trio | ||||||
| 
 | 
 | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     log,  # sub-sys logger |     log,  # sub-sys logger | ||||||
| ) | ) | ||||||
| from ._mngr import ( | from ._mngr import ( | ||||||
|     Services, |     get_service_mngr, | ||||||
|  |     ServiceMngr, | ||||||
| ) | ) | ||||||
| from ._actor_runtime import maybe_open_pikerd | from ._actor_runtime import maybe_open_pikerd | ||||||
| from ._registry import find_service | from ._registry import find_service | ||||||
|  | @ -41,15 +44,14 @@ from ._registry import find_service | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def maybe_spawn_daemon( | async def maybe_spawn_daemon( | ||||||
| 
 |  | ||||||
|     service_name: str, |     service_name: str, | ||||||
|     service_task_target: Callable, |     service_task_target: Callable, | ||||||
| 
 |  | ||||||
|     spawn_args: dict[str, Any], |     spawn_args: dict[str, Any], | ||||||
| 
 | 
 | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
|     singleton: bool = False, |     singleton: bool = False, | ||||||
| 
 | 
 | ||||||
|  |     _locks = defaultdict(trio.Lock), | ||||||
|     **pikerd_kwargs, |     **pikerd_kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> tractor.Portal: | ) -> tractor.Portal: | ||||||
|  | @ -67,7 +69,7 @@ async def maybe_spawn_daemon( | ||||||
|     ''' |     ''' | ||||||
|     # serialize access to this section to avoid |     # serialize access to this section to avoid | ||||||
|     # 2 or more tasks racing to create a daemon |     # 2 or more tasks racing to create a daemon | ||||||
|     lock = Services.locks[service_name] |     lock = _locks[service_name] | ||||||
|     await lock.acquire() |     await lock.acquire() | ||||||
| 
 | 
 | ||||||
|     async with find_service( |     async with find_service( | ||||||
|  | @ -132,7 +134,65 @@ async def maybe_spawn_daemon( | ||||||
|         async with tractor.wait_for_actor(service_name) as portal: |         async with tractor.wait_for_actor(service_name) as portal: | ||||||
|             lock.release() |             lock.release() | ||||||
|             yield portal |             yield portal | ||||||
|             await portal.cancel_actor() |             # --- ---- --- | ||||||
|  |             # XXX NOTE XXX | ||||||
|  |             # --- ---- --- | ||||||
|  |             # DO NOT PUT A `portal.cancel_actor()` here (as was prior)! | ||||||
|  |             # | ||||||
|  |             # Doing so will cause an "out-of-band" ctxc | ||||||
|  |             # (`tractor.ContextCancelled`) to be raised inside the | ||||||
|  |             # `ServiceMngr.open_context_in_task()`'s call to | ||||||
|  |             # `ctx.wait_for_result()` AND the internal self-ctxc | ||||||
|  |             # "graceful capture" WILL NOT CATCH IT! | ||||||
|  |             # | ||||||
|  |             # This can cause certain types of operations to raise | ||||||
|  |             # that ctxc BEFORE THEY `return`, resulting in | ||||||
|  |             # a "false-negative" ctxc being raised when really | ||||||
|  |             # nothing actually failed, other then our semantic | ||||||
|  |             # "failure" to suppress an expected, graceful, | ||||||
|  |             # self-cancel scenario.. | ||||||
|  |             # | ||||||
|  |             # bUt wHy duZ It WorK lIKe dis.. | ||||||
|  |             # ------------------------------ | ||||||
|  |             # from the perspective of the `tractor.Context` this | ||||||
|  |             # cancel request was conducted "out of band" since | ||||||
|  |             # `Context.cancel()` was never called and thus the | ||||||
|  |             # `._cancel_called: bool` was never set. Despite the | ||||||
|  |             # remote `.canceller` being set to `pikerd` (i.e. the | ||||||
|  |             # same `Actor.uid` of the raising service-mngr task) the | ||||||
|  |             # service-task's ctx itself was never marked as having | ||||||
|  |             # requested cancellation and thus still raises the ctxc | ||||||
|  |             # bc it was unaware of any such request. | ||||||
|  |             # | ||||||
|  |             # How to make grokin these cases easier tho? | ||||||
|  |             # ------------------------------------------ | ||||||
|  |             # Because `Portal.cancel_actor()` was called it requests | ||||||
|  |             # "full-`Actor`-runtime-cancellation" of it's peer | ||||||
|  |             # process which IS NOT THE SAME as a single inter-actor | ||||||
|  |             # RPC task cancelling its local context with a remote | ||||||
|  |             # peer `Task` in that same peer process. | ||||||
|  |             # | ||||||
|  |             # ?TODO? It might be better if we do one (or all) of the | ||||||
|  |             # following: | ||||||
|  |             # | ||||||
|  |             # -[ ] at least set a special message for the | ||||||
|  |             #    `ContextCancelled` when raised locally by the | ||||||
|  |             #    unaware ctx task such that we check for the | ||||||
|  |             #    `.canceller` being *our `Actor`* and in the case | ||||||
|  |             #    where `Context._cancel_called == False` we specially | ||||||
|  |             #    note that this is likely an "out-of-band" | ||||||
|  |             #    runtime-cancel request triggered by some call to | ||||||
|  |             #    `Portal.cancel_actor()`, possibly even reporting the | ||||||
|  |             #    exact LOC of that caller by tracking it inside our | ||||||
|  |             #    portal-type? | ||||||
|  |             # -[ ] possibly add another field `ContextCancelled` like | ||||||
|  |             #    maybe a, | ||||||
|  |             #    `.request_type: Literal['os', 'proc', 'actor', | ||||||
|  |             #    'ctx']` type thing which would allow immediately | ||||||
|  |             #    being able to tell what kind of cancellation caused | ||||||
|  |             #    the unexpected ctxc? | ||||||
|  |             # -[ ] REMOVE THIS COMMENT, once we've settled on how to | ||||||
|  |             #     better augment `tractor` to be more explicit on this! | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def spawn_emsd( | async def spawn_emsd( | ||||||
|  | @ -147,21 +207,22 @@ async def spawn_emsd( | ||||||
|     """ |     """ | ||||||
|     log.info('Spawning emsd') |     log.info('Spawning emsd') | ||||||
| 
 | 
 | ||||||
|     portal = await Services.actor_n.start_actor( |     smngr: ServiceMngr = get_service_mngr() | ||||||
|  |     portal = await smngr.actor_n.start_actor( | ||||||
|         'emsd', |         'emsd', | ||||||
|         enable_modules=[ |         enable_modules=[ | ||||||
|             'piker.clearing._ems', |             'piker.clearing._ems', | ||||||
|             'piker.clearing._client', |             'piker.clearing._client', | ||||||
|         ], |         ], | ||||||
|         loglevel=loglevel, |         loglevel=loglevel, | ||||||
|         debug_mode=Services.debug_mode,  # set by pikerd flag |         debug_mode=smngr.debug_mode,  # set by pikerd flag | ||||||
|         **extra_tractor_kwargs |         **extra_tractor_kwargs | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     # non-blocking setup of clearing service |     # non-blocking setup of clearing service | ||||||
|     from ..clearing._ems import _setup_persistent_emsd |     from ..clearing._ems import _setup_persistent_emsd | ||||||
| 
 | 
 | ||||||
|     await Services.start_service_task( |     await smngr.start_service_task( | ||||||
|         'emsd', |         'emsd', | ||||||
|         portal, |         portal, | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -18,16 +18,29 @@ | ||||||
| daemon-service management API. | daemon-service management API. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
|  | from __future__ import annotations | ||||||
|  | from contextlib import ( | ||||||
|  |     asynccontextmanager as acm, | ||||||
|  |     # contextmanager as cm, | ||||||
|  | ) | ||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
|  | from dataclasses import ( | ||||||
|  |     dataclass, | ||||||
|  |     field, | ||||||
|  | ) | ||||||
|  | import functools | ||||||
|  | import inspect | ||||||
| from typing import ( | from typing import ( | ||||||
|     Callable, |     Callable, | ||||||
|     Any, |     Any, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import trio | import msgspec | ||||||
| from trio_typing import TaskStatus |  | ||||||
| import tractor | import tractor | ||||||
|  | import trio | ||||||
|  | from trio import TaskStatus | ||||||
| from tractor import ( | from tractor import ( | ||||||
|  |     ActorNursery, | ||||||
|     current_actor, |     current_actor, | ||||||
|     ContextCancelled, |     ContextCancelled, | ||||||
|     Context, |     Context, | ||||||
|  | @ -39,6 +52,130 @@ from ._util import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO: implement a singleton deco-API for wrapping the below | ||||||
|  | # factory's impl for general actor-singleton use? | ||||||
|  | # | ||||||
|  | # @singleton | ||||||
|  | # async def open_service_mngr( | ||||||
|  | #     **init_kwargs, | ||||||
|  | # ) -> ServiceMngr: | ||||||
|  | #     ''' | ||||||
|  | #     Note this function body is invoke IFF no existing singleton instance already | ||||||
|  | #     exists in this proc's memory. | ||||||
|  | 
 | ||||||
|  | #     ''' | ||||||
|  | #     # setup | ||||||
|  | #     yield ServiceMngr(**init_kwargs) | ||||||
|  | #     # teardown | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO: singleton factory API instead of a class API | ||||||
|  | @acm | ||||||
|  | async def open_service_mngr( | ||||||
|  |     *, | ||||||
|  |     debug_mode: bool = False, | ||||||
|  | 
 | ||||||
|  |     # impl deat which ensures a single global instance | ||||||
|  |     _singleton: list[ServiceMngr|None] = [None], | ||||||
|  |     **init_kwargs, | ||||||
|  | 
 | ||||||
|  | ) -> ServiceMngr: | ||||||
|  |     ''' | ||||||
|  |     Open a multi-subactor-as-service-daemon tree supervisor. | ||||||
|  | 
 | ||||||
|  |     The delivered `ServiceMngr` is a singleton instance for each | ||||||
|  |     actor-process and is allocated on first open and never | ||||||
|  |     de-allocated unless explicitly deleted by al call to | ||||||
|  |     `del_service_mngr()`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # TODO: factor this an allocation into | ||||||
|  |     # a `._mngr.open_service_mngr()` and put in the | ||||||
|  |     # once-n-only-once setup/`.__aenter__()` part! | ||||||
|  |     # -[ ] how to make this only happen on the `mngr == None` case? | ||||||
|  |     #  |_ use `.trionics.maybe_open_context()` (for generic | ||||||
|  |     #     async-with-style-only-once of the factory impl, though | ||||||
|  |     #     what do we do for the allocation case? | ||||||
|  |     #    / `.maybe_open_nursery()` (since for this specific case | ||||||
|  |     #    it's simpler?) to activate | ||||||
|  |     async with ( | ||||||
|  |         tractor.open_nursery() as an, | ||||||
|  |         trio.open_nursery() as tn, | ||||||
|  |     ): | ||||||
|  |         # impl specific obvi.. | ||||||
|  |         init_kwargs.update({ | ||||||
|  |             'actor_n': an, | ||||||
|  |             'service_n': tn, | ||||||
|  |         }) | ||||||
|  | 
 | ||||||
|  |         mngr: ServiceMngr|None | ||||||
|  |         if (mngr := _singleton[0]) is None: | ||||||
|  | 
 | ||||||
|  |             log.info('Allocating a new service mngr!') | ||||||
|  |             mngr = _singleton[0] = ServiceMngr(**init_kwargs) | ||||||
|  | 
 | ||||||
|  |             # TODO: put into `.__aenter__()` section of | ||||||
|  |             # eventual `@singleton_acm` API wrapper. | ||||||
|  |             # | ||||||
|  |             # assign globally for future daemon/task creation | ||||||
|  |             mngr.actor_n = an | ||||||
|  |             mngr.service_n = tn | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             assert ( | ||||||
|  |                 mngr.actor_n | ||||||
|  |                 and | ||||||
|  |                 mngr.service_tn | ||||||
|  |             ) | ||||||
|  |             log.info( | ||||||
|  |                 'Using extant service mngr!\n\n' | ||||||
|  |                 f'{mngr!r}\n'  # it has a nice `.__repr__()` of services state | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         try: | ||||||
|  |             # NOTE: this is a singleton factory impl specific detail | ||||||
|  |             # which should be supported in the condensed | ||||||
|  |             # `@singleton_acm` API? | ||||||
|  |             mngr.debug_mode = debug_mode | ||||||
|  | 
 | ||||||
|  |             yield mngr | ||||||
|  |         finally: | ||||||
|  |             # TODO: is this more clever/efficient? | ||||||
|  |             # if 'samplerd' in mngr.service_tasks: | ||||||
|  |             #     await mngr.cancel_service('samplerd') | ||||||
|  |             tn.cancel_scope.cancel() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_service_mngr() -> ServiceMngr: | ||||||
|  |     ''' | ||||||
|  |     Try to get the singleton service-mngr for this actor presuming it | ||||||
|  |     has already been allocated using, | ||||||
|  | 
 | ||||||
|  |     .. code:: python | ||||||
|  | 
 | ||||||
|  |         async with open_<@singleton_acm(func)>() as mngr` | ||||||
|  |             ... this block kept open ... | ||||||
|  | 
 | ||||||
|  |     If not yet allocated raise a `ServiceError`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # https://stackoverflow.com/a/12627202 | ||||||
|  |     # https://docs.python.org/3/library/inspect.html#inspect.Signature | ||||||
|  |     maybe_mngr: ServiceMngr|None = inspect.signature( | ||||||
|  |         open_service_mngr | ||||||
|  |     ).parameters['_singleton'].default[0] | ||||||
|  | 
 | ||||||
|  |     if maybe_mngr is None: | ||||||
|  |         raise RuntimeError( | ||||||
|  |             'Someone must allocate a `ServiceMngr` using\n\n' | ||||||
|  |             '`async with open_service_mngr()` beforehand!!\n' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     return maybe_mngr | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| # TODO: we need remote wrapping and a general soln: | # TODO: we need remote wrapping and a general soln: | ||||||
| # - factor this into a ``tractor.highlevel`` extension # pack for the | # - factor this into a ``tractor.highlevel`` extension # pack for the | ||||||
| #   library. | #   library. | ||||||
|  | @ -46,31 +183,46 @@ from ._util import ( | ||||||
| #   to the pikerd actor for starting services remotely! | #   to the pikerd actor for starting services remotely! | ||||||
| # - prolly rename this to ActorServicesNursery since it spawns | # - prolly rename this to ActorServicesNursery since it spawns | ||||||
| #   new actors and supervises them to completion? | #   new actors and supervises them to completion? | ||||||
| class Services: | @dataclass | ||||||
|  | class ServiceMngr: | ||||||
|  | # class ServiceMngr(msgspec.Struct): | ||||||
|  |     ''' | ||||||
|  |     A multi-subactor-as-service manager. | ||||||
| 
 | 
 | ||||||
|     actor_n: tractor._supervise.ActorNursery |     Spawn, supervise and monitor service/daemon subactors in a SC | ||||||
|  |     process tree. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     actor_n: ActorNursery | ||||||
|     service_n: trio.Nursery |     service_n: trio.Nursery | ||||||
|     debug_mode: bool  # tractor sub-actor debug mode flag |     debug_mode: bool = False # tractor sub-actor debug mode flag | ||||||
|  | 
 | ||||||
|     service_tasks: dict[ |     service_tasks: dict[ | ||||||
|         str, |         str, | ||||||
|         tuple[ |         tuple[ | ||||||
|             trio.CancelScope, |             trio.CancelScope, | ||||||
|  |             Context, | ||||||
|             Portal, |             Portal, | ||||||
|             trio.Event, |             trio.Event, | ||||||
|         ] |         ] | ||||||
|     ] = {} |     ] = field(default_factory=dict) | ||||||
|     locks = defaultdict(trio.Lock) | 
 | ||||||
|  |     # internal per-service task mutexs | ||||||
|  |     _locks = defaultdict(trio.Lock) | ||||||
| 
 | 
 | ||||||
|     @classmethod |  | ||||||
|     async def start_service_task( |     async def start_service_task( | ||||||
|         self, |         self, | ||||||
|         name: str, |         name: str, | ||||||
|         portal: Portal, |         portal: Portal, | ||||||
|  | 
 | ||||||
|  |         # TODO: typevar for the return type of the target and then | ||||||
|  |         # use it below for `ctx_res`? | ||||||
|         target: Callable, |         target: Callable, | ||||||
|  | 
 | ||||||
|         allow_overruns: bool = False, |         allow_overruns: bool = False, | ||||||
|         **ctx_kwargs, |         **ctx_kwargs, | ||||||
| 
 | 
 | ||||||
|     ) -> (trio.CancelScope, Context): |     ) -> (trio.CancelScope, Context, Any): | ||||||
|         ''' |         ''' | ||||||
|         Open a context in a service sub-actor, add to a stack |         Open a context in a service sub-actor, add to a stack | ||||||
|         that gets unwound at ``pikerd`` teardown. |         that gets unwound at ``pikerd`` teardown. | ||||||
|  | @ -83,6 +235,7 @@ class Services: | ||||||
|             task_status: TaskStatus[ |             task_status: TaskStatus[ | ||||||
|                 tuple[ |                 tuple[ | ||||||
|                     trio.CancelScope, |                     trio.CancelScope, | ||||||
|  |                     Context, | ||||||
|                     trio.Event, |                     trio.Event, | ||||||
|                     Any, |                     Any, | ||||||
|                 ] |                 ] | ||||||
|  | @ -90,64 +243,87 @@ class Services: | ||||||
| 
 | 
 | ||||||
|         ) -> Any: |         ) -> Any: | ||||||
| 
 | 
 | ||||||
|  |             # TODO: use the ctx._scope directly here instead? | ||||||
|  |             # -[ ] actually what semantics do we expect for this | ||||||
|  |             #   usage!? | ||||||
|             with trio.CancelScope() as cs: |             with trio.CancelScope() as cs: | ||||||
| 
 |                 try: | ||||||
|                     async with portal.open_context( |                     async with portal.open_context( | ||||||
|                         target, |                         target, | ||||||
|                         allow_overruns=allow_overruns, |                         allow_overruns=allow_overruns, | ||||||
|                         **ctx_kwargs, |                         **ctx_kwargs, | ||||||
| 
 | 
 | ||||||
|                 ) as (ctx, first): |                     ) as (ctx, started): | ||||||
| 
 | 
 | ||||||
|                         # unblock once the remote context has started |                         # unblock once the remote context has started | ||||||
|                         complete = trio.Event() |                         complete = trio.Event() | ||||||
|                     task_status.started((cs, complete, first)) |                         task_status.started(( | ||||||
|  |                             cs, | ||||||
|  |                             ctx, | ||||||
|  |                             complete, | ||||||
|  |                             started, | ||||||
|  |                         )) | ||||||
|                         log.info( |                         log.info( | ||||||
|                         f'`pikerd` service {name} started with value {first}' |                             f'`pikerd` service {name} started with value {started}' | ||||||
|                         ) |                         ) | ||||||
|                     try: |  | ||||||
|                         # wait on any context's return value |                         # wait on any context's return value | ||||||
|                         # and any final portal result from the |                         # and any final portal result from the | ||||||
|                         # sub-actor. |                         # sub-actor. | ||||||
|                         ctx_res: Any = await ctx.result() |                         ctx_res: Any = await ctx.wait_for_result() | ||||||
| 
 | 
 | ||||||
|                         # NOTE: blocks indefinitely until cancelled |                         # NOTE: blocks indefinitely until cancelled | ||||||
|                         # either by error from the target context |                         # either by error from the target context | ||||||
|                         # function or by being cancelled here by the |                         # function or by being cancelled here by the | ||||||
|                         # surrounding cancel scope. |                         # surrounding cancel scope. | ||||||
|                         return (await portal.result(), ctx_res) |                         return ( | ||||||
|  |                             await portal.wait_for_result(), | ||||||
|  |                             ctx_res, | ||||||
|  |                         ) | ||||||
|  | 
 | ||||||
|                 except ContextCancelled as ctxe: |                 except ContextCancelled as ctxe: | ||||||
|                     canceller: tuple[str, str] = ctxe.canceller |                     canceller: tuple[str, str] = ctxe.canceller | ||||||
|                     our_uid: tuple[str, str] = current_actor().uid |                     our_uid: tuple[str, str] = current_actor().uid | ||||||
|                     if ( |                     if ( | ||||||
|                             canceller != portal.channel.uid |                         canceller != portal.chan.uid | ||||||
|                         and |                         and | ||||||
|                         canceller != our_uid |                         canceller != our_uid | ||||||
|                     ): |                     ): | ||||||
|                         log.cancel( |                         log.cancel( | ||||||
|                                 f'Actor-service {name} was remotely cancelled?\n' |                             f'Actor-service `{name}` was remotely cancelled by a peer?\n' | ||||||
|                                 f'remote canceller: {canceller}\n' | 
 | ||||||
|                                 f'Keeping {our_uid} alive, ignoring sub-actor cancel..\n' |                             # TODO: this would be a good spot to use | ||||||
|  |                             # a respawn feature Bo | ||||||
|  |                             f'-> Keeping `pikerd` service manager alive despite this inter-peer cancel\n\n' | ||||||
|  | 
 | ||||||
|  |                             f'cancellee: {portal.chan.uid}\n' | ||||||
|  |                             f'canceller: {canceller}\n' | ||||||
|                         ) |                         ) | ||||||
|                     else: |                     else: | ||||||
|                         raise |                         raise | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|                 finally: |                 finally: | ||||||
|  |                     # NOTE: the ctx MUST be cancelled first if we | ||||||
|  |                     # don't want the above `ctx.wait_for_result()` to | ||||||
|  |                     # raise a self-ctxc. WHY, well since from the ctx's | ||||||
|  |                     # perspective the cancel request will have | ||||||
|  |                     # arrived out-out-of-band at the `Actor.cancel()` | ||||||
|  |                     # level, thus `Context.cancel_called == False`, | ||||||
|  |                     # meaning `ctx._is_self_cancelled() == False`. | ||||||
|  |                     # with trio.CancelScope(shield=True): | ||||||
|  |                     # await ctx.cancel() | ||||||
|                     await portal.cancel_actor() |                     await portal.cancel_actor() | ||||||
|                     complete.set() |                     complete.set() | ||||||
|                     self.service_tasks.pop(name) |                     self.service_tasks.pop(name) | ||||||
| 
 | 
 | ||||||
|         cs, complete, first = await self.service_n.start(open_context_in_task) |         cs, sub_ctx, complete, started = await self.service_n.start( | ||||||
|  |             open_context_in_task | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|         # store the cancel scope and portal for later cancellation or |         # store the cancel scope and portal for later cancellation or | ||||||
|         # retstart if needed. |         # retstart if needed. | ||||||
|         self.service_tasks[name] = (cs, portal, complete) |         self.service_tasks[name] = (cs, sub_ctx, portal, complete) | ||||||
|  |         return cs, sub_ctx, started | ||||||
| 
 | 
 | ||||||
|         return cs, first |  | ||||||
| 
 |  | ||||||
|     @classmethod |  | ||||||
|     async def cancel_service( |     async def cancel_service( | ||||||
|         self, |         self, | ||||||
|         name: str, |         name: str, | ||||||
|  | @ -158,8 +334,80 @@ class Services: | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         log.info(f'Cancelling `pikerd` service {name}') |         log.info(f'Cancelling `pikerd` service {name}') | ||||||
|         cs, portal, complete = self.service_tasks[name] |         cs, sub_ctx, portal, complete = self.service_tasks[name] | ||||||
|         cs.cancel() | 
 | ||||||
|  |         # cs.cancel() | ||||||
|  |         await sub_ctx.cancel() | ||||||
|         await complete.wait() |         await complete.wait() | ||||||
|         assert name not in self.service_tasks, \ | 
 | ||||||
|  |         if name in self.service_tasks: | ||||||
|  |             # TODO: custom err? | ||||||
|  |             # raise ServiceError( | ||||||
|  |             raise RuntimeError( | ||||||
|                 f'Serice task for {name} not terminated?' |                 f'Serice task for {name} not terminated?' | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         # assert name not in self.service_tasks, \ | ||||||
|  |         #     f'Serice task for {name} not terminated?' | ||||||
|  | 
 | ||||||
|  |     async def start_service( | ||||||
|  |         self, | ||||||
|  |         daemon_name: str, | ||||||
|  |         ctx_ep: Callable,  # kwargs must `partial`-ed in! | ||||||
|  | 
 | ||||||
|  |         debug_mode: bool = False, | ||||||
|  |         **tractor_actor_kwargs, | ||||||
|  | 
 | ||||||
|  |     ) -> Context: | ||||||
|  |         ''' | ||||||
|  |         Start a "service" task in a new sub-actor (daemon) and manage it's lifetime | ||||||
|  |         indefinitely. | ||||||
|  | 
 | ||||||
|  |         Services can be cancelled/shutdown using `.cancel_service()`. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         entry: tuple|None = self.service_tasks.get(daemon_name) | ||||||
|  |         if entry: | ||||||
|  |             (cs, sub_ctx, portal, complete) = entry | ||||||
|  |             return sub_ctx | ||||||
|  | 
 | ||||||
|  |         if daemon_name not in self.service_tasks: | ||||||
|  |             portal = await self.actor_n.start_actor( | ||||||
|  |                 daemon_name, | ||||||
|  |                 debug_mode=(  # maybe set globally during allocate | ||||||
|  |                     debug_mode | ||||||
|  |                     or | ||||||
|  |                     self.debug_mode | ||||||
|  |                 ), | ||||||
|  |                 **tractor_actor_kwargs, | ||||||
|  |             ) | ||||||
|  |             ctx_kwargs: dict[str, Any] = {} | ||||||
|  |             if isinstance(ctx_ep, functools.partial): | ||||||
|  |                 ctx_kwargs: dict[str, Any] = ctx_ep.keywords | ||||||
|  |                 ctx_ep: Callable = ctx_ep.func | ||||||
|  | 
 | ||||||
|  |             (cs, sub_ctx, started) = await self.start_service_task( | ||||||
|  |                 daemon_name, | ||||||
|  |                 portal, | ||||||
|  |                 ctx_ep, | ||||||
|  |                 **ctx_kwargs, | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             return sub_ctx | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO: | ||||||
|  | # -[ ] factor all the common shit from `.data._sampling` | ||||||
|  | #   and `.brokers._daemon` into here / `ServiceMngr` | ||||||
|  | #   in terms of allocating the `Portal` as part of the | ||||||
|  | #   "service-in-subactor" starting! | ||||||
|  | # -[ ] move to `tractor.hilevel._service`, import and use here! | ||||||
|  | # NOTE: purposely leaks the ref to the mod-scope Bo | ||||||
|  | # import tractor | ||||||
|  | # from tractor.hilevel import ( | ||||||
|  | #     open_service_mngr, | ||||||
|  | #     ServiceMngr, | ||||||
|  | # ) | ||||||
|  | # mngr: ServiceMngr|None = None | ||||||
|  | # with tractor.hilevel.open_service_mngr() as mngr: | ||||||
|  | #     Services = proxy(mngr) | ||||||
|  |  | ||||||
|  | @ -21,11 +21,13 @@ from typing import ( | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | # TODO: oof, needs to be changed to `httpx`! | ||||||
| import asks | import asks | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     import docker |     import docker | ||||||
|     from ._ahab import DockerContainer |     from ._ahab import DockerContainer | ||||||
|  |     from . import ServiceMngr | ||||||
| 
 | 
 | ||||||
| from ._util import log  # sub-sys logger | from ._util import log  # sub-sys logger | ||||||
| from ._util import ( | from ._util import ( | ||||||
|  | @ -127,7 +129,7 @@ def start_elasticsearch( | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def start_ahab_daemon( | async def start_ahab_daemon( | ||||||
|     service_mngr: Services, |     service_mngr: ServiceMngr, | ||||||
|     user_config: dict | None = None, |     user_config: dict | None = None, | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -53,7 +53,7 @@ import pendulum | ||||||
| # import purerpc | # import purerpc | ||||||
| 
 | 
 | ||||||
| from ..data.feed import maybe_open_feed | from ..data.feed import maybe_open_feed | ||||||
| from . import Services | from . import ServiceMngr | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     log,  # sub-sys logger |     log,  # sub-sys logger | ||||||
|     get_console_log, |     get_console_log, | ||||||
|  | @ -233,7 +233,7 @@ def start_marketstore( | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def start_ahab_daemon( | async def start_ahab_daemon( | ||||||
|     service_mngr: Services, |     service_mngr: ServiceMngr, | ||||||
|     user_config: dict | None = None, |     user_config: dict | None = None, | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -161,7 +161,12 @@ class NativeStorageClient: | ||||||
| 
 | 
 | ||||||
|     def index_files(self): |     def index_files(self): | ||||||
|         for path in self._datadir.iterdir(): |         for path in self._datadir.iterdir(): | ||||||
|             if path.name in {'borked', 'expired',}: |             if ( | ||||||
|  |                 path.name in {'borked', 'expired',} | ||||||
|  |                 or | ||||||
|  |                 '.parquet' not in str(path) | ||||||
|  |             ): | ||||||
|  |                 # ignore all non-apache files (for now) | ||||||
|                 continue |                 continue | ||||||
| 
 | 
 | ||||||
|             key: str = path.name.rstrip('.parquet') |             key: str = path.name.rstrip('.parquet') | ||||||
|  |  | ||||||
|  | @ -44,8 +44,10 @@ import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import tractor | import tractor | ||||||
| from pendulum import ( | from pendulum import ( | ||||||
|  |     Interval, | ||||||
|     DateTime, |     DateTime, | ||||||
|     Duration, |     Duration, | ||||||
|  |     duration as mk_duration, | ||||||
|     from_timestamp, |     from_timestamp, | ||||||
| ) | ) | ||||||
| import numpy as np | import numpy as np | ||||||
|  | @ -214,7 +216,8 @@ async def maybe_fill_null_segments( | ||||||
|         # pair, immediately stop backfilling? |         # pair, immediately stop backfilling? | ||||||
|         if ( |         if ( | ||||||
|             start_dt |             start_dt | ||||||
|             and end_dt < start_dt |             and | ||||||
|  |             end_dt < start_dt | ||||||
|         ): |         ): | ||||||
|             await tractor.pause() |             await tractor.pause() | ||||||
|             break |             break | ||||||
|  | @ -262,6 +265,7 @@ async def maybe_fill_null_segments( | ||||||
|         except tractor.ContextCancelled: |         except tractor.ContextCancelled: | ||||||
|             # log.exception |             # log.exception | ||||||
|             await tractor.pause() |             await tractor.pause() | ||||||
|  |             raise | ||||||
| 
 | 
 | ||||||
|     null_segs_detected.set() |     null_segs_detected.set() | ||||||
|     # RECHECK for more null-gaps |     # RECHECK for more null-gaps | ||||||
|  | @ -349,7 +353,7 @@ async def maybe_fill_null_segments( | ||||||
| 
 | 
 | ||||||
| async def start_backfill( | async def start_backfill( | ||||||
|     get_hist, |     get_hist, | ||||||
|     frame_types: dict[str, Duration] | None, |     def_frame_duration: Duration, | ||||||
|     mod: ModuleType, |     mod: ModuleType, | ||||||
|     mkt: MktPair, |     mkt: MktPair, | ||||||
|     shm: ShmArray, |     shm: ShmArray, | ||||||
|  | @ -379,22 +383,23 @@ async def start_backfill( | ||||||
|         update_start_on_prepend: bool = False |         update_start_on_prepend: bool = False | ||||||
|         if backfill_until_dt is None: |         if backfill_until_dt is None: | ||||||
| 
 | 
 | ||||||
|             # TODO: drop this right and just expose the backfill |             # TODO: per-provider default history-durations? | ||||||
|             # limits inside a [storage] section in conf.toml? |             # -[ ] inside the `open_history_client()` config allow | ||||||
|             # when no tsdb "last datum" is provided, we just load |             #    declaring the history duration limits instead of | ||||||
|             # some near-term history. |             #    guessing and/or applying the same limits to all? | ||||||
|             # periods = { |             # | ||||||
|             #     1: {'days': 1}, |             # -[ ] allow declaring (default) per-provider backfill | ||||||
|             #     60: {'days': 14}, |             #     limits inside a [storage] sub-section in conf.toml? | ||||||
|             # } |             # | ||||||
| 
 |             # NOTE, when no tsdb "last datum" is provided, we just | ||||||
|             # do a decently sized backfill and load it into storage. |             # load some near-term history by presuming a "decently | ||||||
|  |             # large" 60s duration limit and a much shorter 1s range. | ||||||
|             periods = { |             periods = { | ||||||
|                 1: {'days': 2}, |                 1: {'days': 2}, | ||||||
|                 60: {'years': 6}, |                 60: {'years': 6}, | ||||||
|             } |             } | ||||||
|             period_duration: int = periods[timeframe] |             period_duration: int = periods[timeframe] | ||||||
|             update_start_on_prepend = True |             update_start_on_prepend: bool = True | ||||||
| 
 | 
 | ||||||
|             # NOTE: manually set the "latest" datetime which we intend to |             # NOTE: manually set the "latest" datetime which we intend to | ||||||
|             # backfill history "until" so as to adhere to the history |             # backfill history "until" so as to adhere to the history | ||||||
|  | @ -416,7 +421,6 @@ async def start_backfill( | ||||||
|                 f'backfill_until_dt: {backfill_until_dt}\n' |                 f'backfill_until_dt: {backfill_until_dt}\n' | ||||||
|                 f'last_start_dt: {last_start_dt}\n' |                 f'last_start_dt: {last_start_dt}\n' | ||||||
|             ) |             ) | ||||||
| 
 |  | ||||||
|             try: |             try: | ||||||
|                 ( |                 ( | ||||||
|                     array, |                     array, | ||||||
|  | @ -426,71 +430,114 @@ async def start_backfill( | ||||||
|                     timeframe, |                     timeframe, | ||||||
|                     end_dt=last_start_dt, |                     end_dt=last_start_dt, | ||||||
|                 ) |                 ) | ||||||
| 
 |  | ||||||
|             except NoData as _daterr: |             except NoData as _daterr: | ||||||
|                 # 3 cases: |                 orig_last_start_dt: datetime = last_start_dt | ||||||
|                 # - frame in the middle of a legit venue gap |                 gap_report: str = ( | ||||||
|                 # - history actually began at the `last_start_dt` |                     f'EMPTY FRAME for `end_dt: {last_start_dt}`?\n' | ||||||
|                 # - some other unknown error (ib blocking the |                     f'{mod.name} -> tf@fqme: {timeframe}@{mkt.fqme}\n' | ||||||
|                 #   history bc they don't want you seeing how they |                     f'last_start_dt: {orig_last_start_dt}\n\n' | ||||||
|                 #   cucked all the tinas..) |                     f'bf_until: {backfill_until_dt}\n' | ||||||
|                 if dur := frame_types.get(timeframe): |  | ||||||
|                     # decrement by a frame's worth of duration and |  | ||||||
|                     # retry a few times. |  | ||||||
|                     last_start_dt.subtract( |  | ||||||
|                         seconds=dur.total_seconds() |  | ||||||
|                 ) |                 ) | ||||||
|                     log.warning( |                 # EMPTY FRAME signal with 3 (likely) causes: | ||||||
|                         f'{mod.name} -> EMPTY FRAME for end_dt?\n' |                 # | ||||||
|                         f'tf@fqme: {timeframe}@{mkt.fqme}\n' |                 # 1. range contains legit gap in venue history | ||||||
|                         'bf_until <- last_start_dt:\n' |                 # 2. history actually (edge case) **began** at the | ||||||
|                         f'{backfill_until_dt} <- {last_start_dt}\n' |                 #    value `last_start_dt` | ||||||
|                         f'Decrementing `end_dt` by {dur} and retry..\n' |                 # 3. some other unknown error (ib blocking the | ||||||
|  |                 #    history-query bc they don't want you seeing how | ||||||
|  |                 #    they cucked all the tinas.. like with options | ||||||
|  |                 #    hist) | ||||||
|  |                 # | ||||||
|  |                 if def_frame_duration: | ||||||
|  |                     # decrement by a duration's (frame) worth of time | ||||||
|  |                     # as maybe indicated by the backend to see if we | ||||||
|  |                     # can get older data before this possible | ||||||
|  |                     # "history gap". | ||||||
|  |                     last_start_dt: datetime = last_start_dt.subtract( | ||||||
|  |                         seconds=def_frame_duration.total_seconds() | ||||||
|                     ) |                     ) | ||||||
|  |                     gap_report += ( | ||||||
|  |                         f'Decrementing `end_dt` and retrying with,\n' | ||||||
|  |                         f'def_frame_duration: {def_frame_duration}\n' | ||||||
|  |                         f'(new) last_start_dt: {last_start_dt}\n' | ||||||
|  |                     ) | ||||||
|  |                     log.warning(gap_report) | ||||||
|  |                     # skip writing to shm/tsdb and try the next | ||||||
|  |                     # duration's worth of prior history. | ||||||
|                     continue |                     continue | ||||||
| 
 | 
 | ||||||
|             # broker says there never was or is no more history to pull |                 else: | ||||||
|             except DataUnavailable: |                     # await tractor.pause() | ||||||
|                 log.warning( |                     raise DataUnavailable(gap_report) | ||||||
|                     f'NO-MORE-DATA in range?\n' |  | ||||||
|                     f'`{mod.name}` halted history:\n' |  | ||||||
|                     f'tf@fqme: {timeframe}@{mkt.fqme}\n' |  | ||||||
|                     'bf_until <- last_start_dt:\n' |  | ||||||
|                     f'{backfill_until_dt} <- {last_start_dt}\n' |  | ||||||
|                 ) |  | ||||||
| 
 | 
 | ||||||
|                 # ugh, what's a better way? |             # broker says there never was or is no more history to pull | ||||||
|                 # TODO: fwiw, we probably want a way to signal a throttle |             except DataUnavailable as due: | ||||||
|                 # condition (eg. with ib) so that we can halt the |                 message: str = due.args[0] | ||||||
|                 # request loop until the condition is resolved? |                 log.warning( | ||||||
|                 if timeframe > 1: |                     f'Provider {mod.name!r} halted backfill due to,\n\n' | ||||||
|                     await tractor.pause() | 
 | ||||||
|  |                     f'{message}\n' | ||||||
|  | 
 | ||||||
|  |                     f'fqme: {mkt.fqme}\n' | ||||||
|  |                     f'timeframe: {timeframe}\n' | ||||||
|  |                     f'last_start_dt: {last_start_dt}\n' | ||||||
|  |                     f'bf_until: {backfill_until_dt}\n' | ||||||
|  |                 ) | ||||||
|  |                 # UGH: what's a better way? | ||||||
|  |                 # TODO: backends are responsible for being correct on | ||||||
|  |                 # this right!? | ||||||
|  |                 # -[ ] in the `ib` case we could maybe offer some way | ||||||
|  |                 #     to halt the request loop until the condition is | ||||||
|  |                 #     resolved or should the backend be entirely in | ||||||
|  |                 #     charge of solving such faults? yes, right? | ||||||
|                 return |                 return | ||||||
| 
 | 
 | ||||||
|  |             time: np.ndarray = array['time'] | ||||||
|             assert ( |             assert ( | ||||||
|                 array['time'][0] |                 time[0] | ||||||
|                 == |                 == | ||||||
|                 next_start_dt.timestamp() |                 next_start_dt.timestamp() | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             diff = last_start_dt - next_start_dt |             assert time[-1] == next_end_dt.timestamp() | ||||||
|             frame_time_diff_s = diff.seconds | 
 | ||||||
|  |             expected_dur: Interval = last_start_dt - next_start_dt | ||||||
| 
 | 
 | ||||||
|             # frame's worth of sample-period-steps, in seconds |             # frame's worth of sample-period-steps, in seconds | ||||||
|             frame_size_s: float = len(array) * timeframe |             frame_size_s: float = len(array) * timeframe | ||||||
|             expected_frame_size_s: float = frame_size_s + timeframe |             recv_frame_dur: Duration = ( | ||||||
|             if frame_time_diff_s > expected_frame_size_s: |                 from_timestamp(array[-1]['time']) | ||||||
| 
 |                 - | ||||||
|  |                 from_timestamp(array[0]['time']) | ||||||
|  |             ) | ||||||
|  |             if ( | ||||||
|  |                 (lt_frame := (recv_frame_dur < expected_dur)) | ||||||
|  |                 or | ||||||
|  |                 (null_frame := (frame_size_s == 0)) | ||||||
|  |                 # ^XXX, should NEVER hit now! | ||||||
|  |             ): | ||||||
|                 # XXX: query result includes a start point prior to our |                 # XXX: query result includes a start point prior to our | ||||||
|                 # expected "frame size" and thus is likely some kind of |                 # expected "frame size" and thus is likely some kind of | ||||||
|                 # history gap (eg. market closed period, outage, etc.) |                 # history gap (eg. market closed period, outage, etc.) | ||||||
|                 # so just report it to console for now. |                 # so just report it to console for now. | ||||||
|  |                 if lt_frame: | ||||||
|  |                     reason = 'Possible GAP (or first-datum)' | ||||||
|  |                 else: | ||||||
|  |                     assert null_frame | ||||||
|  |                     reason = 'NULL-FRAME' | ||||||
|  | 
 | ||||||
|  |                 missing_dur: Interval = expected_dur.end - recv_frame_dur.end | ||||||
|                 log.warning( |                 log.warning( | ||||||
|                     'GAP DETECTED:\n' |                     f'{timeframe}s-series {reason} detected!\n' | ||||||
|                     f'last_start_dt: {last_start_dt}\n' |                     f'fqme: {mkt.fqme}\n' | ||||||
|                     f'diff: {diff}\n' |                     f'last_start_dt: {last_start_dt}\n\n' | ||||||
|                     f'frame_time_diff_s: {frame_time_diff_s}\n' |                     f'recv interval: {recv_frame_dur}\n' | ||||||
|  |                     f'expected interval: {expected_dur}\n\n' | ||||||
|  | 
 | ||||||
|  |                     f'Missing duration of history of {missing_dur.in_words()!r}\n' | ||||||
|  |                     f'{missing_dur}\n' | ||||||
|                 ) |                 ) | ||||||
|  |                 # await tractor.pause() | ||||||
| 
 | 
 | ||||||
|             to_push = diff_history( |             to_push = diff_history( | ||||||
|                 array, |                 array, | ||||||
|  | @ -565,7 +612,8 @@ async def start_backfill( | ||||||
|             # long-term storage. |             # long-term storage. | ||||||
|             if ( |             if ( | ||||||
|                 storage is not None |                 storage is not None | ||||||
|                 and write_tsdb |                 and | ||||||
|  |                 write_tsdb | ||||||
|             ): |             ): | ||||||
|                 log.info( |                 log.info( | ||||||
|                     f'Writing {ln} frame to storage:\n' |                     f'Writing {ln} frame to storage:\n' | ||||||
|  | @ -578,6 +626,7 @@ async def start_backfill( | ||||||
|                     'crypto', |                     'crypto', | ||||||
|                     'crypto_currency', |                     'crypto_currency', | ||||||
|                     'fiat',  # a "forex pair" |                     'fiat',  # a "forex pair" | ||||||
|  |                     'perpetual_future',  # stupid "perps" from cex land | ||||||
|                 }: |                 }: | ||||||
|                     # for now, our table key schema is not including |                     # for now, our table key schema is not including | ||||||
|                     # the dst[/src] source asset token. |                     # the dst[/src] source asset token. | ||||||
|  | @ -685,7 +734,7 @@ async def back_load_from_tsdb( | ||||||
|         last_tsdb_dt |         last_tsdb_dt | ||||||
|         and latest_start_dt |         and latest_start_dt | ||||||
|     ): |     ): | ||||||
|         backfilled_size_s = ( |         backfilled_size_s: Duration = ( | ||||||
|             latest_start_dt - last_tsdb_dt |             latest_start_dt - last_tsdb_dt | ||||||
|         ).seconds |         ).seconds | ||||||
|         # if the shm buffer len is not large enough to contain |         # if the shm buffer len is not large enough to contain | ||||||
|  | @ -908,6 +957,8 @@ async def tsdb_backfill( | ||||||
|             f'{pformat(config)}\n' |             f'{pformat(config)}\n' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |         # concurrently load the provider's most-recent-frame AND any | ||||||
|  |         # pre-existing tsdb history already saved in `piker` storage. | ||||||
|         dt_eps: list[DateTime, DateTime] = [] |         dt_eps: list[DateTime, DateTime] = [] | ||||||
|         async with trio.open_nursery() as tn: |         async with trio.open_nursery() as tn: | ||||||
|             tn.start_soon( |             tn.start_soon( | ||||||
|  | @ -918,7 +969,6 @@ async def tsdb_backfill( | ||||||
|                 timeframe, |                 timeframe, | ||||||
|                 config, |                 config, | ||||||
|             ) |             ) | ||||||
| 
 |  | ||||||
|             tsdb_entry: tuple = await load_tsdb_hist( |             tsdb_entry: tuple = await load_tsdb_hist( | ||||||
|                 storage, |                 storage, | ||||||
|                 mkt, |                 mkt, | ||||||
|  | @ -947,6 +997,25 @@ async def tsdb_backfill( | ||||||
|                 mr_end_dt, |                 mr_end_dt, | ||||||
|             ) = dt_eps |             ) = dt_eps | ||||||
| 
 | 
 | ||||||
|  |             first_frame_dur_s: Duration = (mr_end_dt - mr_start_dt).seconds | ||||||
|  |             calced_frame_size: Duration = mk_duration( | ||||||
|  |                 seconds=first_frame_dur_s, | ||||||
|  |             ) | ||||||
|  |             # NOTE, attempt to use the backend declared default frame | ||||||
|  |             # sizing (as allowed by their time-series query APIs) and | ||||||
|  |             # if not provided try to construct a default from the | ||||||
|  |             # first frame received above. | ||||||
|  |             def_frame_durs: dict[ | ||||||
|  |                 int, | ||||||
|  |                 Duration, | ||||||
|  |             ]|None = config.get('frame_types', None) | ||||||
|  |             if def_frame_durs: | ||||||
|  |                 def_frame_size: Duration = def_frame_durs[timeframe] | ||||||
|  |                 assert def_frame_size == calced_frame_size | ||||||
|  |             else: | ||||||
|  |                 # use what we calced from first frame above. | ||||||
|  |                 def_frame_size = calced_frame_size | ||||||
|  | 
 | ||||||
|             # NOTE: when there's no offline data, there's 2 cases: |             # NOTE: when there's no offline data, there's 2 cases: | ||||||
|             # - data backend doesn't support timeframe/sample |             # - data backend doesn't support timeframe/sample | ||||||
|             #   period (in which case `dt_eps` should be `None` and |             #   period (in which case `dt_eps` should be `None` and | ||||||
|  | @ -977,7 +1046,7 @@ async def tsdb_backfill( | ||||||
|                     partial( |                     partial( | ||||||
|                         start_backfill, |                         start_backfill, | ||||||
|                         get_hist=get_hist, |                         get_hist=get_hist, | ||||||
|                         frame_types=config.get('frame_types', None), |                         def_frame_duration=def_frame_size, | ||||||
|                         mod=mod, |                         mod=mod, | ||||||
|                         mkt=mkt, |                         mkt=mkt, | ||||||
|                         shm=shm, |                         shm=shm, | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										213
									
								
								pyproject.toml
								
								
								
								
							
							
						
						
									
										213
									
								
								pyproject.toml
								
								
								
								
							|  | @ -15,8 +15,8 @@ | ||||||
| # You should have received a copy of the GNU Affero General Public License | # You should have received a copy of the GNU Affero General Public License | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| [build-system] | [build-system] | ||||||
| requires = ["poetry-core"] | requires = ["hatchling"] | ||||||
| build-backend = "poetry.core.masonry.api" | build-backend = "hatchling.build" | ||||||
| 
 | 
 | ||||||
| # ------ - ------ | # ------ - ------ | ||||||
| 
 | 
 | ||||||
|  | @ -25,113 +25,23 @@ build-backend = "poetry.core.masonry.api" | ||||||
| ignore = [] | ignore = [] | ||||||
| 
 | 
 | ||||||
| # https://docs.astral.sh/ruff/settings/#lint_per-file-ignores | # https://docs.astral.sh/ruff/settings/#lint_per-file-ignores | ||||||
| "piker/ui/qt.py" = [ | # "piker/ui/qt.py" = [ | ||||||
|   "E402", | #   "E402", | ||||||
|   'F401',  # unused imports (without __all__ or blah as blah) | #   'F401',  # unused imports (without __all__ or blah as blah) | ||||||
|   # "F841", # unused variable rules | #   # "F841", # unused variable rules | ||||||
| ] | # ] | ||||||
| # ignore-init-module-imports = false | # ignore-init-module-imports = false | ||||||
| 
 | 
 | ||||||
| # ------ - ------ | # ------ - ------ | ||||||
| 
 | 
 | ||||||
| [tool.poetry] |  | ||||||
| name = "piker" |  | ||||||
| version = "0.1.0.alpha0.dev0" |  | ||||||
| description = "trading gear for hackers" |  | ||||||
| authors = ["Tyler Goodlet <goodboy_foss@protonmail.com>"] |  | ||||||
| license = "AGPLv3" |  | ||||||
| readme = "README.rst" |  | ||||||
| 
 |  | ||||||
| # ------ - ------ |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies] |  | ||||||
| async-generator = "^1.10" |  | ||||||
| attrs = "^23.1.0" |  | ||||||
| bidict = "^0.22.1" |  | ||||||
| colorama = "^0.4.6" |  | ||||||
| colorlog = "^6.7.0" |  | ||||||
| cython = "^3.0.0" |  | ||||||
| greenback = "^1.1.1" |  | ||||||
| ib-insync = "^0.9.86" |  | ||||||
| msgspec = "^0.18.0" |  | ||||||
| numba = "^0.59.0" |  | ||||||
| numpy = "^1.25" |  | ||||||
| polars = "^0.18.13" |  | ||||||
| pygments = "^2.16.1" |  | ||||||
| python = ">=3.11, <3.13" |  | ||||||
| rich = "^13.5.2" |  | ||||||
| # setuptools = "^68.0.0" |  | ||||||
| tomli = "^2.0.1" |  | ||||||
| tomli-w = "^1.0.0" |  | ||||||
| trio-util = "^0.7.0" |  | ||||||
| trio-websocket = "^0.10.3" |  | ||||||
| typer = "^0.9.0" |  | ||||||
| rapidfuzz = "^3.5.2" |  | ||||||
| pdbp = "^1.5.0" |  | ||||||
| trio = "^0.24" |  | ||||||
| pendulum = "^3.0.0" |  | ||||||
| httpx = "^0.27.0" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies.tractor] |  | ||||||
| develop = true |  | ||||||
| git = 'https://github.com/goodboy/tractor.git' |  | ||||||
| branch = 'asyncio_debugger_support' |  | ||||||
| # path = "../tractor" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies.asyncvnc] |  | ||||||
| git = 'https://github.com/pikers/asyncvnc.git' |  | ||||||
| branch = 'main' |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies.tomlkit] |  | ||||||
| develop = true |  | ||||||
| git = 'https://github.com/pikers/tomlkit.git' |  | ||||||
| branch = 'piker_pin' |  | ||||||
| # path = "../tomlkit/" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.group.uis] |  | ||||||
| optional = true |  | ||||||
| [tool.poetry.group.uis.dependencies] |  | ||||||
| # https://python-poetry.org/docs/managing-dependencies/#dependency-groups |  | ||||||
| # TODO: make sure the levenshtein shit compiles on nix.. |  | ||||||
| # rapidfuzz = {extras = ["speedup"], version = "^0.18.0"} |  | ||||||
| rapidfuzz = "^3.2.0" |  | ||||||
| qdarkstyle = ">=3.0.2" |  | ||||||
| pyqtgraph = { git = 'https://github.com/pikers/pyqtgraph.git' } |  | ||||||
| 
 |  | ||||||
| # ------ - ------ |  | ||||||
| pyqt6 = "^6.7.0" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.group.dev] |  | ||||||
| optional = true |  | ||||||
| [tool.poetry.group.dev.dependencies] |  | ||||||
| # testing / CI |  | ||||||
| pytest = "^6.0.0" |  | ||||||
| elasticsearch = "^8.9.0" |  | ||||||
| xonsh = "^0.14.2" |  | ||||||
| prompt-toolkit = "3.0.40" |  | ||||||
| 
 |  | ||||||
| # console ehancements and eventually remote debugging |  | ||||||
| # extras/helpers. |  | ||||||
| # TODO: add a toolset that makes debugging a `pikerd` service |  | ||||||
| # (tree) easy to hack on directly using more or less the local env: |  | ||||||
| # - xonsh + xxh |  | ||||||
| # - rsyscall + pdbp |  | ||||||
| # - actor runtime control console like BEAM/OTP |  | ||||||
| 
 |  | ||||||
| # ------ - ------ |  | ||||||
| 
 |  | ||||||
| # TODO: add an `--only daemon` group for running non-ui / pikerd |  | ||||||
| # service tree in distributed mode B) |  | ||||||
| # https://python-poetry.org/docs/managing-dependencies/#installing-group-dependencies |  | ||||||
| # [tool.poetry.group.daemon.dependencies] |  | ||||||
| 
 |  | ||||||
| [tool.poetry.scripts] |  | ||||||
| piker = 'piker.cli:cli' |  | ||||||
| pikerd = 'piker.cli:pikerd' |  | ||||||
| ledger = 'piker.accounting.cli:ledger' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| [project] | [project] | ||||||
|  | name = "piker" | ||||||
|  | version = "0.1.0a0dev0" | ||||||
|  | description = "trading gear for hackers" | ||||||
|  | authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }] | ||||||
|  | requires-python = ">=3.12, <3.13" | ||||||
|  | license = "AGPL-3.0-or-later" | ||||||
|  | readme = "README.rst" | ||||||
| keywords = [ | keywords = [ | ||||||
|     "async", |     "async", | ||||||
|     "trading", |     "trading", | ||||||
|  | @ -140,15 +50,98 @@ keywords=[ | ||||||
|     "charting", |     "charting", | ||||||
| ] | ] | ||||||
| classifiers = [ | classifiers = [ | ||||||
|   'Development Status :: 3 - Alpha', |     "Development Status :: 3 - Alpha", | ||||||
|     "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", |     "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||||
|   'Operating System :: POSIX :: Linux', |     "Operating System :: POSIX :: Linux", | ||||||
|     "Programming Language :: Python :: Implementation :: CPython", |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|     "Programming Language :: Python :: 3 :: Only", |     "Programming Language :: Python :: 3 :: Only", | ||||||
|     "Programming Language :: Python :: 3.11", |     "Programming Language :: Python :: 3.11", | ||||||
|     "Programming Language :: Python :: 3.12", |     "Programming Language :: Python :: 3.12", | ||||||
|   'Intended Audience :: Financial and Insurance Industry', |     "Intended Audience :: Financial and Insurance Industry", | ||||||
|   'Intended Audience :: Science/Research', |     "Intended Audience :: Science/Research", | ||||||
|   'Intended Audience :: Developers', |     "Intended Audience :: Developers", | ||||||
|   'Intended Audience :: Education', |     "Intended Audience :: Education", | ||||||
| ] | ] | ||||||
|  | dependencies = [ | ||||||
|  |     "async-generator >=1.10, <2.0.0", | ||||||
|  |     "attrs >=23.1.0, <24.0.0", | ||||||
|  |     "bidict >=0.22.1, <0.23.0", | ||||||
|  |     "colorama >=0.4.6, <0.5.0", | ||||||
|  |     "colorlog >=6.7.0, <7.0.0", | ||||||
|  |     "ib-insync >=0.9.86, <0.10.0", | ||||||
|  |     "numba >=0.59.0, <0.60.0", | ||||||
|  |     "numpy >=1.25, <2.0", | ||||||
|  |     "polars >=0.18.13, <0.19.0", | ||||||
|  |     "pygments >=2.16.1, <3.0.0", | ||||||
|  |     "rich >=13.5.2, <14.0.0", | ||||||
|  |     "tomli >=2.0.1, <3.0.0", | ||||||
|  |     "tomli-w >=1.0.0, <2.0.0", | ||||||
|  |     "trio-util >=0.7.0, <0.8.0", | ||||||
|  |     "trio-websocket >=0.10.3, <0.11.0", | ||||||
|  |     "typer >=0.9.0, <1.0.0", | ||||||
|  |     "rapidfuzz >=3.5.2, <4.0.0", | ||||||
|  |     "pdbp >=1.5.0, <2.0.0", | ||||||
|  |     "trio >=0.24, <0.25", | ||||||
|  |     "pendulum >=3.0.0, <4.0.0", | ||||||
|  |     "httpx >=0.27.0, <0.28.0", | ||||||
|  |     "cryptofeed >=2.4.0, <3.0.0", | ||||||
|  |     "pyarrow >=17.0.0, <18.0.0", | ||||||
|  |     "websockets ==12.0", | ||||||
|  |     "msgspec", | ||||||
|  |     "tractor", | ||||||
|  |     "asyncvnc", | ||||||
|  |     "tomlkit", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [project.optional-dependencies] | ||||||
|  | uis = [ | ||||||
|  |     # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies | ||||||
|  |     # TODO: make sure the levenshtein shit compiles on nix.. | ||||||
|  |     # rapidfuzz = {extras = ["speedup"], version = "^0.18.0"} | ||||||
|  |     "rapidfuzz >=3.2.0, <4.0.0", | ||||||
|  |     "qdarkstyle >=3.0.2, <4.0.0", | ||||||
|  |     "pyqt6 >=6.7.0, <7.0.0", | ||||||
|  |     "pyqtgraph", | ||||||
|  |      | ||||||
|  |     # ------ - ------ | ||||||
|  |      | ||||||
|  |     # TODO: add an `--only daemon` group for running non-ui / pikerd | ||||||
|  |     # service tree in distributed mode B) | ||||||
|  |     # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies | ||||||
|  |     # [project.optional-dependencies] | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [dependency-groups] | ||||||
|  | dev = [ | ||||||
|  |     "pytest >=6.0.0, <7.0.0", | ||||||
|  |     "elasticsearch >=8.9.0, <9.0.0", | ||||||
|  |     "xonsh >=0.14.2, <0.15.0", | ||||||
|  |     "prompt-toolkit ==3.0.40", | ||||||
|  |     "cython >=3.0.0, <4.0.0", | ||||||
|  |     "greenback >=1.1.1, <2.0.0", | ||||||
|  |     # console ehancements and eventually remote debugging | ||||||
|  |     # extras/helpers. | ||||||
|  |     # TODO: add a toolset that makes debugging a `pikerd` service | ||||||
|  |     # (tree) easy to hack on directly using more or less the local env: | ||||||
|  |     # - xonsh + xxh | ||||||
|  |     # - rsyscall + pdbp | ||||||
|  |     # - actor runtime control console like BEAM/OTP | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [project.scripts] | ||||||
|  | piker = "piker.cli:cli" | ||||||
|  | pikerd = "piker.cli:pikerd" | ||||||
|  | ledger = "piker.accounting.cli:ledger" | ||||||
|  | 
 | ||||||
|  | [tool.hatch.build.targets.sdist] | ||||||
|  | include = ["piker"] | ||||||
|  | 
 | ||||||
|  | [tool.hatch.build.targets.wheel] | ||||||
|  | include = ["piker"] | ||||||
|  | 
 | ||||||
|  | [tool.uv.sources] | ||||||
|  | pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" } | ||||||
|  | asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" } | ||||||
|  | tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" } | ||||||
|  | msgspec = { git = "https://github.com/jcrist/msgspec.git" } | ||||||
|  | tractor = { path = "../tractor" } | ||||||
|  |  | ||||||
|  | @ -10,7 +10,7 @@ from piker import ( | ||||||
|     config, |     config, | ||||||
| ) | ) | ||||||
| from piker.service import ( | from piker.service import ( | ||||||
|     Services, |     get_service_mngr, | ||||||
| ) | ) | ||||||
| from piker.log import get_console_log | from piker.log import get_console_log | ||||||
| 
 | 
 | ||||||
|  | @ -129,7 +129,7 @@ async def _open_test_pikerd( | ||||||
|         ) as service_manager, |         ) as service_manager, | ||||||
|     ): |     ): | ||||||
|         # this proc/actor is the pikerd |         # this proc/actor is the pikerd | ||||||
|         assert service_manager is Services |         assert service_manager is get_service_mngr() | ||||||
| 
 | 
 | ||||||
|         async with tractor.wait_for_actor( |         async with tractor.wait_for_actor( | ||||||
|             'pikerd', |             'pikerd', | ||||||
|  |  | ||||||
|  | @ -26,7 +26,7 @@ import pytest | ||||||
| import tractor | import tractor | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
| 
 | 
 | ||||||
| from piker.service import Services | from piker.service import ServiceMngr | ||||||
| from piker.log import get_logger | from piker.log import get_logger | ||||||
| from piker.clearing._messages import ( | from piker.clearing._messages import ( | ||||||
|     Order, |     Order, | ||||||
|  | @ -158,7 +158,7 @@ def load_and_check_pos( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_ems_err_on_bad_broker( | def test_ems_err_on_bad_broker( | ||||||
|     open_test_pikerd: Services, |     open_test_pikerd: ServiceMngr, | ||||||
|     loglevel: str, |     loglevel: str, | ||||||
| ): | ): | ||||||
|     async def load_bad_fqme(): |     async def load_bad_fqme(): | ||||||
|  |  | ||||||
|  | @ -15,7 +15,7 @@ import tractor | ||||||
| 
 | 
 | ||||||
| from piker.service import ( | from piker.service import ( | ||||||
|     find_service, |     find_service, | ||||||
|     Services, |     ServiceMngr, | ||||||
| ) | ) | ||||||
| from piker.data import ( | from piker.data import ( | ||||||
|     open_feed, |     open_feed, | ||||||
|  | @ -44,7 +44,7 @@ def test_runtime_boot( | ||||||
|     async def main(): |     async def main(): | ||||||
|         port = 6666 |         port = 6666 | ||||||
|         daemon_addr = ('127.0.0.1', port) |         daemon_addr = ('127.0.0.1', port) | ||||||
|         services: Services |         services: ServiceMngr | ||||||
| 
 | 
 | ||||||
|         async with ( |         async with ( | ||||||
|             open_test_pikerd( |             open_test_pikerd( | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue