Compare commits
	
		
			56 Commits 
		
	
	
		
			ffbca17ba2
			...
			be44888e5f
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | be44888e5f | |
|  | d035dbde5a | |
|  | fd6ef9665d | |
|  | 0d39897458 | |
|  | b87708a7c9 | |
|  | bf195e8796 | |
|  | cbe274089e | |
|  | 879cf3a9f3 | |
|  | ece32f8ed0 | |
|  | f3735dd712 | |
|  | 39542fdb50 | |
|  | a75cfbb124 | |
|  | 7b6410c535 | |
|  | 23e4f74c7a | |
|  | c32d79f04b | |
|  | b560319043 | |
|  | a06e7784f1 | |
|  | 12a13768bf | |
|  | 73ef2afe30 | |
|  | d425d36fc3 | |
|  | 4e963e27d0 | |
|  | 3dcee16bf6 | |
|  | 1f41b151d7 | |
|  | e8c196fd88 | |
|  | 7cefb202fb | |
|  | ddb4c0269f | |
|  | 139f62f4de | |
|  | 338e292002 | |
|  | 13af0a90eb | |
|  | e84781ca1e | |
|  | 576b15e2c6 | |
|  | b7e54571ea | |
|  | 1a295c0c21 | |
|  | f057a20bfa | |
|  | 9dba47902e | |
|  | e0ecef04bb | |
|  | 266347bcdb | |
|  | 051d43b559 | |
|  | ac3ea5d960 | |
|  | cd1ad13720 | |
|  | 941340f853 | |
|  | b046151464 | |
|  | f3d810f3ef | |
|  | 1c433e7bda | |
|  | af0bba5667 | |
|  | 28bbf11484 | |
|  | d1c00de05b | |
|  | 30caac4c27 | |
|  | 3de985d3cd | |
|  | cf46671e4b | |
|  | 87b0ac2331 | |
|  | d014f6f37d | |
|  | f50e7ad5f0 | |
|  | de55fbd44a | |
|  | e941afc491 | |
|  | 2ed8b47883 | 
							
								
								
									
										148
									
								
								default.nix
								
								
								
								
							
							
						
						
									
										148
									
								
								default.nix
								
								
								
								
							|  | @ -1,82 +1,130 @@ | ||||||
| with (import <nixpkgs> {}); | with (import <nixpkgs> {}); | ||||||
| with python312Packages; |  | ||||||
| let | let | ||||||
|   glibStorePath = lib.getLib glib; |   glibStorePath = lib.getLib glib; | ||||||
|   qtpyStorePath = lib.getLib qtpy; |   zstdStorePath = lib.getLib zstd; | ||||||
|   pyqt6StorePath = lib.getLib pyqt6; |   dbusStorePath = lib.getLib dbus; | ||||||
|   pyqt6SipStorePath = lib.getLib pyqt6-sip; |   libGLStorePath = lib.getLib libGL; | ||||||
|  |   freetypeStorePath = lib.getLib freetype; | ||||||
|   qt6baseStorePath = lib.getLib qt6.qtbase; |   qt6baseStorePath = lib.getLib qt6.qtbase; | ||||||
|   rapidfuzzStorePath = lib.getLib rapidfuzz; |   fontconfigStorePath = lib.getLib fontconfig; | ||||||
|   qdarkstyleStorePath = lib.getLib qdarkstyle; |   libxkbcommonStorePath = lib.getLib libxkbcommon; | ||||||
|  |   xcbutilcursorStorePath = lib.getLib xcb-util-cursor; | ||||||
|  | 
 | ||||||
|  |   qtpyStorePath = lib.getLib python312Packages.qtpy; | ||||||
|  |   pyqt6StorePath = lib.getLib python312Packages.pyqt6; | ||||||
|  |   pyqt6SipStorePath = lib.getLib python312Packages.pyqt6-sip; | ||||||
|  |   rapidfuzzStorePath = lib.getLib python312Packages.rapidfuzz; | ||||||
|  |   qdarkstyleStorePath = lib.getLib python312Packages.qdarkstyle; | ||||||
|  | 
 | ||||||
|  |   xorgLibX11StorePath = lib.getLib xorg.libX11; | ||||||
|  |   xorgLibxcbStorePath = lib.getLib xorg.libxcb; | ||||||
|  |   xorgxcbutilwmStorePath = lib.getLib xorg.xcbutilwm; | ||||||
|  |   xorgxcbutilimageStorePath = lib.getLib xorg.xcbutilimage; | ||||||
|  |   xorgxcbutilerrorsStorePath = lib.getLib xorg.xcbutilerrors; | ||||||
|  |   xorgxcbutilkeysymsStorePath = lib.getLib xorg.xcbutilkeysyms; | ||||||
|  |   xorgxcbutilrenderutilStorePath = lib.getLib xorg.xcbutilrenderutil; | ||||||
| in | in | ||||||
| stdenv.mkDerivation { | stdenv.mkDerivation { | ||||||
|   name = "piker-qt6-poetry-shell"; |   name = "piker-qt6-uv"; | ||||||
|   buildInputs = [ |   buildInputs = [ | ||||||
|     # System requirements. |     # System requirements. | ||||||
|     glib |     glib | ||||||
|  |     dbus | ||||||
|  |     zstd | ||||||
|  |     libGL | ||||||
|  |     freetype | ||||||
|     qt6.qtbase |     qt6.qtbase | ||||||
|     libgcc.lib |     libgcc.lib | ||||||
|  |     fontconfig | ||||||
|  |     libxkbcommon | ||||||
|  | 
 | ||||||
|  |     # Xorg requirements | ||||||
|  |     xcb-util-cursor | ||||||
|  |     xorg.libxcb | ||||||
|  |     xorg.libX11 | ||||||
|  |     xorg.xcbutilwm | ||||||
|  |     xorg.xcbutilimage | ||||||
|  |     xorg.xcbutilerrors | ||||||
|  |     xorg.xcbutilkeysyms | ||||||
|  |     xorg.xcbutilrenderutil | ||||||
| 
 | 
 | ||||||
|     # Python requirements. |     # Python requirements. | ||||||
|     python312Full |     python312Full | ||||||
|     poetry-core |     python312Packages.uv | ||||||
|     qdarkstyle |     python312Packages.qdarkstyle | ||||||
|     rapidfuzz |     python312Packages.rapidfuzz | ||||||
|     pyqt6 |     python312Packages.pyqt6 | ||||||
|     qtpy |     python312Packages.qtpy | ||||||
|   ]; |   ]; | ||||||
|   src = null; |   src = null; | ||||||
|   shellHook = '' |   shellHook = '' | ||||||
|     set -e |     set -e | ||||||
| 
 | 
 | ||||||
|     export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${libgcc.lib}/lib:${glibStorePath}/lib |  | ||||||
| 
 |  | ||||||
|     # Set the Qt plugin path |     # Set the Qt plugin path | ||||||
|     # export QT_DEBUG_PLUGINS=1 |     # export QT_DEBUG_PLUGINS=1 | ||||||
|  |      | ||||||
|  |     QTBASE_PATH="${qt6baseStorePath}/lib" | ||||||
|  |     QT_PLUGIN_PATH="$QTBASE_PATH/qt-6/plugins" | ||||||
|  |     QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms" | ||||||
| 
 | 
 | ||||||
|     QTBASE_PATH="${qt6baseStorePath}" |     LIB_GCC_PATH="${libgcc.lib}/lib" | ||||||
|     echo "qtbase path:    $QTBASE_PATH" |     GLIB_PATH="${glibStorePath}/lib" | ||||||
|     echo "" |     ZSTD_PATH="${zstdStorePath}/lib" | ||||||
|     export QT_PLUGIN_PATH="$QTBASE_PATH/lib/qt-6/plugins" |     DBUS_PATH="${dbusStorePath}/lib" | ||||||
|     export QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms" |     LIBGL_PATH="${libGLStorePath}/lib" | ||||||
|     echo "qt plugin path: $QT_PLUGIN_PATH" |     FREETYPE_PATH="${freetypeStorePath}/lib" | ||||||
|     echo "" |     FONTCONFIG_PATH="${fontconfigStorePath}/lib" | ||||||
|  |     LIB_XKB_COMMON_PATH="${libxkbcommonStorePath}/lib" | ||||||
| 
 | 
 | ||||||
|     # Maybe create venv & install deps |     XCB_UTIL_CURSOR_PATH="${xcbutilcursorStorePath}/lib" | ||||||
|     poetry install --with uis |     XORG_LIB_X11_PATH="${xorgLibX11StorePath}/lib" | ||||||
|  |     XORG_LIB_XCB_PATH="${xorgLibxcbStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_IMAGE_PATH="${xorgxcbutilimageStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_WM_PATH="${xorgxcbutilwmStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_RENDER_UTIL_PATH="${xorgxcbutilrenderutilStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_KEYSYMS_PATH="${xorgxcbutilkeysymsStorePath}/lib" | ||||||
|  |     XORG_XCB_UTIL_ERRORS_PATH="${xorgxcbutilerrorsStorePath}/lib" | ||||||
| 
 | 
 | ||||||
|     # Use pyqt6 from System, patch activate script |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QTBASE_PATH" | ||||||
|     ACTIVATE_SCRIPT_PATH="$(poetry env info --path)/bin/activate" |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_PLUGIN_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_QPA_PLATFORM_PLUGIN_PATH" | ||||||
| 
 | 
 | ||||||
|     export RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages" |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_GCC_PATH" | ||||||
|     export QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages" |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$DBUS_PATH" | ||||||
|     export QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages" |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GLIB_PATH" | ||||||
|     export PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages" |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZSTD_PATH" | ||||||
|     export PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages" |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIBGL_PATH" | ||||||
|     echo "rapidfuzz at:   $RPDFUZZ_PATH" |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FONTCONFIG_PATH" | ||||||
|     echo "qdarkstyle at:  $QDRKSTYLE_PATH" |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FREETYPE_PATH" | ||||||
|     echo "qtpy at:        $QTPY_PATH"  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_XKB_COMMON_PATH" | ||||||
|     echo "pyqt6 at:       $PYQT6_PATH" |  | ||||||
|     echo "pyqt6-sip at:   $PYQT6_SIP_PATH" |  | ||||||
|     echo "" |  | ||||||
| 
 | 
 | ||||||
|     PATCH="export PYTHONPATH=\"" |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XCB_UTIL_CURSOR_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_X11_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_XCB_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_IMAGE_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_WM_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_RENDER_UTIL_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_KEYSYMS_PATH" | ||||||
|  |     LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_ERRORS_PATH" | ||||||
| 
 | 
 | ||||||
|     PATCH="$PATCH\$RPDFUZZ_PATH" |     export LD_LIBRARY_PATH | ||||||
|     PATCH="$PATCH:\$QDRKSTYLE_PATH" |  | ||||||
|     PATCH="$PATCH:\$QTPY_PATH" |  | ||||||
|     PATCH="$PATCH:\$PYQT6_PATH" |  | ||||||
|     PATCH="$PATCH:\$PYQT6_SIP_PATH" |  | ||||||
| 
 | 
 | ||||||
|     PATCH="$PATCH\"" |     RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages" | ||||||
|  |     QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages" | ||||||
|  |     QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages" | ||||||
|  |     PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages" | ||||||
|  |     PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages" | ||||||
| 
 | 
 | ||||||
|     if grep -q "$PATCH" "$ACTIVATE_SCRIPT_PATH"; then |     PATCH="$PATCH:$RPDFUZZ_PATH" | ||||||
|         echo "venv is already patched." |     PATCH="$PATCH:$QDRKSTYLE_PATH" | ||||||
|     else |     PATCH="$PATCH:$QTPY_PATH" | ||||||
|         echo "patching $ACTIVATE_SCRIPT_PATH to use pyqt6 from nixos..." |     PATCH="$PATCH:$PYQT6_PATH" | ||||||
|         sed -i "\$i$PATCH" $ACTIVATE_SCRIPT_PATH |     PATCH="$PATCH:$PYQT6_SIP_PATH" | ||||||
|     fi | 
 | ||||||
|  |     export PATCH | ||||||
|  | 
 | ||||||
|  |     # Install deps | ||||||
|  |     uv lock | ||||||
| 
 | 
 | ||||||
|     poetry shell |  | ||||||
|   ''; |   ''; | ||||||
| } | } | ||||||
|  |  | ||||||
|  | @ -0,0 +1,139 @@ | ||||||
|  | #!/usr/bin/env python | ||||||
|  | from decimal import ( | ||||||
|  |     Decimal, | ||||||
|  | ) | ||||||
|  | import trio | ||||||
|  | import tractor | ||||||
|  | from datetime import datetime | ||||||
|  | from pprint import pformat | ||||||
|  | from piker.brokers.deribit.api import ( | ||||||
|  |     get_client, | ||||||
|  |     maybe_open_oi_feed, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | def check_if_complete( | ||||||
|  |         oi: dict[str, dict[str, Decimal | None]] | ||||||
|  |     ) -> bool: | ||||||
|  |     return all( | ||||||
|  |         oi[strike]['C'] is not None  | ||||||
|  |         and | ||||||
|  |         oi[strike]['P'] is not None for strike in oi | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def max_pain_daemon( | ||||||
|  | ) -> None: | ||||||
|  |     oi_by_strikes: dict[str, dict[str, Decimal | None]] | ||||||
|  |     expiry_dates: list[str] | ||||||
|  |     currency: str = 'btc' | ||||||
|  |     kind: str = 'option' | ||||||
|  | 
 | ||||||
|  |     async with get_client( | ||||||
|  |     ) as client: | ||||||
|  |         expiry_dates: list[str] = await client.get_expiration_dates( | ||||||
|  |             currency=currency, | ||||||
|  |             kind=kind | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     print(f'Available expiration dates for {currency}-{kind}:') | ||||||
|  |     print(f'{expiry_dates}') | ||||||
|  |     expiry_date: str = input('Please enter a valid expiration date: ').upper() | ||||||
|  |     print('Starting little daemon...') | ||||||
|  |     instruments: list[Symbol] = [] | ||||||
|  |     oi_by_strikes: dict[str, dict[str, Decimal]] | ||||||
|  | 
 | ||||||
|  |     def update_oi_by_strikes(msg: tuple): | ||||||
|  |         nonlocal oi_by_strikes | ||||||
|  |         if 'oi' == msg[0]: | ||||||
|  |             strike_price = msg[1]['strike_price'] | ||||||
|  |             option_type = msg[1]['option_type'] | ||||||
|  |             open_interest = msg[1]['open_interest'] | ||||||
|  |             oi_by_strikes.setdefault( | ||||||
|  |                 strike_price, {} | ||||||
|  |             ).update( | ||||||
|  |                 {option_type: open_interest} | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |     def get_max_pain( | ||||||
|  |         oi_by_strikes: dict[str, dict[str, Decimal]] | ||||||
|  |     ) -> dict[str, str | Decimal]: | ||||||
|  |         ''' | ||||||
|  |         This method requires only the strike_prices and oi for call | ||||||
|  |         and puts, the closes list are the same as the strike_prices | ||||||
|  |         the idea is to sum all the calls and puts cash for each strike | ||||||
|  |         and the ITM strikes from that strike, the lowest value is what we  | ||||||
|  |         are looking for the intrinsic value. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  | 
 | ||||||
|  |         nonlocal timestamp | ||||||
|  |         # We meed to find the lowest value, so we start at  | ||||||
|  |         # infinity to ensure that, and the max_pain must be  | ||||||
|  |         # an amount greater than zero. | ||||||
|  |         total_intrinsic_value: Decimal = Decimal('Infinity') | ||||||
|  |         max_pain: Decimal = Decimal(0) | ||||||
|  |         call_cash: Decimal = Decimal(0) | ||||||
|  |         put_cash: Decimal = Decimal(0) | ||||||
|  |         intrinsic_values: dict[str, dict[str, Decimal]] = {} | ||||||
|  |         closes: list = sorted(Decimal(close) for close in oi_by_strikes) | ||||||
|  | 
 | ||||||
|  |         for strike, oi in oi_by_strikes.items(): | ||||||
|  |             s = Decimal(strike) | ||||||
|  |             call_cash = sum(max(0, (s - c) * oi_by_strikes[str(c)]['C']) for c in closes) | ||||||
|  |             put_cash = sum(max(0, (c - s) * oi_by_strikes[str(c)]['P']) for c in closes) | ||||||
|  | 
 | ||||||
|  |             intrinsic_values[strike] = { | ||||||
|  |                 'C': call_cash, | ||||||
|  |                 'P': put_cash, | ||||||
|  |                 'total': call_cash + put_cash, | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             if intrinsic_values[strike]['total'] < total_intrinsic_value: | ||||||
|  |                 total_intrinsic_value = intrinsic_values[strike]['total'] | ||||||
|  |                 max_pain = s | ||||||
|  | 
 | ||||||
|  |         return { | ||||||
|  |             'timestamp': timestamp, | ||||||
|  |             'expiry_date': expiry_date, | ||||||
|  |             'total_intrinsic_value': total_intrinsic_value, | ||||||
|  |             'max_pain': max_pain, | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |     async with get_client( | ||||||
|  |     ) as client: | ||||||
|  |         instruments = await client.get_instruments( | ||||||
|  |             expiry_date=expiry_date, | ||||||
|  |         ) | ||||||
|  |         oi_by_strikes = client.get_strikes_dict(instruments) | ||||||
|  | 
 | ||||||
|  |     async with maybe_open_oi_feed( | ||||||
|  |         instruments, | ||||||
|  |     ) as oi_feed: | ||||||
|  |         async for msg in oi_feed: | ||||||
|  | 
 | ||||||
|  |             update_oi_by_strikes(msg) | ||||||
|  |             if check_if_complete(oi_by_strikes): | ||||||
|  |                 if 'oi' == msg[0]: | ||||||
|  |                     timestamp = msg[1]['timestamp'] | ||||||
|  |                     max_pain = get_max_pain(oi_by_strikes) | ||||||
|  |                     print('-----------------------------------------------') | ||||||
|  |                     print(f'timestamp:             {datetime.fromtimestamp(max_pain['timestamp'])}') | ||||||
|  |                     print(f'expiry_date:           {max_pain['expiry_date']}') | ||||||
|  |                     print(f'max_pain:              {max_pain['max_pain']}') | ||||||
|  |                     print(f'total intrinsic value: {max_pain['total_intrinsic_value']}') | ||||||
|  |                     print('-----------------------------------------------') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def main(): | ||||||
|  | 
 | ||||||
|  |     async with tractor.open_nursery() as n: | ||||||
|  |          | ||||||
|  |         p: tractor.Portal = await n.start_actor( | ||||||
|  |             'max_pain_daemon', | ||||||
|  |             enable_modules=[__name__], | ||||||
|  |             infect_asyncio=True, | ||||||
|  |         ) | ||||||
|  |         await p.run(max_pain_daemon) | ||||||
|  | 
 | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     trio.run(main) | ||||||
|  | @ -0,0 +1,19 @@ | ||||||
|  | ## Max Pain Calculation for Deribit Options | ||||||
|  | 
 | ||||||
|  | This feature, which calculates the max pain point for options traded on the Deribit exchange using cryptofeed library. | ||||||
|  | 
 | ||||||
|  | - Functions in the api module for fetching options data from Deribit. [commit](https://pikers.dev/pikers/piker/commit/da55856dd2876291f55a06eb0561438a912d8241) | ||||||
|  | 
 | ||||||
|  | - Compute the max pain point based on open interest data using deribit's api. [commit](https://pikers.dev/pikers/piker/commit/0d9d6e15ba0edeb662ec97f7599dd66af3046b94) | ||||||
|  | 
 | ||||||
|  | ### How to test it? | ||||||
|  | 
 | ||||||
|  | **Before start:** in order to get this working with `uv`,  you **must** use my `tractor` [fork](https://pikers.dev/ntorres/tractor/src/branch/aio_abandons) and this branch: `aio_abandons`, the reason is that I cherry-pick the `uv_migration` that guille made, for some reason that a didn't dive into, in my system y need tractor using `uv` too. quite hacky I guess. | ||||||
|  | 
 | ||||||
|  | 1. `uv lock` | ||||||
|  | 
 | ||||||
|  | 2. `uv run --no-dev python examples/max_pain.py` | ||||||
|  | 
 | ||||||
|  | 3. A message should be display, enter one of the expiration date available. | ||||||
|  | 
 | ||||||
|  | 4. The script should be up and running. | ||||||
|  | @ -51,6 +51,7 @@ __brokers__: list[str] = [ | ||||||
|     'ib', |     'ib', | ||||||
|     'kraken', |     'kraken', | ||||||
|     'kucoin', |     'kucoin', | ||||||
|  |     'deribit', | ||||||
| 
 | 
 | ||||||
|     # broken but used to work |     # broken but used to work | ||||||
|     # 'questrade', |     # 'questrade', | ||||||
|  | @ -61,7 +62,6 @@ __brokers__: list[str] = [ | ||||||
|     # wstrade |     # wstrade | ||||||
|     # iex |     # iex | ||||||
| 
 | 
 | ||||||
|     # deribit |  | ||||||
|     # bitso |     # bitso | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -25,6 +25,7 @@ from .api import ( | ||||||
|     get_client, |     get_client, | ||||||
| ) | ) | ||||||
| from .feed import ( | from .feed import ( | ||||||
|  |     get_mkt_info, | ||||||
|     open_history_client, |     open_history_client, | ||||||
|     open_symbol_search, |     open_symbol_search, | ||||||
|     stream_quotes, |     stream_quotes, | ||||||
|  | @ -34,15 +35,20 @@ from .feed import ( | ||||||
|     # open_trade_dialog, |     # open_trade_dialog, | ||||||
|     # norm_trade_records, |     # norm_trade_records, | ||||||
| # ) | # ) | ||||||
|  | from .venues import ( | ||||||
|  |     OptionPair, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| __all__ = [ | __all__ = [ | ||||||
|     'get_client', |     'get_client', | ||||||
| #    'trades_dialogue', | #    'trades_dialogue', | ||||||
|  |     'get_mkt_info', | ||||||
|     'open_history_client', |     'open_history_client', | ||||||
|     'open_symbol_search', |     'open_symbol_search', | ||||||
|     'stream_quotes', |     'stream_quotes', | ||||||
|  |     'OptionPair', | ||||||
| #    'norm_trade_records', | #    'norm_trade_records', | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -18,38 +18,59 @@ | ||||||
| Deribit backend. | Deribit backend. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
|  | from __future__ import annotations | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from typing import Any, Optional, Callable | from typing import ( | ||||||
|  |     # Any, | ||||||
|  |     # Optional, | ||||||
|  |     Callable, | ||||||
|  | ) | ||||||
|  | # from pprint import pformat | ||||||
| import time | import time | ||||||
| 
 | 
 | ||||||
|  | import cryptofeed | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import pendulum | from pendulum import ( | ||||||
| from rapidfuzz import process as fuzzy |     from_timestamp, | ||||||
|  | ) | ||||||
| import numpy as np | import numpy as np | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
| from piker.brokers import open_cached_client | from piker.accounting import ( | ||||||
| from piker.log import get_logger, get_console_log |     Asset, | ||||||
| from piker.data import ShmArray |     MktPair, | ||||||
| from piker.brokers._util import ( |     unpack_fqme, | ||||||
|     BrokerError, | ) | ||||||
|  | from piker.brokers import ( | ||||||
|  |     open_cached_client, | ||||||
|  |     NoData, | ||||||
|     DataUnavailable, |     DataUnavailable, | ||||||
| ) | ) | ||||||
| 
 | from piker._cacheables import ( | ||||||
| from cryptofeed import FeedHandler |     async_lifo_cache, | ||||||
| from cryptofeed.defines import ( |  | ||||||
|     DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT |  | ||||||
| ) | ) | ||||||
| from cryptofeed.symbols import Symbol | from piker.log import ( | ||||||
|  |     get_logger, | ||||||
|  |     mk_repr, | ||||||
|  | ) | ||||||
|  | from piker.data.validate import FeedInit | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| from .api import ( | from .api import ( | ||||||
|     Client, Trade, |     Client, | ||||||
|     get_config, |     # get_config, | ||||||
|     str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst, |     piker_sym_to_cb_sym, | ||||||
|  |     cb_sym_to_deribit_inst, | ||||||
|  |     str_to_cb_sym, | ||||||
|     maybe_open_price_feed |     maybe_open_price_feed | ||||||
| ) | ) | ||||||
|  | from .venues import ( | ||||||
|  |     Pair, | ||||||
|  |     OptionPair, | ||||||
|  |     Trade, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| _spawn_kwargs = { | _spawn_kwargs = { | ||||||
|     'infect_asyncio': True, |     'infect_asyncio': True, | ||||||
|  | @ -64,90 +85,215 @@ async def open_history_client( | ||||||
|     mkt: MktPair, |     mkt: MktPair, | ||||||
| ) -> tuple[Callable, int]: | ) -> tuple[Callable, int]: | ||||||
| 
 | 
 | ||||||
|     fnstrument: str = mkt.bs_fqme |  | ||||||
|     # TODO implement history getter for the new storage layer. |     # TODO implement history getter for the new storage layer. | ||||||
|     async with open_cached_client('deribit') as client: |     async with open_cached_client('deribit') as client: | ||||||
| 
 | 
 | ||||||
|  |         pair: OptionPair = client._pairs[mkt.dst.name] | ||||||
|  |         # XXX NOTE, the cuckers use ms !!! | ||||||
|  |         creation_time_s: int = pair.creation_timestamp/1000 | ||||||
|  | 
 | ||||||
|         async def get_ohlc( |         async def get_ohlc( | ||||||
|             end_dt: Optional[datetime] = None, |             timeframe: float, | ||||||
|             start_dt: Optional[datetime] = None, |             end_dt: datetime | None = None, | ||||||
|  |             start_dt: datetime | None = None, | ||||||
| 
 | 
 | ||||||
|         ) -> tuple[ |         ) -> tuple[ | ||||||
|             np.ndarray, |             np.ndarray, | ||||||
|             datetime,  # start |             datetime,  # start | ||||||
|             datetime,  # end |             datetime,  # end | ||||||
|         ]: |         ]: | ||||||
|  |             if timeframe != 60: | ||||||
|  |                 raise DataUnavailable('Only 1m bars are supported') | ||||||
| 
 | 
 | ||||||
|             array = await client.bars( |             array: np.ndarray = await client.bars( | ||||||
|                 instrument, |                 mkt, | ||||||
|                 start_dt=start_dt, |                 start_dt=start_dt, | ||||||
|                 end_dt=end_dt, |                 end_dt=end_dt, | ||||||
|             ) |             ) | ||||||
|             if len(array) == 0: |             if len(array) == 0: | ||||||
|                 raise DataUnavailable |                 if ( | ||||||
|  |                     end_dt is None | ||||||
|  |                 ): | ||||||
|  |                     raise DataUnavailable( | ||||||
|  |                         'No history seems to exist yet?\n\n' | ||||||
|  |                         f'{mkt}' | ||||||
|  |                     ) | ||||||
|  |                 elif ( | ||||||
|  |                     end_dt | ||||||
|  |                     and | ||||||
|  |                     end_dt.timestamp() < creation_time_s | ||||||
|  |                 ): | ||||||
|  |                     # the contract can't have history | ||||||
|  |                     # before it was created. | ||||||
|  |                     pair_type_str: str = type(pair).__name__ | ||||||
|  |                     create_dt: datetime = from_timestamp(creation_time_s) | ||||||
|  |                     raise DataUnavailable( | ||||||
|  |                         f'No history prior to\n' | ||||||
|  |                         f'`{pair_type_str}.creation_timestamp: int = ' | ||||||
|  |                         f'{pair.creation_timestamp}\n\n' | ||||||
|  |                         f'------ deribit sux ------\n' | ||||||
|  |                         f'WHICH IN "NORMAL PEOPLE WHO USE EPOCH TIME" form is,\n' | ||||||
|  |                         f'creation_time_s: {creation_time_s}\n' | ||||||
|  |                         f'create_dt: {create_dt}\n' | ||||||
|  |                     ) | ||||||
|  |                 raise NoData( | ||||||
|  |                     f'No frame for {start_dt} -> {end_dt}\n' | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) |             start_dt = from_timestamp(array[0]['time']) | ||||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) |             end_dt = from_timestamp(array[-1]['time']) | ||||||
|  | 
 | ||||||
|  |             times = array['time'] | ||||||
|  |             if not times.any(): | ||||||
|  |                 raise ValueError( | ||||||
|  |                     'Bad frame with null-times?\n\n' | ||||||
|  |                     f'{times}' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             if end_dt is None: | ||||||
|  |                 inow: int = round(time.time()) | ||||||
|  |                 if (inow - times[-1]) > 60: | ||||||
|  |                     await tractor.pause() | ||||||
| 
 | 
 | ||||||
|             return array, start_dt, end_dt |             return array, start_dt, end_dt | ||||||
| 
 | 
 | ||||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} |         yield ( | ||||||
|  |             get_ohlc, | ||||||
|  |             {  # backfill config | ||||||
|  |                 'erlangs': 3, | ||||||
|  |                 'rate': 3, | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @async_lifo_cache() | ||||||
|  | async def get_mkt_info( | ||||||
|  |     fqme: str, | ||||||
|  | 
 | ||||||
|  | ) -> tuple[MktPair, Pair|OptionPair] | None: | ||||||
|  | 
 | ||||||
|  |     # uppercase since kraken bs_mktid is always upper | ||||||
|  |     if 'deribit' not in fqme.lower(): | ||||||
|  |         fqme += '.deribit' | ||||||
|  | 
 | ||||||
|  |     mkt_mode: str = '' | ||||||
|  |     broker, mkt_ep, venue, expiry = unpack_fqme(fqme) | ||||||
|  | 
 | ||||||
|  |     # NOTE: we always upper case all tokens to be consistent with | ||||||
|  |     # binance's symbology style for pairs, like `BTCUSDT`, but in | ||||||
|  |     # theory we could also just keep things lower case; as long as | ||||||
|  |     # we're consistent and the symcache matches whatever this func | ||||||
|  |     # returns, always! | ||||||
|  |     expiry: str = expiry.upper() | ||||||
|  |     venue: str = venue.upper() | ||||||
|  |     # venue_lower: str = venue.lower() | ||||||
|  | 
 | ||||||
|  |     mkt_mode: str = 'option' | ||||||
|  | 
 | ||||||
|  |     async with open_cached_client( | ||||||
|  |         'deribit', | ||||||
|  |     ) as client: | ||||||
|  | 
 | ||||||
|  |         assets: dict[str, Asset] = await client.get_assets() | ||||||
|  |         pair_str: str = mkt_ep.lower() | ||||||
|  | 
 | ||||||
|  |         pair: Pair = await client.exch_info( | ||||||
|  |             sym=pair_str, | ||||||
|  |         ) | ||||||
|  |         mkt_mode = pair.venue | ||||||
|  |         client.mkt_mode = mkt_mode | ||||||
|  | 
 | ||||||
|  |         dst: Asset | None = assets.get(pair.bs_dst_asset) | ||||||
|  |         src: Asset | None = assets.get(pair.bs_src_asset) | ||||||
|  | 
 | ||||||
|  |         mkt = MktPair( | ||||||
|  |             dst=dst, | ||||||
|  |             src=src, | ||||||
|  |             price_tick=pair.price_tick, | ||||||
|  |             size_tick=pair.size_tick, | ||||||
|  |             bs_mktid=pair.symbol, | ||||||
|  |             venue=mkt_mode, | ||||||
|  |             broker='deribit', | ||||||
|  |             _atype=mkt_mode, | ||||||
|  |             _fqme_without_src=True, | ||||||
|  | 
 | ||||||
|  |             # expiry=pair.expiry, | ||||||
|  |             # XXX TODO, currently we don't use it since it's | ||||||
|  |             # already "described" in the `OptionPair.symbol: str` | ||||||
|  |             # and if we slap in the ISO repr it's kinda hideous.. | ||||||
|  |             # -[ ] figure out the best either std | ||||||
|  |         ) | ||||||
|  |         return mkt, pair | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def stream_quotes( | async def stream_quotes( | ||||||
| 
 |  | ||||||
|     send_chan: trio.abc.SendChannel, |     send_chan: trio.abc.SendChannel, | ||||||
|     symbols: list[str], |     symbols: list[str], | ||||||
|     feed_is_live: trio.Event, |     feed_is_live: trio.Event, | ||||||
|     loglevel: str = None, |  | ||||||
| 
 | 
 | ||||||
|     # startup sync |     # startup sync | ||||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, |     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging |     ''' | ||||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) |     Open a live quote stream for the market set defined by `symbols`. | ||||||
| 
 | 
 | ||||||
|     sym = symbols[0] |     Internally this starts a `cryptofeed.FeedHandler` inside an `asyncio`-side | ||||||
|  |     task and relays through L1 and `Trade` msgs here to our `trio.Task`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     sym = symbols[0].split('.')[0] | ||||||
|  |     init_msgs: list[FeedInit] = [] | ||||||
|  | 
 | ||||||
|  |     # multiline nested `dict` formatter (since rn quote-msgs are | ||||||
|  |     # just that). | ||||||
|  |     pfmt: Callable[[str], str] = mk_repr( | ||||||
|  |         # so we can see `deribit`'s delightfully mega-long bs fields.. | ||||||
|  |         maxstring=100, | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         open_cached_client('deribit') as client, |         open_cached_client('deribit') as client, | ||||||
|         send_chan as send_chan |         send_chan as send_chan | ||||||
|     ): |     ): | ||||||
|  |         mkt: MktPair | ||||||
|  |         pair: Pair | ||||||
|  |         mkt, pair = await get_mkt_info(sym) | ||||||
| 
 | 
 | ||||||
|         init_msgs = { |         # build out init msgs according to latest spec | ||||||
|             # pass back token, and bool, signalling if we're the writer |         init_msgs.append( | ||||||
|             # and that history has been written |             FeedInit( | ||||||
|             sym: { |                 mkt_info=mkt, | ||||||
|                 'symbol_info': { |             ) | ||||||
|                     'asset_type': 'option', |         ) | ||||||
|                     'price_tick_size': 0.0005 |         # build `cryptofeed` feed-handle | ||||||
|                 }, |         cf_sym: cryptofeed.Symbol = piker_sym_to_cb_sym(sym) | ||||||
|                 'shm_write_opts': {'sum_tick_vml': False}, |  | ||||||
|                 'fqsn': sym, |  | ||||||
|             }, |  | ||||||
|         } |  | ||||||
| 
 | 
 | ||||||
|         nsym = piker_sym_to_cb_sym(sym) |         from_cf: tractor.to_asyncio.LinkedTaskChannel | ||||||
|  |         async with maybe_open_price_feed(sym) as from_cf: | ||||||
| 
 | 
 | ||||||
|         async with maybe_open_price_feed(sym) as stream: |             # load the "last trades" summary | ||||||
|  |             last_trades_res: cryptofeed.LastTradesResult = await client.last_trades( | ||||||
|  |                 cb_sym_to_deribit_inst(cf_sym), | ||||||
|  |                 count=1, | ||||||
|  |             ) | ||||||
|  |             last_trades: list[Trade] = last_trades_res.trades | ||||||
| 
 | 
 | ||||||
|             cache = await client.cache_symbols() |             # TODO, do we even need this or will the above always | ||||||
|  |             # work? | ||||||
|  |             # if not last_trades: | ||||||
|  |             #     await tractor.pause() | ||||||
|  |             #     async for typ, quote in from_cf: | ||||||
|  |             #         if typ == 'trade': | ||||||
|  |             #             last_trade = Trade(**(quote['data'])) | ||||||
|  |             #             break | ||||||
| 
 | 
 | ||||||
|             last_trades = (await client.last_trades( |             # else: | ||||||
|                 cb_sym_to_deribit_inst(nsym), count=1)).trades |             last_trade = Trade( | ||||||
|  |                 **(last_trades[0]) | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             if len(last_trades) == 0: |             first_quote: dict = { | ||||||
|                 last_trade = None |  | ||||||
|                 async for typ, quote in stream: |  | ||||||
|                     if typ == 'trade': |  | ||||||
|                         last_trade = Trade(**(quote['data'])) |  | ||||||
|                         break |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 last_trade = Trade(**(last_trades[0])) |  | ||||||
| 
 |  | ||||||
|             first_quote = { |  | ||||||
|                 'symbol': sym, |                 'symbol': sym, | ||||||
|                 'last': last_trade.price, |                 'last': last_trade.price, | ||||||
|                 'brokerd_ts': last_trade.timestamp, |                 'brokerd_ts': last_trade.timestamp, | ||||||
|  | @ -158,13 +304,84 @@ async def stream_quotes( | ||||||
|                     'broker_ts': last_trade.timestamp |                     'broker_ts': last_trade.timestamp | ||||||
|                 }] |                 }] | ||||||
|             } |             } | ||||||
|             task_status.started((init_msgs,  first_quote)) |             task_status.started(( | ||||||
|  |                 init_msgs, | ||||||
|  |                 first_quote, | ||||||
|  |             )) | ||||||
| 
 | 
 | ||||||
|             feed_is_live.set() |             feed_is_live.set() | ||||||
| 
 | 
 | ||||||
|             async for typ, quote in stream: |             # NOTE XXX, static for now! | ||||||
|                 topic = quote['symbol'] |             # => since this only handles ONE mkt feed at a time we | ||||||
|                 await send_chan.send({topic: quote}) |             # don't need a lookup table to map interleaved quotes | ||||||
|  |             # from multiple possible mkt-pairs | ||||||
|  |             topic: str = mkt.bs_fqme | ||||||
|  | 
 | ||||||
|  |             # deliver until cancelled | ||||||
|  |             async for typ, ref in from_cf: | ||||||
|  |                 match typ: | ||||||
|  |                     case 'trade': | ||||||
|  |                         trade: cryptofeed.types.Trade = ref | ||||||
|  | 
 | ||||||
|  |                         # TODO, re-impl this according to teh ideal | ||||||
|  |                         # fqme for opts that we choose!! | ||||||
|  |                         bs_fqme: str = cb_sym_to_deribit_inst( | ||||||
|  |                             str_to_cb_sym(trade.symbol) | ||||||
|  |                         ).lower() | ||||||
|  | 
 | ||||||
|  |                         piker_quote: dict = { | ||||||
|  |                             'symbol': bs_fqme, | ||||||
|  |                             'last': trade.price, | ||||||
|  |                             'broker_ts': time.time(), | ||||||
|  |                             # ^TODO, name this `brokerd/datad_ts` and | ||||||
|  |                             # use `time.time_ns()` ?? | ||||||
|  |                             'ticks': [{ | ||||||
|  |                                 'type': 'trade', | ||||||
|  |                                 'price': float(trade.price), | ||||||
|  |                                 'size': float(trade.amount), | ||||||
|  |                                 'broker_ts': trade.timestamp, | ||||||
|  |                             }], | ||||||
|  |                         } | ||||||
|  |                         log.info( | ||||||
|  |                             f'deribit {typ!r} quote for {sym!r}\n\n' | ||||||
|  |                             f'{trade}\n\n' | ||||||
|  |                             f'{pfmt(piker_quote)}\n' | ||||||
|  |                         ) | ||||||
|  | 
 | ||||||
|  |                     case 'l1': | ||||||
|  |                         book: cryptofeed.types.L1Book = ref | ||||||
|  | 
 | ||||||
|  |                         # TODO, so this is where we can possibly change things | ||||||
|  |                         # and instead lever the `MktPair.bs_fqme: str` output? | ||||||
|  |                         bs_fqme: str = cb_sym_to_deribit_inst( | ||||||
|  |                             str_to_cb_sym(book.symbol) | ||||||
|  |                         ).lower() | ||||||
|  | 
 | ||||||
|  |                         piker_quote: dict = { | ||||||
|  |                             'symbol': bs_fqme, | ||||||
|  |                             'ticks': [ | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'bid', | ||||||
|  |                                  'price': float(book.bid_price), | ||||||
|  |                                  'size': float(book.bid_size)}, | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'bsize', | ||||||
|  |                                  'price': float(book.bid_price), | ||||||
|  |                                  'size': float(book.bid_size),}, | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'ask', | ||||||
|  |                                  'price': float(book.ask_price), | ||||||
|  |                                  'size': float(book.ask_size),}, | ||||||
|  | 
 | ||||||
|  |                                 {'type': 'asize', | ||||||
|  |                                  'price': float(book.ask_price), | ||||||
|  |                                  'size': float(book.ask_size),} | ||||||
|  |                             ] | ||||||
|  |                         } | ||||||
|  | 
 | ||||||
|  |                 await send_chan.send({ | ||||||
|  |                     topic: piker_quote, | ||||||
|  |                 }) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
|  | @ -174,12 +391,21 @@ async def open_symbol_search( | ||||||
|     async with open_cached_client('deribit') as client: |     async with open_cached_client('deribit') as client: | ||||||
| 
 | 
 | ||||||
|         # load all symbols locally for fast search |         # load all symbols locally for fast search | ||||||
|         cache = await client.cache_symbols() |         # cache = client._pairs | ||||||
|         await ctx.started() |         await ctx.started() | ||||||
| 
 | 
 | ||||||
|         async with ctx.open_stream() as stream: |         async with ctx.open_stream() as stream: | ||||||
| 
 |             pattern: str | ||||||
|             async for pattern in stream: |             async for pattern in stream: | ||||||
|                 # repack in dict form | 
 | ||||||
|                 await stream.send( |                 # NOTE: pattern fuzzy-matching is done within | ||||||
|                     await client.search_symbols(pattern)) |                 # the methd impl. | ||||||
|  |                 pairs: dict[str, Pair] = await client.search_symbols( | ||||||
|  |                     pattern, | ||||||
|  |                 ) | ||||||
|  |                 # repack in fqme-keyed table | ||||||
|  |                 byfqme: dict[str, Pair] = {} | ||||||
|  |                 for pair in pairs.values(): | ||||||
|  |                     byfqme[pair.bs_fqme] = pair | ||||||
|  | 
 | ||||||
|  |                 await stream.send(byfqme) | ||||||
|  |  | ||||||
|  | @ -0,0 +1,196 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | Per market data-type definitions and schemas types. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | from __future__ import annotations | ||||||
|  | import pendulum | ||||||
|  | from typing import ( | ||||||
|  |     Literal, | ||||||
|  |     Optional, | ||||||
|  | ) | ||||||
|  | from decimal import Decimal | ||||||
|  | 
 | ||||||
|  | from piker.types import Struct | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # API endpoint paths by venue / sub-API | ||||||
|  | _domain: str = 'deribit.com' | ||||||
|  | _url = f'https://www.{_domain}' | ||||||
|  | 
 | ||||||
|  | # WEBsocketz | ||||||
|  | _ws_url: str = f'wss://www.{_domain}/ws/api/v2' | ||||||
|  | 
 | ||||||
|  | # test nets | ||||||
|  | _testnet_ws_url: str = f'wss://test.{_domain}/ws/api/v2' | ||||||
|  | 
 | ||||||
|  | MarketType = Literal[ | ||||||
|  |     'option' | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_api_eps(venue: MarketType) -> tuple[str, str]: | ||||||
|  |     ''' | ||||||
|  |     Return API ep root paths per venue. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     return { | ||||||
|  |         'option': ( | ||||||
|  |             _ws_url, | ||||||
|  |         ), | ||||||
|  |     }[venue] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Pair(Struct, frozen=True, kw_only=True): | ||||||
|  | 
 | ||||||
|  |     symbol: str | ||||||
|  | 
 | ||||||
|  |     # src | ||||||
|  |     quote_currency: str # 'BTC' | ||||||
|  | 
 | ||||||
|  |     # dst | ||||||
|  |     base_currency: str # "BTC", | ||||||
|  | 
 | ||||||
|  |     tick_size: float # 0.0001 # [{'above_price': 0.005, 'tick_size': 0.0005}] | ||||||
|  |     tick_size_steps: list[dict[str, float]]  | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def price_tick(self) -> Decimal: | ||||||
|  |         return Decimal(str(self.tick_size_steps[0]['above_price'])) | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def size_tick(self) -> Decimal: | ||||||
|  |         return Decimal(str(self.tick_size)) | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_fqme(self) -> str: | ||||||
|  |         return f'{self.symbol}' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_mktid(self) -> str: | ||||||
|  |         return f'{self.symbol}.{self.venue}' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class OptionPair(Pair, frozen=True): | ||||||
|  | 
 | ||||||
|  |     taker_commission: float # 0.0003 | ||||||
|  |     strike: float # 5000.0 | ||||||
|  |     settlement_period: str # 'day' | ||||||
|  |     settlement_currency: str # "BTC", | ||||||
|  |     rfq: bool # false | ||||||
|  |     price_index: str # 'btc_usd' | ||||||
|  |     option_type: str # 'call' | ||||||
|  |     min_trade_amount: float # 0.1 | ||||||
|  |     maker_commission: float # 0.0003 | ||||||
|  |     kind: str # 'option' | ||||||
|  |     is_active: bool # true | ||||||
|  |     instrument_type: str # 'reversed' | ||||||
|  |     instrument_name: str # 'BTC-1SEP24-55000-C' | ||||||
|  |     instrument_id: int # 364671 | ||||||
|  |     expiration_timestamp: int # 1725177600000 | ||||||
|  |     creation_timestamp: int # 1724918461000 | ||||||
|  |     counter_currency: str # 'USD'  | ||||||
|  |     contract_size: float # '1.0' | ||||||
|  |     block_trade_tick_size: float # '0.0001' | ||||||
|  |     block_trade_min_trade_amount: int # '25' | ||||||
|  |     block_trade_commission: float # '0.003' | ||||||
|  | 
 | ||||||
|  |     # NOTE: see `.data._symcache.SymbologyCache.load()` for why | ||||||
|  |     ns_path: str = 'piker.brokers.deribit:OptionPair' | ||||||
|  | 
 | ||||||
|  |     # TODO, impl this without the MM:SS part of | ||||||
|  |     # the `'THH:MM:SS..'` etc.. | ||||||
|  |     @property | ||||||
|  |     def expiry(self) -> str: | ||||||
|  |         iso_date = pendulum.from_timestamp( | ||||||
|  |             self.expiration_timestamp / 1000 | ||||||
|  |         ).isoformat() | ||||||
|  |         return iso_date  | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def venue(self) -> str: | ||||||
|  |         return f'{self.instrument_type}_option' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_fqme(self) -> str: | ||||||
|  |         return f'{self.symbol}' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_src_asset(self) -> str: | ||||||
|  |         return f'{self.quote_currency}' | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def bs_dst_asset(self) -> str: | ||||||
|  |         return f'{self.symbol}' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | PAIRTYPES: dict[MarketType, Pair] = { | ||||||
|  |     'option': OptionPair, | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class JSONRPCResult(Struct): | ||||||
|  |     id: int | ||||||
|  |     usIn: int | ||||||
|  |     usOut: int | ||||||
|  |     usDiff: int | ||||||
|  |     testnet: bool | ||||||
|  |     jsonrpc: str = '2.0' | ||||||
|  |     error: Optional[dict] = None | ||||||
|  |     result: Optional[list[dict]] = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class JSONRPCChannel(Struct): | ||||||
|  |     method: str | ||||||
|  |     params: dict | ||||||
|  |     jsonrpc: str = '2.0' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class KLinesResult(Struct): | ||||||
|  |     low: list[float] | ||||||
|  |     cost: list[float] | ||||||
|  |     high: list[float] | ||||||
|  |     open: list[float] | ||||||
|  |     close: list[float] | ||||||
|  |     ticks: list[int] | ||||||
|  |     status: str | ||||||
|  |     volume: list[float] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Trade(Struct): | ||||||
|  |     iv: float | ||||||
|  |     price: float | ||||||
|  |     amount: float | ||||||
|  |     trade_id: str | ||||||
|  |     contracts: float | ||||||
|  |     direction: str | ||||||
|  |     trade_seq: int | ||||||
|  |     timestamp: int | ||||||
|  |     mark_price: float | ||||||
|  |     index_price: float | ||||||
|  |     tick_direction: int | ||||||
|  |     instrument_name: str | ||||||
|  |     combo_id: Optional[str] = '', | ||||||
|  |     combo_trade_id: Optional[int] = 0, | ||||||
|  |     block_trade_id: Optional[str] = '', | ||||||
|  |     block_trade_leg_count: Optional[int] = 0, | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class LastTradesResult(Struct): | ||||||
|  |     trades: list[Trade] | ||||||
|  |     has_more: bool | ||||||
|  | @ -30,6 +30,7 @@ import time | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     AsyncIterator, |     AsyncIterator, | ||||||
|  |     Callable, | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | @ -54,6 +55,9 @@ from ._util import ( | ||||||
|     get_console_log, |     get_console_log, | ||||||
| ) | ) | ||||||
| from ..service import maybe_spawn_daemon | from ..service import maybe_spawn_daemon | ||||||
|  | from piker.log import ( | ||||||
|  |     mk_repr, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ._sharedmem import ( |     from ._sharedmem import ( | ||||||
|  | @ -575,7 +579,6 @@ async def open_sample_stream( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def sample_and_broadcast( | async def sample_and_broadcast( | ||||||
| 
 |  | ||||||
|     bus: _FeedsBus,  # noqa |     bus: _FeedsBus,  # noqa | ||||||
|     rt_shm: ShmArray, |     rt_shm: ShmArray, | ||||||
|     hist_shm: ShmArray, |     hist_shm: ShmArray, | ||||||
|  | @ -596,11 +599,22 @@ async def sample_and_broadcast( | ||||||
| 
 | 
 | ||||||
|     overruns = Counter() |     overruns = Counter() | ||||||
| 
 | 
 | ||||||
|  |     # multiline nested `dict` formatter (since rn quote-msgs are | ||||||
|  |     # just that). | ||||||
|  |     pfmt: Callable[[str], str] = mk_repr() | ||||||
|  | 
 | ||||||
|     # iterate stream delivered by broker |     # iterate stream delivered by broker | ||||||
|     async for quotes in quote_stream: |     async for quotes in quote_stream: | ||||||
|         # print(quotes) |  | ||||||
| 
 | 
 | ||||||
|         # TODO: ``numba`` this! |         # XXX WARNING XXX only enable for debugging bc ow can cost | ||||||
|  |         # ALOT of perf with HF-feedz!!! | ||||||
|  |         # | ||||||
|  |         # log.info( | ||||||
|  |         #     'Rx live quotes:\n' | ||||||
|  |         #     f'{pfmt(quotes)}' | ||||||
|  |         # ) | ||||||
|  | 
 | ||||||
|  |         # TODO: `numba` this! | ||||||
|         for broker_symbol, quote in quotes.items(): |         for broker_symbol, quote in quotes.items(): | ||||||
|             # TODO: in theory you can send the IPC msg *before* writing |             # TODO: in theory you can send the IPC msg *before* writing | ||||||
|             # to the sharedmem array to decrease latency, however, that |             # to the sharedmem array to decrease latency, however, that | ||||||
|  | @ -673,6 +687,18 @@ async def sample_and_broadcast( | ||||||
|             sub_key: str = broker_symbol.lower() |             sub_key: str = broker_symbol.lower() | ||||||
|             subs: set[Sub] = bus.get_subs(sub_key) |             subs: set[Sub] = bus.get_subs(sub_key) | ||||||
| 
 | 
 | ||||||
|  |             if not subs: | ||||||
|  |                 all_bs_fqmes: list[str] = list( | ||||||
|  |                     bus._subscribers.keys() | ||||||
|  |                 ) | ||||||
|  |                 log.warning( | ||||||
|  |                     f'No subscribers for {brokername!r} live-quote ??\n' | ||||||
|  |                     f'broker_symbol: {broker_symbol}\n\n' | ||||||
|  | 
 | ||||||
|  |                     f'Maybe the backend-sys symbol does not match one of,\n' | ||||||
|  |                     f'{pfmt(all_bs_fqmes)}\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|             # NOTE: by default the broker backend doesn't append |             # NOTE: by default the broker backend doesn't append | ||||||
|             # it's own "name" into the fqme schema (but maybe it |             # it's own "name" into the fqme schema (but maybe it | ||||||
|             # should?) so we have to manually generate the correct |             # should?) so we have to manually generate the correct | ||||||
|  |  | ||||||
|  | @ -273,7 +273,7 @@ async def _reconnect_forever( | ||||||
|                 nobsws._connected.set() |                 nobsws._connected.set() | ||||||
|                 await trio.sleep_forever() |                 await trio.sleep_forever() | ||||||
|         except HandshakeError: |         except HandshakeError: | ||||||
|             log.exception(f'Retrying connection') |             log.exception('Retrying connection') | ||||||
| 
 | 
 | ||||||
|         # ws & nursery block ends |         # ws & nursery block ends | ||||||
| 
 | 
 | ||||||
|  | @ -359,8 +359,8 @@ async def open_autorecon_ws( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| JSONRPC response-request style machinery for transparent multiplexing of msgs | JSONRPC response-request style machinery for transparent multiplexing | ||||||
| over a NoBsWs. | of msgs over a `NoBsWs`. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| 
 | 
 | ||||||
|  | @ -377,43 +377,77 @@ async def open_jsonrpc_session( | ||||||
|     url: str, |     url: str, | ||||||
|     start_id: int = 0, |     start_id: int = 0, | ||||||
|     response_type: type = JSONRPCResult, |     response_type: type = JSONRPCResult, | ||||||
|     request_type: Optional[type] = None, |     msg_recv_timeout: float = float('inf'), | ||||||
|     request_hook: Optional[Callable] = None, |     # ^NOTE, since only `deribit` is using this jsonrpc stuff atm | ||||||
|     error_hook: Optional[Callable] = None, |     # and options mkts are generally "slow moving".. | ||||||
|  |     # | ||||||
|  |     # FURTHER if we break the underlying ws connection then since we | ||||||
|  |     # don't pass a `fixture` to the task that manages `NoBsWs`, i.e. | ||||||
|  |     # `_reconnect_forever()`, the jsonrpc "transport pipe" get's | ||||||
|  |     # broken and never restored with wtv init sequence is required to | ||||||
|  |     # re-establish a working req-resp session. | ||||||
|  | 
 | ||||||
|  |     # request_type: Optional[type] = None, | ||||||
|  |     # request_hook: Optional[Callable] = None, | ||||||
|  |     # error_hook: Optional[Callable] = None, | ||||||
| ) -> Callable[[str, dict], dict]: | ) -> Callable[[str, dict], dict]: | ||||||
| 
 | 
 | ||||||
|  |     # NOTE, store all request msgs so we can raise errors on the | ||||||
|  |     # caller side! | ||||||
|  |     req_msgs: dict[int, dict] = {} | ||||||
|  | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         trio.open_nursery() as n, |         trio.open_nursery() as n, | ||||||
|         open_autorecon_ws(url) as ws |         open_autorecon_ws( | ||||||
|  |             url=url, | ||||||
|  |             msg_recv_timeout=msg_recv_timeout, | ||||||
|  |         ) as ws | ||||||
|     ): |     ): | ||||||
|         rpc_id: Iterable = count(start_id) |         rpc_id: Iterable[int] = count(start_id) | ||||||
|         rpc_results: dict[int, dict] = {} |         rpc_results: dict[int, dict] = {} | ||||||
| 
 | 
 | ||||||
|         async def json_rpc(method: str, params: dict) -> dict: |         async def json_rpc( | ||||||
|  |             method: str, | ||||||
|  |             params: dict, | ||||||
|  |         ) -> dict: | ||||||
|             ''' |             ''' | ||||||
|             perform a json rpc call and wait for the result, raise exception in |             perform a json rpc call and wait for the result, raise exception in | ||||||
|             case of error field present on response |             case of error field present on response | ||||||
|             ''' |             ''' | ||||||
|  |             nonlocal req_msgs | ||||||
|  | 
 | ||||||
|  |             req_id: int = next(rpc_id) | ||||||
|             msg = { |             msg = { | ||||||
|                 'jsonrpc': '2.0', |                 'jsonrpc': '2.0', | ||||||
|                 'id': next(rpc_id), |                 'id': req_id, | ||||||
|                 'method': method, |                 'method': method, | ||||||
|                 'params': params |                 'params': params | ||||||
|             } |             } | ||||||
|             _id = msg['id'] |             _id = msg['id'] | ||||||
| 
 | 
 | ||||||
|             rpc_results[_id] = { |             result = rpc_results[_id] = { | ||||||
|                 'result': None, |                 'result': None, | ||||||
|                 'event': trio.Event() |                 'error': None, | ||||||
|  |                 'event': trio.Event(),  # signal caller resp arrived | ||||||
|             } |             } | ||||||
|  |             req_msgs[_id] = msg | ||||||
| 
 | 
 | ||||||
|             await ws.send_msg(msg) |             await ws.send_msg(msg) | ||||||
| 
 | 
 | ||||||
|  |             # wait for reponse before unblocking requester code | ||||||
|             await rpc_results[_id]['event'].wait() |             await rpc_results[_id]['event'].wait() | ||||||
| 
 | 
 | ||||||
|             ret = rpc_results[_id]['result'] |             if (maybe_result := result['result']): | ||||||
|  |                 ret = maybe_result | ||||||
|  |                 del rpc_results[_id] | ||||||
| 
 | 
 | ||||||
|             del rpc_results[_id] |             else: | ||||||
|  |                 err = result['error'] | ||||||
|  |                 raise Exception( | ||||||
|  |                     f'JSONRPC request failed\n' | ||||||
|  |                     f'req: {msg}\n' | ||||||
|  |                     f'resp: {err}\n' | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|             if ret.error is not None: |             if ret.error is not None: | ||||||
|                 raise Exception(json.dumps(ret.error, indent=4)) |                 raise Exception(json.dumps(ret.error, indent=4)) | ||||||
|  | @ -428,6 +462,7 @@ async def open_jsonrpc_session( | ||||||
|             the server side. |             the server side. | ||||||
| 
 | 
 | ||||||
|             ''' |             ''' | ||||||
|  |             nonlocal req_msgs | ||||||
|             async for msg in ws: |             async for msg in ws: | ||||||
|                 match msg: |                 match msg: | ||||||
|                     case { |                     case { | ||||||
|  | @ -451,15 +486,29 @@ async def open_jsonrpc_session( | ||||||
|                         'params': _, |                         'params': _, | ||||||
|                     }: |                     }: | ||||||
|                         log.debug(f'Recieved\n{msg}') |                         log.debug(f'Recieved\n{msg}') | ||||||
|                         if request_hook: |                         # if request_hook: | ||||||
|                             await request_hook(request_type(**msg)) |                         #     await request_hook(request_type(**msg)) | ||||||
| 
 | 
 | ||||||
|                     case { |                     case { | ||||||
|                         'error': error |                         'error': error | ||||||
|                     }: |                     }: | ||||||
|                         log.warning(f'Recieved\n{error}') |                         # if error_hook: | ||||||
|                         if error_hook: |                         #     await error_hook(response_type(**msg)) | ||||||
|                             await error_hook(response_type(**msg)) | 
 | ||||||
|  |                         # retreive orig request msg, set error | ||||||
|  |                         # response in original "result" msg, | ||||||
|  |                         # THEN FINALLY set the event to signal caller | ||||||
|  |                         # to raise the error in the parent task. | ||||||
|  |                         req_id: int = msg['id'] | ||||||
|  |                         req_msg: dict = req_msgs[req_id] | ||||||
|  |                         result: dict = rpc_results[req_id] | ||||||
|  |                         result['error'] = error | ||||||
|  |                         result['event'].set() | ||||||
|  |                         log.error( | ||||||
|  |                             f'JSONRPC request failed\n' | ||||||
|  |                             f'req: {req_msg}\n' | ||||||
|  |                             f'resp: {error}\n' | ||||||
|  |                         ) | ||||||
| 
 | 
 | ||||||
|                     case _: |                     case _: | ||||||
|                         log.warning(f'Unhandled JSON-RPC msg!?\n{msg}') |                         log.warning(f'Unhandled JSON-RPC msg!?\n{msg}') | ||||||
|  |  | ||||||
|  | @ -540,7 +540,10 @@ async def open_feed_bus( | ||||||
|         # subscription since the backend isn't (yet) expected to |         # subscription since the backend isn't (yet) expected to | ||||||
|         # append it's own name to the fqme, so we filter on keys |         # append it's own name to the fqme, so we filter on keys | ||||||
|         # which *do not* include that name (e.g .ib) . |         # which *do not* include that name (e.g .ib) . | ||||||
|         bus._subscribers.setdefault(bs_fqme, set()) |         bus._subscribers.setdefault( | ||||||
|  |             bs_fqme, | ||||||
|  |             set(), | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     # sync feed subscribers with flume handles |     # sync feed subscribers with flume handles | ||||||
|     await ctx.started( |     await ctx.started( | ||||||
|  |  | ||||||
							
								
								
									
										28
									
								
								piker/log.py
								
								
								
								
							
							
						
						
									
										28
									
								
								piker/log.py
								
								
								
								
							|  | @ -18,7 +18,11 @@ | ||||||
| Log like a forester! | Log like a forester! | ||||||
| """ | """ | ||||||
| import logging | import logging | ||||||
|  | import reprlib | ||||||
| import json | import json | ||||||
|  | from typing import ( | ||||||
|  |     Callable, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
| from pygments import ( | from pygments import ( | ||||||
|  | @ -84,3 +88,27 @@ def colorize_json( | ||||||
|         # likeable styles: algol_nu, tango, monokai |         # likeable styles: algol_nu, tango, monokai | ||||||
|         formatters.TerminalTrueColorFormatter(style=style) |         formatters.TerminalTrueColorFormatter(style=style) | ||||||
|     ) |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def mk_repr( | ||||||
|  |     **repr_kws, | ||||||
|  | ) -> Callable[[str], str]: | ||||||
|  |     ''' | ||||||
|  |     Allocate and deliver a `repr.Repr` instance with provided input | ||||||
|  |     settings using the std-lib's `reprlib` mod, | ||||||
|  |      * https://docs.python.org/3/library/reprlib.html | ||||||
|  | 
 | ||||||
|  |     ------ Ex. ------ | ||||||
|  |     An up to 6-layer-nested `dict` as multi-line: | ||||||
|  |     - https://stackoverflow.com/a/79102479 | ||||||
|  |     - https://docs.python.org/3/library/reprlib.html#reprlib.Repr.maxlevel | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     def_kws: dict[str, int] = dict( | ||||||
|  |         indent=2, | ||||||
|  |         maxlevel=6,  # recursion levels | ||||||
|  |         maxstring=66,  # match editor line-len limit | ||||||
|  |     ) | ||||||
|  |     def_kws |= repr_kws | ||||||
|  |     reprr = reprlib.Repr(**def_kws) | ||||||
|  |     return reprr.repr | ||||||
|  |  | ||||||
|  | @ -161,7 +161,12 @@ class NativeStorageClient: | ||||||
| 
 | 
 | ||||||
|     def index_files(self): |     def index_files(self): | ||||||
|         for path in self._datadir.iterdir(): |         for path in self._datadir.iterdir(): | ||||||
|             if path.name in {'borked', 'expired',}: |             if ( | ||||||
|  |                 path.name in {'borked', 'expired',} | ||||||
|  |                 or | ||||||
|  |                 '.parquet' not in str(path) | ||||||
|  |             ): | ||||||
|  |                 # ignore all non-apache files (for now) | ||||||
|                 continue |                 continue | ||||||
| 
 | 
 | ||||||
|             key: str = path.name.rstrip('.parquet') |             key: str = path.name.rstrip('.parquet') | ||||||
|  |  | ||||||
|  | @ -44,8 +44,10 @@ import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import tractor | import tractor | ||||||
| from pendulum import ( | from pendulum import ( | ||||||
|  |     Interval, | ||||||
|     DateTime, |     DateTime, | ||||||
|     Duration, |     Duration, | ||||||
|  |     duration as mk_duration, | ||||||
|     from_timestamp, |     from_timestamp, | ||||||
| ) | ) | ||||||
| import numpy as np | import numpy as np | ||||||
|  | @ -214,7 +216,8 @@ async def maybe_fill_null_segments( | ||||||
|         # pair, immediately stop backfilling? |         # pair, immediately stop backfilling? | ||||||
|         if ( |         if ( | ||||||
|             start_dt |             start_dt | ||||||
|             and end_dt < start_dt |             and | ||||||
|  |             end_dt < start_dt | ||||||
|         ): |         ): | ||||||
|             await tractor.pause() |             await tractor.pause() | ||||||
|             break |             break | ||||||
|  | @ -262,6 +265,7 @@ async def maybe_fill_null_segments( | ||||||
|         except tractor.ContextCancelled: |         except tractor.ContextCancelled: | ||||||
|             # log.exception |             # log.exception | ||||||
|             await tractor.pause() |             await tractor.pause() | ||||||
|  |             raise | ||||||
| 
 | 
 | ||||||
|     null_segs_detected.set() |     null_segs_detected.set() | ||||||
|     # RECHECK for more null-gaps |     # RECHECK for more null-gaps | ||||||
|  | @ -349,7 +353,7 @@ async def maybe_fill_null_segments( | ||||||
| 
 | 
 | ||||||
| async def start_backfill( | async def start_backfill( | ||||||
|     get_hist, |     get_hist, | ||||||
|     frame_types: dict[str, Duration] | None, |     def_frame_duration: Duration, | ||||||
|     mod: ModuleType, |     mod: ModuleType, | ||||||
|     mkt: MktPair, |     mkt: MktPair, | ||||||
|     shm: ShmArray, |     shm: ShmArray, | ||||||
|  | @ -379,22 +383,23 @@ async def start_backfill( | ||||||
|         update_start_on_prepend: bool = False |         update_start_on_prepend: bool = False | ||||||
|         if backfill_until_dt is None: |         if backfill_until_dt is None: | ||||||
| 
 | 
 | ||||||
|             # TODO: drop this right and just expose the backfill |             # TODO: per-provider default history-durations? | ||||||
|             # limits inside a [storage] section in conf.toml? |             # -[ ] inside the `open_history_client()` config allow | ||||||
|             # when no tsdb "last datum" is provided, we just load |             #    declaring the history duration limits instead of | ||||||
|             # some near-term history. |             #    guessing and/or applying the same limits to all? | ||||||
|             # periods = { |             # | ||||||
|             #     1: {'days': 1}, |             # -[ ] allow declaring (default) per-provider backfill | ||||||
|             #     60: {'days': 14}, |             #     limits inside a [storage] sub-section in conf.toml? | ||||||
|             # } |             # | ||||||
| 
 |             # NOTE, when no tsdb "last datum" is provided, we just | ||||||
|             # do a decently sized backfill and load it into storage. |             # load some near-term history by presuming a "decently | ||||||
|  |             # large" 60s duration limit and a much shorter 1s range. | ||||||
|             periods = { |             periods = { | ||||||
|                 1: {'days': 2}, |                 1: {'days': 2}, | ||||||
|                 60: {'years': 6}, |                 60: {'years': 6}, | ||||||
|             } |             } | ||||||
|             period_duration: int = periods[timeframe] |             period_duration: int = periods[timeframe] | ||||||
|             update_start_on_prepend = True |             update_start_on_prepend: bool = True | ||||||
| 
 | 
 | ||||||
|             # NOTE: manually set the "latest" datetime which we intend to |             # NOTE: manually set the "latest" datetime which we intend to | ||||||
|             # backfill history "until" so as to adhere to the history |             # backfill history "until" so as to adhere to the history | ||||||
|  | @ -416,7 +421,6 @@ async def start_backfill( | ||||||
|                 f'backfill_until_dt: {backfill_until_dt}\n' |                 f'backfill_until_dt: {backfill_until_dt}\n' | ||||||
|                 f'last_start_dt: {last_start_dt}\n' |                 f'last_start_dt: {last_start_dt}\n' | ||||||
|             ) |             ) | ||||||
| 
 |  | ||||||
|             try: |             try: | ||||||
|                 ( |                 ( | ||||||
|                     array, |                     array, | ||||||
|  | @ -426,37 +430,58 @@ async def start_backfill( | ||||||
|                     timeframe, |                     timeframe, | ||||||
|                     end_dt=last_start_dt, |                     end_dt=last_start_dt, | ||||||
|                 ) |                 ) | ||||||
| 
 |  | ||||||
|             except NoData as _daterr: |             except NoData as _daterr: | ||||||
|                 # 3 cases: |                 orig_last_start_dt: datetime = last_start_dt | ||||||
|                 # - frame in the middle of a legit venue gap |                 gap_report: str = ( | ||||||
|                 # - history actually began at the `last_start_dt` |                     f'EMPTY FRAME for `end_dt: {last_start_dt}`?\n' | ||||||
|                 # - some other unknown error (ib blocking the |                     f'{mod.name} -> tf@fqme: {timeframe}@{mkt.fqme}\n' | ||||||
|                 #   history bc they don't want you seeing how they |                     f'last_start_dt: {orig_last_start_dt}\n\n' | ||||||
|                 #   cucked all the tinas..) |                     f'bf_until: {backfill_until_dt}\n' | ||||||
|                 if dur := frame_types.get(timeframe): |                 ) | ||||||
|                     # decrement by a frame's worth of duration and |                 # EMPTY FRAME signal with 3 (likely) causes: | ||||||
|                     # retry a few times. |                 # | ||||||
|                     last_start_dt.subtract( |                 # 1. range contains legit gap in venue history | ||||||
|                         seconds=dur.total_seconds() |                 # 2. history actually (edge case) **began** at the | ||||||
|  |                 #    value `last_start_dt` | ||||||
|  |                 # 3. some other unknown error (ib blocking the | ||||||
|  |                 #    history-query bc they don't want you seeing how | ||||||
|  |                 #    they cucked all the tinas.. like with options | ||||||
|  |                 #    hist) | ||||||
|  |                 # | ||||||
|  |                 if def_frame_duration: | ||||||
|  |                     # decrement by a duration's (frame) worth of time | ||||||
|  |                     # as maybe indicated by the backend to see if we | ||||||
|  |                     # can get older data before this possible | ||||||
|  |                     # "history gap". | ||||||
|  |                     last_start_dt: datetime = last_start_dt.subtract( | ||||||
|  |                         seconds=def_frame_duration.total_seconds() | ||||||
|                     ) |                     ) | ||||||
|                     log.warning( |                     gap_report += ( | ||||||
|                         f'{mod.name} -> EMPTY FRAME for end_dt?\n' |                         f'Decrementing `end_dt` and retrying with,\n' | ||||||
|                         f'tf@fqme: {timeframe}@{mkt.fqme}\n' |                         f'def_frame_duration: {def_frame_duration}\n' | ||||||
|                         'bf_until <- last_start_dt:\n' |                         f'(new) last_start_dt: {last_start_dt}\n' | ||||||
|                         f'{backfill_until_dt} <- {last_start_dt}\n' |  | ||||||
|                         f'Decrementing `end_dt` by {dur} and retry..\n' |  | ||||||
|                     ) |                     ) | ||||||
|  |                     log.warning(gap_report) | ||||||
|  |                     # skip writing to shm/tsdb and try the next | ||||||
|  |                     # duration's worth of prior history. | ||||||
|                     continue |                     continue | ||||||
| 
 | 
 | ||||||
|  |                 else: | ||||||
|  |                     # await tractor.pause() | ||||||
|  |                     raise DataUnavailable(gap_report) | ||||||
|  | 
 | ||||||
|             # broker says there never was or is no more history to pull |             # broker says there never was or is no more history to pull | ||||||
|             except DataUnavailable: |             except DataUnavailable as due: | ||||||
|  |                 message: str = due.args[0] | ||||||
|                 log.warning( |                 log.warning( | ||||||
|                     f'NO-MORE-DATA in range?\n' |                     f'Provider {mod.name!r} halted backfill due to,\n\n' | ||||||
|                     f'`{mod.name}` halted history:\n' | 
 | ||||||
|                     f'tf@fqme: {timeframe}@{mkt.fqme}\n' |                     f'{message}\n' | ||||||
|                     'bf_until <- last_start_dt:\n' | 
 | ||||||
|                     f'{backfill_until_dt} <- {last_start_dt}\n' |                     f'fqme: {mkt.fqme}\n' | ||||||
|  |                     f'timeframe: {timeframe}\n' | ||||||
|  |                     f'last_start_dt: {last_start_dt}\n' | ||||||
|  |                     f'bf_until: {backfill_until_dt}\n' | ||||||
|                 ) |                 ) | ||||||
|                 # UGH: what's a better way? |                 # UGH: what's a better way? | ||||||
|                 # TODO: backends are responsible for being correct on |                 # TODO: backends are responsible for being correct on | ||||||
|  | @ -465,34 +490,54 @@ async def start_backfill( | ||||||
|                 #     to halt the request loop until the condition is |                 #     to halt the request loop until the condition is | ||||||
|                 #     resolved or should the backend be entirely in |                 #     resolved or should the backend be entirely in | ||||||
|                 #     charge of solving such faults? yes, right? |                 #     charge of solving such faults? yes, right? | ||||||
|                 # if timeframe > 1: |  | ||||||
|                 #     await tractor.pause() |  | ||||||
|                 return |                 return | ||||||
| 
 | 
 | ||||||
|  |             time: np.ndarray = array['time'] | ||||||
|             assert ( |             assert ( | ||||||
|                 array['time'][0] |                 time[0] | ||||||
|                 == |                 == | ||||||
|                 next_start_dt.timestamp() |                 next_start_dt.timestamp() | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             diff = last_start_dt - next_start_dt |             assert time[-1] == next_end_dt.timestamp() | ||||||
|             frame_time_diff_s = diff.seconds | 
 | ||||||
|  |             expected_dur: Interval = last_start_dt - next_start_dt | ||||||
| 
 | 
 | ||||||
|             # frame's worth of sample-period-steps, in seconds |             # frame's worth of sample-period-steps, in seconds | ||||||
|             frame_size_s: float = len(array) * timeframe |             frame_size_s: float = len(array) * timeframe | ||||||
|             expected_frame_size_s: float = frame_size_s + timeframe |             recv_frame_dur: Duration = ( | ||||||
|             if frame_time_diff_s > expected_frame_size_s: |                 from_timestamp(array[-1]['time']) | ||||||
| 
 |                 - | ||||||
|  |                 from_timestamp(array[0]['time']) | ||||||
|  |             ) | ||||||
|  |             if ( | ||||||
|  |                 (lt_frame := (recv_frame_dur < expected_dur)) | ||||||
|  |                 or | ||||||
|  |                 (null_frame := (frame_size_s == 0)) | ||||||
|  |                 # ^XXX, should NEVER hit now! | ||||||
|  |             ): | ||||||
|                 # XXX: query result includes a start point prior to our |                 # XXX: query result includes a start point prior to our | ||||||
|                 # expected "frame size" and thus is likely some kind of |                 # expected "frame size" and thus is likely some kind of | ||||||
|                 # history gap (eg. market closed period, outage, etc.) |                 # history gap (eg. market closed period, outage, etc.) | ||||||
|                 # so just report it to console for now. |                 # so just report it to console for now. | ||||||
|  |                 if lt_frame: | ||||||
|  |                     reason = 'Possible GAP (or first-datum)' | ||||||
|  |                 else: | ||||||
|  |                     assert null_frame | ||||||
|  |                     reason = 'NULL-FRAME' | ||||||
|  | 
 | ||||||
|  |                 missing_dur: Interval = expected_dur.end - recv_frame_dur.end | ||||||
|                 log.warning( |                 log.warning( | ||||||
|                     'GAP DETECTED:\n' |                     f'{timeframe}s-series {reason} detected!\n' | ||||||
|                     f'last_start_dt: {last_start_dt}\n' |                     f'fqme: {mkt.fqme}\n' | ||||||
|                     f'diff: {diff}\n' |                     f'last_start_dt: {last_start_dt}\n\n' | ||||||
|                     f'frame_time_diff_s: {frame_time_diff_s}\n' |                     f'recv interval: {recv_frame_dur}\n' | ||||||
|  |                     f'expected interval: {expected_dur}\n\n' | ||||||
|  | 
 | ||||||
|  |                     f'Missing duration of history of {missing_dur.in_words()!r}\n' | ||||||
|  |                     f'{missing_dur}\n' | ||||||
|                 ) |                 ) | ||||||
|  |                 # await tractor.pause() | ||||||
| 
 | 
 | ||||||
|             to_push = diff_history( |             to_push = diff_history( | ||||||
|                 array, |                 array, | ||||||
|  | @ -567,7 +612,8 @@ async def start_backfill( | ||||||
|             # long-term storage. |             # long-term storage. | ||||||
|             if ( |             if ( | ||||||
|                 storage is not None |                 storage is not None | ||||||
|                 and write_tsdb |                 and | ||||||
|  |                 write_tsdb | ||||||
|             ): |             ): | ||||||
|                 log.info( |                 log.info( | ||||||
|                     f'Writing {ln} frame to storage:\n' |                     f'Writing {ln} frame to storage:\n' | ||||||
|  | @ -688,7 +734,7 @@ async def back_load_from_tsdb( | ||||||
|         last_tsdb_dt |         last_tsdb_dt | ||||||
|         and latest_start_dt |         and latest_start_dt | ||||||
|     ): |     ): | ||||||
|         backfilled_size_s = ( |         backfilled_size_s: Duration = ( | ||||||
|             latest_start_dt - last_tsdb_dt |             latest_start_dt - last_tsdb_dt | ||||||
|         ).seconds |         ).seconds | ||||||
|         # if the shm buffer len is not large enough to contain |         # if the shm buffer len is not large enough to contain | ||||||
|  | @ -911,6 +957,8 @@ async def tsdb_backfill( | ||||||
|             f'{pformat(config)}\n' |             f'{pformat(config)}\n' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |         # concurrently load the provider's most-recent-frame AND any | ||||||
|  |         # pre-existing tsdb history already saved in `piker` storage. | ||||||
|         dt_eps: list[DateTime, DateTime] = [] |         dt_eps: list[DateTime, DateTime] = [] | ||||||
|         async with trio.open_nursery() as tn: |         async with trio.open_nursery() as tn: | ||||||
|             tn.start_soon( |             tn.start_soon( | ||||||
|  | @ -921,7 +969,6 @@ async def tsdb_backfill( | ||||||
|                 timeframe, |                 timeframe, | ||||||
|                 config, |                 config, | ||||||
|             ) |             ) | ||||||
| 
 |  | ||||||
|             tsdb_entry: tuple = await load_tsdb_hist( |             tsdb_entry: tuple = await load_tsdb_hist( | ||||||
|                 storage, |                 storage, | ||||||
|                 mkt, |                 mkt, | ||||||
|  | @ -950,6 +997,25 @@ async def tsdb_backfill( | ||||||
|                 mr_end_dt, |                 mr_end_dt, | ||||||
|             ) = dt_eps |             ) = dt_eps | ||||||
| 
 | 
 | ||||||
|  |             first_frame_dur_s: Duration = (mr_end_dt - mr_start_dt).seconds | ||||||
|  |             calced_frame_size: Duration = mk_duration( | ||||||
|  |                 seconds=first_frame_dur_s, | ||||||
|  |             ) | ||||||
|  |             # NOTE, attempt to use the backend declared default frame | ||||||
|  |             # sizing (as allowed by their time-series query APIs) and | ||||||
|  |             # if not provided try to construct a default from the | ||||||
|  |             # first frame received above. | ||||||
|  |             def_frame_durs: dict[ | ||||||
|  |                 int, | ||||||
|  |                 Duration, | ||||||
|  |             ]|None = config.get('frame_types', None) | ||||||
|  |             if def_frame_durs: | ||||||
|  |                 def_frame_size: Duration = def_frame_durs[timeframe] | ||||||
|  |                 assert def_frame_size == calced_frame_size | ||||||
|  |             else: | ||||||
|  |                 # use what we calced from first frame above. | ||||||
|  |                 def_frame_size = calced_frame_size | ||||||
|  | 
 | ||||||
|             # NOTE: when there's no offline data, there's 2 cases: |             # NOTE: when there's no offline data, there's 2 cases: | ||||||
|             # - data backend doesn't support timeframe/sample |             # - data backend doesn't support timeframe/sample | ||||||
|             #   period (in which case `dt_eps` should be `None` and |             #   period (in which case `dt_eps` should be `None` and | ||||||
|  | @ -980,7 +1046,7 @@ async def tsdb_backfill( | ||||||
|                     partial( |                     partial( | ||||||
|                         start_backfill, |                         start_backfill, | ||||||
|                         get_hist=get_hist, |                         get_hist=get_hist, | ||||||
|                         frame_types=config.get('frame_types', None), |                         def_frame_duration=def_frame_size, | ||||||
|                         mod=mod, |                         mod=mod, | ||||||
|                         mkt=mkt, |                         mkt=mkt, | ||||||
|                         shm=shm, |                         shm=shm, | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										237
									
								
								pyproject.toml
								
								
								
								
							
							
						
						
									
										237
									
								
								pyproject.toml
								
								
								
								
							|  | @ -15,8 +15,8 @@ | ||||||
| # You should have received a copy of the GNU Affero General Public License | # You should have received a copy of the GNU Affero General Public License | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| [build-system] | [build-system] | ||||||
| requires = ["poetry-core"] | requires = ["hatchling"] | ||||||
| build-backend = "poetry.core.masonry.api" | build-backend = "hatchling.build" | ||||||
| 
 | 
 | ||||||
| # ------ - ------ | # ------ - ------ | ||||||
| 
 | 
 | ||||||
|  | @ -25,130 +25,123 @@ build-backend = "poetry.core.masonry.api" | ||||||
| ignore = [] | ignore = [] | ||||||
| 
 | 
 | ||||||
| # https://docs.astral.sh/ruff/settings/#lint_per-file-ignores | # https://docs.astral.sh/ruff/settings/#lint_per-file-ignores | ||||||
| "piker/ui/qt.py" = [ | # "piker/ui/qt.py" = [ | ||||||
|   "E402", | #   "E402", | ||||||
|   'F401',  # unused imports (without __all__ or blah as blah) | #   'F401',  # unused imports (without __all__ or blah as blah) | ||||||
|   # "F841", # unused variable rules | #   # "F841", # unused variable rules | ||||||
| ] | # ] | ||||||
| # ignore-init-module-imports = false | # ignore-init-module-imports = false | ||||||
| 
 | 
 | ||||||
| # ------ - ------ | # ------ - ------ | ||||||
| 
 | 
 | ||||||
| [tool.poetry] |  | ||||||
| name = "piker" |  | ||||||
| version = "0.1.0.alpha0.dev0" |  | ||||||
| description = "trading gear for hackers" |  | ||||||
| authors = ["Tyler Goodlet <goodboy_foss@protonmail.com>"] |  | ||||||
| license = "AGPLv3" |  | ||||||
| readme = "README.rst" |  | ||||||
| 
 |  | ||||||
| # ------ - ------ |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies] |  | ||||||
| async-generator = "^1.10" |  | ||||||
| attrs = "^23.1.0" |  | ||||||
| bidict = "^0.22.1" |  | ||||||
| colorama = "^0.4.6" |  | ||||||
| colorlog = "^6.7.0" |  | ||||||
| ib-insync = "^0.9.86" |  | ||||||
| msgspec = "^0.18.6" |  | ||||||
| numba = "^0.59.0" |  | ||||||
| numpy = "^1.25" |  | ||||||
| polars = "^0.18.13" |  | ||||||
| pygments = "^2.16.1" |  | ||||||
| python = ">=3.11, <3.13" |  | ||||||
| rich = "^13.5.2" |  | ||||||
| # setuptools = "^68.0.0" |  | ||||||
| tomli = "^2.0.1" |  | ||||||
| tomli-w = "^1.0.0" |  | ||||||
| trio-util = "^0.7.0" |  | ||||||
| trio-websocket = "^0.10.3" |  | ||||||
| typer = "^0.9.0" |  | ||||||
| rapidfuzz = "^3.5.2" |  | ||||||
| pdbp = "^1.5.0" |  | ||||||
| trio = "^0.24" |  | ||||||
| pendulum = "^3.0.0" |  | ||||||
| httpx = "^0.27.0" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies.tractor] |  | ||||||
| develop = true |  | ||||||
| git = 'https://pikers.dev/goodboy/tractor.git' |  | ||||||
| branch = 'aio_abandons' |  | ||||||
| # path = "../tractor" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies.asyncvnc] |  | ||||||
| git = 'https://github.com/pikers/asyncvnc.git' |  | ||||||
| branch = 'main' |  | ||||||
| 
 |  | ||||||
| [tool.poetry.dependencies.tomlkit] |  | ||||||
| develop = true |  | ||||||
| git = 'https://github.com/pikers/tomlkit.git' |  | ||||||
| branch = 'piker_pin' |  | ||||||
| # path = "../tomlkit/" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.group.uis] |  | ||||||
| optional = true |  | ||||||
| [tool.poetry.group.uis.dependencies] |  | ||||||
| # https://python-poetry.org/docs/managing-dependencies/#dependency-groups |  | ||||||
| # TODO: make sure the levenshtein shit compiles on nix.. |  | ||||||
| # rapidfuzz = {extras = ["speedup"], version = "^0.18.0"} |  | ||||||
| rapidfuzz = "^3.2.0" |  | ||||||
| qdarkstyle = ">=3.0.2" |  | ||||||
| pyqtgraph = { git = 'https://github.com/pikers/pyqtgraph.git' } |  | ||||||
| 
 |  | ||||||
| # ------ - ------ |  | ||||||
| pyqt6 = "^6.7.0" |  | ||||||
| 
 |  | ||||||
| [tool.poetry.group.dev] |  | ||||||
| optional = true |  | ||||||
| [tool.poetry.group.dev.dependencies] |  | ||||||
| # testing / CI |  | ||||||
| pytest = "^6.0.0" |  | ||||||
| elasticsearch = "^8.9.0" |  | ||||||
| xonsh = "^0.14.2" |  | ||||||
| prompt-toolkit = "3.0.40" |  | ||||||
| cython = "^3.0.0" |  | ||||||
| greenback = "^1.1.1" |  | ||||||
| 
 |  | ||||||
| # console ehancements and eventually remote debugging |  | ||||||
| # extras/helpers. |  | ||||||
| # TODO: add a toolset that makes debugging a `pikerd` service |  | ||||||
| # (tree) easy to hack on directly using more or less the local env: |  | ||||||
| # - xonsh + xxh |  | ||||||
| # - rsyscall + pdbp |  | ||||||
| # - actor runtime control console like BEAM/OTP |  | ||||||
| 
 |  | ||||||
| # ------ - ------ |  | ||||||
| 
 |  | ||||||
| # TODO: add an `--only daemon` group for running non-ui / pikerd |  | ||||||
| # service tree in distributed mode B) |  | ||||||
| # https://python-poetry.org/docs/managing-dependencies/#installing-group-dependencies |  | ||||||
| # [tool.poetry.group.daemon.dependencies] |  | ||||||
| 
 |  | ||||||
| [tool.poetry.scripts] |  | ||||||
| piker = 'piker.cli:cli' |  | ||||||
| pikerd = 'piker.cli:pikerd' |  | ||||||
| ledger = 'piker.accounting.cli:ledger' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| [project] | [project] | ||||||
| keywords=[ | name = "piker" | ||||||
|   "async", | version = "0.1.0a0dev0" | ||||||
|   "trading", | description = "trading gear for hackers" | ||||||
|   "finance", | authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }] | ||||||
|   "quant", | requires-python = ">=3.12, <3.13" | ||||||
|   "charting", | license = "AGPL-3.0-or-later" | ||||||
|  | readme = "README.rst" | ||||||
|  | keywords = [ | ||||||
|  |     "async", | ||||||
|  |     "trading", | ||||||
|  |     "finance", | ||||||
|  |     "quant", | ||||||
|  |     "charting", | ||||||
| ] | ] | ||||||
| classifiers=[ | classifiers = [ | ||||||
|   'Development Status :: 3 - Alpha', |     "Development Status :: 3 - Alpha", | ||||||
|   "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", |     "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", | ||||||
|   'Operating System :: POSIX :: Linux', |     "Operating System :: POSIX :: Linux", | ||||||
|   "Programming Language :: Python :: Implementation :: CPython", |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|   "Programming Language :: Python :: 3 :: Only", |     "Programming Language :: Python :: 3 :: Only", | ||||||
|   "Programming Language :: Python :: 3.11", |     "Programming Language :: Python :: 3.11", | ||||||
|   "Programming Language :: Python :: 3.12", |     "Programming Language :: Python :: 3.12", | ||||||
|   'Intended Audience :: Financial and Insurance Industry', |     "Intended Audience :: Financial and Insurance Industry", | ||||||
|   'Intended Audience :: Science/Research', |     "Intended Audience :: Science/Research", | ||||||
|   'Intended Audience :: Developers', |     "Intended Audience :: Developers", | ||||||
|   'Intended Audience :: Education', |     "Intended Audience :: Education", | ||||||
| ] | ] | ||||||
|  | dependencies = [ | ||||||
|  |     "async-generator >=1.10, <2.0.0", | ||||||
|  |     "attrs >=23.1.0, <24.0.0", | ||||||
|  |     "bidict >=0.22.1, <0.23.0", | ||||||
|  |     "colorama >=0.4.6, <0.5.0", | ||||||
|  |     "colorlog >=6.7.0, <7.0.0", | ||||||
|  |     "ib-insync >=0.9.86, <0.10.0", | ||||||
|  |     "numba >=0.59.0, <0.60.0", | ||||||
|  |     "numpy >=1.25, <2.0", | ||||||
|  |     "polars >=0.18.13, <0.19.0", | ||||||
|  |     "pygments >=2.16.1, <3.0.0", | ||||||
|  |     "rich >=13.5.2, <14.0.0", | ||||||
|  |     "tomli >=2.0.1, <3.0.0", | ||||||
|  |     "tomli-w >=1.0.0, <2.0.0", | ||||||
|  |     "trio-util >=0.7.0, <0.8.0", | ||||||
|  |     "trio-websocket >=0.10.3, <0.11.0", | ||||||
|  |     "typer >=0.9.0, <1.0.0", | ||||||
|  |     "rapidfuzz >=3.5.2, <4.0.0", | ||||||
|  |     "pdbp >=1.5.0, <2.0.0", | ||||||
|  |     "trio >=0.24, <0.25", | ||||||
|  |     "pendulum >=3.0.0, <4.0.0", | ||||||
|  |     "httpx >=0.27.0, <0.28.0", | ||||||
|  |     "cryptofeed >=2.4.0, <3.0.0", | ||||||
|  |     "pyarrow >=17.0.0, <18.0.0", | ||||||
|  |     "websockets ==12.0", | ||||||
|  |     "msgspec", | ||||||
|  |     "tractor", | ||||||
|  |     "asyncvnc", | ||||||
|  |     "tomlkit", | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [project.optional-dependencies] | ||||||
|  | uis = [ | ||||||
|  |     # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies | ||||||
|  |     # TODO: make sure the levenshtein shit compiles on nix.. | ||||||
|  |     # rapidfuzz = {extras = ["speedup"], version = "^0.18.0"} | ||||||
|  |     "rapidfuzz >=3.2.0, <4.0.0", | ||||||
|  |     "qdarkstyle >=3.0.2, <4.0.0", | ||||||
|  |     "pyqt6 >=6.7.0, <7.0.0", | ||||||
|  |     "pyqtgraph", | ||||||
|  |      | ||||||
|  |     # ------ - ------ | ||||||
|  |      | ||||||
|  |     # TODO: add an `--only daemon` group for running non-ui / pikerd | ||||||
|  |     # service tree in distributed mode B) | ||||||
|  |     # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies | ||||||
|  |     # [project.optional-dependencies] | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [dependency-groups] | ||||||
|  | dev = [ | ||||||
|  |     "pytest >=6.0.0, <7.0.0", | ||||||
|  |     "elasticsearch >=8.9.0, <9.0.0", | ||||||
|  |     "xonsh >=0.14.2, <0.15.0", | ||||||
|  |     "prompt-toolkit ==3.0.40", | ||||||
|  |     "cython >=3.0.0, <4.0.0", | ||||||
|  |     "greenback >=1.1.1, <2.0.0", | ||||||
|  |     # console ehancements and eventually remote debugging | ||||||
|  |     # extras/helpers. | ||||||
|  |     # TODO: add a toolset that makes debugging a `pikerd` service | ||||||
|  |     # (tree) easy to hack on directly using more or less the local env: | ||||||
|  |     # - xonsh + xxh | ||||||
|  |     # - rsyscall + pdbp | ||||||
|  |     # - actor runtime control console like BEAM/OTP | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | [project.scripts] | ||||||
|  | piker = "piker.cli:cli" | ||||||
|  | pikerd = "piker.cli:pikerd" | ||||||
|  | ledger = "piker.accounting.cli:ledger" | ||||||
|  | 
 | ||||||
|  | [tool.hatch.build.targets.sdist] | ||||||
|  | include = ["piker"] | ||||||
|  | 
 | ||||||
|  | [tool.hatch.build.targets.wheel] | ||||||
|  | include = ["piker"] | ||||||
|  | 
 | ||||||
|  | [tool.uv.sources] | ||||||
|  | pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" } | ||||||
|  | asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" } | ||||||
|  | tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" } | ||||||
|  | msgspec = { git = "https://github.com/jcrist/msgspec.git" } | ||||||
|  | tractor = { path = "../tractor" } | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue