Compare commits
	
		
			57 Commits 
		
	
	
		
			db2e72a53d
			...
			bf195e8796
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | bf195e8796 | |
|  | cbe274089e | |
|  | 879cf3a9f3 | |
|  | ece32f8ed0 | |
|  | f3735dd712 | |
|  | 39542fdb50 | |
|  | a75cfbb124 | |
|  | 7b6410c535 | |
|  | 23e4f74c7a | |
|  | c32d79f04b | |
|  | b560319043 | |
|  | a06e7784f1 | |
|  | 12a13768bf | |
|  | 73ef2afe30 | |
|  | d425d36fc3 | |
|  | 4e963e27d0 | |
|  | 3dcee16bf6 | |
|  | 1f41b151d7 | |
|  | e8c196fd88 | |
|  | 7cefb202fb | |
|  | ddb4c0269f | |
|  | 139f62f4de | |
|  | 338e292002 | |
|  | 13af0a90eb | |
|  | e84781ca1e | |
|  | 576b15e2c6 | |
|  | b7e54571ea | |
|  | 1a295c0c21 | |
|  | f057a20bfa | |
|  | 9dba47902e | |
|  | e0ecef04bb | |
|  | 266347bcdb | |
|  | 051d43b559 | |
|  | ac3ea5d960 | |
|  | cd1ad13720 | |
|  | 941340f853 | |
|  | b046151464 | |
|  | f3d810f3ef | |
|  | 1c433e7bda | |
|  | af0bba5667 | |
|  | 28bbf11484 | |
|  | d1c00de05b | |
|  | 30caac4c27 | |
|  | 3de985d3cd | |
|  | cf46671e4b | |
|  | 87b0ac2331 | |
|  | d014f6f37d | |
|  | f50e7ad5f0 | |
|  | de55fbd44a | |
|  | e941afc491 | |
|  | 2ed8b47883 | |
|  | bda7e69ad9 | |
|  | 0223074d24 | |
|  | 77d73d27db | |
|  | 411b517d8d | |
|  | 12250a7bb1 | |
|  | ddfffc38b0 | 
|  | @ -0,0 +1,82 @@ | |||
| with (import <nixpkgs> {}); | ||||
| with python312Packages; | ||||
| let | ||||
|   glibStorePath = lib.getLib glib; | ||||
|   qtpyStorePath = lib.getLib qtpy; | ||||
|   pyqt6StorePath = lib.getLib pyqt6; | ||||
|   pyqt6SipStorePath = lib.getLib pyqt6-sip; | ||||
|   qt6baseStorePath = lib.getLib qt6.qtbase; | ||||
|   rapidfuzzStorePath = lib.getLib rapidfuzz; | ||||
|   qdarkstyleStorePath = lib.getLib qdarkstyle; | ||||
| in | ||||
| stdenv.mkDerivation { | ||||
|   name = "piker-qt6-poetry-shell"; | ||||
|   buildInputs = [ | ||||
|     # System requirements. | ||||
|     glib | ||||
|     qt6.qtbase | ||||
|     libgcc.lib | ||||
| 
 | ||||
|     # Python requirements. | ||||
|     python312Full | ||||
|     poetry-core | ||||
|     qdarkstyle | ||||
|     rapidfuzz | ||||
|     pyqt6 | ||||
|     qtpy | ||||
|   ]; | ||||
|   src = null; | ||||
|   shellHook = '' | ||||
|     set -e | ||||
| 
 | ||||
|     export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${libgcc.lib}/lib:${glibStorePath}/lib | ||||
| 
 | ||||
|     # Set the Qt plugin path | ||||
|     # export QT_DEBUG_PLUGINS=1 | ||||
| 
 | ||||
|     QTBASE_PATH="${qt6baseStorePath}" | ||||
|     echo "qtbase path:    $QTBASE_PATH" | ||||
|     echo "" | ||||
|     export QT_PLUGIN_PATH="$QTBASE_PATH/lib/qt-6/plugins" | ||||
|     export QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms" | ||||
|     echo "qt plugin path: $QT_PLUGIN_PATH" | ||||
|     echo "" | ||||
| 
 | ||||
|     # Maybe create venv & install deps | ||||
|     poetry install --with uis | ||||
| 
 | ||||
|     # Use pyqt6 from System, patch activate script | ||||
|     ACTIVATE_SCRIPT_PATH="$(poetry env info --path)/bin/activate" | ||||
| 
 | ||||
|     export RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages" | ||||
|     export QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages" | ||||
|     export QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages" | ||||
|     export PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages" | ||||
|     export PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages" | ||||
|     echo "rapidfuzz at:   $RPDFUZZ_PATH" | ||||
|     echo "qdarkstyle at:  $QDRKSTYLE_PATH" | ||||
|     echo "qtpy at:        $QTPY_PATH"  | ||||
|     echo "pyqt6 at:       $PYQT6_PATH" | ||||
|     echo "pyqt6-sip at:   $PYQT6_SIP_PATH" | ||||
|     echo "" | ||||
| 
 | ||||
|     PATCH="export PYTHONPATH=\"" | ||||
| 
 | ||||
|     PATCH="$PATCH\$RPDFUZZ_PATH" | ||||
|     PATCH="$PATCH:\$QDRKSTYLE_PATH" | ||||
|     PATCH="$PATCH:\$QTPY_PATH" | ||||
|     PATCH="$PATCH:\$PYQT6_PATH" | ||||
|     PATCH="$PATCH:\$PYQT6_SIP_PATH" | ||||
| 
 | ||||
|     PATCH="$PATCH\"" | ||||
| 
 | ||||
|     if grep -q "$PATCH" "$ACTIVATE_SCRIPT_PATH"; then | ||||
|         echo "venv is already patched." | ||||
|     else | ||||
|         echo "patching $ACTIVATE_SCRIPT_PATH to use pyqt6 from nixos..." | ||||
|         sed -i "\$i$PATCH" $ACTIVATE_SCRIPT_PATH | ||||
|     fi | ||||
| 
 | ||||
|     poetry shell | ||||
|   ''; | ||||
| } | ||||
|  | @ -30,7 +30,8 @@ from types import ModuleType | |||
| from typing import ( | ||||
|     Any, | ||||
|     Iterator, | ||||
|     Generator | ||||
|     Generator, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import pendulum | ||||
|  | @ -59,8 +60,10 @@ from ..clearing._messages import ( | |||
|     BrokerdPosition, | ||||
| ) | ||||
| from piker.types import Struct | ||||
| from piker.data._symcache import SymbologyCache | ||||
| from ..log import get_logger | ||||
| from piker.log import get_logger | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from piker.data._symcache import SymbologyCache | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
|  | @ -493,6 +496,17 @@ class Account(Struct): | |||
| 
 | ||||
|         _mktmap_table: dict[str, MktPair] | None = None, | ||||
| 
 | ||||
|         only_require: list[str]|True = True, | ||||
|         # ^list of fqmes that are "required" to be processed from | ||||
|         # this ledger pass; we often don't care about others and | ||||
|         # definitely shouldn't always error in such cases. | ||||
|         # (eg. broker backend loaded that doesn't yet supsport the | ||||
|         # symcache but also, inside the paper engine we don't ad-hoc | ||||
|         # request `get_mkt_info()` for every symbol in the ledger, | ||||
|         # only the one for which we're simulating against). | ||||
|         # TODO, not sure if there's a better soln for this, ideally | ||||
|         # all backends get symcache support afap i guess.. | ||||
| 
 | ||||
|     ) -> dict[str, Position]: | ||||
|         ''' | ||||
|         Update the internal `.pps[str, Position]` table from input | ||||
|  | @ -535,11 +549,32 @@ class Account(Struct): | |||
|                 if _mktmap_table is None: | ||||
|                     raise | ||||
| 
 | ||||
|                 required: bool = ( | ||||
|                     only_require is True | ||||
|                     or ( | ||||
|                         only_require is not True | ||||
|                         and | ||||
|                         fqme in only_require | ||||
|                     ) | ||||
|                 ) | ||||
|                 # XXX: caller is allowed to provide a fallback | ||||
|                 # mktmap table for the case where a new position is | ||||
|                 # being added and the preloaded symcache didn't | ||||
|                 # have this entry prior (eg. with frickin IB..) | ||||
|                 mkt = _mktmap_table[fqme] | ||||
|                 if ( | ||||
|                     not (mkt := _mktmap_table.get(fqme)) | ||||
|                     and | ||||
|                     required | ||||
|                 ): | ||||
|                     raise | ||||
| 
 | ||||
|                 elif not required: | ||||
|                     continue | ||||
| 
 | ||||
|                 else: | ||||
|                     # should be an entry retreived somewhere | ||||
|                     assert mkt | ||||
| 
 | ||||
| 
 | ||||
|             if not (pos := pps.get(bs_mktid)): | ||||
| 
 | ||||
|  | @ -656,7 +691,7 @@ class Account(Struct): | |||
|     def write_config(self) -> None: | ||||
|         ''' | ||||
|         Write the current account state to the user's account TOML file, normally | ||||
|         something like ``pps.toml``. | ||||
|         something like `pps.toml`. | ||||
| 
 | ||||
|         ''' | ||||
|         # TODO: show diff output? | ||||
|  |  | |||
|  | @ -51,6 +51,7 @@ __brokers__: list[str] = [ | |||
|     'ib', | ||||
|     'kraken', | ||||
|     'kucoin', | ||||
|     'deribit', | ||||
| 
 | ||||
|     # broken but used to work | ||||
|     # 'questrade', | ||||
|  | @ -61,7 +62,6 @@ __brokers__: list[str] = [ | |||
|     # wstrade | ||||
|     # iex | ||||
| 
 | ||||
|     # deribit | ||||
|     # bitso | ||||
| ] | ||||
| 
 | ||||
|  |  | |||
|  | @ -181,7 +181,6 @@ class FutesPair(Pair): | |||
|     quoteAsset: str  # 'USDT', | ||||
|     quotePrecision: int  # 8, | ||||
|     requiredMarginPercent: float  # '5.0000', | ||||
|     settlePlan: int  # 0, | ||||
|     timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'], | ||||
|     triggerProtect: float  # '0.0500', | ||||
|     underlyingSubType: list[str]  # ['PoW'], | ||||
|  |  | |||
|  | @ -25,6 +25,7 @@ from .api import ( | |||
|     get_client, | ||||
| ) | ||||
| from .feed import ( | ||||
|     get_mkt_info, | ||||
|     open_history_client, | ||||
|     open_symbol_search, | ||||
|     stream_quotes, | ||||
|  | @ -34,15 +35,20 @@ from .feed import ( | |||
|     # open_trade_dialog, | ||||
|     # norm_trade_records, | ||||
| # ) | ||||
| from .venues import ( | ||||
|     OptionPair, | ||||
| ) | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'get_client', | ||||
| #    'trades_dialogue', | ||||
|     'get_mkt_info', | ||||
|     'open_history_client', | ||||
|     'open_symbol_search', | ||||
|     'stream_quotes', | ||||
|     'OptionPair', | ||||
| #    'norm_trade_records', | ||||
| ] | ||||
| 
 | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -18,38 +18,59 @@ | |||
| Deribit backend. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from datetime import datetime | ||||
| from typing import Any, Optional, Callable | ||||
| from typing import ( | ||||
|     # Any, | ||||
|     # Optional, | ||||
|     Callable, | ||||
| ) | ||||
| # from pprint import pformat | ||||
| import time | ||||
| 
 | ||||
| import cryptofeed | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import pendulum | ||||
| from rapidfuzz import process as fuzzy | ||||
| from pendulum import ( | ||||
|     from_timestamp, | ||||
| ) | ||||
| import numpy as np | ||||
| import tractor | ||||
| 
 | ||||
| from piker.brokers import open_cached_client | ||||
| from piker.log import get_logger, get_console_log | ||||
| from piker.data import ShmArray | ||||
| from piker.brokers._util import ( | ||||
|     BrokerError, | ||||
| from piker.accounting import ( | ||||
|     Asset, | ||||
|     MktPair, | ||||
|     unpack_fqme, | ||||
| ) | ||||
| from piker.brokers import ( | ||||
|     open_cached_client, | ||||
|     NoData, | ||||
|     DataUnavailable, | ||||
| ) | ||||
| 
 | ||||
| from cryptofeed import FeedHandler | ||||
| from cryptofeed.defines import ( | ||||
|     DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT | ||||
| from piker._cacheables import ( | ||||
|     async_lifo_cache, | ||||
| ) | ||||
| from cryptofeed.symbols import Symbol | ||||
| from piker.log import ( | ||||
|     get_logger, | ||||
|     mk_repr, | ||||
| ) | ||||
| from piker.data.validate import FeedInit | ||||
| 
 | ||||
| 
 | ||||
| from .api import ( | ||||
|     Client, Trade, | ||||
|     get_config, | ||||
|     str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst, | ||||
|     Client, | ||||
|     # get_config, | ||||
|     piker_sym_to_cb_sym, | ||||
|     cb_sym_to_deribit_inst, | ||||
|     str_to_cb_sym, | ||||
|     maybe_open_price_feed | ||||
| ) | ||||
| from .venues import ( | ||||
|     Pair, | ||||
|     OptionPair, | ||||
|     Trade, | ||||
| ) | ||||
| 
 | ||||
| _spawn_kwargs = { | ||||
|     'infect_asyncio': True, | ||||
|  | @ -64,90 +85,215 @@ async def open_history_client( | |||
|     mkt: MktPair, | ||||
| ) -> tuple[Callable, int]: | ||||
| 
 | ||||
|     fnstrument: str = mkt.bs_fqme | ||||
|     # TODO implement history getter for the new storage layer. | ||||
|     async with open_cached_client('deribit') as client: | ||||
| 
 | ||||
|         pair: OptionPair = client._pairs[mkt.dst.name] | ||||
|         # XXX NOTE, the cuckers use ms !!! | ||||
|         creation_time_s: int = pair.creation_timestamp/1000 | ||||
| 
 | ||||
|         async def get_ohlc( | ||||
|             end_dt: Optional[datetime] = None, | ||||
|             start_dt: Optional[datetime] = None, | ||||
|             timeframe: float, | ||||
|             end_dt: datetime | None = None, | ||||
|             start_dt: datetime | None = None, | ||||
| 
 | ||||
|         ) -> tuple[ | ||||
|             np.ndarray, | ||||
|             datetime,  # start | ||||
|             datetime,  # end | ||||
|         ]: | ||||
|             if timeframe != 60: | ||||
|                 raise DataUnavailable('Only 1m bars are supported') | ||||
| 
 | ||||
|             array = await client.bars( | ||||
|                 instrument, | ||||
|             array: np.ndarray = await client.bars( | ||||
|                 mkt, | ||||
|                 start_dt=start_dt, | ||||
|                 end_dt=end_dt, | ||||
|             ) | ||||
|             if len(array) == 0: | ||||
|                 raise DataUnavailable | ||||
|                 if ( | ||||
|                     end_dt is None | ||||
|                 ): | ||||
|                     raise DataUnavailable( | ||||
|                         'No history seems to exist yet?\n\n' | ||||
|                         f'{mkt}' | ||||
|                     ) | ||||
|                 elif ( | ||||
|                     end_dt | ||||
|                     and | ||||
|                     end_dt.timestamp() < creation_time_s | ||||
|                 ): | ||||
|                     # the contract can't have history | ||||
|                     # before it was created. | ||||
|                     pair_type_str: str = type(pair).__name__ | ||||
|                     create_dt: datetime = from_timestamp(creation_time_s) | ||||
|                     raise DataUnavailable( | ||||
|                         f'No history prior to\n' | ||||
|                         f'`{pair_type_str}.creation_timestamp: int = ' | ||||
|                         f'{pair.creation_timestamp}\n\n' | ||||
|                         f'------ deribit sux ------\n' | ||||
|                         f'WHICH IN "NORMAL PEOPLE WHO USE EPOCH TIME" form is,\n' | ||||
|                         f'creation_time_s: {creation_time_s}\n' | ||||
|                         f'create_dt: {create_dt}\n' | ||||
|                     ) | ||||
|                 raise NoData( | ||||
|                     f'No frame for {start_dt} -> {end_dt}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) | ||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) | ||||
|             start_dt = from_timestamp(array[0]['time']) | ||||
|             end_dt = from_timestamp(array[-1]['time']) | ||||
| 
 | ||||
|             times = array['time'] | ||||
|             if not times.any(): | ||||
|                 raise ValueError( | ||||
|                     'Bad frame with null-times?\n\n' | ||||
|                     f'{times}' | ||||
|                 ) | ||||
| 
 | ||||
|             if end_dt is None: | ||||
|                 inow: int = round(time.time()) | ||||
|                 if (inow - times[-1]) > 60: | ||||
|                     await tractor.pause() | ||||
| 
 | ||||
|             return array, start_dt, end_dt | ||||
| 
 | ||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} | ||||
|         yield ( | ||||
|             get_ohlc, | ||||
|             {  # backfill config | ||||
|                 'erlangs': 3, | ||||
|                 'rate': 3, | ||||
|             } | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| @async_lifo_cache() | ||||
| async def get_mkt_info( | ||||
|     fqme: str, | ||||
| 
 | ||||
| ) -> tuple[MktPair, Pair|OptionPair] | None: | ||||
| 
 | ||||
|     # uppercase since kraken bs_mktid is always upper | ||||
|     if 'deribit' not in fqme.lower(): | ||||
|         fqme += '.deribit' | ||||
| 
 | ||||
|     mkt_mode: str = '' | ||||
|     broker, mkt_ep, venue, expiry = unpack_fqme(fqme) | ||||
| 
 | ||||
|     # NOTE: we always upper case all tokens to be consistent with | ||||
|     # binance's symbology style for pairs, like `BTCUSDT`, but in | ||||
|     # theory we could also just keep things lower case; as long as | ||||
|     # we're consistent and the symcache matches whatever this func | ||||
|     # returns, always! | ||||
|     expiry: str = expiry.upper() | ||||
|     venue: str = venue.upper() | ||||
|     # venue_lower: str = venue.lower() | ||||
| 
 | ||||
|     mkt_mode: str = 'option' | ||||
| 
 | ||||
|     async with open_cached_client( | ||||
|         'deribit', | ||||
|     ) as client: | ||||
| 
 | ||||
|         assets: dict[str, Asset] = await client.get_assets() | ||||
|         pair_str: str = mkt_ep.lower() | ||||
| 
 | ||||
|         pair: Pair = await client.exch_info( | ||||
|             sym=pair_str, | ||||
|         ) | ||||
|         mkt_mode = pair.venue | ||||
|         client.mkt_mode = mkt_mode | ||||
| 
 | ||||
|         dst: Asset | None = assets.get(pair.bs_dst_asset) | ||||
|         src: Asset | None = assets.get(pair.bs_src_asset) | ||||
| 
 | ||||
|         mkt = MktPair( | ||||
|             dst=dst, | ||||
|             src=src, | ||||
|             price_tick=pair.price_tick, | ||||
|             size_tick=pair.size_tick, | ||||
|             bs_mktid=pair.symbol, | ||||
|             venue=mkt_mode, | ||||
|             broker='deribit', | ||||
|             _atype=mkt_mode, | ||||
|             _fqme_without_src=True, | ||||
| 
 | ||||
|             # expiry=pair.expiry, | ||||
|             # XXX TODO, currently we don't use it since it's | ||||
|             # already "described" in the `OptionPair.symbol: str` | ||||
|             # and if we slap in the ISO repr it's kinda hideous.. | ||||
|             # -[ ] figure out the best either std | ||||
|         ) | ||||
|         return mkt, pair | ||||
| 
 | ||||
| 
 | ||||
| async def stream_quotes( | ||||
| 
 | ||||
|     send_chan: trio.abc.SendChannel, | ||||
|     symbols: list[str], | ||||
|     feed_is_live: trio.Event, | ||||
|     loglevel: str = None, | ||||
| 
 | ||||
|     # startup sync | ||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging | ||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) | ||||
|     ''' | ||||
|     Open a live quote stream for the market set defined by `symbols`. | ||||
| 
 | ||||
|     sym = symbols[0] | ||||
|     Internally this starts a `cryptofeed.FeedHandler` inside an `asyncio`-side | ||||
|     task and relays through L1 and `Trade` msgs here to our `trio.Task`. | ||||
| 
 | ||||
|     ''' | ||||
|     sym = symbols[0].split('.')[0] | ||||
|     init_msgs: list[FeedInit] = [] | ||||
| 
 | ||||
|     # multiline nested `dict` formatter (since rn quote-msgs are | ||||
|     # just that). | ||||
|     pfmt: Callable[[str], str] = mk_repr( | ||||
|         # so we can see `deribit`'s delightfully mega-long bs fields.. | ||||
|         maxstring=100, | ||||
|     ) | ||||
| 
 | ||||
|     async with ( | ||||
|         open_cached_client('deribit') as client, | ||||
|         send_chan as send_chan | ||||
|     ): | ||||
|         mkt: MktPair | ||||
|         pair: Pair | ||||
|         mkt, pair = await get_mkt_info(sym) | ||||
| 
 | ||||
|         init_msgs = { | ||||
|             # pass back token, and bool, signalling if we're the writer | ||||
|             # and that history has been written | ||||
|             sym: { | ||||
|                 'symbol_info': { | ||||
|                     'asset_type': 'option', | ||||
|                     'price_tick_size': 0.0005 | ||||
|                 }, | ||||
|                 'shm_write_opts': {'sum_tick_vml': False}, | ||||
|                 'fqsn': sym, | ||||
|             }, | ||||
|         } | ||||
|         # build out init msgs according to latest spec | ||||
|         init_msgs.append( | ||||
|             FeedInit( | ||||
|                 mkt_info=mkt, | ||||
|             ) | ||||
|         ) | ||||
|         # build `cryptofeed` feed-handle | ||||
|         cf_sym: cryptofeed.Symbol = piker_sym_to_cb_sym(sym) | ||||
| 
 | ||||
|         nsym = piker_sym_to_cb_sym(sym) | ||||
|         from_cf: tractor.to_asyncio.LinkedTaskChannel | ||||
|         async with maybe_open_price_feed(sym) as from_cf: | ||||
| 
 | ||||
|         async with maybe_open_price_feed(sym) as stream: | ||||
|             # load the "last trades" summary | ||||
|             last_trades_res: cryptofeed.LastTradesResult = await client.last_trades( | ||||
|                 cb_sym_to_deribit_inst(cf_sym), | ||||
|                 count=1, | ||||
|             ) | ||||
|             last_trades: list[Trade] = last_trades_res.trades | ||||
| 
 | ||||
|             cache = await client.cache_symbols() | ||||
|             # TODO, do we even need this or will the above always | ||||
|             # work? | ||||
|             # if not last_trades: | ||||
|             #     await tractor.pause() | ||||
|             #     async for typ, quote in from_cf: | ||||
|             #         if typ == 'trade': | ||||
|             #             last_trade = Trade(**(quote['data'])) | ||||
|             #             break | ||||
| 
 | ||||
|             last_trades = (await client.last_trades( | ||||
|                 cb_sym_to_deribit_inst(nsym), count=1)).trades | ||||
|             # else: | ||||
|             last_trade = Trade( | ||||
|                 **(last_trades[0]) | ||||
|             ) | ||||
| 
 | ||||
|             if len(last_trades) == 0: | ||||
|                 last_trade = None | ||||
|                 async for typ, quote in stream: | ||||
|                     if typ == 'trade': | ||||
|                         last_trade = Trade(**(quote['data'])) | ||||
|                         break | ||||
| 
 | ||||
|             else: | ||||
|                 last_trade = Trade(**(last_trades[0])) | ||||
| 
 | ||||
|             first_quote = { | ||||
|             first_quote: dict = { | ||||
|                 'symbol': sym, | ||||
|                 'last': last_trade.price, | ||||
|                 'brokerd_ts': last_trade.timestamp, | ||||
|  | @ -158,13 +304,84 @@ async def stream_quotes( | |||
|                     'broker_ts': last_trade.timestamp | ||||
|                 }] | ||||
|             } | ||||
|             task_status.started((init_msgs,  first_quote)) | ||||
|             task_status.started(( | ||||
|                 init_msgs, | ||||
|                 first_quote, | ||||
|             )) | ||||
| 
 | ||||
|             feed_is_live.set() | ||||
| 
 | ||||
|             async for typ, quote in stream: | ||||
|                 topic = quote['symbol'] | ||||
|                 await send_chan.send({topic: quote}) | ||||
|             # NOTE XXX, static for now! | ||||
|             # => since this only handles ONE mkt feed at a time we | ||||
|             # don't need a lookup table to map interleaved quotes | ||||
|             # from multiple possible mkt-pairs | ||||
|             topic: str = mkt.bs_fqme | ||||
| 
 | ||||
|             # deliver until cancelled | ||||
|             async for typ, ref in from_cf: | ||||
|                 match typ: | ||||
|                     case 'trade': | ||||
|                         trade: cryptofeed.types.Trade = ref | ||||
| 
 | ||||
|                         # TODO, re-impl this according to teh ideal | ||||
|                         # fqme for opts that we choose!! | ||||
|                         bs_fqme: str = cb_sym_to_deribit_inst( | ||||
|                             str_to_cb_sym(trade.symbol) | ||||
|                         ).lower() | ||||
| 
 | ||||
|                         piker_quote: dict = { | ||||
|                             'symbol': bs_fqme, | ||||
|                             'last': trade.price, | ||||
|                             'broker_ts': time.time(), | ||||
|                             # ^TODO, name this `brokerd/datad_ts` and | ||||
|                             # use `time.time_ns()` ?? | ||||
|                             'ticks': [{ | ||||
|                                 'type': 'trade', | ||||
|                                 'price': float(trade.price), | ||||
|                                 'size': float(trade.amount), | ||||
|                                 'broker_ts': trade.timestamp, | ||||
|                             }], | ||||
|                         } | ||||
|                         log.info( | ||||
|                             f'deribit {typ!r} quote for {sym!r}\n\n' | ||||
|                             f'{trade}\n\n' | ||||
|                             f'{pfmt(piker_quote)}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                     case 'l1': | ||||
|                         book: cryptofeed.types.L1Book = ref | ||||
| 
 | ||||
|                         # TODO, so this is where we can possibly change things | ||||
|                         # and instead lever the `MktPair.bs_fqme: str` output? | ||||
|                         bs_fqme: str = cb_sym_to_deribit_inst( | ||||
|                             str_to_cb_sym(book.symbol) | ||||
|                         ).lower() | ||||
| 
 | ||||
|                         piker_quote: dict = { | ||||
|                             'symbol': bs_fqme, | ||||
|                             'ticks': [ | ||||
| 
 | ||||
|                                 {'type': 'bid', | ||||
|                                  'price': float(book.bid_price), | ||||
|                                  'size': float(book.bid_size)}, | ||||
| 
 | ||||
|                                 {'type': 'bsize', | ||||
|                                  'price': float(book.bid_price), | ||||
|                                  'size': float(book.bid_size),}, | ||||
| 
 | ||||
|                                 {'type': 'ask', | ||||
|                                  'price': float(book.ask_price), | ||||
|                                  'size': float(book.ask_size),}, | ||||
| 
 | ||||
|                                 {'type': 'asize', | ||||
|                                  'price': float(book.ask_price), | ||||
|                                  'size': float(book.ask_size),} | ||||
|                             ] | ||||
|                         } | ||||
| 
 | ||||
|                 await send_chan.send({ | ||||
|                     topic: piker_quote, | ||||
|                 }) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -174,12 +391,21 @@ async def open_symbol_search( | |||
|     async with open_cached_client('deribit') as client: | ||||
| 
 | ||||
|         # load all symbols locally for fast search | ||||
|         cache = await client.cache_symbols() | ||||
|         # cache = client._pairs | ||||
|         await ctx.started() | ||||
| 
 | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             pattern: str | ||||
|             async for pattern in stream: | ||||
|                 # repack in dict form | ||||
|                 await stream.send( | ||||
|                     await client.search_symbols(pattern)) | ||||
| 
 | ||||
|                 # NOTE: pattern fuzzy-matching is done within | ||||
|                 # the methd impl. | ||||
|                 pairs: dict[str, Pair] = await client.search_symbols( | ||||
|                     pattern, | ||||
|                 ) | ||||
|                 # repack in fqme-keyed table | ||||
|                 byfqme: dict[str, Pair] = {} | ||||
|                 for pair in pairs.values(): | ||||
|                     byfqme[pair.bs_fqme] = pair | ||||
| 
 | ||||
|                 await stream.send(byfqme) | ||||
|  |  | |||
|  | @ -0,0 +1,196 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Per market data-type definitions and schemas types. | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| import pendulum | ||||
| from typing import ( | ||||
|     Literal, | ||||
|     Optional, | ||||
| ) | ||||
| from decimal import Decimal | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| 
 | ||||
| 
 | ||||
| # API endpoint paths by venue / sub-API | ||||
| _domain: str = 'deribit.com' | ||||
| _url = f'https://www.{_domain}' | ||||
| 
 | ||||
| # WEBsocketz | ||||
| _ws_url: str = f'wss://www.{_domain}/ws/api/v2' | ||||
| 
 | ||||
| # test nets | ||||
| _testnet_ws_url: str = f'wss://test.{_domain}/ws/api/v2' | ||||
| 
 | ||||
| MarketType = Literal[ | ||||
|     'option' | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| def get_api_eps(venue: MarketType) -> tuple[str, str]: | ||||
|     ''' | ||||
|     Return API ep root paths per venue. | ||||
| 
 | ||||
|     ''' | ||||
|     return { | ||||
|         'option': ( | ||||
|             _ws_url, | ||||
|         ), | ||||
|     }[venue] | ||||
| 
 | ||||
| 
 | ||||
| class Pair(Struct, frozen=True, kw_only=True): | ||||
| 
 | ||||
|     symbol: str | ||||
| 
 | ||||
|     # src | ||||
|     quote_currency: str # 'BTC' | ||||
| 
 | ||||
|     # dst | ||||
|     base_currency: str # "BTC", | ||||
| 
 | ||||
|     tick_size: float # 0.0001 # [{'above_price': 0.005, 'tick_size': 0.0005}] | ||||
|     tick_size_steps: list[dict[str, float]]  | ||||
| 
 | ||||
|     @property | ||||
|     def price_tick(self) -> Decimal: | ||||
|         return Decimal(str(self.tick_size_steps[0]['above_price'])) | ||||
| 
 | ||||
|     @property | ||||
|     def size_tick(self) -> Decimal: | ||||
|         return Decimal(str(self.tick_size)) | ||||
| 
 | ||||
|     @property | ||||
|     def bs_fqme(self) -> str: | ||||
|         return f'{self.symbol}' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_mktid(self) -> str: | ||||
|         return f'{self.symbol}.{self.venue}' | ||||
| 
 | ||||
| 
 | ||||
| class OptionPair(Pair, frozen=True): | ||||
| 
 | ||||
|     taker_commission: float # 0.0003 | ||||
|     strike: float # 5000.0 | ||||
|     settlement_period: str # 'day' | ||||
|     settlement_currency: str # "BTC", | ||||
|     rfq: bool # false | ||||
|     price_index: str # 'btc_usd' | ||||
|     option_type: str # 'call' | ||||
|     min_trade_amount: float # 0.1 | ||||
|     maker_commission: float # 0.0003 | ||||
|     kind: str # 'option' | ||||
|     is_active: bool # true | ||||
|     instrument_type: str # 'reversed' | ||||
|     instrument_name: str # 'BTC-1SEP24-55000-C' | ||||
|     instrument_id: int # 364671 | ||||
|     expiration_timestamp: int # 1725177600000 | ||||
|     creation_timestamp: int # 1724918461000 | ||||
|     counter_currency: str # 'USD'  | ||||
|     contract_size: float # '1.0' | ||||
|     block_trade_tick_size: float # '0.0001' | ||||
|     block_trade_min_trade_amount: int # '25' | ||||
|     block_trade_commission: float # '0.003' | ||||
| 
 | ||||
|     # NOTE: see `.data._symcache.SymbologyCache.load()` for why | ||||
|     ns_path: str = 'piker.brokers.deribit:OptionPair' | ||||
| 
 | ||||
|     # TODO, impl this without the MM:SS part of | ||||
|     # the `'THH:MM:SS..'` etc.. | ||||
|     @property | ||||
|     def expiry(self) -> str: | ||||
|         iso_date = pendulum.from_timestamp( | ||||
|             self.expiration_timestamp / 1000 | ||||
|         ).isoformat() | ||||
|         return iso_date  | ||||
| 
 | ||||
|     @property | ||||
|     def venue(self) -> str: | ||||
|         return f'{self.instrument_type}_option' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_fqme(self) -> str: | ||||
|         return f'{self.symbol}' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_src_asset(self) -> str: | ||||
|         return f'{self.quote_currency}' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_dst_asset(self) -> str: | ||||
|         return f'{self.symbol}' | ||||
| 
 | ||||
| 
 | ||||
| PAIRTYPES: dict[MarketType, Pair] = { | ||||
|     'option': OptionPair, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| class JSONRPCResult(Struct): | ||||
|     id: int | ||||
|     usIn: int | ||||
|     usOut: int | ||||
|     usDiff: int | ||||
|     testnet: bool | ||||
|     jsonrpc: str = '2.0' | ||||
|     error: Optional[dict] = None | ||||
|     result: Optional[list[dict]] = None | ||||
| 
 | ||||
| 
 | ||||
| class JSONRPCChannel(Struct): | ||||
|     method: str | ||||
|     params: dict | ||||
|     jsonrpc: str = '2.0' | ||||
| 
 | ||||
| 
 | ||||
| class KLinesResult(Struct): | ||||
|     low: list[float] | ||||
|     cost: list[float] | ||||
|     high: list[float] | ||||
|     open: list[float] | ||||
|     close: list[float] | ||||
|     ticks: list[int] | ||||
|     status: str | ||||
|     volume: list[float] | ||||
| 
 | ||||
| 
 | ||||
| class Trade(Struct): | ||||
|     iv: float | ||||
|     price: float | ||||
|     amount: float | ||||
|     trade_id: str | ||||
|     contracts: float | ||||
|     direction: str | ||||
|     trade_seq: int | ||||
|     timestamp: int | ||||
|     mark_price: float | ||||
|     index_price: float | ||||
|     tick_direction: int | ||||
|     instrument_name: str | ||||
|     combo_id: Optional[str] = '', | ||||
|     combo_trade_id: Optional[int] = 0, | ||||
|     block_trade_id: Optional[str] = '', | ||||
|     block_trade_leg_count: Optional[int] = 0, | ||||
| 
 | ||||
| 
 | ||||
| class LastTradesResult(Struct): | ||||
|     trades: list[Trade] | ||||
|     has_more: bool | ||||
|  | @ -111,6 +111,10 @@ class KucoinMktPair(Struct, frozen=True): | |||
|     quoteMaxSize: float | ||||
|     quoteMinSize: float | ||||
|     symbol: str  # our bs_mktid, kucoin's internal id | ||||
|     feeCategory: int | ||||
|     makerFeeCoefficient: float | ||||
|     takerFeeCoefficient: float | ||||
|     st: bool | ||||
| 
 | ||||
| 
 | ||||
| class AccountTrade(Struct, frozen=True): | ||||
|  | @ -593,7 +597,7 @@ async def get_client() -> AsyncGenerator[Client, None]: | |||
|     ''' | ||||
|     async with ( | ||||
|         httpx.AsyncClient( | ||||
|             base_url=f'https://api.kucoin.com/api', | ||||
|             base_url='https://api.kucoin.com/api', | ||||
|         ) as trio_client, | ||||
|     ): | ||||
|         client = Client(httpx_client=trio_client) | ||||
|  | @ -637,7 +641,7 @@ async def open_ping_task( | |||
|                 await trio.sleep((ping_interval - 1000) / 1000) | ||||
|                 await ws.send_msg({'id': connect_id, 'type': 'ping'}) | ||||
| 
 | ||||
|         log.info('Starting ping task for kucoin ws connection') | ||||
|         log.warning('Starting ping task for kucoin ws connection') | ||||
|         n.start_soon(ping_server) | ||||
| 
 | ||||
|         yield | ||||
|  | @ -649,9 +653,14 @@ async def open_ping_task( | |||
| async def get_mkt_info( | ||||
|     fqme: str, | ||||
| 
 | ||||
| ) -> tuple[MktPair, KucoinMktPair]: | ||||
| ) -> tuple[ | ||||
|     MktPair, | ||||
|     KucoinMktPair, | ||||
| ]: | ||||
|     ''' | ||||
|     Query for and return a `MktPair` and `KucoinMktPair`. | ||||
|     Query for and return both a `piker.accounting.MktPair` and | ||||
|     `KucoinMktPair` from provided `fqme: str` | ||||
|     (fully-qualified-market-endpoint). | ||||
| 
 | ||||
|     ''' | ||||
|     async with open_cached_client('kucoin') as client: | ||||
|  | @ -726,6 +735,8 @@ async def stream_quotes( | |||
| 
 | ||||
|         log.info(f'Starting up quote stream(s) for {symbols}') | ||||
|         for sym_str in symbols: | ||||
|             mkt: MktPair | ||||
|             pair: KucoinMktPair | ||||
|             mkt, pair = await get_mkt_info(sym_str) | ||||
|             init_msgs.append( | ||||
|                 FeedInit(mkt_info=mkt) | ||||
|  | @ -733,7 +744,11 @@ async def stream_quotes( | |||
| 
 | ||||
|         ws: NoBsWs | ||||
|         token, ping_interval = await client._get_ws_token() | ||||
|         connect_id = str(uuid4()) | ||||
|         log.info('API reported ping_interval: {ping_interval}\n') | ||||
| 
 | ||||
|         connect_id: str = str(uuid4()) | ||||
|         typ: str | ||||
|         quote: dict | ||||
|         async with ( | ||||
|             open_autorecon_ws( | ||||
|                 ( | ||||
|  | @ -747,20 +762,37 @@ async def stream_quotes( | |||
|                 ), | ||||
|             ) as ws, | ||||
|             open_ping_task(ws, ping_interval, connect_id), | ||||
|             aclosing(stream_messages(ws, sym_str)) as msg_gen, | ||||
|             aclosing( | ||||
|                 iter_normed_quotes( | ||||
|                     ws, sym_str | ||||
|                 ) | ||||
|             ) as iter_quotes, | ||||
|         ): | ||||
|             typ, quote = await anext(msg_gen) | ||||
|             typ, quote = await anext(iter_quotes) | ||||
| 
 | ||||
|             while typ != 'trade': | ||||
|                 # take care to not unblock here until we get a real | ||||
|                 # trade quote | ||||
|                 typ, quote = await anext(msg_gen) | ||||
|             # take care to not unblock here until we get a real | ||||
|             # trade quote? | ||||
|             # ^TODO, remove this right? | ||||
|             # -[ ] what often blocks chart boot/new-feed switching | ||||
|             #   since we'ere waiting for a live quote instead of just | ||||
|             #   loading history afap.. | ||||
|             #  |_ XXX, not sure if we require a bit of rework to core | ||||
|             #    feed init logic or if backends justg gotta be | ||||
|             #    changed up.. feel like there was some causality | ||||
|             #    dilema prolly only seen with IB too.. | ||||
|             # while typ != 'trade': | ||||
|             #     typ, quote = await anext(iter_quotes) | ||||
| 
 | ||||
|             task_status.started((init_msgs, quote)) | ||||
|             feed_is_live.set() | ||||
| 
 | ||||
|             async for typ, msg in msg_gen: | ||||
|                 await send_chan.send({sym_str: msg}) | ||||
|             # XXX NOTE, DO NOT include the `.<backend>` suffix! | ||||
|             # OW the sampling loop will not broadcast correctly.. | ||||
|             # since `bus._subscribers.setdefault(bs_fqme, set())` | ||||
|             # is used inside `.data.open_feed_bus()` !!! | ||||
|             topic: str = mkt.bs_fqme | ||||
|             async for typ, quote in iter_quotes: | ||||
|                 await send_chan.send({topic: quote}) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
|  | @ -815,7 +847,7 @@ async def subscribe( | |||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| async def stream_messages( | ||||
| async def iter_normed_quotes( | ||||
|     ws: NoBsWs, | ||||
|     sym: str, | ||||
| 
 | ||||
|  | @ -846,6 +878,9 @@ async def stream_messages( | |||
| 
 | ||||
|                 yield 'trade', { | ||||
|                     'symbol': sym, | ||||
|                     # TODO, is 'last' even used elsewhere/a-good | ||||
|                     # semantic? can't we just read the ticks with our | ||||
|                     # .data.ticktools.frame_ticks()`/ | ||||
|                     'last': trade_data.price, | ||||
|                     'brokerd_ts': last_trade_ts, | ||||
|                     'ticks': [ | ||||
|  | @ -938,7 +973,7 @@ async def open_history_client( | |||
|             if end_dt is None: | ||||
|                 inow = round(time.time()) | ||||
| 
 | ||||
|                 print( | ||||
|                 log.debug( | ||||
|                     f'difference in time between load and processing' | ||||
|                     f'{inow - times[-1]}' | ||||
|                 ) | ||||
|  |  | |||
|  | @ -653,7 +653,11 @@ class Router(Struct): | |||
|             flume = feed.flumes[fqme] | ||||
|             first_quote: dict = flume.first_quote | ||||
|             book: DarkBook = self.get_dark_book(broker) | ||||
|             book.lasts[fqme]: float = float(first_quote['last']) | ||||
| 
 | ||||
|             if not (last := first_quote.get('last')): | ||||
|                 last: float = flume.rt_shm.array[-1]['close'] | ||||
| 
 | ||||
|             book.lasts[fqme]: float = float(last) | ||||
| 
 | ||||
|             async with self.maybe_open_brokerd_dialog( | ||||
|                 brokermod=brokermod, | ||||
|  | @ -716,7 +720,7 @@ class Router(Struct): | |||
|             subs = self.subscribers[sub_key] | ||||
| 
 | ||||
|         sent_some: bool = False | ||||
|         for client_stream in subs: | ||||
|         for client_stream in subs.copy(): | ||||
|             try: | ||||
|                 await client_stream.send(msg) | ||||
|                 sent_some = True | ||||
|  | @ -1010,10 +1014,14 @@ async def translate_and_relay_brokerd_events( | |||
|                 status_msg.brokerd_msg = msg | ||||
|                 status_msg.src = msg.broker_details['name'] | ||||
| 
 | ||||
|                 await router.client_broadcast( | ||||
|                     status_msg.req.symbol, | ||||
|                     status_msg, | ||||
|                 ) | ||||
|                 if not status_msg.req: | ||||
|                     # likely some order change state? | ||||
|                     await tractor.pause() | ||||
|                 else: | ||||
|                     await router.client_broadcast( | ||||
|                         status_msg.req.symbol, | ||||
|                         status_msg, | ||||
|                     ) | ||||
| 
 | ||||
|                 if status == 'closed': | ||||
|                     log.info(f'Execution for {oid} is complete!') | ||||
|  |  | |||
|  | @ -653,6 +653,7 @@ async def open_trade_dialog( | |||
|                 # in) use manually constructed table from calling | ||||
|                 # the `.get_mkt_info()` provider EP above. | ||||
|                 _mktmap_table=mkt_by_fqme, | ||||
|                 only_require=list(mkt_by_fqme), | ||||
|             ) | ||||
| 
 | ||||
|             pp_msgs: list[BrokerdPosition] = [] | ||||
|  |  | |||
|  | @ -30,6 +30,7 @@ import time | |||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncIterator, | ||||
|     Callable, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
|  | @ -54,6 +55,9 @@ from ._util import ( | |||
|     get_console_log, | ||||
| ) | ||||
| from ..service import maybe_spawn_daemon | ||||
| from piker.log import ( | ||||
|     mk_repr, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._sharedmem import ( | ||||
|  | @ -575,7 +579,6 @@ async def open_sample_stream( | |||
| 
 | ||||
| 
 | ||||
| async def sample_and_broadcast( | ||||
| 
 | ||||
|     bus: _FeedsBus,  # noqa | ||||
|     rt_shm: ShmArray, | ||||
|     hist_shm: ShmArray, | ||||
|  | @ -596,11 +599,22 @@ async def sample_and_broadcast( | |||
| 
 | ||||
|     overruns = Counter() | ||||
| 
 | ||||
|     # multiline nested `dict` formatter (since rn quote-msgs are | ||||
|     # just that). | ||||
|     pfmt: Callable[[str], str] = mk_repr() | ||||
| 
 | ||||
|     # iterate stream delivered by broker | ||||
|     async for quotes in quote_stream: | ||||
|         # print(quotes) | ||||
| 
 | ||||
|         # TODO: ``numba`` this! | ||||
|         # XXX WARNING XXX only enable for debugging bc ow can cost | ||||
|         # ALOT of perf with HF-feedz!!! | ||||
|         # | ||||
|         # log.info( | ||||
|         #     'Rx live quotes:\n' | ||||
|         #     f'{pfmt(quotes)}' | ||||
|         # ) | ||||
| 
 | ||||
|         # TODO: `numba` this! | ||||
|         for broker_symbol, quote in quotes.items(): | ||||
|             # TODO: in theory you can send the IPC msg *before* writing | ||||
|             # to the sharedmem array to decrease latency, however, that | ||||
|  | @ -673,6 +687,18 @@ async def sample_and_broadcast( | |||
|             sub_key: str = broker_symbol.lower() | ||||
|             subs: set[Sub] = bus.get_subs(sub_key) | ||||
| 
 | ||||
|             if not subs: | ||||
|                 all_bs_fqmes: list[str] = list( | ||||
|                     bus._subscribers.keys() | ||||
|                 ) | ||||
|                 log.warning( | ||||
|                     f'No subscribers for {brokername!r} live-quote ??\n' | ||||
|                     f'broker_symbol: {broker_symbol}\n\n' | ||||
| 
 | ||||
|                     f'Maybe the backend-sys symbol does not match one of,\n' | ||||
|                     f'{pfmt(all_bs_fqmes)}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             # NOTE: by default the broker backend doesn't append | ||||
|             # it's own "name" into the fqme schema (but maybe it | ||||
|             # should?) so we have to manually generate the correct | ||||
|  |  | |||
|  | @ -273,7 +273,7 @@ async def _reconnect_forever( | |||
|                 nobsws._connected.set() | ||||
|                 await trio.sleep_forever() | ||||
|         except HandshakeError: | ||||
|             log.exception(f'Retrying connection') | ||||
|             log.exception('Retrying connection') | ||||
| 
 | ||||
|         # ws & nursery block ends | ||||
| 
 | ||||
|  | @ -359,8 +359,8 @@ async def open_autorecon_ws( | |||
| 
 | ||||
| 
 | ||||
| ''' | ||||
| JSONRPC response-request style machinery for transparent multiplexing of msgs | ||||
| over a NoBsWs. | ||||
| JSONRPC response-request style machinery for transparent multiplexing | ||||
| of msgs over a `NoBsWs`. | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
|  | @ -377,43 +377,77 @@ async def open_jsonrpc_session( | |||
|     url: str, | ||||
|     start_id: int = 0, | ||||
|     response_type: type = JSONRPCResult, | ||||
|     request_type: Optional[type] = None, | ||||
|     request_hook: Optional[Callable] = None, | ||||
|     error_hook: Optional[Callable] = None, | ||||
|     msg_recv_timeout: float = float('inf'), | ||||
|     # ^NOTE, since only `deribit` is using this jsonrpc stuff atm | ||||
|     # and options mkts are generally "slow moving".. | ||||
|     # | ||||
|     # FURTHER if we break the underlying ws connection then since we | ||||
|     # don't pass a `fixture` to the task that manages `NoBsWs`, i.e. | ||||
|     # `_reconnect_forever()`, the jsonrpc "transport pipe" get's | ||||
|     # broken and never restored with wtv init sequence is required to | ||||
|     # re-establish a working req-resp session. | ||||
| 
 | ||||
|     # request_type: Optional[type] = None, | ||||
|     # request_hook: Optional[Callable] = None, | ||||
|     # error_hook: Optional[Callable] = None, | ||||
| ) -> Callable[[str, dict], dict]: | ||||
| 
 | ||||
|     # NOTE, store all request msgs so we can raise errors on the | ||||
|     # caller side! | ||||
|     req_msgs: dict[int, dict] = {} | ||||
| 
 | ||||
|     async with ( | ||||
|         trio.open_nursery() as n, | ||||
|         open_autorecon_ws(url) as ws | ||||
|         open_autorecon_ws( | ||||
|             url=url, | ||||
|             msg_recv_timeout=msg_recv_timeout, | ||||
|         ) as ws | ||||
|     ): | ||||
|         rpc_id: Iterable = count(start_id) | ||||
|         rpc_id: Iterable[int] = count(start_id) | ||||
|         rpc_results: dict[int, dict] = {} | ||||
| 
 | ||||
|         async def json_rpc(method: str, params: dict) -> dict: | ||||
|         async def json_rpc( | ||||
|             method: str, | ||||
|             params: dict, | ||||
|         ) -> dict: | ||||
|             ''' | ||||
|             perform a json rpc call and wait for the result, raise exception in | ||||
|             case of error field present on response | ||||
|             ''' | ||||
|             nonlocal req_msgs | ||||
| 
 | ||||
|             req_id: int = next(rpc_id) | ||||
|             msg = { | ||||
|                 'jsonrpc': '2.0', | ||||
|                 'id': next(rpc_id), | ||||
|                 'id': req_id, | ||||
|                 'method': method, | ||||
|                 'params': params | ||||
|             } | ||||
|             _id = msg['id'] | ||||
| 
 | ||||
|             rpc_results[_id] = { | ||||
|             result = rpc_results[_id] = { | ||||
|                 'result': None, | ||||
|                 'event': trio.Event() | ||||
|                 'error': None, | ||||
|                 'event': trio.Event(),  # signal caller resp arrived | ||||
|             } | ||||
|             req_msgs[_id] = msg | ||||
| 
 | ||||
|             await ws.send_msg(msg) | ||||
| 
 | ||||
|             # wait for reponse before unblocking requester code | ||||
|             await rpc_results[_id]['event'].wait() | ||||
| 
 | ||||
|             ret = rpc_results[_id]['result'] | ||||
|             if (maybe_result := result['result']): | ||||
|                 ret = maybe_result | ||||
|                 del rpc_results[_id] | ||||
| 
 | ||||
|             del rpc_results[_id] | ||||
|             else: | ||||
|                 err = result['error'] | ||||
|                 raise Exception( | ||||
|                     f'JSONRPC request failed\n' | ||||
|                     f'req: {msg}\n' | ||||
|                     f'resp: {err}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             if ret.error is not None: | ||||
|                 raise Exception(json.dumps(ret.error, indent=4)) | ||||
|  | @ -428,6 +462,7 @@ async def open_jsonrpc_session( | |||
|             the server side. | ||||
| 
 | ||||
|             ''' | ||||
|             nonlocal req_msgs | ||||
|             async for msg in ws: | ||||
|                 match msg: | ||||
|                     case { | ||||
|  | @ -451,15 +486,29 @@ async def open_jsonrpc_session( | |||
|                         'params': _, | ||||
|                     }: | ||||
|                         log.debug(f'Recieved\n{msg}') | ||||
|                         if request_hook: | ||||
|                             await request_hook(request_type(**msg)) | ||||
|                         # if request_hook: | ||||
|                         #     await request_hook(request_type(**msg)) | ||||
| 
 | ||||
|                     case { | ||||
|                         'error': error | ||||
|                     }: | ||||
|                         log.warning(f'Recieved\n{error}') | ||||
|                         if error_hook: | ||||
|                             await error_hook(response_type(**msg)) | ||||
|                         # if error_hook: | ||||
|                         #     await error_hook(response_type(**msg)) | ||||
| 
 | ||||
|                         # retreive orig request msg, set error | ||||
|                         # response in original "result" msg, | ||||
|                         # THEN FINALLY set the event to signal caller | ||||
|                         # to raise the error in the parent task. | ||||
|                         req_id: int = msg['id'] | ||||
|                         req_msg: dict = req_msgs[req_id] | ||||
|                         result: dict = rpc_results[req_id] | ||||
|                         result['error'] = error | ||||
|                         result['event'].set() | ||||
|                         log.error( | ||||
|                             f'JSONRPC request failed\n' | ||||
|                             f'req: {req_msg}\n' | ||||
|                             f'resp: {error}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                     case _: | ||||
|                         log.warning(f'Unhandled JSON-RPC msg!?\n{msg}') | ||||
|  |  | |||
|  | @ -540,7 +540,10 @@ async def open_feed_bus( | |||
|         # subscription since the backend isn't (yet) expected to | ||||
|         # append it's own name to the fqme, so we filter on keys | ||||
|         # which *do not* include that name (e.g .ib) . | ||||
|         bus._subscribers.setdefault(bs_fqme, set()) | ||||
|         bus._subscribers.setdefault( | ||||
|             bs_fqme, | ||||
|             set(), | ||||
|         ) | ||||
| 
 | ||||
|     # sync feed subscribers with flume handles | ||||
|     await ctx.started( | ||||
|  |  | |||
							
								
								
									
										28
									
								
								piker/log.py
								
								
								
								
							
							
						
						
									
										28
									
								
								piker/log.py
								
								
								
								
							|  | @ -18,7 +18,11 @@ | |||
| Log like a forester! | ||||
| """ | ||||
| import logging | ||||
| import reprlib | ||||
| import json | ||||
| from typing import ( | ||||
|     Callable, | ||||
| ) | ||||
| 
 | ||||
| import tractor | ||||
| from pygments import ( | ||||
|  | @ -84,3 +88,27 @@ def colorize_json( | |||
|         # likeable styles: algol_nu, tango, monokai | ||||
|         formatters.TerminalTrueColorFormatter(style=style) | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def mk_repr( | ||||
|     **repr_kws, | ||||
| ) -> Callable[[str], str]: | ||||
|     ''' | ||||
|     Allocate and deliver a `repr.Repr` instance with provided input | ||||
|     settings using the std-lib's `reprlib` mod, | ||||
|      * https://docs.python.org/3/library/reprlib.html | ||||
| 
 | ||||
|     ------ Ex. ------ | ||||
|     An up to 6-layer-nested `dict` as multi-line: | ||||
|     - https://stackoverflow.com/a/79102479 | ||||
|     - https://docs.python.org/3/library/reprlib.html#reprlib.Repr.maxlevel | ||||
| 
 | ||||
|     ''' | ||||
|     def_kws: dict[str, int] = dict( | ||||
|         indent=2, | ||||
|         maxlevel=6,  # recursion levels | ||||
|         maxstring=66,  # match editor line-len limit | ||||
|     ) | ||||
|     def_kws |= repr_kws | ||||
|     reprr = reprlib.Repr(**def_kws) | ||||
|     return reprr.repr | ||||
|  |  | |||
|  | @ -161,7 +161,12 @@ class NativeStorageClient: | |||
| 
 | ||||
|     def index_files(self): | ||||
|         for path in self._datadir.iterdir(): | ||||
|             if path.name in {'borked', 'expired',}: | ||||
|             if ( | ||||
|                 path.name in {'borked', 'expired',} | ||||
|                 or | ||||
|                 '.parquet' not in str(path) | ||||
|             ): | ||||
|                 # ignore all non-apache files (for now) | ||||
|                 continue | ||||
| 
 | ||||
|             key: str = path.name.rstrip('.parquet') | ||||
|  |  | |||
|  | @ -44,8 +44,10 @@ import trio | |||
| from trio_typing import TaskStatus | ||||
| import tractor | ||||
| from pendulum import ( | ||||
|     Interval, | ||||
|     DateTime, | ||||
|     Duration, | ||||
|     duration as mk_duration, | ||||
|     from_timestamp, | ||||
| ) | ||||
| import numpy as np | ||||
|  | @ -214,7 +216,8 @@ async def maybe_fill_null_segments( | |||
|         # pair, immediately stop backfilling? | ||||
|         if ( | ||||
|             start_dt | ||||
|             and end_dt < start_dt | ||||
|             and | ||||
|             end_dt < start_dt | ||||
|         ): | ||||
|             await tractor.pause() | ||||
|             break | ||||
|  | @ -262,6 +265,7 @@ async def maybe_fill_null_segments( | |||
|         except tractor.ContextCancelled: | ||||
|             # log.exception | ||||
|             await tractor.pause() | ||||
|             raise | ||||
| 
 | ||||
|     null_segs_detected.set() | ||||
|     # RECHECK for more null-gaps | ||||
|  | @ -349,7 +353,7 @@ async def maybe_fill_null_segments( | |||
| 
 | ||||
| async def start_backfill( | ||||
|     get_hist, | ||||
|     frame_types: dict[str, Duration] | None, | ||||
|     def_frame_duration: Duration, | ||||
|     mod: ModuleType, | ||||
|     mkt: MktPair, | ||||
|     shm: ShmArray, | ||||
|  | @ -379,22 +383,23 @@ async def start_backfill( | |||
|         update_start_on_prepend: bool = False | ||||
|         if backfill_until_dt is None: | ||||
| 
 | ||||
|             # TODO: drop this right and just expose the backfill | ||||
|             # limits inside a [storage] section in conf.toml? | ||||
|             # when no tsdb "last datum" is provided, we just load | ||||
|             # some near-term history. | ||||
|             # periods = { | ||||
|             #     1: {'days': 1}, | ||||
|             #     60: {'days': 14}, | ||||
|             # } | ||||
| 
 | ||||
|             # do a decently sized backfill and load it into storage. | ||||
|             # TODO: per-provider default history-durations? | ||||
|             # -[ ] inside the `open_history_client()` config allow | ||||
|             #    declaring the history duration limits instead of | ||||
|             #    guessing and/or applying the same limits to all? | ||||
|             # | ||||
|             # -[ ] allow declaring (default) per-provider backfill | ||||
|             #     limits inside a [storage] sub-section in conf.toml? | ||||
|             # | ||||
|             # NOTE, when no tsdb "last datum" is provided, we just | ||||
|             # load some near-term history by presuming a "decently | ||||
|             # large" 60s duration limit and a much shorter 1s range. | ||||
|             periods = { | ||||
|                 1: {'days': 2}, | ||||
|                 60: {'years': 6}, | ||||
|             } | ||||
|             period_duration: int = periods[timeframe] | ||||
|             update_start_on_prepend = True | ||||
|             update_start_on_prepend: bool = True | ||||
| 
 | ||||
|             # NOTE: manually set the "latest" datetime which we intend to | ||||
|             # backfill history "until" so as to adhere to the history | ||||
|  | @ -416,7 +421,6 @@ async def start_backfill( | |||
|                 f'backfill_until_dt: {backfill_until_dt}\n' | ||||
|                 f'last_start_dt: {last_start_dt}\n' | ||||
|             ) | ||||
| 
 | ||||
|             try: | ||||
|                 ( | ||||
|                     array, | ||||
|  | @ -426,37 +430,58 @@ async def start_backfill( | |||
|                     timeframe, | ||||
|                     end_dt=last_start_dt, | ||||
|                 ) | ||||
| 
 | ||||
|             except NoData as _daterr: | ||||
|                 # 3 cases: | ||||
|                 # - frame in the middle of a legit venue gap | ||||
|                 # - history actually began at the `last_start_dt` | ||||
|                 # - some other unknown error (ib blocking the | ||||
|                 #   history bc they don't want you seeing how they | ||||
|                 #   cucked all the tinas..) | ||||
|                 if dur := frame_types.get(timeframe): | ||||
|                     # decrement by a frame's worth of duration and | ||||
|                     # retry a few times. | ||||
|                     last_start_dt.subtract( | ||||
|                         seconds=dur.total_seconds() | ||||
|                 orig_last_start_dt: datetime = last_start_dt | ||||
|                 gap_report: str = ( | ||||
|                     f'EMPTY FRAME for `end_dt: {last_start_dt}`?\n' | ||||
|                     f'{mod.name} -> tf@fqme: {timeframe}@{mkt.fqme}\n' | ||||
|                     f'last_start_dt: {orig_last_start_dt}\n\n' | ||||
|                     f'bf_until: {backfill_until_dt}\n' | ||||
|                 ) | ||||
|                 # EMPTY FRAME signal with 3 (likely) causes: | ||||
|                 # | ||||
|                 # 1. range contains legit gap in venue history | ||||
|                 # 2. history actually (edge case) **began** at the | ||||
|                 #    value `last_start_dt` | ||||
|                 # 3. some other unknown error (ib blocking the | ||||
|                 #    history-query bc they don't want you seeing how | ||||
|                 #    they cucked all the tinas.. like with options | ||||
|                 #    hist) | ||||
|                 # | ||||
|                 if def_frame_duration: | ||||
|                     # decrement by a duration's (frame) worth of time | ||||
|                     # as maybe indicated by the backend to see if we | ||||
|                     # can get older data before this possible | ||||
|                     # "history gap". | ||||
|                     last_start_dt: datetime = last_start_dt.subtract( | ||||
|                         seconds=def_frame_duration.total_seconds() | ||||
|                     ) | ||||
|                     log.warning( | ||||
|                         f'{mod.name} -> EMPTY FRAME for end_dt?\n' | ||||
|                         f'tf@fqme: {timeframe}@{mkt.fqme}\n' | ||||
|                         'bf_until <- last_start_dt:\n' | ||||
|                         f'{backfill_until_dt} <- {last_start_dt}\n' | ||||
|                         f'Decrementing `end_dt` by {dur} and retry..\n' | ||||
|                     gap_report += ( | ||||
|                         f'Decrementing `end_dt` and retrying with,\n' | ||||
|                         f'def_frame_duration: {def_frame_duration}\n' | ||||
|                         f'(new) last_start_dt: {last_start_dt}\n' | ||||
|                     ) | ||||
|                     log.warning(gap_report) | ||||
|                     # skip writing to shm/tsdb and try the next | ||||
|                     # duration's worth of prior history. | ||||
|                     continue | ||||
| 
 | ||||
|                 else: | ||||
|                     # await tractor.pause() | ||||
|                     raise DataUnavailable(gap_report) | ||||
| 
 | ||||
|             # broker says there never was or is no more history to pull | ||||
|             except DataUnavailable: | ||||
|             except DataUnavailable as due: | ||||
|                 message: str = due.args[0] | ||||
|                 log.warning( | ||||
|                     f'NO-MORE-DATA in range?\n' | ||||
|                     f'`{mod.name}` halted history:\n' | ||||
|                     f'tf@fqme: {timeframe}@{mkt.fqme}\n' | ||||
|                     'bf_until <- last_start_dt:\n' | ||||
|                     f'{backfill_until_dt} <- {last_start_dt}\n' | ||||
|                     f'Provider {mod.name!r} halted backfill due to,\n\n' | ||||
| 
 | ||||
|                     f'{message}\n' | ||||
| 
 | ||||
|                     f'fqme: {mkt.fqme}\n' | ||||
|                     f'timeframe: {timeframe}\n' | ||||
|                     f'last_start_dt: {last_start_dt}\n' | ||||
|                     f'bf_until: {backfill_until_dt}\n' | ||||
|                 ) | ||||
|                 # UGH: what's a better way? | ||||
|                 # TODO: backends are responsible for being correct on | ||||
|  | @ -465,34 +490,54 @@ async def start_backfill( | |||
|                 #     to halt the request loop until the condition is | ||||
|                 #     resolved or should the backend be entirely in | ||||
|                 #     charge of solving such faults? yes, right? | ||||
|                 # if timeframe > 1: | ||||
|                 #     await tractor.pause() | ||||
|                 return | ||||
| 
 | ||||
|             time: np.ndarray = array['time'] | ||||
|             assert ( | ||||
|                 array['time'][0] | ||||
|                 time[0] | ||||
|                 == | ||||
|                 next_start_dt.timestamp() | ||||
|             ) | ||||
| 
 | ||||
|             diff = last_start_dt - next_start_dt | ||||
|             frame_time_diff_s = diff.seconds | ||||
|             assert time[-1] == next_end_dt.timestamp() | ||||
| 
 | ||||
|             expected_dur: Interval = last_start_dt - next_start_dt | ||||
| 
 | ||||
|             # frame's worth of sample-period-steps, in seconds | ||||
|             frame_size_s: float = len(array) * timeframe | ||||
|             expected_frame_size_s: float = frame_size_s + timeframe | ||||
|             if frame_time_diff_s > expected_frame_size_s: | ||||
| 
 | ||||
|             recv_frame_dur: Duration = ( | ||||
|                 from_timestamp(array[-1]['time']) | ||||
|                 - | ||||
|                 from_timestamp(array[0]['time']) | ||||
|             ) | ||||
|             if ( | ||||
|                 (lt_frame := (recv_frame_dur < expected_dur)) | ||||
|                 or | ||||
|                 (null_frame := (frame_size_s == 0)) | ||||
|                 # ^XXX, should NEVER hit now! | ||||
|             ): | ||||
|                 # XXX: query result includes a start point prior to our | ||||
|                 # expected "frame size" and thus is likely some kind of | ||||
|                 # history gap (eg. market closed period, outage, etc.) | ||||
|                 # so just report it to console for now. | ||||
|                 if lt_frame: | ||||
|                     reason = 'Possible GAP (or first-datum)' | ||||
|                 else: | ||||
|                     assert null_frame | ||||
|                     reason = 'NULL-FRAME' | ||||
| 
 | ||||
|                 missing_dur: Interval = expected_dur.end - recv_frame_dur.end | ||||
|                 log.warning( | ||||
|                     'GAP DETECTED:\n' | ||||
|                     f'last_start_dt: {last_start_dt}\n' | ||||
|                     f'diff: {diff}\n' | ||||
|                     f'frame_time_diff_s: {frame_time_diff_s}\n' | ||||
|                     f'{timeframe}s-series {reason} detected!\n' | ||||
|                     f'fqme: {mkt.fqme}\n' | ||||
|                     f'last_start_dt: {last_start_dt}\n\n' | ||||
|                     f'recv interval: {recv_frame_dur}\n' | ||||
|                     f'expected interval: {expected_dur}\n\n' | ||||
| 
 | ||||
|                     f'Missing duration of history of {missing_dur.in_words()!r}\n' | ||||
|                     f'{missing_dur}\n' | ||||
|                 ) | ||||
|                 # await tractor.pause() | ||||
| 
 | ||||
|             to_push = diff_history( | ||||
|                 array, | ||||
|  | @ -567,7 +612,8 @@ async def start_backfill( | |||
|             # long-term storage. | ||||
|             if ( | ||||
|                 storage is not None | ||||
|                 and write_tsdb | ||||
|                 and | ||||
|                 write_tsdb | ||||
|             ): | ||||
|                 log.info( | ||||
|                     f'Writing {ln} frame to storage:\n' | ||||
|  | @ -688,7 +734,7 @@ async def back_load_from_tsdb( | |||
|         last_tsdb_dt | ||||
|         and latest_start_dt | ||||
|     ): | ||||
|         backfilled_size_s = ( | ||||
|         backfilled_size_s: Duration = ( | ||||
|             latest_start_dt - last_tsdb_dt | ||||
|         ).seconds | ||||
|         # if the shm buffer len is not large enough to contain | ||||
|  | @ -911,6 +957,8 @@ async def tsdb_backfill( | |||
|             f'{pformat(config)}\n' | ||||
|         ) | ||||
| 
 | ||||
|         # concurrently load the provider's most-recent-frame AND any | ||||
|         # pre-existing tsdb history already saved in `piker` storage. | ||||
|         dt_eps: list[DateTime, DateTime] = [] | ||||
|         async with trio.open_nursery() as tn: | ||||
|             tn.start_soon( | ||||
|  | @ -921,7 +969,6 @@ async def tsdb_backfill( | |||
|                 timeframe, | ||||
|                 config, | ||||
|             ) | ||||
| 
 | ||||
|             tsdb_entry: tuple = await load_tsdb_hist( | ||||
|                 storage, | ||||
|                 mkt, | ||||
|  | @ -950,6 +997,25 @@ async def tsdb_backfill( | |||
|                 mr_end_dt, | ||||
|             ) = dt_eps | ||||
| 
 | ||||
|             first_frame_dur_s: Duration = (mr_end_dt - mr_start_dt).seconds | ||||
|             calced_frame_size: Duration = mk_duration( | ||||
|                 seconds=first_frame_dur_s, | ||||
|             ) | ||||
|             # NOTE, attempt to use the backend declared default frame | ||||
|             # sizing (as allowed by their time-series query APIs) and | ||||
|             # if not provided try to construct a default from the | ||||
|             # first frame received above. | ||||
|             def_frame_durs: dict[ | ||||
|                 int, | ||||
|                 Duration, | ||||
|             ]|None = config.get('frame_types', None) | ||||
|             if def_frame_durs: | ||||
|                 def_frame_size: Duration = def_frame_durs[timeframe] | ||||
|                 assert def_frame_size == calced_frame_size | ||||
|             else: | ||||
|                 # use what we calced from first frame above. | ||||
|                 def_frame_size = calced_frame_size | ||||
| 
 | ||||
|             # NOTE: when there's no offline data, there's 2 cases: | ||||
|             # - data backend doesn't support timeframe/sample | ||||
|             #   period (in which case `dt_eps` should be `None` and | ||||
|  | @ -980,7 +1046,7 @@ async def tsdb_backfill( | |||
|                     partial( | ||||
|                         start_backfill, | ||||
|                         get_hist=get_hist, | ||||
|                         frame_types=config.get('frame_types', None), | ||||
|                         def_frame_duration=def_frame_size, | ||||
|                         mod=mod, | ||||
|                         mkt=mkt, | ||||
|                         shm=shm, | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -25,11 +25,11 @@ build-backend = "poetry.core.masonry.api" | |||
| ignore = [] | ||||
| 
 | ||||
| # https://docs.astral.sh/ruff/settings/#lint_per-file-ignores | ||||
| "piker/ui/qt.py" = [ | ||||
|   "E402", | ||||
|   'F401',  # unused imports (without __all__ or blah as blah) | ||||
|   # "F841", # unused variable rules | ||||
| ] | ||||
| # "piker/ui/qt.py" = [ | ||||
| #   "E402", | ||||
| #   'F401',  # unused imports (without __all__ or blah as blah) | ||||
| #   # "F841", # unused variable rules | ||||
| # ] | ||||
| # ignore-init-module-imports = false | ||||
| 
 | ||||
| # ------ - ------ | ||||
|  | @ -50,10 +50,8 @@ attrs = "^23.1.0" | |||
| bidict = "^0.22.1" | ||||
| colorama = "^0.4.6" | ||||
| colorlog = "^6.7.0" | ||||
| cython = "^3.0.0" | ||||
| greenback = "^1.1.1" | ||||
| ib-insync = "^0.9.86" | ||||
| msgspec = "^0.18.0" | ||||
| msgspec = "^0.18.6" | ||||
| numba = "^0.59.0" | ||||
| numpy = "^1.25" | ||||
| polars = "^0.18.13" | ||||
|  | @ -71,13 +69,11 @@ pdbp = "^1.5.0" | |||
| trio = "^0.24" | ||||
| pendulum = "^3.0.0" | ||||
| httpx = "^0.27.0" | ||||
| cryptofeed = "^2.4.0" | ||||
| pyarrow = "^17.0.0" | ||||
| 
 | ||||
| [tool.poetry.dependencies.tractor] | ||||
| develop = true | ||||
| git = 'https://github.com/goodboy/tractor.git' | ||||
| branch = 'asyncio_debugger_support' | ||||
| # path = "../tractor" | ||||
| 
 | ||||
| tractor = {path = "../tractor", develop = true} | ||||
| websockets = "12.0" | ||||
| [tool.poetry.dependencies.asyncvnc] | ||||
| git = 'https://github.com/pikers/asyncvnc.git' | ||||
| branch = 'main' | ||||
|  | @ -109,6 +105,8 @@ pytest = "^6.0.0" | |||
| elasticsearch = "^8.9.0" | ||||
| xonsh = "^0.14.2" | ||||
| prompt-toolkit = "3.0.40" | ||||
| cython = "^3.0.0" | ||||
| greenback = "^1.1.1" | ||||
| 
 | ||||
| # console ehancements and eventually remote debugging | ||||
| # extras/helpers. | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue