Compare commits
	
		
			74 Commits 
		
	
	
		
			gitea_feat
			...
			macos_fixe
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | cd15f2ae76 | |
|  | ed3a8d81b1 | |
|  | 61edb5cb19 | |
|  | db77d7ab29 | |
|  | 8c274efd18 | |
|  | 0b123c9af9 | |
|  | d17160519e | |
|  | 5bc7e4c9b6 | |
|  | d35e1e5c67 | |
|  | d4c10b2b0f | |
|  | 46285a601e | |
|  | f9610c9e26 | |
|  | 9d5e405903 | |
|  | e19a724037 | |
|  | 390a57c96d | |
|  | 69eac7bb15 | |
|  | a45de0b710 | |
|  | 9df1988aa6 | |
|  | f7caa75228 | |
|  | e9613e46f6 | |
|  | 6637ca9e4f | |
|  | 7e139e6a8e | |
|  | c2d9283db4 | |
|  | 28ba1392bb | |
|  | f50202a6af | |
|  | baff466ee0 | |
|  | b01edcf65a | |
|  | 2545def7bb | |
|  | 1b74417688 | |
|  | 4d4f5d0af5 | |
|  | 7e82bf0729 | |
|  | f1b4550483 | |
|  | bdaf74a19a | |
|  | b87ca76700 | |
|  | 94caa248e7 | |
|  | da953b6b0c | |
|  | fb8375f608 | |
|  | d5faf4f59d | |
|  | df5e72f7ae | |
|  | bf33cb93b1 | |
|  | d655e81290 | |
|  | bc72e3d206 | |
|  | 35cb538a69 | |
|  | 8a768af5bb | |
|  | 8b0fac3b6c | |
|  | 36cc0cf750 | |
|  | 3ff0a86741 | |
|  | 705f0e86ac | |
|  | 2a24d1d50c | |
|  | 84ad34f51e | |
|  | cbbf674737 | |
|  | ec71dc2018 | |
|  | 17aebf44a9 | |
|  | 5f347c9f6a | |
|  | cdb41e4881 | |
|  | 289b63bb2a | |
|  | 8f1e082c91 | |
|  | b9321dbb49 | |
|  | 21d051b05f | |
|  | 3118d0f140 | |
|  | 4278d8e2f1 | |
|  | b209512eb6 | |
|  | 8a9d21468a | |
|  | 75ddba09f7 | |
|  | dae17bb043 | |
|  | 8bd0a182cf | |
|  | 04421e5ad2 | |
|  | 1e0c3da32d | |
|  | 5b87b3c2a6 | |
|  | 438e69e42c | |
|  | ec6dd7cafc | |
|  | f1436c93db | |
|  | 1061103f76 | |
|  | 3aea296caa | 
|  | @ -103,3 +103,4 @@ ENV/ | |||
| # mypy | ||||
| .mypy_cache/ | ||||
| .vscode/settings.json | ||||
| **/.DS_Store | ||||
|  |  | |||
							
								
								
									
										31
									
								
								README.rst
								
								
								
								
							
							
						
						
									
										31
									
								
								README.rst
								
								
								
								
							|  | @ -88,7 +88,23 @@ a sane install with `uv` | |||
| ************************ | ||||
| bc why install with `python` when you can faster with `rust` :: | ||||
| 
 | ||||
|     uv lock | ||||
|     uv sync | ||||
| 
 | ||||
|     # ^ astral's docs, | ||||
|     # https://docs.astral.sh/uv/concepts/projects/sync/ | ||||
| 
 | ||||
| include all GUIs :: | ||||
| 
 | ||||
|     uv sync --extra uis | ||||
| 
 | ||||
| AND with all our hacking tools:: | ||||
| 
 | ||||
|     uv sync --dev --extra uis | ||||
| 
 | ||||
| 
 | ||||
| Ensure you can run the root-daemon:: | ||||
| 
 | ||||
|     uv run pikerd [-l info --pdb] | ||||
| 
 | ||||
| 
 | ||||
| hacky install on nixos | ||||
|  | @ -103,7 +119,18 @@ start a chart | |||
| ************* | ||||
| run a realtime OHLCV chart stand-alone:: | ||||
| 
 | ||||
|     piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken | ||||
|     [uv run] piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken | ||||
| 
 | ||||
|     # ^^^ iff you haven't activated the py-env, | ||||
|     # - https://docs.astral.sh/uv/concepts/projects/run/ | ||||
|     # | ||||
|     # in order to create an explicit virt-env see, | ||||
|     # - https://docs.astral.sh/uv/concepts/projects/layout/#the-project-environment | ||||
|     # - https://docs.astral.sh/uv/pip/environments/ | ||||
|     # | ||||
|     # use $UV_PROJECT_ENVIRONMENT to select any non-`.venv/` | ||||
|     # as the venv sudir in the repo's root. | ||||
|     # - https://docs.astral.sh/uv/reference/environment/#uv_project_environment | ||||
| 
 | ||||
| this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes | ||||
| overlayed on the same graph. Use of `piker` without first starting | ||||
|  |  | |||
|  | @ -4,9 +4,11 @@ tsdb.host = 'localhost' | |||
| tsdb.grpc_port = 5995 | ||||
| 
 | ||||
| [ui] | ||||
| # set custom font + size which will scale entire UI | ||||
| # set custom font + size which will scale entire UI~ | ||||
| # font_size = 16 | ||||
| # font_size = 32 | ||||
| # font_name = 'Monospaced' | ||||
| 
 | ||||
| # colorscheme = 'default'  # UNUSED | ||||
| # graphics.update_throttle = 60  # Hz  # TODO | ||||
| # graphics.update_throttle = 120  # Hz  #PENDING TODO | ||||
| 
 | ||||
|  |  | |||
|  | @ -121,6 +121,7 @@ async def bot_main(): | |||
|             # tick_throttle=10, | ||||
|         ) as feed, | ||||
| 
 | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         assert accounts | ||||
|  |  | |||
|  | @ -0,0 +1,338 @@ | |||
| #!/usr/bin/env python | ||||
| from decimal import ( | ||||
|     Decimal, | ||||
| ) | ||||
| from pathlib import Path | ||||
| 
 | ||||
| import numpy as np | ||||
| # import polars as pl | ||||
| import trio | ||||
| import tractor | ||||
| from datetime import datetime | ||||
| # from pprint import pformat | ||||
| from piker.brokers.deribit.api import ( | ||||
|     get_client, | ||||
|     maybe_open_oi_feed, | ||||
| ) | ||||
| from piker.storage import open_storage_client, StorageClient | ||||
| from piker.log import get_logger | ||||
| import sys | ||||
| import pyqtgraph as pg | ||||
| from PyQt6 import QtCore | ||||
| from pyqtgraph import ScatterPlotItem, InfiniteLine | ||||
| from PyQt6.QtWidgets import QApplication | ||||
| from cryptofeed.symbols import Symbol | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| # XXX, use 2 newlines between top level LOC (even between these | ||||
| # imports and the next function line ;) | ||||
| 
 | ||||
| def check_if_complete( | ||||
|         oi: dict[str, dict[str, Decimal | None]] | ||||
|     ) -> bool: | ||||
|     return all( | ||||
|         oi[strike]['C'] is not None | ||||
|         and | ||||
|         oi[strike]['P'] is not None for strike in oi | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| async def max_pain_daemon( | ||||
| ) -> None: | ||||
|     oi_by_strikes: dict[str, dict[str, Decimal | None]] | ||||
|     instruments: list[Symbol] = [] | ||||
|     expiry_dates: list[str] | ||||
|     expiry_date: str | ||||
|     currency: str = 'btc' | ||||
|     kind: str = 'option' | ||||
| 
 | ||||
|     async with get_client( | ||||
|     ) as client: | ||||
|         expiry_dates: list[str] = await client.get_expiration_dates( | ||||
|             currency=currency, | ||||
|             kind=kind | ||||
|         ) | ||||
| 
 | ||||
|         log.info( | ||||
|             f'Available expiries for {currency!r}-{kind}:\n' | ||||
|             f'{expiry_dates}\n' | ||||
|         ) | ||||
|         expiry_date: str = input( | ||||
|             'Please enter a valid expiration date: ' | ||||
|         ).upper() | ||||
|         print('Starting little daemon...') | ||||
| 
 | ||||
|         # maybe move this type annot down to the assignment line? | ||||
|         oi_by_strikes: dict[str, dict[str, Decimal]] | ||||
|         instruments = await client.get_instruments( | ||||
|             expiry_date=expiry_date, | ||||
|         ) | ||||
|         oi_by_strikes = client.get_strikes_dict(instruments) | ||||
| 
 | ||||
| 
 | ||||
|     def get_total_intrinsic_values( | ||||
|         oi_by_strikes: dict[str, dict[str, Decimal]] | ||||
|     ) -> dict[str, dict[str, Decimal]]: | ||||
|         call_cash: Decimal = Decimal(0) | ||||
|         put_cash: Decimal = Decimal(0) | ||||
|         intrinsic_values: dict[str, dict[str, Decimal]] = {} | ||||
|         closes: list = sorted(Decimal(close) for close in oi_by_strikes) | ||||
| 
 | ||||
|         for strike, oi in oi_by_strikes.items(): | ||||
|             s = Decimal(strike) | ||||
|             call_cash = sum(max(0, (s - c) * oi_by_strikes[str(c)]['C']) for c in closes) | ||||
|             put_cash = sum(max(0, (c - s) * oi_by_strikes[str(c)]['P']) for c in closes) | ||||
| 
 | ||||
|             intrinsic_values[strike] = { | ||||
|                 'C': call_cash, | ||||
|                 'P': put_cash, | ||||
|                 'total': call_cash + put_cash, | ||||
|             } | ||||
| 
 | ||||
|         return intrinsic_values | ||||
| 
 | ||||
|     def get_intrinsic_value_and_max_pain( | ||||
|         intrinsic_values: dict[str, dict[str, Decimal]] | ||||
|         ): | ||||
|         # We meed to find the lowest value, so we start at | ||||
|         # infinity to ensure that, and the max_pain must be | ||||
|         # an amount greater than zero. | ||||
|         total_intrinsic_value: Decimal = Decimal('Infinity') | ||||
|         max_pain: Decimal = Decimal(0) | ||||
| 
 | ||||
|         for strike, oi in oi_by_strikes.items(): | ||||
|             s = Decimal(strike) | ||||
|             if intrinsic_values[strike]['total'] < total_intrinsic_value: | ||||
|                 total_intrinsic_value = intrinsic_values[strike]['total'] | ||||
|                 max_pain = s | ||||
| 
 | ||||
|         return total_intrinsic_value, max_pain | ||||
| 
 | ||||
|     def plot_graph( | ||||
|         oi_by_strikes: dict[str, dict[str, Decimal]], | ||||
|         plot, | ||||
|     ): | ||||
|         """Update the bar graph with new open interest data.""" | ||||
|         plot.clear() | ||||
| 
 | ||||
|         intrinsic_values = get_total_intrinsic_values(oi_by_strikes) | ||||
| 
 | ||||
|         for strike_str in sorted(oi_by_strikes, key=lambda x: int(x)): | ||||
|             strike = int(strike_str) | ||||
|             calls_val = float(oi_by_strikes[strike_str]['C']) | ||||
|             puts_val  = float(oi_by_strikes[strike_str]['P']) | ||||
| 
 | ||||
|             bar_c = pg.BarGraphItem( | ||||
|                 x=[strike - 100], | ||||
|                 height=[calls_val], | ||||
|                 width=200, | ||||
|                 pen='w', | ||||
|                 brush=(0, 0, 255, 150) | ||||
|             ) | ||||
|             plot.addItem(bar_c) | ||||
| 
 | ||||
|             bar_p = pg.BarGraphItem( | ||||
|                 x=[strike + 100], | ||||
|                 height=[puts_val], | ||||
|                 width=200, | ||||
|                 pen='w', | ||||
|                 brush=(255, 0, 0, 150) | ||||
|             ) | ||||
|             plot.addItem(bar_p) | ||||
| 
 | ||||
|             total_val = float(intrinsic_values[strike_str]['total']) / 100000 | ||||
| 
 | ||||
|             scatter_iv = ScatterPlotItem( | ||||
|                 x=[strike], | ||||
|                 y=[total_val], | ||||
|                 pen=pg.mkPen(color=(0, 255, 0), width=2), | ||||
|                 brush=pg.mkBrush(0, 255, 0, 150), | ||||
|                 size=3, | ||||
|                 symbol='o' | ||||
|             ) | ||||
|             plot.addItem(scatter_iv) | ||||
| 
 | ||||
|         _, max_pain = get_intrinsic_value_and_max_pain(intrinsic_values) | ||||
| 
 | ||||
|         vertical_line = InfiniteLine( | ||||
|             pos=max_pain, | ||||
|             angle=90, | ||||
|             pen=pg.mkPen(color='yellow', width=1, style=QtCore.Qt.PenStyle.DotLine), | ||||
|             label=f'Max pain: {max_pain:,.0f}', | ||||
|             labelOpts={ | ||||
|                 'position': 0.85, | ||||
|                 'color': 'yellow', | ||||
|                 'movable': True | ||||
|             } | ||||
|         ) | ||||
|         plot.addItem(vertical_line) | ||||
| 
 | ||||
|     def update_oi_by_strikes(msg: tuple): | ||||
|         nonlocal oi_by_strikes | ||||
|         if 'oi' == msg[0]: | ||||
|             strike_price = msg[1]['strike_price'] | ||||
|             option_type = msg[1]['option_type'] | ||||
|             open_interest = msg[1]['open_interest'] | ||||
|             oi_by_strikes.setdefault( | ||||
|                 strike_price, {} | ||||
|             ).update( | ||||
|                 {option_type: open_interest} | ||||
|             ) | ||||
| 
 | ||||
|     # Define the structured dtype | ||||
|     dtype = np.dtype([ | ||||
|         ('time', int), | ||||
|         ('oi', float), | ||||
|         ('oi_calc', float), | ||||
|     ]) | ||||
|     async def write_open_interest_on_file(msg: tuple, client: StorageClient): | ||||
|         if 'oi' == msg[0]: | ||||
|             nonlocal expiry_date | ||||
|             timestamp = msg[1]['timestamp'] | ||||
|             strike_price = msg[1]["strike_price"] | ||||
|             option_type = msg[1]['option_type'].lower() | ||||
|             col_sym_key = f'btc-{expiry_date.lower()}-{strike_price}-{option_type}' | ||||
| 
 | ||||
|             # Create the numpy array with sample data | ||||
|             data = np.array([ | ||||
|                 ( | ||||
|                     int(timestamp), | ||||
|                     float(msg[1]['open_interest']), | ||||
|                     np.nan, | ||||
|                 ), | ||||
|             ], dtype=dtype) | ||||
| 
 | ||||
|             path: Path = await client.write_oi( | ||||
|                 col_sym_key, | ||||
|                 data, | ||||
|             ) | ||||
|             # TODO, use std logging like this throughout for status | ||||
|             # emissions on console! | ||||
|             log.info(f'Wrote OI history to {path}') | ||||
| 
 | ||||
|     def get_max_pain( | ||||
|         oi_by_strikes: dict[str, dict[str, Decimal]] | ||||
|     ) -> dict[str, str | Decimal]: | ||||
|         ''' | ||||
|         This method requires only the strike_prices and oi for call | ||||
|         and puts, the closes list are the same as the strike_prices | ||||
|         the idea is to sum all the calls and puts cash for each strike | ||||
|         and the ITM strikes from that strike, the lowest value is what we | ||||
|         are looking for the intrinsic value. | ||||
| 
 | ||||
|         ''' | ||||
| 
 | ||||
|         nonlocal timestamp | ||||
| 
 | ||||
|         intrinsic_values = get_total_intrinsic_values(oi_by_strikes) | ||||
| 
 | ||||
|         total_intrinsic_value, max_pain = get_intrinsic_value_and_max_pain(intrinsic_values) | ||||
| 
 | ||||
|         return { | ||||
|             'timestamp': timestamp, | ||||
|             'expiry_date': expiry_date, | ||||
|             'total_intrinsic_value': total_intrinsic_value, | ||||
|             'max_pain': max_pain, | ||||
|         } | ||||
| 
 | ||||
|     async with ( | ||||
|         open_storage_client() as (_, storage), | ||||
| 
 | ||||
|         maybe_open_oi_feed( | ||||
|             instruments, | ||||
|         ) as oi_feed, | ||||
|     ): | ||||
|         # Initialize QApplication | ||||
|         app = QApplication(sys.argv) | ||||
| 
 | ||||
|         win = pg.GraphicsLayoutWidget(show=True) | ||||
|         win.setWindowTitle('Calls (blue) vs Puts (red)') | ||||
| 
 | ||||
|         plot = win.addPlot(title='OI by Strikes') | ||||
|         plot.showGrid(x=True, y=True) | ||||
|         print('Plot initialized...') | ||||
| 
 | ||||
|         async for msg in oi_feed: | ||||
| 
 | ||||
|             # In memory oi_by_strikes dict, all message are filtered here | ||||
|             # and the dict is updated with the open interest data | ||||
|             update_oi_by_strikes(msg) | ||||
| 
 | ||||
|             # Write on file using storage client | ||||
|             await write_open_interest_on_file(msg, storage) | ||||
| 
 | ||||
|             # Max pain calcs, before start we must gather all the open interest for | ||||
|             # all the strike prices and option types available for a expiration date | ||||
|             if check_if_complete(oi_by_strikes): | ||||
|                 if 'oi' == msg[0]: | ||||
|                     # Here we must read for the filesystem all the latest open interest value for | ||||
|                     # each instrument for that specific expiration date, that means look up for the | ||||
|                     # last update got the instrument btc-{expity_date}-*oi1s.parquet (1s because is | ||||
|                     # hardcoded to something, sorry.) | ||||
|                     timestamp = msg[1]['timestamp'] | ||||
|                     max_pain = get_max_pain(oi_by_strikes) | ||||
|                     # intrinsic_values = get_total_intrinsic_values(oi_by_strikes) | ||||
| 
 | ||||
|                     # graph here | ||||
|                     plot_graph(oi_by_strikes, plot) | ||||
| 
 | ||||
|                     # TODO, use a single multiline string with `()` | ||||
|                     # and drop the multiple `print()` calls (this | ||||
|                     # should be done elsewhere in this file as well! | ||||
|                     # | ||||
|                     # As per the docs, | ||||
|                     # https://docs.python.org/3/reference/lexical_analysis.html#string-literal-concatenation | ||||
|                     # you could instead do, | ||||
|                     # print( | ||||
|                     #   '-----------------------------------------------\n' | ||||
|                     #   f'timestamp: {datetime.fromtimestamp(max_pain['timestamp'])}\n' | ||||
|                     # ) | ||||
|                     # WHY? | ||||
|                     # |_ less ctx-switches/calls to `print()` | ||||
|                     # |_ the `str` can then be modified / passed | ||||
|                     #   around as a variable more easily if needed in | ||||
|                     #   the future ;) | ||||
|                     # | ||||
|                     # ALSO, i believe there already is a stdlib | ||||
|                     # module to do "alignment" of text which you | ||||
|                     # could try for doing the right-side alignment, | ||||
|                     # https://docs.python.org/3/library/textwrap.html#textwrap.indent | ||||
|                     # | ||||
|                     print('-----------------------------------------------') | ||||
|                     print(f'timestamp:             {datetime.fromtimestamp(max_pain['timestamp'])}') | ||||
|                     print(f'expiry_date:           {max_pain['expiry_date']}') | ||||
|                     print(f'max_pain:              {max_pain['max_pain']:,.0f}') | ||||
|                     print(f'total intrinsic value: {max_pain['total_intrinsic_value']:,.0f}') | ||||
|                     print('-----------------------------------------------') | ||||
| 
 | ||||
|             # Process GUI events to keep the window responsive | ||||
|             app.processEvents() | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
| 
 | ||||
|     async with tractor.open_nursery( | ||||
|         debug_mode=True, | ||||
|         loglevel='info', | ||||
|     ) as an: | ||||
|         from tractor import log | ||||
|         log.get_console_log(level='info') | ||||
| 
 | ||||
|         ptl: tractor.Portal = await an.start_actor( | ||||
|             'max_pain_daemon', | ||||
|             enable_modules=[__name__], | ||||
|             infect_asyncio=True, | ||||
|             # ^TODO, we can actually run this in the root-actor now | ||||
|             # if needed as per 2nd "section" in, | ||||
|             # https://pikers.dev/goodboy/tractor/pulls/2 | ||||
|             # | ||||
|             # NOTE, will first require us porting to modern | ||||
|             # `tractor:main` though ofc! | ||||
| 
 | ||||
|         ) | ||||
|         await ptl.run(max_pain_daemon) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -0,0 +1,29 @@ | |||
| ## Max Pain Calculation for Deribit Options | ||||
| 
 | ||||
| This feature, which calculates the max pain point for options traded | ||||
| on the Deribit exchange using cryptofeed library. | ||||
| 
 | ||||
| - Functions in the api module for fetching options data from Deribit. | ||||
|   [commit](https://pikers.dev/pikers/piker/commit/da55856dd2876291f55a06eb0561438a912d8241) | ||||
| 
 | ||||
| - Compute the max pain point based on open interest data using | ||||
|   deribit's api. | ||||
|   [commit](https://pikers.dev/pikers/piker/commit/0d9d6e15ba0edeb662ec97f7599dd66af3046b94) | ||||
| 
 | ||||
| ### How to test it? | ||||
| 
 | ||||
| **Before start:** in order to get this working with `uv`,  you | ||||
| **must** use my [`tractor` fork](https://pikers.dev/ntorres/tractor/src/branch/aio_abandons) | ||||
| and this branch: `aio_abandons`, the reason is that I cherry-pick the | ||||
| `uv_migration` that guille made, for some reason that a didn't dive | ||||
| into, in my system y need tractor using `uv` too. quite hacky | ||||
| I guess. | ||||
| 
 | ||||
| 1. `uv lock` | ||||
| 
 | ||||
| 2. `uv run --no-dev python examples/max_pain.py` | ||||
| 
 | ||||
| 3. A message should be display, enter one of the expiration date | ||||
|    available. | ||||
| 
 | ||||
| 4. The script should be up and running. | ||||
|  | @ -0,0 +1,20 @@ | |||
| #!/usr/bin/env bash | ||||
| # macOS wrapper for piker to handle missing XDG_RUNTIME_DIR | ||||
| 
 | ||||
| # Set up runtime directory for macOS if not already set | ||||
| if [ -z "$XDG_RUNTIME_DIR" ]; then | ||||
|     # Use macOS standard temp directory with user-specific subdirectory | ||||
|     export XDG_RUNTIME_DIR="/tmp/piker-runtime-$(id -u)" | ||||
| 
 | ||||
|     # Create the directory if it doesn't exist | ||||
|     if [ ! -d "$XDG_RUNTIME_DIR" ]; then | ||||
|         mkdir -p "$XDG_RUNTIME_DIR" | ||||
|         # Set proper permissions (only user can access) | ||||
|         chmod 700 "$XDG_RUNTIME_DIR" | ||||
|     fi | ||||
| 
 | ||||
|     echo "Set XDG_RUNTIME_DIR to: $XDG_RUNTIME_DIR" | ||||
| fi | ||||
| 
 | ||||
| # Run piker with all passed arguments | ||||
| exec uv run piker "$@" | ||||
|  | @ -42,7 +42,6 @@ from ._mktinfo import ( | |||
|     dec_digits, | ||||
|     digits_to_dec, | ||||
|     MktPair, | ||||
|     Symbol, | ||||
|     unpack_fqme, | ||||
|     _derivs as DerivTypes, | ||||
| ) | ||||
|  | @ -60,7 +59,6 @@ __all__ = [ | |||
|     'Asset', | ||||
|     'MktPair', | ||||
|     'Position', | ||||
|     'Symbol', | ||||
|     'Transaction', | ||||
|     'TransactionLedger', | ||||
|     'dec_digits', | ||||
|  |  | |||
|  | @ -390,8 +390,8 @@ class MktPair(Struct, frozen=True): | |||
|         cls, | ||||
|         fqme: str, | ||||
| 
 | ||||
|         price_tick: float | str, | ||||
|         size_tick: float | str, | ||||
|         price_tick: float|str, | ||||
|         size_tick: float|str, | ||||
|         bs_mktid: str, | ||||
| 
 | ||||
|         broker: str | None = None, | ||||
|  | @ -677,90 +677,3 @@ def unpack_fqme( | |||
|         # '.'.join([mkt_ep, venue]), | ||||
|         suffix, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| class Symbol(Struct): | ||||
|     ''' | ||||
|     I guess this is some kinda container thing for dealing with | ||||
|     all the different meta-data formats from brokers? | ||||
| 
 | ||||
|     ''' | ||||
|     key: str | ||||
| 
 | ||||
|     broker: str = '' | ||||
|     venue: str = '' | ||||
| 
 | ||||
|     # precision descriptors for price and vlm | ||||
|     tick_size: Decimal = Decimal('0.01') | ||||
|     lot_tick_size: Decimal = Decimal('0.0') | ||||
| 
 | ||||
|     suffix: str = '' | ||||
|     broker_info: dict[str, dict[str, Any]] = {} | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_fqme( | ||||
|         cls, | ||||
|         fqsn: str, | ||||
|         info: dict[str, Any], | ||||
| 
 | ||||
|     ) -> Symbol: | ||||
|         broker, mktep, venue, suffix = unpack_fqme(fqsn) | ||||
|         tick_size = info.get('price_tick_size', 0.01) | ||||
|         lot_size = info.get('lot_tick_size', 0.0) | ||||
| 
 | ||||
|         return Symbol( | ||||
|             broker=broker, | ||||
|             key=mktep, | ||||
|             tick_size=tick_size, | ||||
|             lot_tick_size=lot_size, | ||||
|             venue=venue, | ||||
|             suffix=suffix, | ||||
|             broker_info={broker: info}, | ||||
|         ) | ||||
| 
 | ||||
|     @property | ||||
|     def type_key(self) -> str: | ||||
|         return list(self.broker_info.values())[0]['asset_type'] | ||||
| 
 | ||||
|     @property | ||||
|     def tick_size_digits(self) -> int: | ||||
|         return float_digits(self.tick_size) | ||||
| 
 | ||||
|     @property | ||||
|     def lot_size_digits(self) -> int: | ||||
|         return float_digits(self.lot_tick_size) | ||||
| 
 | ||||
|     @property | ||||
|     def price_tick(self) -> Decimal: | ||||
|         return Decimal(str(self.tick_size)) | ||||
| 
 | ||||
|     @property | ||||
|     def size_tick(self) -> Decimal: | ||||
|         return Decimal(str(self.lot_tick_size)) | ||||
| 
 | ||||
|     @property | ||||
|     def broker(self) -> str: | ||||
|         return list(self.broker_info.keys())[0] | ||||
| 
 | ||||
|     @property | ||||
|     def fqme(self) -> str: | ||||
|         return maybe_cons_tokens([ | ||||
|             self.key,  # final "pair name" (eg. qqq[/usd], btcusdt) | ||||
|             self.venue, | ||||
|             self.suffix,  # includes expiry and other con info | ||||
|             self.broker, | ||||
|         ]) | ||||
| 
 | ||||
|     def quantize( | ||||
|         self, | ||||
|         size: float, | ||||
|     ) -> Decimal: | ||||
|         digits = float_digits(self.lot_tick_size) | ||||
|         return Decimal(size).quantize( | ||||
|             Decimal(f'1.{"0".ljust(digits, "0")}'), | ||||
|             rounding=ROUND_HALF_EVEN | ||||
|         ) | ||||
| 
 | ||||
|     # NOTE: when cast to `str` return fqme | ||||
|     def __str__(self) -> str: | ||||
|         return self.fqme | ||||
|  |  | |||
|  | @ -362,7 +362,11 @@ class Position(Struct): | |||
|         # added: bool = False | ||||
|         tid: str = t.tid | ||||
|         if tid in self._events: | ||||
|             log.warning(f'{t} is already added?!') | ||||
|             log.debug( | ||||
|                 f'Txn is already added?\n' | ||||
|                 f'\n' | ||||
|                 f'{t}\n' | ||||
|             ) | ||||
|             # return added | ||||
| 
 | ||||
|         # TODO: apparently this IS possible with a dict but not | ||||
|  | @ -696,7 +700,7 @@ class Account(Struct): | |||
|                     else: | ||||
|                         # TODO: we reallly need a diff set of | ||||
|                         # loglevels/colors per subsys. | ||||
|                         log.warning( | ||||
|                         log.debug( | ||||
|                             f'Recent position for {fqme} was closed!' | ||||
|                         ) | ||||
| 
 | ||||
|  |  | |||
|  | @ -22,7 +22,9 @@ you know when you're losing money (if possible) XD | |||
| from __future__ import annotations | ||||
| from collections.abc import ValuesView | ||||
| from contextlib import contextmanager as cm | ||||
| from functools import partial | ||||
| from math import copysign | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Callable, | ||||
|  | @ -37,12 +39,16 @@ from pendulum import ( | |||
|     parse, | ||||
| ) | ||||
| 
 | ||||
| from ..log import get_logger | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._ledger import ( | ||||
|         Transaction, | ||||
|         TransactionLedger, | ||||
|     ) | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| def ppu( | ||||
|     clears: Iterator[Transaction], | ||||
|  | @ -238,6 +244,9 @@ def iter_by_dt( | |||
| 
 | ||||
|     def dyn_parse_to_dt( | ||||
|         tx: tuple[str, dict[str, Any]] | Transaction, | ||||
| 
 | ||||
|         debug: bool = False, | ||||
|         _invalid: list|None = None, | ||||
|     ) -> DateTime: | ||||
| 
 | ||||
|         # handle `.items()` inputs | ||||
|  | @ -250,11 +259,16 @@ def iter_by_dt( | |||
|         # get best parser for this record.. | ||||
|         for k in parsers: | ||||
|             if ( | ||||
|                 isdict and k in tx | ||||
|                  or getattr(tx, k, None) | ||||
|                 (v := getattr(tx, k, None)) | ||||
|                 or | ||||
|                 ( | ||||
|                     isdict | ||||
|                     and | ||||
|                     (v := tx.get(k)) | ||||
|                 ) | ||||
|             ): | ||||
|                 v = tx[k] if isdict else tx.dt | ||||
|                 assert v is not None, f'No valid value for `{k}`!?' | ||||
|                 # TODO? remove yah? | ||||
|                 # v = tx[k] if isdict else tx.dt | ||||
| 
 | ||||
|                 # only call parser on the value if not None from | ||||
|                 # the `parsers` table above (when NOT using | ||||
|  | @ -262,21 +276,54 @@ def iter_by_dt( | |||
|                 # sort on it directly | ||||
|                 if ( | ||||
|                     not isinstance(v, DateTime) | ||||
|                     and (parser := parsers.get(k)) | ||||
|                     and | ||||
|                     (parser := parsers.get(k)) | ||||
|                 ): | ||||
|                     return parser(v) | ||||
|                     ret = parser(v) | ||||
|                 else: | ||||
|                     return v | ||||
|                     ret = v | ||||
| 
 | ||||
|                 return ret | ||||
| 
 | ||||
|             else: | ||||
|                 continue | ||||
| 
 | ||||
|         # XXX: should never get here.. | ||||
|         else: | ||||
|             # XXX: should never get here.. | ||||
|             breakpoint() | ||||
|             if debug: | ||||
|                 import tractor | ||||
|                 with tractor.devx.maybe_open_crash_handler(): | ||||
|                     raise ValueError( | ||||
|                         f'Invalid txn time ??\n' | ||||
|                         f'txn-id: {k!r}\n' | ||||
|                         f'{k!r}: {v!r}\n' | ||||
|                     ) | ||||
|                         # assert v is not None, f'No valid value for `{k}`!?' | ||||
| 
 | ||||
|     entry: tuple[str, dict] | Transaction | ||||
|             if _invalid is not None: | ||||
|                 _invalid.append(tx) | ||||
|                 return from_timestamp(0.) | ||||
| 
 | ||||
|                 # breakpoint() | ||||
| 
 | ||||
|     entry: tuple[str, dict]|Transaction | ||||
|     invalid: list = [] | ||||
|     for entry in sorted( | ||||
|         records, | ||||
|         key=key or dyn_parse_to_dt, | ||||
|         key=key or partial( | ||||
|             dyn_parse_to_dt, | ||||
|             _invalid=invalid, | ||||
|         ), | ||||
|     ): | ||||
|         if entry in invalid: | ||||
|             log.warning( | ||||
|                 f'Ignoring txn w invalid timestamp ??\n' | ||||
|                 f'{pformat(entry)}\n' | ||||
|                 # f'txn-id: {k!r}\n' | ||||
|                 # f'{k!r}: {v!r}\n' | ||||
|             ) | ||||
|             continue | ||||
| 
 | ||||
|         # NOTE the type sig above; either pairs or txns B) | ||||
|         yield entry | ||||
| 
 | ||||
|  |  | |||
|  | @ -51,6 +51,7 @@ __brokers__: list[str] = [ | |||
|     'ib', | ||||
|     'kraken', | ||||
|     'kucoin', | ||||
|     'deribit', | ||||
| 
 | ||||
|     # broken but used to work | ||||
|     # 'questrade', | ||||
|  | @ -61,7 +62,6 @@ __brokers__: list[str] = [ | |||
|     # wstrade | ||||
|     # iex | ||||
| 
 | ||||
|     # deribit | ||||
|     # bitso | ||||
| ] | ||||
| 
 | ||||
|  |  | |||
|  | @ -96,7 +96,10 @@ async def _setup_persistent_brokerd( | |||
|     # - `open_symbol_search()` | ||||
|     # NOTE: see ep invocation details inside `.data.feed`. | ||||
|     try: | ||||
|         async with trio.open_nursery() as service_nursery: | ||||
|         async with ( | ||||
|             tractor.trionics.collapse_eg(), | ||||
|             trio.open_nursery() as service_nursery | ||||
|         ): | ||||
|             bus: _FeedsBus = feed.get_feed_bus( | ||||
|                 brokername, | ||||
|                 service_nursery, | ||||
|  |  | |||
|  | @ -374,9 +374,14 @@ class Client: | |||
|                 pair: Pair = pair_type(**item) | ||||
|             except Exception as e: | ||||
|                 e.add_note( | ||||
|                     "\nDon't panic, prolly stupid binance changed their symbology schema again..\n" | ||||
|                     'Check out their API docs here:\n\n' | ||||
|                     'https://binance-docs.github.io/apidocs/spot/en/#exchange-information' | ||||
|                     f'\n' | ||||
|                     f'New or removed field we need to codify!\n' | ||||
|                     f'pair-type: {pair_type!r}\n' | ||||
|                     f'\n' | ||||
|                     f"Don't panic, prolly stupid binance changed their symbology schema again..\n" | ||||
|                     f'Check out their API docs here:\n' | ||||
|                     f'\n' | ||||
|                     f'https://binance-docs.github.io/apidocs/spot/en/#exchange-information\n' | ||||
|                 ) | ||||
|                 raise | ||||
|             pair_table[pair.symbol.upper()] = pair | ||||
|  |  | |||
|  | @ -440,6 +440,7 @@ async def open_trade_dialog( | |||
|             #   - ledger: TransactionLedger | ||||
| 
 | ||||
|             async with ( | ||||
|                 tractor.trionics.collapse_eg(), | ||||
|                 trio.open_nursery() as tn, | ||||
|                 ctx.open_stream() as ems_stream, | ||||
|             ): | ||||
|  |  | |||
|  | @ -448,7 +448,6 @@ async def subscribe( | |||
| 
 | ||||
| 
 | ||||
| async def stream_quotes( | ||||
| 
 | ||||
|     send_chan: trio.abc.SendChannel, | ||||
|     symbols: list[str], | ||||
|     feed_is_live: trio.Event, | ||||
|  | @ -460,6 +459,7 @@ async def stream_quotes( | |||
| ) -> None: | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.trionics.maybe_raise_from_masking_exc(), | ||||
|         send_chan as send_chan, | ||||
|         open_cached_client('binance') as client, | ||||
|     ): | ||||
|  |  | |||
|  | @ -97,6 +97,13 @@ class Pair(Struct, frozen=True, kw_only=True): | |||
|     baseAsset: str | ||||
|     baseAssetPrecision: int | ||||
| 
 | ||||
|     permissionSets: list[list[str]] | ||||
| 
 | ||||
|     # https://developers.binance.com/docs/binance-spot-api-docs#2025-08-26 | ||||
|     # will become non-optional 2025-08-28? | ||||
|     # https://developers.binance.com/docs/binance-spot-api-docs#future-changes | ||||
|     pegInstructionsAllowed: bool|None = None | ||||
| 
 | ||||
|     filters: dict[ | ||||
|         str, | ||||
|         str | int | float, | ||||
|  | @ -142,7 +149,11 @@ class SpotPair(Pair, frozen=True): | |||
|     defaultSelfTradePreventionMode: str | ||||
|     allowedSelfTradePreventionModes: list[str] | ||||
|     permissions: list[str] | ||||
|     permissionSets: list[list[str]] | ||||
| 
 | ||||
|     # can the paint botz creat liq gaps even easier on this asset? | ||||
|     # Bp | ||||
|     # https://developers.binance.com/docs/binance-spot-api-docs/faqs/order_amend_keep_priority | ||||
|     amendAllowed: bool | ||||
| 
 | ||||
|     # NOTE: see `.data._symcache.SymbologyCache.load()` for why | ||||
|     ns_path: str = 'piker.brokers.binance:SpotPair' | ||||
|  |  | |||
|  | @ -25,6 +25,7 @@ from .api import ( | |||
|     get_client, | ||||
| ) | ||||
| from .feed import ( | ||||
|     get_mkt_info, | ||||
|     open_history_client, | ||||
|     open_symbol_search, | ||||
|     stream_quotes, | ||||
|  | @ -34,15 +35,20 @@ from .feed import ( | |||
|     # open_trade_dialog, | ||||
|     # norm_trade_records, | ||||
| # ) | ||||
| from .venues import ( | ||||
|     OptionPair, | ||||
| ) | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| __all__ = [ | ||||
|     'get_client', | ||||
| #    'trades_dialogue', | ||||
|     'get_mkt_info', | ||||
|     'open_history_client', | ||||
|     'open_symbol_search', | ||||
|     'stream_quotes', | ||||
|     'OptionPair', | ||||
| #    'norm_trade_records', | ||||
| ] | ||||
| 
 | ||||
|  |  | |||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -18,38 +18,59 @@ | |||
| Deribit backend. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from datetime import datetime | ||||
| from typing import Any, Optional, Callable | ||||
| from typing import ( | ||||
|     # Any, | ||||
|     # Optional, | ||||
|     Callable, | ||||
| ) | ||||
| # from pprint import pformat | ||||
| import time | ||||
| 
 | ||||
| import cryptofeed | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import pendulum | ||||
| from rapidfuzz import process as fuzzy | ||||
| from pendulum import ( | ||||
|     from_timestamp, | ||||
| ) | ||||
| import numpy as np | ||||
| import tractor | ||||
| 
 | ||||
| from piker.brokers import open_cached_client | ||||
| from piker.log import get_logger, get_console_log | ||||
| from piker.data import ShmArray | ||||
| from piker.brokers._util import ( | ||||
|     BrokerError, | ||||
| from piker.accounting import ( | ||||
|     Asset, | ||||
|     MktPair, | ||||
|     unpack_fqme, | ||||
| ) | ||||
| from piker.brokers import ( | ||||
|     open_cached_client, | ||||
|     NoData, | ||||
|     DataUnavailable, | ||||
| ) | ||||
| 
 | ||||
| from cryptofeed import FeedHandler | ||||
| from cryptofeed.defines import ( | ||||
|     DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT | ||||
| from piker._cacheables import ( | ||||
|     async_lifo_cache, | ||||
| ) | ||||
| from cryptofeed.symbols import Symbol | ||||
| from piker.log import ( | ||||
|     get_logger, | ||||
|     mk_repr, | ||||
| ) | ||||
| from piker.data.validate import FeedInit | ||||
| 
 | ||||
| 
 | ||||
| from .api import ( | ||||
|     Client, Trade, | ||||
|     get_config, | ||||
|     str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst, | ||||
|     Client, | ||||
|     # get_config, | ||||
|     piker_sym_to_cb_sym, | ||||
|     cb_sym_to_deribit_inst, | ||||
|     str_to_cb_sym, | ||||
|     maybe_open_price_feed | ||||
| ) | ||||
| from .venues import ( | ||||
|     Pair, | ||||
|     OptionPair, | ||||
|     Trade, | ||||
| ) | ||||
| 
 | ||||
| _spawn_kwargs = { | ||||
|     'infect_asyncio': True, | ||||
|  | @ -64,90 +85,215 @@ async def open_history_client( | |||
|     mkt: MktPair, | ||||
| ) -> tuple[Callable, int]: | ||||
| 
 | ||||
|     fnstrument: str = mkt.bs_fqme | ||||
|     # TODO implement history getter for the new storage layer. | ||||
|     async with open_cached_client('deribit') as client: | ||||
| 
 | ||||
|         pair: OptionPair = client._pairs[mkt.dst.name] | ||||
|         # XXX NOTE, the cuckers use ms !!! | ||||
|         creation_time_s: int = pair.creation_timestamp/1000 | ||||
| 
 | ||||
|         async def get_ohlc( | ||||
|             end_dt: Optional[datetime] = None, | ||||
|             start_dt: Optional[datetime] = None, | ||||
|             timeframe: float, | ||||
|             end_dt: datetime | None = None, | ||||
|             start_dt: datetime | None = None, | ||||
| 
 | ||||
|         ) -> tuple[ | ||||
|             np.ndarray, | ||||
|             datetime,  # start | ||||
|             datetime,  # end | ||||
|         ]: | ||||
|             if timeframe != 60: | ||||
|                 raise DataUnavailable('Only 1m bars are supported') | ||||
| 
 | ||||
|             array = await client.bars( | ||||
|                 instrument, | ||||
|             array: np.ndarray = await client.bars( | ||||
|                 mkt, | ||||
|                 start_dt=start_dt, | ||||
|                 end_dt=end_dt, | ||||
|             ) | ||||
|             if len(array) == 0: | ||||
|                 raise DataUnavailable | ||||
|                 if ( | ||||
|                     end_dt is None | ||||
|                 ): | ||||
|                     raise DataUnavailable( | ||||
|                         'No history seems to exist yet?\n\n' | ||||
|                         f'{mkt}' | ||||
|                     ) | ||||
|                 elif ( | ||||
|                     end_dt | ||||
|                     and | ||||
|                     end_dt.timestamp() < creation_time_s | ||||
|                 ): | ||||
|                     # the contract can't have history | ||||
|                     # before it was created. | ||||
|                     pair_type_str: str = type(pair).__name__ | ||||
|                     create_dt: datetime = from_timestamp(creation_time_s) | ||||
|                     raise DataUnavailable( | ||||
|                         f'No history prior to\n' | ||||
|                         f'`{pair_type_str}.creation_timestamp: int = ' | ||||
|                         f'{pair.creation_timestamp}\n\n' | ||||
|                         f'------ deribit sux ------\n' | ||||
|                         f'WHICH IN "NORMAL PEOPLE WHO USE EPOCH TIME" form is,\n' | ||||
|                         f'creation_time_s: {creation_time_s}\n' | ||||
|                         f'create_dt: {create_dt}\n' | ||||
|                     ) | ||||
|                 raise NoData( | ||||
|                     f'No frame for {start_dt} -> {end_dt}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) | ||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) | ||||
|             start_dt = from_timestamp(array[0]['time']) | ||||
|             end_dt = from_timestamp(array[-1]['time']) | ||||
| 
 | ||||
|             times = array['time'] | ||||
|             if not times.any(): | ||||
|                 raise ValueError( | ||||
|                     'Bad frame with null-times?\n\n' | ||||
|                     f'{times}' | ||||
|                 ) | ||||
| 
 | ||||
|             if end_dt is None: | ||||
|                 inow: int = round(time.time()) | ||||
|                 if (inow - times[-1]) > 60: | ||||
|                     await tractor.pause() | ||||
| 
 | ||||
|             return array, start_dt, end_dt | ||||
| 
 | ||||
|         yield get_ohlc, {'erlangs': 3, 'rate': 3} | ||||
|         yield ( | ||||
|             get_ohlc, | ||||
|             {  # backfill config | ||||
|                 'erlangs': 3, | ||||
|                 'rate': 3, | ||||
|             } | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| @async_lifo_cache() | ||||
| async def get_mkt_info( | ||||
|     fqme: str, | ||||
| 
 | ||||
| ) -> tuple[MktPair, Pair|OptionPair] | None: | ||||
| 
 | ||||
|     # uppercase since kraken bs_mktid is always upper | ||||
|     if 'deribit' not in fqme.lower(): | ||||
|         fqme += '.deribit' | ||||
| 
 | ||||
|     mkt_mode: str = '' | ||||
|     broker, mkt_ep, venue, expiry = unpack_fqme(fqme) | ||||
| 
 | ||||
|     # NOTE: we always upper case all tokens to be consistent with | ||||
|     # binance's symbology style for pairs, like `BTCUSDT`, but in | ||||
|     # theory we could also just keep things lower case; as long as | ||||
|     # we're consistent and the symcache matches whatever this func | ||||
|     # returns, always! | ||||
|     expiry: str = expiry.upper() | ||||
|     venue: str = venue.upper() | ||||
|     # venue_lower: str = venue.lower() | ||||
| 
 | ||||
|     mkt_mode: str = 'option' | ||||
| 
 | ||||
|     async with open_cached_client( | ||||
|         'deribit', | ||||
|     ) as client: | ||||
| 
 | ||||
|         assets: dict[str, Asset] = await client.get_assets() | ||||
|         pair_str: str = mkt_ep.lower() | ||||
| 
 | ||||
|         pair: Pair = await client.exch_info( | ||||
|             sym=pair_str, | ||||
|         ) | ||||
|         mkt_mode = pair.venue | ||||
|         client.mkt_mode = mkt_mode | ||||
| 
 | ||||
|         dst: Asset | None = assets.get(pair.bs_dst_asset) | ||||
|         src: Asset | None = assets.get(pair.bs_src_asset) | ||||
| 
 | ||||
|         mkt = MktPair( | ||||
|             dst=dst, | ||||
|             src=src, | ||||
|             price_tick=pair.price_tick, | ||||
|             size_tick=pair.size_tick, | ||||
|             bs_mktid=pair.symbol, | ||||
|             venue=mkt_mode, | ||||
|             broker='deribit', | ||||
|             _atype=mkt_mode, | ||||
|             _fqme_without_src=True, | ||||
| 
 | ||||
|             # expiry=pair.expiry, | ||||
|             # XXX TODO, currently we don't use it since it's | ||||
|             # already "described" in the `OptionPair.symbol: str` | ||||
|             # and if we slap in the ISO repr it's kinda hideous.. | ||||
|             # -[ ] figure out the best either std | ||||
|         ) | ||||
|         return mkt, pair | ||||
| 
 | ||||
| 
 | ||||
| async def stream_quotes( | ||||
| 
 | ||||
|     send_chan: trio.abc.SendChannel, | ||||
|     symbols: list[str], | ||||
|     feed_is_live: trio.Event, | ||||
|     loglevel: str = None, | ||||
| 
 | ||||
|     # startup sync | ||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging | ||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) | ||||
|     ''' | ||||
|     Open a live quote stream for the market set defined by `symbols`. | ||||
| 
 | ||||
|     sym = symbols[0] | ||||
|     Internally this starts a `cryptofeed.FeedHandler` inside an `asyncio`-side | ||||
|     task and relays through L1 and `Trade` msgs here to our `trio.Task`. | ||||
| 
 | ||||
|     ''' | ||||
|     sym = symbols[0].split('.')[0] | ||||
|     init_msgs: list[FeedInit] = [] | ||||
| 
 | ||||
|     # multiline nested `dict` formatter (since rn quote-msgs are | ||||
|     # just that). | ||||
|     pfmt: Callable[[str], str] = mk_repr( | ||||
|         # so we can see `deribit`'s delightfully mega-long bs fields.. | ||||
|         maxstring=100, | ||||
|     ) | ||||
| 
 | ||||
|     async with ( | ||||
|         open_cached_client('deribit') as client, | ||||
|         send_chan as send_chan | ||||
|     ): | ||||
|         mkt: MktPair | ||||
|         pair: Pair | ||||
|         mkt, pair = await get_mkt_info(sym) | ||||
| 
 | ||||
|         init_msgs = { | ||||
|             # pass back token, and bool, signalling if we're the writer | ||||
|             # and that history has been written | ||||
|             sym: { | ||||
|                 'symbol_info': { | ||||
|                     'asset_type': 'option', | ||||
|                     'price_tick_size': 0.0005 | ||||
|                 }, | ||||
|                 'shm_write_opts': {'sum_tick_vml': False}, | ||||
|                 'fqsn': sym, | ||||
|             }, | ||||
|         } | ||||
|         # build out init msgs according to latest spec | ||||
|         init_msgs.append( | ||||
|             FeedInit( | ||||
|                 mkt_info=mkt, | ||||
|             ) | ||||
|         ) | ||||
|         # build `cryptofeed` feed-handle | ||||
|         cf_sym: cryptofeed.Symbol = piker_sym_to_cb_sym(sym) | ||||
| 
 | ||||
|         nsym = piker_sym_to_cb_sym(sym) | ||||
|         from_cf: tractor.to_asyncio.LinkedTaskChannel | ||||
|         async with maybe_open_price_feed(sym) as from_cf: | ||||
| 
 | ||||
|         async with maybe_open_price_feed(sym) as stream: | ||||
|             # load the "last trades" summary | ||||
|             last_trades_res: cryptofeed.LastTradesResult = await client.last_trades( | ||||
|                 cb_sym_to_deribit_inst(cf_sym), | ||||
|                 count=1, | ||||
|             ) | ||||
|             last_trades: list[Trade] = last_trades_res.trades | ||||
| 
 | ||||
|             cache = await client.cache_symbols() | ||||
|             # TODO, do we even need this or will the above always | ||||
|             # work? | ||||
|             # if not last_trades: | ||||
|             #     await tractor.pause() | ||||
|             #     async for typ, quote in from_cf: | ||||
|             #         if typ == 'trade': | ||||
|             #             last_trade = Trade(**(quote['data'])) | ||||
|             #             break | ||||
| 
 | ||||
|             last_trades = (await client.last_trades( | ||||
|                 cb_sym_to_deribit_inst(nsym), count=1)).trades | ||||
|             # else: | ||||
|             last_trade = Trade( | ||||
|                 **(last_trades[0]) | ||||
|             ) | ||||
| 
 | ||||
|             if len(last_trades) == 0: | ||||
|                 last_trade = None | ||||
|                 async for typ, quote in stream: | ||||
|                     if typ == 'trade': | ||||
|                         last_trade = Trade(**(quote['data'])) | ||||
|                         break | ||||
| 
 | ||||
|             else: | ||||
|                 last_trade = Trade(**(last_trades[0])) | ||||
| 
 | ||||
|             first_quote = { | ||||
|             first_quote: dict = { | ||||
|                 'symbol': sym, | ||||
|                 'last': last_trade.price, | ||||
|                 'brokerd_ts': last_trade.timestamp, | ||||
|  | @ -158,13 +304,84 @@ async def stream_quotes( | |||
|                     'broker_ts': last_trade.timestamp | ||||
|                 }] | ||||
|             } | ||||
|             task_status.started((init_msgs,  first_quote)) | ||||
|             task_status.started(( | ||||
|                 init_msgs, | ||||
|                 first_quote, | ||||
|             )) | ||||
| 
 | ||||
|             feed_is_live.set() | ||||
| 
 | ||||
|             async for typ, quote in stream: | ||||
|                 topic = quote['symbol'] | ||||
|                 await send_chan.send({topic: quote}) | ||||
|             # NOTE XXX, static for now! | ||||
|             # => since this only handles ONE mkt feed at a time we | ||||
|             # don't need a lookup table to map interleaved quotes | ||||
|             # from multiple possible mkt-pairs | ||||
|             topic: str = mkt.bs_fqme | ||||
| 
 | ||||
|             # deliver until cancelled | ||||
|             async for typ, ref in from_cf: | ||||
|                 match typ: | ||||
|                     case 'trade': | ||||
|                         trade: cryptofeed.types.Trade = ref | ||||
| 
 | ||||
|                         # TODO, re-impl this according to teh ideal | ||||
|                         # fqme for opts that we choose!! | ||||
|                         bs_fqme: str = cb_sym_to_deribit_inst( | ||||
|                             str_to_cb_sym(trade.symbol) | ||||
|                         ).lower() | ||||
| 
 | ||||
|                         piker_quote: dict = { | ||||
|                             'symbol': bs_fqme, | ||||
|                             'last': trade.price, | ||||
|                             'broker_ts': time.time(), | ||||
|                             # ^TODO, name this `brokerd/datad_ts` and | ||||
|                             # use `time.time_ns()` ?? | ||||
|                             'ticks': [{ | ||||
|                                 'type': 'trade', | ||||
|                                 'price': float(trade.price), | ||||
|                                 'size': float(trade.amount), | ||||
|                                 'broker_ts': trade.timestamp, | ||||
|                             }], | ||||
|                         } | ||||
|                         log.info( | ||||
|                             f'deribit {typ!r} quote for {sym!r}\n\n' | ||||
|                             f'{trade}\n\n' | ||||
|                             f'{pfmt(piker_quote)}\n' | ||||
|                         ) | ||||
| 
 | ||||
|                     case 'l1': | ||||
|                         book: cryptofeed.types.L1Book = ref | ||||
| 
 | ||||
|                         # TODO, so this is where we can possibly change things | ||||
|                         # and instead lever the `MktPair.bs_fqme: str` output? | ||||
|                         bs_fqme: str = cb_sym_to_deribit_inst( | ||||
|                             str_to_cb_sym(book.symbol) | ||||
|                         ).lower() | ||||
| 
 | ||||
|                         piker_quote: dict = { | ||||
|                             'symbol': bs_fqme, | ||||
|                             'ticks': [ | ||||
| 
 | ||||
|                                 {'type': 'bid', | ||||
|                                  'price': float(book.bid_price), | ||||
|                                  'size': float(book.bid_size)}, | ||||
| 
 | ||||
|                                 {'type': 'bsize', | ||||
|                                  'price': float(book.bid_price), | ||||
|                                  'size': float(book.bid_size),}, | ||||
| 
 | ||||
|                                 {'type': 'ask', | ||||
|                                  'price': float(book.ask_price), | ||||
|                                  'size': float(book.ask_size),}, | ||||
| 
 | ||||
|                                 {'type': 'asize', | ||||
|                                  'price': float(book.ask_price), | ||||
|                                  'size': float(book.ask_size),} | ||||
|                             ] | ||||
|                         } | ||||
| 
 | ||||
|                 await send_chan.send({ | ||||
|                     topic: piker_quote, | ||||
|                 }) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -174,12 +391,21 @@ async def open_symbol_search( | |||
|     async with open_cached_client('deribit') as client: | ||||
| 
 | ||||
|         # load all symbols locally for fast search | ||||
|         cache = await client.cache_symbols() | ||||
|         # cache = client._pairs | ||||
|         await ctx.started() | ||||
| 
 | ||||
|         async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|             pattern: str | ||||
|             async for pattern in stream: | ||||
|                 # repack in dict form | ||||
|                 await stream.send( | ||||
|                     await client.search_symbols(pattern)) | ||||
| 
 | ||||
|                 # NOTE: pattern fuzzy-matching is done within | ||||
|                 # the methd impl. | ||||
|                 pairs: dict[str, Pair] = await client.search_symbols( | ||||
|                     pattern, | ||||
|                 ) | ||||
|                 # repack in fqme-keyed table | ||||
|                 byfqme: dict[str, Pair] = {} | ||||
|                 for pair in pairs.values(): | ||||
|                     byfqme[pair.bs_fqme] = pair | ||||
| 
 | ||||
|                 await stream.send(byfqme) | ||||
|  |  | |||
|  | @ -0,0 +1,196 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| Per market data-type definitions and schemas types. | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| import pendulum | ||||
| from typing import ( | ||||
|     Literal, | ||||
|     Optional, | ||||
| ) | ||||
| from decimal import Decimal | ||||
| 
 | ||||
| from piker.types import Struct | ||||
| 
 | ||||
| 
 | ||||
| # API endpoint paths by venue / sub-API | ||||
| _domain: str = 'deribit.com' | ||||
| _url = f'https://www.{_domain}' | ||||
| 
 | ||||
| # WEBsocketz | ||||
| _ws_url: str = f'wss://www.{_domain}/ws/api/v2' | ||||
| 
 | ||||
| # test nets | ||||
| _testnet_ws_url: str = f'wss://test.{_domain}/ws/api/v2' | ||||
| 
 | ||||
| MarketType = Literal[ | ||||
|     'option' | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| def get_api_eps(venue: MarketType) -> tuple[str, str]: | ||||
|     ''' | ||||
|     Return API ep root paths per venue. | ||||
| 
 | ||||
|     ''' | ||||
|     return { | ||||
|         'option': ( | ||||
|             _ws_url, | ||||
|         ), | ||||
|     }[venue] | ||||
| 
 | ||||
| 
 | ||||
| class Pair(Struct, frozen=True, kw_only=True): | ||||
| 
 | ||||
|     symbol: str | ||||
| 
 | ||||
|     # src | ||||
|     quote_currency: str # 'BTC' | ||||
| 
 | ||||
|     # dst | ||||
|     base_currency: str # "BTC", | ||||
| 
 | ||||
|     tick_size: float # 0.0001 # [{'above_price': 0.005, 'tick_size': 0.0005}] | ||||
|     tick_size_steps: list[dict[str, float]]  | ||||
| 
 | ||||
|     @property | ||||
|     def price_tick(self) -> Decimal: | ||||
|         return Decimal(str(self.tick_size_steps[0]['above_price'])) | ||||
| 
 | ||||
|     @property | ||||
|     def size_tick(self) -> Decimal: | ||||
|         return Decimal(str(self.tick_size)) | ||||
| 
 | ||||
|     @property | ||||
|     def bs_fqme(self) -> str: | ||||
|         return f'{self.symbol}' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_mktid(self) -> str: | ||||
|         return f'{self.symbol}.{self.venue}' | ||||
| 
 | ||||
| 
 | ||||
| class OptionPair(Pair, frozen=True): | ||||
| 
 | ||||
|     taker_commission: float # 0.0003 | ||||
|     strike: float # 5000.0 | ||||
|     settlement_period: str # 'day' | ||||
|     settlement_currency: str # "BTC", | ||||
|     rfq: bool # false | ||||
|     price_index: str # 'btc_usd' | ||||
|     option_type: str # 'call' | ||||
|     min_trade_amount: float # 0.1 | ||||
|     maker_commission: float # 0.0003 | ||||
|     kind: str # 'option' | ||||
|     is_active: bool # true | ||||
|     instrument_type: str # 'reversed' | ||||
|     instrument_name: str # 'BTC-1SEP24-55000-C' | ||||
|     instrument_id: int # 364671 | ||||
|     expiration_timestamp: int # 1725177600000 | ||||
|     creation_timestamp: int # 1724918461000 | ||||
|     counter_currency: str # 'USD'  | ||||
|     contract_size: float # '1.0' | ||||
|     block_trade_tick_size: float # '0.0001' | ||||
|     block_trade_min_trade_amount: int # '25' | ||||
|     block_trade_commission: float # '0.003' | ||||
| 
 | ||||
|     # NOTE: see `.data._symcache.SymbologyCache.load()` for why | ||||
|     ns_path: str = 'piker.brokers.deribit:OptionPair' | ||||
| 
 | ||||
|     # TODO, impl this without the MM:SS part of | ||||
|     # the `'THH:MM:SS..'` etc.. | ||||
|     @property | ||||
|     def expiry(self) -> str: | ||||
|         iso_date = pendulum.from_timestamp( | ||||
|             self.expiration_timestamp / 1000 | ||||
|         ).isoformat() | ||||
|         return iso_date  | ||||
| 
 | ||||
|     @property | ||||
|     def venue(self) -> str: | ||||
|         return f'{self.instrument_type}_option' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_fqme(self) -> str: | ||||
|         return f'{self.symbol}' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_src_asset(self) -> str: | ||||
|         return f'{self.quote_currency}' | ||||
| 
 | ||||
|     @property | ||||
|     def bs_dst_asset(self) -> str: | ||||
|         return f'{self.symbol}' | ||||
| 
 | ||||
| 
 | ||||
| PAIRTYPES: dict[MarketType, Pair] = { | ||||
|     'option': OptionPair, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| class JSONRPCResult(Struct): | ||||
|     id: int | ||||
|     usIn: int | ||||
|     usOut: int | ||||
|     usDiff: int | ||||
|     testnet: bool | ||||
|     jsonrpc: str = '2.0' | ||||
|     error: Optional[dict] = None | ||||
|     result: Optional[list[dict]] = None | ||||
| 
 | ||||
| 
 | ||||
| class JSONRPCChannel(Struct): | ||||
|     method: str | ||||
|     params: dict | ||||
|     jsonrpc: str = '2.0' | ||||
| 
 | ||||
| 
 | ||||
| class KLinesResult(Struct): | ||||
|     low: list[float] | ||||
|     cost: list[float] | ||||
|     high: list[float] | ||||
|     open: list[float] | ||||
|     close: list[float] | ||||
|     ticks: list[int] | ||||
|     status: str | ||||
|     volume: list[float] | ||||
| 
 | ||||
| 
 | ||||
| class Trade(Struct): | ||||
|     iv: float | ||||
|     price: float | ||||
|     amount: float | ||||
|     trade_id: str | ||||
|     contracts: float | ||||
|     direction: str | ||||
|     trade_seq: int | ||||
|     timestamp: int | ||||
|     mark_price: float | ||||
|     index_price: float | ||||
|     tick_direction: int | ||||
|     instrument_name: str | ||||
|     combo_id: Optional[str] = '', | ||||
|     combo_trade_id: Optional[int] = 0, | ||||
|     block_trade_id: Optional[str] = '', | ||||
|     block_trade_leg_count: Optional[int] = 0, | ||||
| 
 | ||||
| 
 | ||||
| class LastTradesResult(Struct): | ||||
|     trades: list[Trade] | ||||
|     has_more: bool | ||||
|  | @ -20,6 +20,11 @@ runnable script-programs. | |||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from datetime import (  # noqa | ||||
|     datetime, | ||||
|     date, | ||||
|     tzinfo as TzInfo, | ||||
| ) | ||||
| from functools import partial | ||||
| from typing import ( | ||||
|     Literal, | ||||
|  | @ -34,6 +39,7 @@ from piker.brokers._util import get_logger | |||
| if TYPE_CHECKING: | ||||
|     from .api import Client | ||||
|     from ib_insync import IB | ||||
|     import i3ipc | ||||
| 
 | ||||
| log = get_logger('piker.brokers.ib') | ||||
| 
 | ||||
|  | @ -48,6 +54,37 @@ _reset_tech: Literal[ | |||
| ] = 'vnc' | ||||
| 
 | ||||
| 
 | ||||
| no_setup_msg:str = ( | ||||
|     'No data reset hack test setup for {vnc_sockaddr}!\n' | ||||
|     'See config setup tips @\n' | ||||
|     'https://github.com/pikers/piker/tree/master/piker/brokers/ib' | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def try_xdo_manual( | ||||
|     vnc_sockaddr: str, | ||||
| ): | ||||
|     ''' | ||||
|     Do the "manual" `xdo`-based screen switch + click | ||||
|     combo since apparently the `asyncvnc` client ain't workin.. | ||||
| 
 | ||||
|     Note this is only meant as a backup method for Xorg users, | ||||
|     ideally you can use a real vnc client and the `vnc_click_hack()` | ||||
|     impl! | ||||
| 
 | ||||
|     ''' | ||||
|     global _reset_tech | ||||
|     try: | ||||
|         i3ipc_xdotool_manual_click_hack() | ||||
|         _reset_tech = 'i3ipc_xdotool' | ||||
|         return True | ||||
|     except OSError: | ||||
|         log.exception( | ||||
|             no_setup_msg.format(vnc_sockaddr=vnc_sockaddr) | ||||
|         ) | ||||
|         return False | ||||
| 
 | ||||
| 
 | ||||
| async def data_reset_hack( | ||||
|     # vnc_host: str, | ||||
|     client: Client, | ||||
|  | @ -90,15 +127,9 @@ async def data_reset_hack( | |||
|     vnc_port: int | ||||
|     vnc_sockaddr: tuple[str] | None = client.conf.get('vnc_addrs') | ||||
| 
 | ||||
|     no_setup_msg:str = ( | ||||
|         f'No data reset hack test setup for {vnc_sockaddr}!\n' | ||||
|         'See config setup tips @\n' | ||||
|         'https://github.com/pikers/piker/tree/master/piker/brokers/ib' | ||||
|     ) | ||||
| 
 | ||||
|     if not vnc_sockaddr: | ||||
|         log.warning( | ||||
|             no_setup_msg | ||||
|             no_setup_msg.format(vnc_sockaddr=vnc_sockaddr) | ||||
|             + | ||||
|             'REQUIRES A `vnc_addrs: array` ENTRY' | ||||
|         ) | ||||
|  | @ -119,27 +150,38 @@ async def data_reset_hack( | |||
|                         port=vnc_port, | ||||
|                     ) | ||||
|                 ) | ||||
|             except OSError: | ||||
|                 if vnc_host != 'localhost': | ||||
|                     log.warning(no_setup_msg) | ||||
|                     return False | ||||
| 
 | ||||
|             except ( | ||||
|                 OSError,  # no VNC server avail.. | ||||
|                 PermissionError,  # asyncvnc pw fail.. | ||||
|             ): | ||||
|                 try: | ||||
|                     import i3ipc  # noqa  (since a deps dynamic check) | ||||
|                 except ModuleNotFoundError: | ||||
|                     log.warning(no_setup_msg) | ||||
|                     log.warning( | ||||
|                         no_setup_msg.format(vnc_sockaddr=vnc_sockaddr) | ||||
|                     ) | ||||
|                     return False | ||||
| 
 | ||||
|                 try: | ||||
|                     i3ipc_xdotool_manual_click_hack() | ||||
|                     _reset_tech = 'i3ipc_xdotool' | ||||
|                     return True | ||||
|                 except OSError: | ||||
|                     log.exception(no_setup_msg) | ||||
|                     return False | ||||
|                 if vnc_host not in { | ||||
|                     'localhost', | ||||
|                     '127.0.0.1', | ||||
|                 }: | ||||
|                     focussed, matches = i3ipc_fin_wins_titled() | ||||
|                     if not matches: | ||||
|                         log.warning( | ||||
|                             no_setup_msg.format(vnc_sockaddr=vnc_sockaddr) | ||||
|                         ) | ||||
|                         return False | ||||
|                     else: | ||||
|                         try_xdo_manual(vnc_sockaddr) | ||||
| 
 | ||||
|                 # localhost but no vnc-client or it borked.. | ||||
|                 else: | ||||
|                     try_xdo_manual(vnc_sockaddr) | ||||
| 
 | ||||
|         case 'i3ipc_xdotool': | ||||
|             i3ipc_xdotool_manual_click_hack() | ||||
|             try_xdo_manual(vnc_sockaddr) | ||||
|             # i3ipc_xdotool_manual_click_hack() | ||||
| 
 | ||||
|         case _ as tech: | ||||
|             raise RuntimeError(f'{tech} is not supported for reset tech!?') | ||||
|  | @ -178,9 +220,9 @@ async def vnc_click_hack( | |||
|         host, | ||||
|         port=port, | ||||
| 
 | ||||
|         # TODO: doesn't work see: | ||||
|         # https://github.com/barneygale/asyncvnc/issues/7 | ||||
|         # password='ibcansmbz', | ||||
|         # TODO: doesn't work? | ||||
|         # see, https://github.com/barneygale/asyncvnc/issues/7 | ||||
|         password='doggy', | ||||
| 
 | ||||
|     ) as client: | ||||
| 
 | ||||
|  | @ -194,70 +236,103 @@ async def vnc_click_hack( | |||
|         client.keyboard.press('Ctrl', 'Alt', key)  # keys are stacked | ||||
| 
 | ||||
| 
 | ||||
| def i3ipc_fin_wins_titled( | ||||
|     titles: list[str] = [ | ||||
|         'Interactive Brokers',  # tws running in i3 | ||||
|         'IB Gateway',  # gw running in i3 | ||||
|         # 'IB',  # gw running in i3 (newer version?) | ||||
| 
 | ||||
|         # !TODO, remote vnc instance | ||||
|         # -[ ] something in title (or other Con-props) that indicates | ||||
|         #   this is explicitly for ibrk sw? | ||||
|         #  |_[ ]  !can use modden spawn eventually! | ||||
|         'TigerVNC', | ||||
|         # 'vncviewer',  # the terminal.. | ||||
|     ], | ||||
| ) -> tuple[ | ||||
|     i3ipc.Con,  # orig focussed win | ||||
|     list[tuple[str, i3ipc.Con]],  # matching wins by title | ||||
| ]: | ||||
|     ''' | ||||
|     Attempt to find a local-DE window titled with an entry in | ||||
|     `titles`. | ||||
| 
 | ||||
|     If found deliver the current focussed window and all matching | ||||
|     `i3ipc.Con`s in a list. | ||||
| 
 | ||||
|     ''' | ||||
|     import i3ipc | ||||
|     ipc = i3ipc.Connection() | ||||
| 
 | ||||
|     # TODO: might be worth offering some kinda api for grabbing | ||||
|     # the window id from the pid? | ||||
|     # https://stackoverflow.com/a/2250879 | ||||
|     tree = ipc.get_tree() | ||||
|     focussed: i3ipc.Con = tree.find_focused() | ||||
| 
 | ||||
|     matches: list[i3ipc.Con] = [] | ||||
|     for name in titles: | ||||
|         results = tree.find_titled(name) | ||||
|         print(f'results for {name}: {results}') | ||||
|         if results: | ||||
|             con = results[0] | ||||
|             matches.append(( | ||||
|                 name, | ||||
|                 con, | ||||
|             )) | ||||
| 
 | ||||
|     return ( | ||||
|         focussed, | ||||
|         matches, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| def i3ipc_xdotool_manual_click_hack() -> None: | ||||
|     ''' | ||||
|     Do the data reset hack but expecting a local X-window using `xdotool`. | ||||
| 
 | ||||
|     ''' | ||||
|     import i3ipc | ||||
|     i3 = i3ipc.Connection() | ||||
| 
 | ||||
|     # TODO: might be worth offering some kinda api for grabbing | ||||
|     # the window id from the pid? | ||||
|     # https://stackoverflow.com/a/2250879 | ||||
|     t = i3.get_tree() | ||||
| 
 | ||||
|     orig_win_id = t.find_focused().window | ||||
| 
 | ||||
|     # for tws | ||||
|     win_names: list[str] = [ | ||||
|         'Interactive Brokers',  # tws running in i3 | ||||
|         'IB Gateway',  # gw running in i3 | ||||
|         # 'IB',  # gw running in i3 (newer version?) | ||||
|     ] | ||||
| 
 | ||||
|     focussed, matches = i3ipc_fin_wins_titled() | ||||
|     orig_win_id = focussed.window | ||||
|     try: | ||||
|         for name in win_names: | ||||
|             results = t.find_titled(name) | ||||
|             print(f'results for {name}: {results}') | ||||
|             if results: | ||||
|                 con = results[0] | ||||
|                 print(f'Resetting data feed for {name}') | ||||
|                 win_id = str(con.window) | ||||
|                 w, h = con.rect.width, con.rect.height | ||||
|         for name, con in matches: | ||||
|             print(f'Resetting data feed for {name}') | ||||
|             win_id = str(con.window) | ||||
|             w, h = con.rect.width, con.rect.height | ||||
| 
 | ||||
|                 # TODO: seems to be a few libs for python but not sure | ||||
|                 # if they support all the sub commands we need, order of | ||||
|                 # most recent commit history: | ||||
|                 # https://github.com/rr-/pyxdotool | ||||
|                 # https://github.com/ShaneHutter/pyxdotool | ||||
|                 # https://github.com/cphyc/pyxdotool | ||||
|             # TODO: seems to be a few libs for python but not sure | ||||
|             # if they support all the sub commands we need, order of | ||||
|             # most recent commit history: | ||||
|             # https://github.com/rr-/pyxdotool | ||||
|             # https://github.com/ShaneHutter/pyxdotool | ||||
|             # https://github.com/cphyc/pyxdotool | ||||
| 
 | ||||
|                 # TODO: only run the reconnect (2nd) kc on a detected | ||||
|                 # disconnect? | ||||
|                 for key_combo, timeout in [ | ||||
|                     # only required if we need a connection reset. | ||||
|                     # ('ctrl+alt+r', 12), | ||||
|                     # data feed reset. | ||||
|                     ('ctrl+alt+f', 6) | ||||
|                 ]: | ||||
|                     subprocess.call([ | ||||
|                         'xdotool', | ||||
|                         'windowactivate', '--sync', win_id, | ||||
|             # TODO: only run the reconnect (2nd) kc on a detected | ||||
|             # disconnect? | ||||
|             for key_combo, timeout in [ | ||||
|                 # only required if we need a connection reset. | ||||
|                 # ('ctrl+alt+r', 12), | ||||
|                 # data feed reset. | ||||
|                 ('ctrl+alt+f', 6) | ||||
|             ]: | ||||
|                 subprocess.call([ | ||||
|                     'xdotool', | ||||
|                     'windowactivate', '--sync', win_id, | ||||
| 
 | ||||
|                         # move mouse to bottom left of window (where | ||||
|                         # there should be nothing to click). | ||||
|                         'mousemove_relative', '--sync', str(w-4), str(h-4), | ||||
|                     # move mouse to bottom left of window (where | ||||
|                     # there should be nothing to click). | ||||
|                     'mousemove_relative', '--sync', str(w-4), str(h-4), | ||||
| 
 | ||||
|                         # NOTE: we may need to stick a `--retry 3` in here.. | ||||
|                         'click', '--window', win_id, | ||||
|                         '--repeat', '3', '1', | ||||
|                     # NOTE: we may need to stick a `--retry 3` in here.. | ||||
|                     'click', '--window', win_id, | ||||
|                     '--repeat', '3', '1', | ||||
| 
 | ||||
|                         # hackzorzes | ||||
|                         'key', key_combo, | ||||
|                         ], | ||||
|                         timeout=timeout, | ||||
|                     ) | ||||
|                     # hackzorzes | ||||
|                     'key', key_combo, | ||||
|                     ], | ||||
|                     timeout=timeout, | ||||
|                 ) | ||||
| 
 | ||||
|         # re-activate and focus original window | ||||
|         subprocess.call([ | ||||
|  | @ -267,3 +342,99 @@ def i3ipc_xdotool_manual_click_hack() -> None: | |||
|         ]) | ||||
|     except subprocess.TimeoutExpired: | ||||
|         log.exception('xdotool timed out?') | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| def is_current_time_in_range( | ||||
|     start_dt: datetime, | ||||
|     end_dt: datetime, | ||||
| ) -> bool: | ||||
|     ''' | ||||
|     Check if current time is within the datetime range. | ||||
| 
 | ||||
|     Use any/the-same timezone as provided by `start_dt.tzinfo` value | ||||
|     in the range. | ||||
| 
 | ||||
|     ''' | ||||
|     now: datetime = datetime.now(start_dt.tzinfo) | ||||
|     return start_dt <= now <= end_dt | ||||
| 
 | ||||
| 
 | ||||
| # TODO, put this into `._util` and call it from here! | ||||
| # | ||||
| # NOTE, this was generated by @guille from a gpt5 prompt | ||||
| # and was originally thot to be needed before learning about | ||||
| # `ib_insync.contract.ContractDetails._parseSessions()` and | ||||
| # it's downstream meths.. | ||||
| # | ||||
| # This is still likely useful to keep for now to parse the | ||||
| # `.tradingHours: str` value manually if we ever decide | ||||
| # to move off `ib_async` and implement our own `trio`/`anyio` | ||||
| # based version Bp | ||||
| # | ||||
| # >attempt to parse the retarted ib "time stampy thing" they | ||||
| # >do for "venue hours" with this.. written by | ||||
| # >gpt5-"thinking", | ||||
| # | ||||
| 
 | ||||
| 
 | ||||
| def parse_trading_hours( | ||||
|     spec: str, | ||||
|     tz: TzInfo|None = None | ||||
| ) -> dict[ | ||||
|     date, | ||||
|     tuple[datetime, datetime] | ||||
| ]|None: | ||||
|     ''' | ||||
|     Parse venue hours like: | ||||
|     'YYYYMMDD:HHMM-YYYYMMDD:HHMM;YYYYMMDD:CLOSED;...' | ||||
| 
 | ||||
|     Returns `dict[date] = (open_dt, close_dt)` or `None` if | ||||
|     closed. | ||||
| 
 | ||||
|     ''' | ||||
|     if ( | ||||
|         not isinstance(spec, str) | ||||
|         or | ||||
|         not spec | ||||
|     ): | ||||
|         raise ValueError('spec must be a non-empty string') | ||||
| 
 | ||||
|     out: dict[ | ||||
|         date, | ||||
|         tuple[datetime, datetime] | ||||
|     ]|None = {} | ||||
| 
 | ||||
|     for part in (p.strip() for p in spec.split(';') if p.strip()): | ||||
|         if part.endswith(':CLOSED'): | ||||
|             day_s, _ = part.split(':', 1) | ||||
|             d = datetime.strptime(day_s, '%Y%m%d').date() | ||||
|             out[d] = None | ||||
|             continue | ||||
| 
 | ||||
|         try: | ||||
|             start_s, end_s = part.split('-', 1) | ||||
|             start_dt = datetime.strptime(start_s, '%Y%m%d:%H%M') | ||||
|             end_dt = datetime.strptime(end_s, '%Y%m%d:%H%M') | ||||
|         except ValueError as exc: | ||||
|             raise ValueError(f'invalid segment: {part}') from exc | ||||
| 
 | ||||
|         if tz is not None: | ||||
|             start_dt = start_dt.replace(tzinfo=tz) | ||||
|             end_dt = end_dt.replace(tzinfo=tz) | ||||
| 
 | ||||
|         out[start_dt.date()] = (start_dt, end_dt) | ||||
| 
 | ||||
|     return out | ||||
| 
 | ||||
| 
 | ||||
| # ORIG desired usage, | ||||
| # | ||||
| # TODO, for non-drunk tomorrow, | ||||
| # - call above fn and check that `output[today] is not None` | ||||
| # trading_hrs: dict = parse_trading_hours( | ||||
| #     details.tradingHours | ||||
| # ) | ||||
| # liq_hrs: dict = parse_trading_hours( | ||||
| #     details.liquidHours | ||||
|     # ) | ||||
|  |  | |||
|  | @ -48,6 +48,7 @@ from bidict import bidict | |||
| import trio | ||||
| import tractor | ||||
| from tractor import to_asyncio | ||||
| from tractor import trionics | ||||
| from pendulum import ( | ||||
|     from_timestamp, | ||||
|     DateTime, | ||||
|  | @ -96,6 +97,10 @@ from ._util import ( | |||
|     get_logger, | ||||
| ) | ||||
| 
 | ||||
| # ?TODO? this can now be removed since it was originally to extend | ||||
| # with a `bar_vwap` field that we removed from the default ohlcv | ||||
| # dtype since it's better calculated in an FSP func | ||||
| # | ||||
| _bar_load_dtype: list[tuple[str, type]] = [ | ||||
|     # NOTE XXX: only part that's diff | ||||
|     # from our default fields where | ||||
|  | @ -333,15 +338,15 @@ class Client: | |||
|         fqme: str, | ||||
| 
 | ||||
|         # EST in ISO 8601 format is required... below is EPOCH | ||||
|         start_dt: datetime | str = "1970-01-01T00:00:00.000000-05:00", | ||||
|         end_dt: datetime | str = "", | ||||
|         start_dt: datetime|str = "1970-01-01T00:00:00.000000-05:00", | ||||
|         end_dt: datetime|str = "", | ||||
| 
 | ||||
|         # ohlc sample period in seconds | ||||
|         sample_period_s: int = 1, | ||||
| 
 | ||||
|         # optional "duration of time" equal to the | ||||
|         # length of the returned history frame. | ||||
|         duration: str | None = None, | ||||
|         duration: str|None = None, | ||||
| 
 | ||||
|         **kwargs, | ||||
| 
 | ||||
|  | @ -715,8 +720,8 @@ class Client: | |||
| 
 | ||||
|     async def find_contracts( | ||||
|         self, | ||||
|         pattern: str | None = None, | ||||
|         contract: Contract | None = None, | ||||
|         pattern: str|None = None, | ||||
|         contract: Contract|None = None, | ||||
|         qualify: bool = True, | ||||
|         err_on_qualify: bool = True, | ||||
| 
 | ||||
|  | @ -861,7 +866,7 @@ class Client: | |||
|         self, | ||||
|         fqme: str, | ||||
| 
 | ||||
|     ) -> datetime | None: | ||||
|     ) -> datetime|None: | ||||
|         ''' | ||||
|         Return the first datetime stamp for `fqme` or `None` | ||||
|         on request failure. | ||||
|  | @ -917,7 +922,7 @@ class Client: | |||
|         tries: int = 100, | ||||
|         raise_on_timeout: bool = False, | ||||
| 
 | ||||
|     ) -> Ticker | None: | ||||
|     ) -> Ticker|None: | ||||
|         ''' | ||||
|         Return a single (snap) quote for symbol. | ||||
| 
 | ||||
|  | @ -929,7 +934,7 @@ class Client: | |||
|         ready: ticker.TickerUpdateEvent = ticker.updateEvent | ||||
| 
 | ||||
|         # ensure a last price gets filled in before we deliver quote | ||||
|         timeouterr: Exception | None = None | ||||
|         timeouterr: Exception|None = None | ||||
|         warnset: bool = False | ||||
|         for _ in range(tries): | ||||
| 
 | ||||
|  | @ -1369,8 +1374,8 @@ async def load_clients_for_trio( | |||
|     ''' | ||||
|     Pure async mngr proxy to ``load_aio_clients()``. | ||||
| 
 | ||||
|     This is a bootstrap entrypoing to call from | ||||
|     a ``tractor.to_asyncio.open_channel_from()``. | ||||
|     This is a bootstrap entrypoint to call from | ||||
|     a `tractor.to_asyncio.open_channel_from()`. | ||||
| 
 | ||||
|     ''' | ||||
|     async with load_aio_clients( | ||||
|  | @ -1391,7 +1396,10 @@ async def open_client_proxies() -> tuple[ | |||
|     async with ( | ||||
|         tractor.trionics.maybe_open_context( | ||||
|             acm_func=tractor.to_asyncio.open_channel_from, | ||||
|             kwargs={'target': load_clients_for_trio}, | ||||
|             kwargs={ | ||||
|                 'target': load_clients_for_trio, | ||||
|                 # ^XXX, kwarg to `open_channel_from()` | ||||
|             }, | ||||
| 
 | ||||
|             # lock around current actor task access | ||||
|             # TODO: maybe this should be the default in tractor? | ||||
|  | @ -1501,7 +1509,7 @@ class MethodProxy: | |||
|         self, | ||||
|         pattern: str, | ||||
| 
 | ||||
|     ) -> dict[str, Any] | trio.Event: | ||||
|     ) -> dict[str, Any]|trio.Event: | ||||
| 
 | ||||
|         ev = self.event_table.get(pattern) | ||||
| 
 | ||||
|  | @ -1538,7 +1546,7 @@ async def open_aio_client_method_relay( | |||
|     # relay all method requests to ``asyncio``-side client and deliver | ||||
|     # back results | ||||
|     while not to_trio._closed: | ||||
|         msg: tuple[str, dict] | dict | None = await from_trio.get() | ||||
|         msg: tuple[str, dict]|dict|None = await from_trio.get() | ||||
|         match msg: | ||||
|             case None:  # termination sentinel | ||||
|                 log.info('asyncio `Client` method-proxy SHUTDOWN!') | ||||
|  | @ -1584,7 +1592,8 @@ async def open_client_proxy( | |||
|             event_consumers=event_table, | ||||
|         ) as (first, chan), | ||||
| 
 | ||||
|         trio.open_nursery() as relay_n, | ||||
|         trionics.collapse_eg(),  # loose-ify | ||||
|         trio.open_nursery() as relay_tn, | ||||
|     ): | ||||
| 
 | ||||
|         assert isinstance(first, Client) | ||||
|  | @ -1624,7 +1633,7 @@ async def open_client_proxy( | |||
| 
 | ||||
|                     continue | ||||
| 
 | ||||
|         relay_n.start_soon(relay_events) | ||||
|         relay_tn.start_soon(relay_events) | ||||
| 
 | ||||
|         yield proxy | ||||
| 
 | ||||
|  |  | |||
|  | @ -34,6 +34,7 @@ import trio | |||
| from trio_typing import TaskStatus | ||||
| import tractor | ||||
| from tractor.to_asyncio import LinkedTaskChannel | ||||
| from tractor import trionics | ||||
| from ib_insync.contract import ( | ||||
|     Contract, | ||||
| ) | ||||
|  | @ -407,7 +408,7 @@ async def update_and_audit_pos_msg( | |||
| 
 | ||||
|     # TODO: make this a "propaganda" log level? | ||||
|     if ibpos.avgCost != msg.avg_price: | ||||
|         log.warning( | ||||
|         log.debug( | ||||
|             f'IB "FIFO" avg price for {msg.symbol} is DIFF:\n' | ||||
|             f'ib: {ibfmtmsg}\n' | ||||
|             '---------------------------\n' | ||||
|  | @ -546,7 +547,10 @@ async def open_trade_dialog( | |||
|         ), | ||||
| 
 | ||||
|         # TODO: do this as part of `open_account()`!? | ||||
|         open_symcache('ib', only_from_memcache=True) as symcache, | ||||
|         open_symcache( | ||||
|             'ib', | ||||
|             only_from_memcache=True, | ||||
|         ) as symcache, | ||||
|     ): | ||||
|         # Open a trade ledgers stack for appending trade records over | ||||
|         # multiple accounts. | ||||
|  | @ -555,7 +559,9 @@ async def open_trade_dialog( | |||
|         tables: dict[str, Account] = {} | ||||
|         order_msgs: list[Status] = [] | ||||
|         conf = get_config() | ||||
|         accounts_def_inv: bidict[str, str] = bidict(conf['accounts']).inverse | ||||
|         accounts_def_inv: bidict[str, str] = bidict( | ||||
|             conf['accounts'] | ||||
|         ).inverse | ||||
| 
 | ||||
|         with ( | ||||
|             ExitStack() as lstack, | ||||
|  | @ -705,7 +711,11 @@ async def open_trade_dialog( | |||
|             # client-account and build out position msgs to deliver to | ||||
|             # EMS. | ||||
|             for acctid, acnt in tables.items(): | ||||
|                 active_pps, closed_pps = acnt.dump_active() | ||||
|                 active_pps: dict[str, Position] | ||||
|                 ( | ||||
|                     active_pps, | ||||
|                     closed_pps, | ||||
|                 ) = acnt.dump_active() | ||||
| 
 | ||||
|                 for pps in [active_pps, closed_pps]: | ||||
|                     piker_pps: list[Position] = list(pps.values()) | ||||
|  | @ -721,6 +731,7 @@ async def open_trade_dialog( | |||
|                         ) | ||||
|                         if ibpos: | ||||
|                             bs_mktid: str = str(ibpos.contract.conId) | ||||
| 
 | ||||
|                             msg = await update_and_audit_pos_msg( | ||||
|                                 acctid, | ||||
|                                 pikerpos, | ||||
|  | @ -738,7 +749,7 @@ async def open_trade_dialog( | |||
|                                 f'UNEXPECTED POSITION says IB => {msg.symbol}\n' | ||||
|                                 'Maybe they LIQUIDATED YOU or your ledger is wrong?\n' | ||||
|                             ) | ||||
|                             log.error(logmsg) | ||||
|                             log.debug(logmsg) | ||||
| 
 | ||||
|             await ctx.started(( | ||||
|                 all_positions, | ||||
|  | @ -747,21 +758,22 @@ async def open_trade_dialog( | |||
| 
 | ||||
|             async with ( | ||||
|                 ctx.open_stream() as ems_stream, | ||||
|                 trio.open_nursery() as n, | ||||
|                 trionics.collapse_eg(), | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|                 # relay existing open orders to ems | ||||
|                 for msg in order_msgs: | ||||
|                     await ems_stream.send(msg) | ||||
| 
 | ||||
|                 for client in set(aioclients.values()): | ||||
|                     trade_event_stream: LinkedTaskChannel = await n.start( | ||||
|                     trade_event_stream: LinkedTaskChannel = await tn.start( | ||||
|                         open_trade_event_stream, | ||||
|                         client, | ||||
|                     ) | ||||
| 
 | ||||
|                     # start order request handler **before** local trades | ||||
|                     # event loop | ||||
|                     n.start_soon( | ||||
|                     tn.start_soon( | ||||
|                         handle_order_requests, | ||||
|                         ems_stream, | ||||
|                         accounts_def, | ||||
|  | @ -769,7 +781,7 @@ async def open_trade_dialog( | |||
|                     ) | ||||
| 
 | ||||
|                     # allocate event relay tasks for each client connection | ||||
|                     n.start_soon( | ||||
|                     tn.start_soon( | ||||
|                         deliver_trade_events, | ||||
| 
 | ||||
|                         trade_event_stream, | ||||
|  | @ -1241,32 +1253,47 @@ async def deliver_trade_events( | |||
|                 # never relay errors for non-broker related issues | ||||
|                 # https://interactivebrokers.github.io/tws-api/message_codes.html | ||||
|                 code: int = err['error_code'] | ||||
|                 if code in { | ||||
|                     200,  # uhh | ||||
|                 reason: str = err['reason'] | ||||
|                 reqid: str = str(err['reqid']) | ||||
| 
 | ||||
|                 # "Warning:" msg codes, | ||||
|                 # https://interactivebrokers.github.io/tws-api/message_codes.html#warning_codes | ||||
|                 # - 2109: 'Outside Regular Trading Hours' | ||||
|                 if 'Warning:' in reason: | ||||
|                     log.warning( | ||||
|                         f'Order-API-warning: {code!r}\n' | ||||
|                         f'reqid: {reqid!r}\n' | ||||
|                         f'\n' | ||||
|                         f'{pformat(err)}\n' | ||||
|                         # ^TODO? should we just print the `reason` | ||||
|                         # not the full `err`-dict? | ||||
|                     ) | ||||
|                     continue | ||||
| 
 | ||||
|                 # XXX known special (ignore) cases | ||||
|                 elif code in { | ||||
|                     200,  # uhh.. ni idea | ||||
| 
 | ||||
|                     # hist pacing / connectivity | ||||
|                     162, | ||||
|                     165, | ||||
| 
 | ||||
|                     # WARNING codes: | ||||
|                     # https://interactivebrokers.github.io/tws-api/message_codes.html#warning_codes | ||||
|                     # Attribute 'Outside Regular Trading Hours' is | ||||
|                     # " 'ignored based on the order type and | ||||
|                     # destination. PlaceOrder is now ' 'being | ||||
|                     # processed.', | ||||
|                     2109, | ||||
| 
 | ||||
|                     # XXX: lol this isn't even documented.. | ||||
|                     # 'No market data during competing live session' | ||||
|                     1669, | ||||
|                 }: | ||||
|                     log.error( | ||||
|                         f'Order-API-error which is non-cancel-causing ?!\n' | ||||
|                         f'\n' | ||||
|                         f'{pformat(err)}\n' | ||||
|                     ) | ||||
|                     continue | ||||
| 
 | ||||
|                 reqid: str = str(err['reqid']) | ||||
|                 reason: str = err['reason'] | ||||
| 
 | ||||
|                 if err['reqid'] == -1: | ||||
|                     log.error(f'TWS external order error:\n{pformat(err)}') | ||||
|                     log.error( | ||||
|                         f'TWS external order error ??\n' | ||||
|                         f'{pformat(err)}\n' | ||||
|                     ) | ||||
| 
 | ||||
|                 flow: dict = dict( | ||||
|                     flows.get(reqid) | ||||
|  |  | |||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| # Copyright (C) 2018-forever Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -13,10 +13,12 @@ | |||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| """ | ||||
| Data feed endpoints pre-wrapped and ready for use with ``tractor``/``trio``. | ||||
| 
 | ||||
| """ | ||||
| ''' | ||||
| Data feed endpoints pre-wrapped and ready for use with `tractor`/`trio` | ||||
| via "infected-asyncio-mode". | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| import asyncio | ||||
| from contextlib import ( | ||||
|  | @ -26,7 +28,6 @@ from dataclasses import asdict | |||
| from datetime import datetime | ||||
| from functools import partial | ||||
| from pprint import pformat | ||||
| from math import isnan | ||||
| import time | ||||
| from typing import ( | ||||
|     Any, | ||||
|  | @ -40,7 +41,6 @@ import numpy as np | |||
| from pendulum import ( | ||||
|     now, | ||||
|     from_timestamp, | ||||
|     # DateTime, | ||||
|     Duration, | ||||
|     duration as mk_duration, | ||||
| ) | ||||
|  | @ -69,7 +69,10 @@ from .api import ( | |||
|     Contract, | ||||
|     RequestError, | ||||
| ) | ||||
| from ._util import data_reset_hack | ||||
| from ._util import ( | ||||
|     data_reset_hack, | ||||
|     is_current_time_in_range, | ||||
| ) | ||||
| from .symbols import get_mkt_info | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|  | @ -184,7 +187,8 @@ async def open_history_client( | |||
| 
 | ||||
|             if ( | ||||
|                 start_dt | ||||
|                 and start_dt.timestamp() == 0 | ||||
|                 and | ||||
|                 start_dt.timestamp() == 0 | ||||
|             ): | ||||
|                 await tractor.pause() | ||||
| 
 | ||||
|  | @ -203,7 +207,7 @@ async def open_history_client( | |||
|             ): | ||||
|                 count += 1 | ||||
|                 mean += latency / count | ||||
|                 print( | ||||
|                 log.debug( | ||||
|                     f'HISTORY FRAME QUERY LATENCY: {latency}\n' | ||||
|                     f'mean: {mean}' | ||||
|                 ) | ||||
|  | @ -285,8 +289,9 @@ _pacing: str = ( | |||
| 
 | ||||
| async def wait_on_data_reset( | ||||
|     proxy: MethodProxy, | ||||
| 
 | ||||
|     reset_type: str = 'data', | ||||
|     timeout: float = 16, # float('inf'), | ||||
|     timeout: float = 16, | ||||
| 
 | ||||
|     task_status: TaskStatus[ | ||||
|         tuple[ | ||||
|  | @ -295,29 +300,47 @@ async def wait_on_data_reset( | |||
|         ] | ||||
|     ] = trio.TASK_STATUS_IGNORED, | ||||
| ) -> bool: | ||||
|     ''' | ||||
|     Wait on a (global-ish) "data-farm" event to be emitted | ||||
|     by the IB api server. | ||||
| 
 | ||||
|     # TODO: we might have to put a task lock around this | ||||
|     # method.. | ||||
|     hist_ev = proxy.status_event( | ||||
|     Allows syncing to reconnect event-messages emitted on the API | ||||
|     console, such as: | ||||
| 
 | ||||
|      - 'HMDS data farm connection is OK:ushmds' | ||||
|      - 'Market data farm is connecting:usfuture' | ||||
|      - 'Market data farm connection is OK:usfuture' | ||||
| 
 | ||||
|      Deliver a `(cs, done: Event)` pair to the caller to support it | ||||
|      waiting or cancelling the associated "data-reset-request"; | ||||
|      normally a manual data-reset-req is expected to be the cause and | ||||
|      thus trigger such events (such as our click-hack-magic from | ||||
|      `.ib._util`). | ||||
| 
 | ||||
|     ''' | ||||
|     # ?TODO, do we need a task-lock around this method? | ||||
|     # | ||||
|     # register for an API "status event" wrapped for `trio`-sync. | ||||
|     hist_ev: trio.Event = proxy.status_event( | ||||
|         'HMDS data farm connection is OK:ushmds' | ||||
|     ) | ||||
| 
 | ||||
|     # TODO: other event messages we might want to try and | ||||
|     # wait for but i wasn't able to get any of this | ||||
|     # reliable.. | ||||
|     # | ||||
|     # ^TODO: other event-messages we might want to support waiting-for | ||||
|     # but i wasn't able to get reliable.. | ||||
|     # | ||||
|     # reconnect_start = proxy.status_event( | ||||
|     #     'Market data farm is connecting:usfuture' | ||||
|     # ) | ||||
|     # live_ev = proxy.status_event( | ||||
|     #     'Market data farm connection is OK:usfuture' | ||||
|     # ) | ||||
| 
 | ||||
|     # try to wait on the reset event(s) to arrive, a timeout | ||||
|     # will trigger a retry up to 6 times (for now). | ||||
|     client: Client = proxy._aio_ns | ||||
| 
 | ||||
|     done = trio.Event() | ||||
|     with trio.move_on_after(timeout) as cs: | ||||
| 
 | ||||
|         task_status.started((cs, done)) | ||||
| 
 | ||||
|         log.warning( | ||||
|  | @ -396,8 +419,9 @@ async def get_bars( | |||
|     bool,  # timed out hint | ||||
| ]: | ||||
|     ''' | ||||
|     Retrieve historical data from a ``trio``-side task using | ||||
|     a ``MethoProxy``. | ||||
|     Request-n-retrieve historical data frames from a `trio.Task` | ||||
|     using a `MethoProxy` to query the `asyncio`-side's | ||||
|     `.ib.api.Client` methods. | ||||
| 
 | ||||
|     ''' | ||||
|     global _data_resetter_task, _failed_resets | ||||
|  | @ -607,7 +631,10 @@ async def get_bars( | |||
|     # such that simultaneous symbol queries don't try data resettingn | ||||
|     # too fast.. | ||||
|     unset_resetter: bool = False | ||||
|     async with trio.open_nursery() as nurse: | ||||
|     async with ( | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as nurse | ||||
|     ): | ||||
| 
 | ||||
|         # start history request that we allow | ||||
|         # to run indefinitely until a result is acquired | ||||
|  | @ -653,14 +680,14 @@ async def get_bars( | |||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| # per-actor cache of inter-eventloop-chans | ||||
| _quote_streams: dict[str, trio.abc.ReceiveStream] = {} | ||||
| 
 | ||||
| 
 | ||||
| # TODO! update to the new style sig with, | ||||
| # `chan: to_asyncio.LinkedTaskChannel,` | ||||
| async def _setup_quote_stream( | ||||
| 
 | ||||
|     from_trio: asyncio.Queue, | ||||
|     to_trio: trio.abc.SendChannel, | ||||
| 
 | ||||
|     chan: tractor.to_asyncio.LinkedTaskChannel, | ||||
|     symbol: str, | ||||
|     opts: tuple[int] = ( | ||||
|         '375',  # RT trade volume (excludes utrades) | ||||
|  | @ -678,10 +705,13 @@ async def _setup_quote_stream( | |||
| 
 | ||||
| ) -> trio.abc.ReceiveChannel: | ||||
|     ''' | ||||
|     Stream a ticker using the std L1 api. | ||||
|     Stream L1 quotes via the `Ticker.updateEvent.connect(push)` | ||||
|     callback API by registering a `push` callback which simply | ||||
|     `chan.send_nowait()`s quote msgs back to the calling | ||||
|     parent-`trio.Task`-side. | ||||
| 
 | ||||
|     This task is ``asyncio``-side and must be called from | ||||
|     ``tractor.to_asyncio.open_channel_from()``. | ||||
|     NOTE, that this task-fn is run on the `asyncio.Task`-side ONLY | ||||
|     and is thus run via `tractor.to_asyncio.open_channel_from()`. | ||||
| 
 | ||||
|     ''' | ||||
|     global _quote_streams | ||||
|  | @ -689,37 +719,79 @@ async def _setup_quote_stream( | |||
|     async with load_aio_clients( | ||||
|         disconnect_on_exit=False, | ||||
|     ) as accts2clients: | ||||
|         caccount_name, client = get_preferred_data_client(accts2clients) | ||||
|         contract = contract or (await client.find_contract(symbol)) | ||||
|         to_trio.send_nowait(contract)  # cuz why not | ||||
|         ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts)) | ||||
| 
 | ||||
|         # NOTE: it's batch-wise and slow af but I guess could | ||||
|         # be good for backchecking? Seems to be every 5s maybe? | ||||
|         # XXX since this is an `asyncio.Task`, we must use | ||||
|         # tractor.pause_from_sync() | ||||
| 
 | ||||
|         caccount_name, client = get_preferred_data_client(accts2clients) | ||||
|         contract = ( | ||||
|             contract | ||||
|             or | ||||
|             (await client.find_contract(symbol)) | ||||
|         ) | ||||
|         chan.started_nowait(contract)  # cuz why not | ||||
|         ticker: Ticker = client.ib.reqMktData( | ||||
|             contract, | ||||
|             ','.join(opts), | ||||
|         ) | ||||
|         maybe_exc: BaseException|None = None | ||||
|         handler_tries: int = 0 | ||||
|         aio_task: asyncio.Task = asyncio.current_task() | ||||
| 
 | ||||
|         # ?TODO? this API is batch-wise and quite slow-af but, | ||||
|         # - seems to be 5s updates? | ||||
|         # - maybe we could use it for backchecking? | ||||
|         # | ||||
|         # ticker: Ticker = client.ib.reqTickByTickData( | ||||
|         #     contract, 'Last', | ||||
|         # ) | ||||
| 
 | ||||
|         # # define a simple queue push routine that streams quote packets | ||||
|         # # to trio over the ``to_trio`` memory channel. | ||||
|         # to_trio, from_aio = trio.open_memory_channel(2**8)  # type: ignore | ||||
|         # define a very naive queue-pushing callback that relays | ||||
|         # quote-packets directly the calling (parent) `trio.Task`. | ||||
|         # Ensure on teardown we cancel the feed via their cancel API. | ||||
|         # | ||||
|         def teardown(): | ||||
|             ''' | ||||
|             Disconnect our `push`-er callback and cancel the data-feed | ||||
|             for `contract`. | ||||
| 
 | ||||
|             ''' | ||||
|             nonlocal maybe_exc | ||||
|             ticker.updateEvent.disconnect(push) | ||||
|             log.error(f"Disconnected stream for `{symbol}`") | ||||
|             report: str = f'Disconnected mkt-data for {symbol!r} due to ' | ||||
|             if maybe_exc is not None: | ||||
|                 report += ( | ||||
|                     'error,\n' | ||||
|                     f'{maybe_exc!r}\n' | ||||
|                 ) | ||||
|                 log.error(report) | ||||
|             else: | ||||
|                 report += ( | ||||
|                     'cancellation.\n' | ||||
|                 ) | ||||
|                 log.cancel(report) | ||||
| 
 | ||||
|             client.ib.cancelMktData(contract) | ||||
| 
 | ||||
|             # decouple broadcast mem chan | ||||
|             _quote_streams.pop(symbol, None) | ||||
| 
 | ||||
|         def push(t: Ticker) -> None: | ||||
|             """ | ||||
|             Push quotes to trio task. | ||||
|         def push( | ||||
|             t: Ticker, | ||||
|             tries_before_raise: int = 6, | ||||
|         ) -> None: | ||||
|             ''' | ||||
|             Push quotes verbatim to parent-side `trio.Task`. | ||||
| 
 | ||||
|             """ | ||||
|             # log.debug(t) | ||||
|             ''' | ||||
|             nonlocal maybe_exc, handler_tries | ||||
|             # log.debug(f'new IB quote: {t}\n') | ||||
|             try: | ||||
|                 to_trio.send_nowait(t) | ||||
|                 chan.send_nowait(t) | ||||
| 
 | ||||
|                 # XXX TODO XXX replicate in `tractor` tests | ||||
|                 # as per `CancelledError`-handler notes below! | ||||
|                 # assert 0 | ||||
|             except ( | ||||
|                 trio.BrokenResourceError, | ||||
| 
 | ||||
|  | @ -734,35 +806,104 @@ async def _setup_quote_stream( | |||
|                 # resulting in tracebacks spammed to console.. | ||||
|                 # Manually do the dereg ourselves. | ||||
|                 teardown() | ||||
|             except trio.WouldBlock: | ||||
|                 # log.warning( | ||||
|                 #     f'channel is blocking symbol feed for {symbol}?' | ||||
|                 #     f'\n{to_trio.statistics}' | ||||
|                 # ) | ||||
|                 pass | ||||
| 
 | ||||
|             # except trio.WouldBlock: | ||||
|             #     # for slow debugging purposes to avoid clobbering prompt | ||||
|             #     # with log msgs | ||||
|             #     pass | ||||
|             # for slow debugging purposes to avoid clobbering prompt | ||||
|             # with log msgs | ||||
|             except trio.WouldBlock: | ||||
|                 log.exception( | ||||
|                     f'Asyncio->Trio `chan.send_nowait()` blocked !?\n' | ||||
|                     f'\n' | ||||
|                     f'{chan._to_trio.statistics()}\n' | ||||
|                 ) | ||||
| 
 | ||||
|             # ?TODO, handle re-connection attempts? | ||||
|             except BaseException as _berr: | ||||
|                 berr = _berr | ||||
|                 if handler_tries >= tries_before_raise: | ||||
|                     # breakpoint() | ||||
|                     maybe_exc = _berr | ||||
|                     # task.set_exception(berr) | ||||
|                     aio_task.cancel(msg=berr.args) | ||||
|                     raise berr | ||||
|                 else: | ||||
|                     handler_tries += 1 | ||||
| 
 | ||||
|                 log.exception( | ||||
|                     f'Failed to push ticker quote !?\n' | ||||
|                     f'handler_tries={handler_tries!r}\n' | ||||
|                     f'ticker: {t!r}\n' | ||||
|                     f'\n' | ||||
|                     f'{chan._to_trio.statistics()}\n' | ||||
|                     f'\n' | ||||
|                     f'CAUSE: {berr}\n' | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
|         ticker.updateEvent.connect(push) | ||||
|         try: | ||||
|             await asyncio.sleep(float('inf')) | ||||
|         finally: | ||||
|             teardown() | ||||
| 
 | ||||
|         # return from_aio | ||||
|             # XXX, for debug.. TODO? can we rm again? | ||||
|             # | ||||
|             # tractor.pause_from_sync() | ||||
|             # while True: | ||||
|             #     await asyncio.sleep(1.6) | ||||
|             #     if ticker.ticks: | ||||
|             #         log.debug( | ||||
|             #             f'ticker.ticks = \n' | ||||
|             #             f'{ticker.ticks}\n' | ||||
|             #         ) | ||||
|             #     else: | ||||
|             #         log.warning( | ||||
|             #             'UHH no ticker.ticks ??' | ||||
|             #         ) | ||||
| 
 | ||||
|         # XXX TODO XXX !?!? | ||||
|         # apparently **without this handler** and the subsequent | ||||
|         # re-raising of `maybe_exc from _taskc` cancelling the | ||||
|         # `aio_task` from the `push()`-callback will cause a very | ||||
|         # strange chain of exc raising that breaks alll sorts of | ||||
|         # downstream callers, tasks and remote-actor tasks!? | ||||
|         # | ||||
|         # -[ ] we need some lowlevel reproducting tests to replicate | ||||
|         #      those worst-case scenarios in `tractor` core!! | ||||
|         # -[ ] likely we should factor-out the `tractor.to_asyncio` | ||||
|         #      attempts at workarounds in `.translate_aio_errors()` | ||||
|         #      for failed `asyncio.Task.set_exception()` to either | ||||
|         #      call `aio_task.cancel()` and/or | ||||
|         #      `aio_task._fut_waiter.set_exception()` to a re-useable | ||||
|         #      toolset in something like a `.to_asyncio._utils`?? | ||||
|         # | ||||
|         except asyncio.CancelledError as _taskc: | ||||
|             if maybe_exc is not None: | ||||
|                 raise maybe_exc from _taskc | ||||
| 
 | ||||
|             raise _taskc | ||||
| 
 | ||||
|         except BaseException as _berr: | ||||
|             # stash any crash cause for reporting in `teardown()` | ||||
|             maybe_exc = _berr | ||||
|             raise _berr | ||||
| 
 | ||||
|         finally: | ||||
|             # always disconnect our `push()` and cancel the | ||||
|             # ib-"mkt-data-feed". | ||||
|             teardown() | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_aio_quote_stream( | ||||
| 
 | ||||
|     symbol: str, | ||||
|     contract: Contract | None = None, | ||||
|     contract: Contract|None = None, | ||||
| 
 | ||||
| ) -> trio.abc.ReceiveStream: | ||||
|     ''' | ||||
|     Open a real-time `Ticker` quote stream from an `asyncio.Task` | ||||
|     spawned via `tractor.to_asyncio.open_channel_from()`, deliver the | ||||
|     inter-event-loop channel to the `trio.Task` caller and cache it | ||||
|     globally for re-use. | ||||
| 
 | ||||
|     ''' | ||||
|     from tractor.trionics import broadcast_receiver | ||||
|     global _quote_streams | ||||
| 
 | ||||
|  | @ -787,6 +928,10 @@ async def open_aio_quote_stream( | |||
| 
 | ||||
|         assert contract | ||||
| 
 | ||||
|         # TODO? de-reg on teardown of last consumer task? | ||||
|         # -> why aren't we using `.trionics.maybe_open_context()` | ||||
|         # here again?? (we are in `open_client_proxies()` tho?) | ||||
|         # | ||||
|         # cache feed for later consumers | ||||
|         _quote_streams[symbol] = from_aio | ||||
| 
 | ||||
|  | @ -801,7 +946,12 @@ def normalize( | |||
|     calc_price: bool = False | ||||
| 
 | ||||
| ) -> dict: | ||||
|     ''' | ||||
|     Translate `ib_async`'s `Ticker.ticks` values to a `piker` | ||||
|     normalized `dict` form for transmit to downstream `.data` layer | ||||
|     consumers. | ||||
| 
 | ||||
|     ''' | ||||
|     # check for special contract types | ||||
|     con = ticker.contract | ||||
|     fqme, calc_price = con2fqme(con) | ||||
|  | @ -820,7 +970,7 @@ def normalize( | |||
| 
 | ||||
|             tbt = ticker.tickByTicks | ||||
|             if tbt: | ||||
|                 print(f'tickbyticks:\n {ticker.tickByTicks}') | ||||
|                 log.info(f'tickbyticks:\n {ticker.tickByTicks}') | ||||
| 
 | ||||
|     ticker.ticks = new_ticks | ||||
| 
 | ||||
|  | @ -856,27 +1006,39 @@ def normalize( | |||
|     return data | ||||
| 
 | ||||
| 
 | ||||
| # ?TODO? feels like this task-fn could be factored to reduce some | ||||
| # indentation levels? | ||||
| # -[ ] the reconnect while loop on ib-gw "data farm connection.."s | ||||
| # -[ ] everything embedded under the `async with aclosing(stream):` | ||||
| #     as the "meat" of the quote delivery once the connection is | ||||
| #     stable. | ||||
| # | ||||
| async def stream_quotes( | ||||
| 
 | ||||
|     send_chan: trio.abc.SendChannel, | ||||
|     symbols: list[str], | ||||
|     feed_is_live: trio.Event, | ||||
|     loglevel: str = None, | ||||
| 
 | ||||
|     # TODO? we need to hook into the `ib_async` logger like | ||||
|     # we can with i3ipc from modden! | ||||
|     # loglevel: str|None = None, | ||||
| 
 | ||||
|     # startup sync | ||||
|     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Stream symbol quotes. | ||||
|     Stream `symbols[0]` quotes back via `send_chan`. | ||||
| 
 | ||||
|     This is a ``trio`` callable routine meant to be invoked | ||||
|     once the brokerd is up. | ||||
|     The `feed_is_live: Event` is set to signal the caller that it can | ||||
|     begin processing msgs from the mem-chan. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: support multiple subscriptions | ||||
|     sym = symbols[0] | ||||
|     log.info(f'request for real-time quotes: {sym}') | ||||
|     sym: str = symbols[0] | ||||
|     log.info( | ||||
|         f'request for real-time quotes\n' | ||||
|         f'sym: {sym!r}\n' | ||||
|     ) | ||||
| 
 | ||||
|     init_msgs: list[FeedInit] = [] | ||||
| 
 | ||||
|  | @ -885,34 +1047,49 @@ async def stream_quotes( | |||
|     details: ibis.ContractDetails | ||||
|     async with ( | ||||
|         open_data_client() as proxy, | ||||
|         # trio.open_nursery() as tn, | ||||
|     ): | ||||
|         mkt, details = await get_mkt_info( | ||||
|             sym, | ||||
|             proxy=proxy,  # passed to avoid implicit client load | ||||
|         ) | ||||
| 
 | ||||
|         # is venue active rn? | ||||
|         venue_is_open: bool = any( | ||||
|             is_current_time_in_range( | ||||
|                 start_dt=sesh.start, | ||||
|                 end_dt=sesh.end, | ||||
|             ) | ||||
|             for sesh in details.tradingSessions() | ||||
|         ) | ||||
| 
 | ||||
|         init_msg = FeedInit(mkt_info=mkt) | ||||
| 
 | ||||
|         # NOTE, tell sampler (via config) to skip vlm summing for dst | ||||
|         # assets which provide no vlm data.. | ||||
|         if mkt.dst.atype in { | ||||
|             'fiat', | ||||
|             'index', | ||||
|             'commodity', | ||||
|         }: | ||||
|             # tell sampler config that it shouldn't do vlm summing. | ||||
|             init_msg.shm_write_opts['sum_tick_vlm'] = False | ||||
|             init_msg.shm_write_opts['has_vlm'] = False | ||||
| 
 | ||||
|         init_msgs.append(init_msg) | ||||
| 
 | ||||
|         con: Contract = details.contract | ||||
|         first_ticker: Ticker | None = None | ||||
|         with trio.move_on_after(1): | ||||
|         first_ticker: Ticker|None = None | ||||
| 
 | ||||
|         with trio.move_on_after(1.6) as quote_cs: | ||||
|             first_ticker: Ticker = await proxy.get_quote( | ||||
|                 contract=con, | ||||
|                 raise_on_timeout=False, | ||||
|             ) | ||||
| 
 | ||||
|         # XXX should never happen with this ep right? | ||||
|         # but if so then, more then likely mkt is closed? | ||||
|         if quote_cs.cancelled_caught: | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|         if first_ticker: | ||||
|             first_quote: dict = normalize(first_ticker) | ||||
| 
 | ||||
|  | @ -924,28 +1101,27 @@ async def stream_quotes( | |||
|                 f'{pformat(first_quote)}\n' | ||||
|             ) | ||||
| 
 | ||||
|         # NOTE: it might be outside regular trading hours for | ||||
|         # assets with "standard venue operating hours" so we | ||||
|         # only "pretend the feed is live" when the dst asset | ||||
|         # type is NOT within the NON-NORMAL-venue set: aka not | ||||
|         # commodities, forex or crypto currencies which CAN | ||||
|         # always return a NaN on a snap quote request during | ||||
|         # normal venue hours. In the case of a closed venue | ||||
|         # (equitiies, futes, bonds etc.) we at least try to | ||||
|         # grab the OHLC history. | ||||
|         if ( | ||||
|             first_ticker | ||||
|             and | ||||
|             isnan(first_ticker.last) | ||||
|             # SO, if the last quote price value is NaN we ONLY | ||||
|             # "pretend to do" `feed_is_live.set()` if it's a known | ||||
|             # dst asset venue with a lot of closed operating hours. | ||||
|             and mkt.dst.atype not in { | ||||
|                 'commodity', | ||||
|                 'fiat', | ||||
|                 'crypto', | ||||
|             } | ||||
|         ): | ||||
|         # XXX NOTE: whenever we're "outside regular trading hours" | ||||
|         # (only relevant for assets coming from the "legacy markets" | ||||
|         # space) so we basically (from an API/runtime-operational | ||||
|         # perspective) "pretend the feed is live" even if it's | ||||
|         # actually closed. | ||||
|         # | ||||
|         # IOW, we signal to the effective caller (task) that the live | ||||
|         # feed is "already up" but really we're just indicating that | ||||
|         # the OHLCV history can start being loaded immediately by the | ||||
|         # `piker.data`/`.tsp` layers. | ||||
|         # | ||||
|         # XXX, deats: the "pretend we're live" is just done by | ||||
|         # a `feed_is_live.set()` even though nothing is actually live | ||||
|         # Bp | ||||
|         if not venue_is_open: | ||||
|             log.warning( | ||||
|                 f'Venue is closed, unable to establish real-time feed.\n' | ||||
|                 f'mkt: {mkt!r}\n' | ||||
|                 f'\n' | ||||
|                 f'first_ticker: {first_ticker}\n' | ||||
|             ) | ||||
|             task_status.started(( | ||||
|                 init_msgs, | ||||
|                 first_quote, | ||||
|  | @ -956,10 +1132,12 @@ async def stream_quotes( | |||
|             feed_is_live.set() | ||||
| 
 | ||||
|             # block and let data history backfill code run. | ||||
|             # XXX obvi given the venue is closed, we never expect feed | ||||
|             # to come up; a taskc should be the only way to | ||||
|             # terminate this task. | ||||
|             await trio.sleep_forever() | ||||
|             return  # we never expect feed to come up? | ||||
| 
 | ||||
|         # TODO: we should instead spawn a task that waits on a feed | ||||
|         # ?TODO, we could instead spawn a task that waits on a feed | ||||
|         # to start and let it wait indefinitely..instead of this | ||||
|         # hard coded stuff. | ||||
|         # async def wait_for_first_quote(): | ||||
|  | @ -981,23 +1159,26 @@ async def stream_quotes( | |||
|             'Rxed init quote:\n' | ||||
|             f'{pformat(first_quote)}' | ||||
|         ) | ||||
|         cs: trio.CancelScope | None = None | ||||
|         cs: trio.CancelScope|None = None | ||||
|         startup: bool = True | ||||
|         while ( | ||||
|             startup | ||||
|             or cs.cancel_called | ||||
|             or | ||||
|             cs.cancel_called | ||||
|         ): | ||||
|             with trio.CancelScope() as cs: | ||||
|                 async with ( | ||||
|                     tractor.trionics.collapse_eg(), | ||||
|                     trio.open_nursery() as nurse, | ||||
|                     open_aio_quote_stream( | ||||
|                         symbol=sym, | ||||
|                         contract=con, | ||||
|                     ) as stream, | ||||
|                 ): | ||||
|                     # ?TODO? can we rm this - particularly for `ib_async`? | ||||
|                     # ugh, clear ticks since we've consumed them | ||||
|                     # (ahem, ib_insync is stateful trash) | ||||
|                     first_ticker.ticks = [] | ||||
|                     # first_ticker.ticks = [] | ||||
| 
 | ||||
|                     # only on first entry at feed boot up | ||||
|                     if startup: | ||||
|  | @ -1011,8 +1192,8 @@ async def stream_quotes( | |||
|                     # data feed event. | ||||
|                     async def reset_on_feed(): | ||||
| 
 | ||||
|                         # TODO: this seems to be surpressed from the | ||||
|                         # traceback in ``tractor``? | ||||
|                         # ??TODO? this seems to be surpressed from the | ||||
|                         # traceback in `tractor`? | ||||
|                         # assert 0 | ||||
| 
 | ||||
|                         rt_ev = proxy.status_event( | ||||
|  | @ -1056,7 +1237,7 @@ async def stream_quotes( | |||
|                                     # ugh, clear ticks since we've | ||||
|                                     # consumed them (ahem, ib_insync is | ||||
|                                     # truly stateful trash) | ||||
|                                     ticker.ticks = [] | ||||
|                                     # ticker.ticks = [] | ||||
| 
 | ||||
|                                     # XXX: this works because we don't use | ||||
|                                     # ``aclosing()`` above? | ||||
|  | @ -1073,8 +1254,12 @@ async def stream_quotes( | |||
|                         async for ticker in stream: | ||||
|                             quote = normalize(ticker) | ||||
|                             fqme = quote['fqme'] | ||||
|                             log.debug( | ||||
|                                 f'Sending quote\n' | ||||
|                                 f'{quote}' | ||||
|                             ) | ||||
|                             await send_chan.send({fqme: quote}) | ||||
| 
 | ||||
|                             # ugh, clear ticks since we've consumed them | ||||
|                             ticker.ticks = [] | ||||
|                             # ticker.ticks = [] | ||||
|                             # last = time.time() | ||||
|  |  | |||
|  | @ -34,6 +34,7 @@ import urllib.parse | |||
| import hashlib | ||||
| import hmac | ||||
| import base64 | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| from piker import config | ||||
|  | @ -372,8 +373,7 @@ class Client: | |||
|         #     1658347714, 'status': 'Success'}]} | ||||
| 
 | ||||
|         if xfers: | ||||
|             import tractor | ||||
|             await tractor.pp() | ||||
|             await tractor.pause() | ||||
| 
 | ||||
|         trans: dict[str, Transaction] = {} | ||||
|         for entry in xfers: | ||||
|  | @ -501,7 +501,8 @@ class Client: | |||
|             for xkey, data in resp['result'].items(): | ||||
| 
 | ||||
|                 # NOTE: always cache in pairs tables for faster lookup | ||||
|                 pair = Pair(xname=xkey, **data) | ||||
|                 with tractor.devx.maybe_open_crash_handler(): # as bxerr: | ||||
|                     pair = Pair(xname=xkey, **data) | ||||
| 
 | ||||
|                 # register the above `Pair` structs for all | ||||
|                 # key-sets/monikers: a set of 4 (frickin) tables | ||||
|  |  | |||
|  | @ -175,9 +175,8 @@ async def handle_order_requests( | |||
| 
 | ||||
|             case { | ||||
|                 'account': 'kraken.spot' as account, | ||||
|                 'action': action, | ||||
|             } if action in {'buy', 'sell'}: | ||||
| 
 | ||||
|                 'action': 'buy'|'sell', | ||||
|             }: | ||||
|                 # validate | ||||
|                 order = BrokerdOrder(**msg) | ||||
| 
 | ||||
|  | @ -262,6 +261,12 @@ async def handle_order_requests( | |||
|                 } | extra | ||||
| 
 | ||||
|                 log.info(f'Submitting WS order request:\n{pformat(req)}') | ||||
| 
 | ||||
|                 # NOTE HOWTO, debug order requests | ||||
|                 # | ||||
|                 # if 'XRP' in pair: | ||||
|                 #     await tractor.pause() | ||||
| 
 | ||||
|                 await ws.send_msg(req) | ||||
| 
 | ||||
|                 # placehold for sanity checking in relay loop | ||||
|  | @ -1085,6 +1090,8 @@ async def handle_order_updates( | |||
|                         f'Failed to {action} order {reqid}:\n' | ||||
|                         f'{errmsg}' | ||||
|                     ) | ||||
|                     # if tractor._state.debug_mode(): | ||||
|                     #     await tractor.pause() | ||||
| 
 | ||||
|                     symbol: str = 'N/A' | ||||
|                     if chain := apiflows.get(reqid): | ||||
|  |  | |||
|  | @ -21,7 +21,6 @@ Symbology defs and search. | |||
| from decimal import Decimal | ||||
| 
 | ||||
| import tractor | ||||
| from rapidfuzz import process as fuzzy | ||||
| 
 | ||||
| from piker._cacheables import ( | ||||
|     async_lifo_cache, | ||||
|  | @ -41,8 +40,13 @@ from piker.accounting._mktinfo import ( | |||
| ) | ||||
| 
 | ||||
| 
 | ||||
| # https://www.kraken.com/features/api#get-tradable-pairs | ||||
| class Pair(Struct): | ||||
|     ''' | ||||
|     A tradable asset pair as schema-defined by, | ||||
| 
 | ||||
|     https://docs.kraken.com/api/docs/rest-api/get-tradable-asset-pairs | ||||
| 
 | ||||
|     ''' | ||||
|     xname: str  # idiotic bs_mktid equiv i guess? | ||||
|     altname: str  # alternate pair name | ||||
|     wsname: str  # WebSocket pair name (if available) | ||||
|  | @ -53,7 +57,6 @@ class Pair(Struct): | |||
|     lot: str  # volume lot size | ||||
| 
 | ||||
|     cost_decimals: int | ||||
|     costmin: float | ||||
|     pair_decimals: int  # scaling decimal places for pair | ||||
|     lot_decimals: int  # scaling decimal places for volume | ||||
| 
 | ||||
|  | @ -79,6 +82,7 @@ class Pair(Struct): | |||
|     tick_size: float  # min price step size | ||||
|     status: str | ||||
| 
 | ||||
|     costmin: str|None = None  # XXX, only some mktpairs? | ||||
|     short_position_limit: float = 0 | ||||
|     long_position_limit: float = float('inf') | ||||
| 
 | ||||
|  |  | |||
|  | @ -25,7 +25,10 @@ from typing import TYPE_CHECKING | |||
| 
 | ||||
| import trio | ||||
| import tractor | ||||
| from tractor.trionics import broadcast_receiver | ||||
| from tractor.trionics import ( | ||||
|     broadcast_receiver, | ||||
|     collapse_eg, | ||||
| ) | ||||
| 
 | ||||
| from ._util import ( | ||||
|     log,  # sub-sys logger | ||||
|  | @ -281,8 +284,11 @@ async def open_ems( | |||
|             client._ems_stream = trades_stream | ||||
| 
 | ||||
|             # start sync code order msg delivery task | ||||
|             async with trio.open_nursery() as n: | ||||
|                 n.start_soon( | ||||
|             async with ( | ||||
|                 collapse_eg(), | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|                 tn.start_soon( | ||||
|                     relay_orders_from_sync_code, | ||||
|                     client, | ||||
|                     fqme, | ||||
|  | @ -298,4 +304,4 @@ async def open_ems( | |||
|                 ) | ||||
| 
 | ||||
|                 # stop the sync-msg-relay task on exit. | ||||
|                 n.cancel_scope.cancel() | ||||
|                 tn.cancel_scope.cancel() | ||||
|  |  | |||
|  | @ -42,6 +42,7 @@ from bidict import bidict | |||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import tractor | ||||
| from tractor import trionics | ||||
| 
 | ||||
| from ._util import ( | ||||
|     log,  # sub-sys logger | ||||
|  | @ -76,7 +77,6 @@ if TYPE_CHECKING: | |||
| 
 | ||||
| # TODO: numba all of this | ||||
| def mk_check( | ||||
| 
 | ||||
|     trigger_price: float, | ||||
|     known_last: float, | ||||
|     action: str, | ||||
|  | @ -162,7 +162,7 @@ async def clear_dark_triggers( | |||
| 
 | ||||
|     router: Router, | ||||
|     brokerd_orders_stream: tractor.MsgStream, | ||||
|     quote_stream: tractor.ReceiveMsgStream,  # noqa | ||||
|     quote_stream: tractor.MsgStream, | ||||
|     broker: str, | ||||
|     fqme: str, | ||||
| 
 | ||||
|  | @ -178,6 +178,7 @@ async def clear_dark_triggers( | |||
|     ''' | ||||
|     # XXX: optimize this for speed! | ||||
|     # TODO: | ||||
|     # - port to the new ringbuf stuff in `tractor.ipc`! | ||||
|     # - numba all this! | ||||
|     # - this stream may eventually contain multiple symbols | ||||
|     quote_stream._raise_on_lag = False | ||||
|  | @ -500,7 +501,7 @@ class Router(Struct): | |||
| 
 | ||||
|     ''' | ||||
|     # setup at actor spawn time | ||||
|     nursery: trio.Nursery | ||||
|     _tn: trio.Nursery | ||||
| 
 | ||||
|     # broker to book map | ||||
|     books: dict[str, DarkBook] = {} | ||||
|  | @ -666,7 +667,7 @@ class Router(Struct): | |||
|                 # dark book clearing loop, also lives with parent | ||||
|                 # daemon to allow dark order clearing while no | ||||
|                 # client is connected. | ||||
|                 self.nursery.start_soon( | ||||
|                 self._tn.start_soon( | ||||
|                     clear_dark_triggers, | ||||
|                     self, | ||||
|                     relay.brokerd_stream, | ||||
|  | @ -689,7 +690,7 @@ class Router(Struct): | |||
| 
 | ||||
|                 # spawn a ``brokerd`` order control dialog stream | ||||
|                 # that syncs lifetime with the parent `emsd` daemon. | ||||
|                 self.nursery.start_soon( | ||||
|                 self._tn.start_soon( | ||||
|                     translate_and_relay_brokerd_events, | ||||
|                     broker, | ||||
|                     relay.brokerd_stream, | ||||
|  | @ -763,10 +764,12 @@ async def _setup_persistent_emsd( | |||
| 
 | ||||
|     global _router | ||||
| 
 | ||||
|     # open a root "service nursery" for the ``emsd`` actor | ||||
|     async with trio.open_nursery() as service_nursery: | ||||
| 
 | ||||
|         _router = Router(nursery=service_nursery) | ||||
|     # open a root "service task-nursery" for the `emsd`-actor | ||||
|     async with ( | ||||
|         trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn | ||||
|     ): | ||||
|         _router = Router(_tn=tn) | ||||
| 
 | ||||
|         # TODO: send back the full set of persistent | ||||
|         # orders/execs? | ||||
|  | @ -1182,12 +1185,16 @@ async def process_client_order_cmds( | |||
|     submitting live orders immediately if requested by the client. | ||||
| 
 | ||||
|     ''' | ||||
|     # cmd: dict | ||||
|     # TODO, only allow `msgspec.Struct` form! | ||||
|     cmd: dict | ||||
|     async for cmd in client_order_stream: | ||||
|         log.info(f'Received order cmd:\n{pformat(cmd)}') | ||||
|         log.info( | ||||
|             f'Received order cmd:\n' | ||||
|             f'{pformat(cmd)}\n' | ||||
|         ) | ||||
| 
 | ||||
|         # CAWT DAMN we need struct support! | ||||
|         oid = str(cmd['oid']) | ||||
|         oid: str = str(cmd['oid']) | ||||
| 
 | ||||
|         # register this stream as an active order dialog (msg flow) for | ||||
|         # this order id such that translated message from the brokerd | ||||
|  | @ -1293,7 +1300,7 @@ async def process_client_order_cmds( | |||
|             case { | ||||
|                 'oid': oid, | ||||
|                 'symbol': fqme, | ||||
|                 'price': trigger_price, | ||||
|                 'price': price, | ||||
|                 'size': size, | ||||
|                 'action': ('buy' | 'sell') as action, | ||||
|                 'exec_mode': ('live' | 'paper'), | ||||
|  | @ -1325,7 +1332,7 @@ async def process_client_order_cmds( | |||
| 
 | ||||
|                     symbol=sym, | ||||
|                     action=action, | ||||
|                     price=trigger_price, | ||||
|                     price=price, | ||||
|                     size=size, | ||||
|                     account=req.account, | ||||
|                 ) | ||||
|  | @ -1347,7 +1354,11 @@ async def process_client_order_cmds( | |||
|                 # (``translate_and_relay_brokerd_events()`` above) will | ||||
|                 # handle relaying the ems side responses back to | ||||
|                 # the client/cmd sender from this request | ||||
|                 log.info(f'Sending live order to {broker}:\n{pformat(msg)}') | ||||
|                 log.info( | ||||
|                     f'Sending live order to {broker}:\n' | ||||
|                     f'{pformat(msg)}' | ||||
|                 ) | ||||
| 
 | ||||
|                 await brokerd_order_stream.send(msg) | ||||
| 
 | ||||
|                 # an immediate response should be ``BrokerdOrderAck`` | ||||
|  | @ -1363,7 +1374,7 @@ async def process_client_order_cmds( | |||
|             case { | ||||
|                 'oid': oid, | ||||
|                 'symbol': fqme, | ||||
|                 'price': trigger_price, | ||||
|                 'price': price, | ||||
|                 'size': size, | ||||
|                 'exec_mode': exec_mode, | ||||
|                 'action': action, | ||||
|  | @ -1391,7 +1402,12 @@ async def process_client_order_cmds( | |||
|                 if isnan(last): | ||||
|                     last = flume.rt_shm.array[-1]['close'] | ||||
| 
 | ||||
|                 pred = mk_check(trigger_price, last, action) | ||||
|                 trigger_price: float = float(price) | ||||
|                 pred = mk_check( | ||||
|                     trigger_price, | ||||
|                     last, | ||||
|                     action, | ||||
|                 ) | ||||
| 
 | ||||
|                 # NOTE: for dark orders currently we submit | ||||
|                 # the triggered live order at a price 5 ticks | ||||
|  | @ -1498,7 +1514,7 @@ async def maybe_open_trade_relays( | |||
|         loglevel: str = 'info', | ||||
|     ): | ||||
| 
 | ||||
|         fqme, relay, feed, client_ready = await _router.nursery.start( | ||||
|         fqme, relay, feed, client_ready = await _router._tn.start( | ||||
|             _router.open_trade_relays, | ||||
|             fqme, | ||||
|             exec_mode, | ||||
|  | @ -1531,7 +1547,7 @@ async def _emsd_main( | |||
|     ctx: tractor.Context, | ||||
|     fqme: str, | ||||
|     exec_mode: str,  # ('paper', 'live') | ||||
|     loglevel: str | None = None, | ||||
|     loglevel: str|None = None, | ||||
| 
 | ||||
| ) -> tuple[ | ||||
|     dict[ | ||||
|  |  | |||
|  | @ -19,6 +19,7 @@ Clearing sub-system message and protocols. | |||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from decimal import Decimal | ||||
| from typing import ( | ||||
|     Literal, | ||||
| ) | ||||
|  | @ -71,7 +72,15 @@ class Order(Struct): | |||
|     symbol: str  # | MktPair | ||||
|     account: str  # should we set a default as '' ? | ||||
| 
 | ||||
|     price: float | ||||
|     # https://docs.python.org/3/library/decimal.html#decimal-objects | ||||
|     # | ||||
|     # ?TODO? decimal usage throughout? | ||||
|     # -[ ] possibly leverage the `Encoder(decimal_format='number')` | ||||
|     #  bit? | ||||
|     # |_https://jcristharif.com/msgspec/supported-types.html#decimal | ||||
|     # -[ ] should we also use it for .size? | ||||
|     # | ||||
|     price: Decimal | ||||
|     size: float  # -ve is "sell", +ve is "buy" | ||||
| 
 | ||||
|     brokers: list[str] = [] | ||||
|  | @ -178,7 +187,7 @@ class BrokerdOrder(Struct): | |||
|     time_ns: int | ||||
| 
 | ||||
|     symbol: str  # fqme | ||||
|     price: float | ||||
|     price: Decimal | ||||
|     size: float | ||||
| 
 | ||||
|     # TODO: if we instead rely on a +ve/-ve size to determine | ||||
|  |  | |||
|  | @ -508,7 +508,7 @@ async def handle_order_requests( | |||
|                 reqid = await client.submit_limit( | ||||
|                     oid=order.oid, | ||||
|                     symbol=f'{order.symbol}.{client.broker}', | ||||
|                     price=order.price, | ||||
|                     price=float(order.price), | ||||
|                     action=order.action, | ||||
|                     size=order.size, | ||||
|                     # XXX: by default 0 tells ``ib_insync`` methods that | ||||
|  |  | |||
|  | @ -134,86 +134,65 @@ def pikerd( | |||
|     Spawn the piker broker-daemon. | ||||
| 
 | ||||
|     ''' | ||||
|     from tractor.devx import maybe_open_crash_handler | ||||
|     with maybe_open_crash_handler(pdb=pdb): | ||||
|         log = get_console_log(loglevel, name='cli') | ||||
|     # from tractor.devx import maybe_open_crash_handler | ||||
|     # with maybe_open_crash_handler(pdb=False): | ||||
|     log = get_console_log(loglevel, name='cli') | ||||
| 
 | ||||
|         if pdb: | ||||
|             log.warning(( | ||||
|                 "\n" | ||||
|                 "!!! YOU HAVE ENABLED DAEMON DEBUG MODE !!!\n" | ||||
|                 "When a `piker` daemon crashes it will block the " | ||||
|                 "task-thread until resumed from console!\n" | ||||
|                 "\n" | ||||
|     if pdb: | ||||
|         log.warning(( | ||||
|             "\n" | ||||
|             "!!! YOU HAVE ENABLED DAEMON DEBUG MODE !!!\n" | ||||
|             "When a `piker` daemon crashes it will block the " | ||||
|             "task-thread until resumed from console!\n" | ||||
|             "\n" | ||||
|         )) | ||||
| 
 | ||||
|     # service-actor registry endpoint socket-address set | ||||
|     regaddrs: list[tuple[str, int]] = [] | ||||
| 
 | ||||
|     conf, _ = config.load( | ||||
|         conf_name='conf', | ||||
|     ) | ||||
|     network: dict = conf.get('network') | ||||
|     if ( | ||||
|         network is None | ||||
|         and not maddr | ||||
|     ): | ||||
|         regaddrs = [( | ||||
|             _default_registry_host, | ||||
|             _default_registry_port, | ||||
|         )] | ||||
| 
 | ||||
|     else: | ||||
|         eps: dict = load_trans_eps( | ||||
|             network, | ||||
|             maddr, | ||||
|         ) | ||||
|         for layers in eps['pikerd']: | ||||
|             regaddrs.append(( | ||||
|                 layers['ipv4']['addr'], | ||||
|                 layers['tcp']['port'], | ||||
|             )) | ||||
| 
 | ||||
|         # service-actor registry endpoint socket-address set | ||||
|         regaddrs: list[tuple[str, int]] = [] | ||||
|     from .. import service | ||||
| 
 | ||||
|         conf, _ = config.load( | ||||
|             conf_name='conf', | ||||
|         ) | ||||
|         network: dict = conf.get('network') | ||||
|         if ( | ||||
|             network is None | ||||
|             and not maddr | ||||
|     async def main(): | ||||
|         service_mngr: service.Services | ||||
|         async with ( | ||||
|             service.open_pikerd( | ||||
|                 registry_addrs=regaddrs, | ||||
|                 loglevel=loglevel, | ||||
|                 debug_mode=pdb, | ||||
|                 # enable_transports=['uds'], | ||||
|                 enable_transports=['tcp'], | ||||
|             ) as service_mngr, | ||||
|         ): | ||||
|             regaddrs = [( | ||||
|                 _default_registry_host, | ||||
|                 _default_registry_port, | ||||
|             )] | ||||
|             assert service_mngr | ||||
|             # ?TODO? spawn all other sub-actor daemons according to | ||||
|             # multiaddress endpoint spec defined by user config | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|         else: | ||||
|             eps: dict = load_trans_eps( | ||||
|                 network, | ||||
|                 maddr, | ||||
|             ) | ||||
|             for layers in eps['pikerd']: | ||||
|                 regaddrs.append(( | ||||
|                     layers['ipv4']['addr'], | ||||
|                     layers['tcp']['port'], | ||||
|                 )) | ||||
| 
 | ||||
|         from .. import service | ||||
| 
 | ||||
|         async def main(): | ||||
|             service_mngr: service.Services | ||||
| 
 | ||||
|             async with ( | ||||
|                 service.open_pikerd( | ||||
|                     registry_addrs=regaddrs, | ||||
|                     loglevel=loglevel, | ||||
|                     debug_mode=pdb, | ||||
| 
 | ||||
|                 ) as service_mngr,  # normally delivers a ``Services`` handle | ||||
| 
 | ||||
|                 # AsyncExitStack() as stack, | ||||
|             ): | ||||
|                 # TODO: spawn all other sub-actor daemons according to | ||||
|                 # multiaddress endpoint spec defined by user config | ||||
|                 assert service_mngr | ||||
| 
 | ||||
|                 # if tsdb: | ||||
|                 #     dname, conf = await stack.enter_async_context( | ||||
|                 #         service.marketstore.start_ahab_daemon( | ||||
|                 #             service_mngr, | ||||
|                 #             loglevel=loglevel, | ||||
|                 #         ) | ||||
|                 #     ) | ||||
|                 #     log.info(f'TSDB `{dname}` up with conf:\n{conf}') | ||||
| 
 | ||||
|                 # if es: | ||||
|                 #     dname, conf = await stack.enter_async_context( | ||||
|                 #         service.elastic.start_ahab_daemon( | ||||
|                 #             service_mngr, | ||||
|                 #             loglevel=loglevel, | ||||
|                 #         ) | ||||
|                 #     ) | ||||
|                 #     log.info(f'DB `{dname}` up with conf:\n{conf}') | ||||
| 
 | ||||
|                 await trio.sleep_forever() | ||||
| 
 | ||||
|         trio.run(main) | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @click.group(context_settings=config._context_defaults) | ||||
|  | @ -328,6 +307,10 @@ def services(config, tl, ports): | |||
|     if not ports: | ||||
|         ports = [_default_registry_port] | ||||
| 
 | ||||
|     addr = tractor._addr.wrap_address( | ||||
|         addr=(host, ports[0]) | ||||
|     ) | ||||
| 
 | ||||
|     async def list_services(): | ||||
|         nonlocal host | ||||
|         async with ( | ||||
|  | @ -335,16 +318,18 @@ def services(config, tl, ports): | |||
|                 name='service_query', | ||||
|                 loglevel=config['loglevel'] if tl else None, | ||||
|             ), | ||||
|             tractor.get_arbiter( | ||||
|                 host=host, | ||||
|                 port=ports[0] | ||||
|             tractor.get_registry( | ||||
|                 addr=addr, | ||||
|             ) as portal | ||||
|         ): | ||||
|             registry = await portal.run_from_ns('self', 'get_registry') | ||||
|             registry = await portal.run_from_ns( | ||||
|                 'self', | ||||
|                 'get_registry', | ||||
|             ) | ||||
|             json_d = {} | ||||
|             for key, socket in registry.items(): | ||||
|                 host, port = socket | ||||
|                 json_d[key] = f'{host}:{port}' | ||||
|                 json_d[key] = f'{socket}' | ||||
| 
 | ||||
|             click.echo(f"{colorize_json(json_d)}") | ||||
| 
 | ||||
|     trio.run(list_services) | ||||
|  |  | |||
|  | @ -284,7 +284,8 @@ class Sampler: | |||
| 
 | ||||
|                     except ( | ||||
|                         trio.BrokenResourceError, | ||||
|                         trio.ClosedResourceError | ||||
|                         trio.ClosedResourceError, | ||||
|                         trio.EndOfChannel, | ||||
|                     ): | ||||
|                         log.error( | ||||
|                             f'{stream._ctx.chan.uid} dropped connection' | ||||
|  | @ -697,7 +698,7 @@ async def sample_and_broadcast( | |||
| 
 | ||||
|                                 log.warning( | ||||
|                                     f'Feed OVERRUN {sub_key}' | ||||
|                                     '@{bus.brokername} -> \n' | ||||
|                                     f'@{bus.brokername} -> \n' | ||||
|                                     f'feed @ {chan.uid}\n' | ||||
|                                     f'throttle = {throttle} Hz' | ||||
|                                 ) | ||||
|  | @ -876,6 +877,7 @@ async def uniform_rate_send( | |||
|         except tractor.RemoteActorError as rme: | ||||
|             if rme.type is not tractor._exceptions.StreamOverrun: | ||||
|                 raise | ||||
| 
 | ||||
|             ctx = stream._ctx | ||||
|             chan = ctx.chan | ||||
|             log.warning( | ||||
|  | @ -892,6 +894,7 @@ async def uniform_rate_send( | |||
|             trio.ClosedResourceError, | ||||
|             trio.BrokenResourceError, | ||||
|             ConnectionResetError, | ||||
|             trio.EndOfChannel, | ||||
|         ): | ||||
|             # if the feed consumer goes down then drop | ||||
|             # out of this rate limiter | ||||
|  |  | |||
|  | @ -92,6 +92,15 @@ class SymbologyCache(Struct): | |||
| 
 | ||||
|     def write_config(self) -> None: | ||||
| 
 | ||||
|         def clean_dict_for_toml(d): | ||||
|             '''Remove None values from dict recursively for TOML serialization''' | ||||
|             if isinstance(d, dict): | ||||
|                 return {k: clean_dict_for_toml(v) for k, v in d.items() if v is not None} | ||||
|             elif isinstance(d, list): | ||||
|                 return [clean_dict_for_toml(item) for item in d if item is not None] | ||||
|             else: | ||||
|                 return d | ||||
| 
 | ||||
|         # put the backend's pair-struct type ref at the top | ||||
|         # of file if possible. | ||||
|         cachedict: dict[str, Any] = { | ||||
|  | @ -112,7 +121,9 @@ class SymbologyCache(Struct): | |||
| 
 | ||||
|             dct = cachedict[key] = {} | ||||
|             for key, struct in table.items(): | ||||
|                 dct[key] = struct.to_dict(include_non_members=False) | ||||
|                 raw_dict = struct.to_dict(include_non_members=False) | ||||
|                 # Clean None values for TOML compatibility | ||||
|                 dct[key] = clean_dict_for_toml(raw_dict) | ||||
| 
 | ||||
|         try: | ||||
|             with self.fp.open(mode='wb') as fp: | ||||
|  |  | |||
|  | @ -27,7 +27,6 @@ from functools import partial | |||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Callable, | ||||
|     AsyncContextManager, | ||||
|     AsyncGenerator, | ||||
|  | @ -35,6 +34,7 @@ from typing import ( | |||
| ) | ||||
| import json | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| from trio_websocket import ( | ||||
|  | @ -167,7 +167,7 @@ async def _reconnect_forever( | |||
| 
 | ||||
|     async def proxy_msgs( | ||||
|         ws: WebSocketConnection, | ||||
|         pcs: trio.CancelScope,  # parent cancel scope | ||||
|         rent_cs: trio.CancelScope,  # parent cancel scope | ||||
|     ): | ||||
|         ''' | ||||
|         Receive (under `timeout` deadline) all msgs from from underlying | ||||
|  | @ -192,7 +192,7 @@ async def _reconnect_forever( | |||
|                         f'{url} connection bail with:' | ||||
|                     ) | ||||
|                     await trio.sleep(0.5) | ||||
|                     pcs.cancel() | ||||
|                     rent_cs.cancel() | ||||
| 
 | ||||
|                     # go back to reonnect loop in parent task | ||||
|                     return | ||||
|  | @ -204,7 +204,7 @@ async def _reconnect_forever( | |||
|                         f'{src_mod}\n' | ||||
|                         'WS feed seems down and slow af.. reconnecting\n' | ||||
|                     ) | ||||
|                     pcs.cancel() | ||||
|                     rent_cs.cancel() | ||||
| 
 | ||||
|                     # go back to reonnect loop in parent task | ||||
|                     return | ||||
|  | @ -228,7 +228,12 @@ async def _reconnect_forever( | |||
|     nobsws._connected = trio.Event() | ||||
|     task_status.started() | ||||
| 
 | ||||
|     while not snd._closed: | ||||
|     mc_state: trio._channel.MemoryChannelState = snd._state | ||||
|     while ( | ||||
|         mc_state.open_receive_channels > 0 | ||||
|         and | ||||
|         mc_state.open_send_channels > 0 | ||||
|     ): | ||||
|         log.info( | ||||
|             f'{src_mod}\n' | ||||
|             f'{url} trying (RE)CONNECT' | ||||
|  | @ -237,10 +242,11 @@ async def _reconnect_forever( | |||
|         ws: WebSocketConnection | ||||
|         try: | ||||
|             async with ( | ||||
|                 trio.open_nursery() as n, | ||||
|                 open_websocket_url(url) as ws, | ||||
|                 tractor.trionics.collapse_eg(), | ||||
|                 trio.open_nursery() as tn, | ||||
|             ): | ||||
|                 cs = nobsws._cs = n.cancel_scope | ||||
|                 cs = nobsws._cs = tn.cancel_scope | ||||
|                 nobsws._ws = ws | ||||
|                 log.info( | ||||
|                     f'{src_mod}\n' | ||||
|  | @ -248,7 +254,7 @@ async def _reconnect_forever( | |||
|                 ) | ||||
| 
 | ||||
|                 # begin relay loop to forward msgs | ||||
|                 n.start_soon( | ||||
|                 tn.start_soon( | ||||
|                     proxy_msgs, | ||||
|                     ws, | ||||
|                     cs, | ||||
|  | @ -262,7 +268,7 @@ async def _reconnect_forever( | |||
| 
 | ||||
|                     # TODO: should we return an explicit sub-cs | ||||
|                     # from this fixture task? | ||||
|                     await n.start( | ||||
|                     await tn.start( | ||||
|                         open_fixture, | ||||
|                         fixture, | ||||
|                         nobsws, | ||||
|  | @ -272,11 +278,23 @@ async def _reconnect_forever( | |||
|                 # to let tasks run **inside** the ws open block above. | ||||
|                 nobsws._connected.set() | ||||
|                 await trio.sleep_forever() | ||||
|         except HandshakeError: | ||||
| 
 | ||||
|         except ( | ||||
|             HandshakeError, | ||||
|             ConnectionRejected, | ||||
|         ): | ||||
|             log.exception('Retrying connection') | ||||
|             await trio.sleep(0.5)  # throttle | ||||
| 
 | ||||
|         # ws & nursery block ends | ||||
|         except BaseException as _berr: | ||||
|             berr = _berr | ||||
|             log.exception( | ||||
|                 'Reconnect-attempt failed ??\n' | ||||
|             ) | ||||
|             await trio.sleep(0.2)  # throttle | ||||
|             raise berr | ||||
| 
 | ||||
|         #|_ws & nursery block ends | ||||
|         nobsws._connected = trio.Event() | ||||
|         if cs.cancelled_caught: | ||||
|             log.cancel( | ||||
|  | @ -324,21 +342,25 @@ async def open_autorecon_ws( | |||
|     connetivity errors, or some user defined recv timeout. | ||||
| 
 | ||||
|     You can provide a ``fixture`` async-context-manager which will be | ||||
|     entered/exitted around each connection reset; eg. for (re)requesting | ||||
|     subscriptions without requiring streaming setup code to rerun. | ||||
|     entered/exitted around each connection reset; eg. for | ||||
|     (re)requesting subscriptions without requiring streaming setup | ||||
|     code to rerun. | ||||
| 
 | ||||
|     ''' | ||||
|     snd: trio.MemorySendChannel | ||||
|     rcv: trio.MemoryReceiveChannel | ||||
|     snd, rcv = trio.open_memory_channel(616) | ||||
| 
 | ||||
|     async with trio.open_nursery() as n: | ||||
|     async with ( | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn | ||||
|     ): | ||||
|         nobsws = NoBsWs( | ||||
|             url, | ||||
|             rcv, | ||||
|             msg_recv_timeout=msg_recv_timeout, | ||||
|         ) | ||||
|         await n.start( | ||||
|         await tn.start( | ||||
|             partial( | ||||
|                 _reconnect_forever, | ||||
|                 url, | ||||
|  | @ -351,11 +373,10 @@ async def open_autorecon_ws( | |||
|         await nobsws._connected.wait() | ||||
|         assert nobsws._cs | ||||
|         assert nobsws.connected() | ||||
| 
 | ||||
|         try: | ||||
|             yield nobsws | ||||
|         finally: | ||||
|             n.cancel_scope.cancel() | ||||
|             tn.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| ''' | ||||
|  | @ -368,8 +389,8 @@ of msgs over a `NoBsWs`. | |||
| class JSONRPCResult(Struct): | ||||
|     id: int | ||||
|     jsonrpc: str = '2.0' | ||||
|     result: Optional[dict] = None | ||||
|     error: Optional[dict] = None | ||||
|     result: dict|None = None | ||||
|     error: dict|None = None | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
|  |  | |||
|  | @ -39,6 +39,7 @@ from typing import ( | |||
|     AsyncContextManager, | ||||
|     Awaitable, | ||||
|     Sequence, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import trio | ||||
|  | @ -75,6 +76,10 @@ from ._sampling import ( | |||
|     uniform_rate_send, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from tractor._addr import Address | ||||
|     from tractor.msg.types import Aid | ||||
| 
 | ||||
| 
 | ||||
| class Sub(Struct, frozen=True): | ||||
|     ''' | ||||
|  | @ -723,7 +728,10 @@ class Feed(Struct): | |||
|                 async for msg in stream: | ||||
|                     await tx.send(msg) | ||||
| 
 | ||||
|         async with trio.open_nursery() as nurse: | ||||
|         async with ( | ||||
|             tractor.trionics.collapse_eg(), | ||||
|             trio.open_nursery() as nurse | ||||
|         ): | ||||
|             # spawn a relay task for each stream so that they all | ||||
|             # multiplex to a common channel. | ||||
|             for brokername in mods: | ||||
|  | @ -899,19 +907,19 @@ async def open_feed( | |||
|             feed.portals[brokermod] = portal | ||||
| 
 | ||||
|             # fill out "status info" that the UI can show | ||||
|             host, port = portal.channel.raddr | ||||
|             if host == '127.0.0.1': | ||||
|                 host = 'localhost' | ||||
| 
 | ||||
|             chan: tractor.Channel = portal.chan | ||||
|             raddr: Address = chan.raddr | ||||
|             aid: Aid = chan.aid | ||||
|             # TAG_feed_status_update | ||||
|             feed.status.update({ | ||||
|                 'actor_name': portal.channel.uid[0], | ||||
|                 'host': host, | ||||
|                 'port': port, | ||||
|                 'actor_id': aid, | ||||
|                 'actor_short_id': f'{aid.name}@{aid.pid}', | ||||
|                 'ipc': chan.raddr.proto_key, | ||||
|                 'ipc_addr': raddr, | ||||
|                 'hist_shm': 'NA', | ||||
|                 'rt_shm': 'NA', | ||||
|                 'throttle_rate': tick_throttle, | ||||
|                 'throttle_hz': tick_throttle, | ||||
|             }) | ||||
|             # feed.status.update(init_msg.pop('status', {})) | ||||
| 
 | ||||
|             # (allocate and) connect to any feed bus for this broker | ||||
|             bus_ctxs.append( | ||||
|  |  | |||
|  | @ -200,9 +200,13 @@ def maybe_mk_fsp_shm( | |||
|     ) | ||||
| 
 | ||||
|     # (attempt to) uniquely key the fsp shm buffers | ||||
|     # Use hash for macOS compatibility (31 char limit) | ||||
|     import hashlib | ||||
|     actor_name, uuid = tractor.current_actor().uid | ||||
|     uuid_snip: str = uuid[:16] | ||||
|     key: str = f'piker.{actor_name}[{uuid_snip}].{sym}.{target.name}' | ||||
|     # Create short hash of sym and target name | ||||
|     content = f'{sym}.{target.name}' | ||||
|     content_hash = hashlib.md5(content.encode()).hexdigest()[:8] | ||||
|     key: str = f'{uuid[:8]}_{content_hash}.fsp' | ||||
| 
 | ||||
|     shm, opened = maybe_open_shm_array( | ||||
|         key, | ||||
|  |  | |||
|  | @ -498,6 +498,7 @@ async def cascade( | |||
| 
 | ||||
|         func_name: str = func.__name__ | ||||
|         async with ( | ||||
|             tractor.trionics.collapse_eg(),  # avoid multi-taskc tb in console | ||||
|             trio.open_nursery() as tn, | ||||
|         ): | ||||
|             # TODO: might be better to just make a "restart" method where | ||||
|  |  | |||
							
								
								
									
										28
									
								
								piker/log.py
								
								
								
								
							
							
						
						
									
										28
									
								
								piker/log.py
								
								
								
								
							|  | @ -18,7 +18,11 @@ | |||
| Log like a forester! | ||||
| """ | ||||
| import logging | ||||
| import reprlib | ||||
| import json | ||||
| from typing import ( | ||||
|     Callable, | ||||
| ) | ||||
| 
 | ||||
| import tractor | ||||
| from pygments import ( | ||||
|  | @ -84,3 +88,27 @@ def colorize_json( | |||
|         # likeable styles: algol_nu, tango, monokai | ||||
|         formatters.TerminalTrueColorFormatter(style=style) | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def mk_repr( | ||||
|     **repr_kws, | ||||
| ) -> Callable[[str], str]: | ||||
|     ''' | ||||
|     Allocate and deliver a `repr.Repr` instance with provided input | ||||
|     settings using the std-lib's `reprlib` mod, | ||||
|      * https://docs.python.org/3/library/reprlib.html | ||||
| 
 | ||||
|     ------ Ex. ------ | ||||
|     An up to 6-layer-nested `dict` as multi-line: | ||||
|     - https://stackoverflow.com/a/79102479 | ||||
|     - https://docs.python.org/3/library/reprlib.html#reprlib.Repr.maxlevel | ||||
| 
 | ||||
|     ''' | ||||
|     def_kws: dict[str, int] = dict( | ||||
|         indent=2, | ||||
|         maxlevel=6,  # recursion levels | ||||
|         maxstring=66,  # match editor line-len limit | ||||
|     ) | ||||
|     def_kws |= repr_kws | ||||
|     reprr = reprlib.Repr(**def_kws) | ||||
|     return reprr.repr | ||||
|  |  | |||
|  | @ -107,17 +107,22 @@ async def open_piker_runtime( | |||
|         async with ( | ||||
|             tractor.open_root_actor( | ||||
| 
 | ||||
|                 # passed through to ``open_root_actor`` | ||||
|                 # passed through to `open_root_actor` | ||||
|                 registry_addrs=registry_addrs, | ||||
|                 name=name, | ||||
|                 start_method=start_method, | ||||
|                 loglevel=loglevel, | ||||
|                 debug_mode=debug_mode, | ||||
|                 start_method=start_method, | ||||
| 
 | ||||
|                 # XXX NOTE MEMBER DAT der's a perf hit yo!! | ||||
|                 # https://greenback.readthedocs.io/en/latest/principle.html#performance | ||||
|                 maybe_enable_greenback=True, | ||||
| 
 | ||||
|                 # TODO: eventually we should be able to avoid | ||||
|                 # having the root have more then permissions to | ||||
|                 # spawn other specialized daemons I think? | ||||
|                 enable_modules=enable_modules, | ||||
|                 hide_tb=False, | ||||
| 
 | ||||
|                 **tractor_kwargs, | ||||
|             ) as actor, | ||||
|  | @ -200,7 +205,8 @@ async def open_pikerd( | |||
|             reg_addrs, | ||||
|         ), | ||||
|         tractor.open_nursery() as actor_nursery, | ||||
|         trio.open_nursery() as service_nursery, | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as service_tn, | ||||
|     ): | ||||
|         for addr in reg_addrs: | ||||
|             if addr not in root_actor.accept_addrs: | ||||
|  | @ -211,7 +217,7 @@ async def open_pikerd( | |||
| 
 | ||||
|         # assign globally for future daemon/task creation | ||||
|         Services.actor_n = actor_nursery | ||||
|         Services.service_n = service_nursery | ||||
|         Services.service_n = service_tn | ||||
|         Services.debug_mode = debug_mode | ||||
| 
 | ||||
|         try: | ||||
|  | @ -221,7 +227,7 @@ async def open_pikerd( | |||
|             # TODO: is this more clever/efficient? | ||||
|             # if 'samplerd' in Services.service_tasks: | ||||
|             #     await Services.cancel_service('samplerd') | ||||
|             service_nursery.cancel_scope.cancel() | ||||
|             service_tn.cancel_scope.cancel() | ||||
| 
 | ||||
| 
 | ||||
| # TODO: do we even need this? | ||||
|  | @ -256,7 +262,10 @@ async def maybe_open_pikerd( | |||
|     loglevel: str | None = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> tractor._portal.Portal | ClassVar[Services]: | ||||
| ) -> ( | ||||
|     tractor._portal.Portal | ||||
|     |ClassVar[Services] | ||||
| ): | ||||
|     ''' | ||||
|     If no ``pikerd`` daemon-root-actor can be found start it and | ||||
|     yield up (we should probably figure out returning a portal to self | ||||
|  | @ -281,10 +290,11 @@ async def maybe_open_pikerd( | |||
| 
 | ||||
|     registry_addrs: list[tuple[str, int]] = ( | ||||
|         registry_addrs | ||||
|         or [_default_reg_addr] | ||||
|         or | ||||
|         [_default_reg_addr] | ||||
|     ) | ||||
| 
 | ||||
|     pikerd_portal: tractor.Portal | None | ||||
|     pikerd_portal: tractor.Portal|None | ||||
|     async with ( | ||||
|         open_piker_runtime( | ||||
|             name=query_name, | ||||
|  |  | |||
|  | @ -28,6 +28,7 @@ from contextlib import ( | |||
| ) | ||||
| 
 | ||||
| import tractor | ||||
| from trio.lowlevel import current_task | ||||
| 
 | ||||
| from ._util import ( | ||||
|     log,  # sub-sys logger | ||||
|  | @ -70,69 +71,84 @@ async def maybe_spawn_daemon( | |||
|     lock = Services.locks[service_name] | ||||
|     await lock.acquire() | ||||
| 
 | ||||
|     async with find_service( | ||||
|         service_name, | ||||
|         registry_addrs=[('127.0.0.1', 6116)], | ||||
|     ) as portal: | ||||
|         if portal is not None: | ||||
|             lock.release() | ||||
|             yield portal | ||||
|             return | ||||
|     try: | ||||
|         async with find_service( | ||||
|             service_name, | ||||
|             registry_addrs=[('127.0.0.1', 6116)], | ||||
|         ) as portal: | ||||
|             if portal is not None: | ||||
|                 lock.release() | ||||
|                 yield portal | ||||
|                 return | ||||
| 
 | ||||
|     log.warning( | ||||
|         f"Couldn't find any existing {service_name}\n" | ||||
|         'Attempting to spawn new daemon-service..' | ||||
|     ) | ||||
|         log.warning( | ||||
|             f"Couldn't find any existing {service_name}\n" | ||||
|             'Attempting to spawn new daemon-service..' | ||||
|         ) | ||||
| 
 | ||||
|     # ask root ``pikerd`` daemon to spawn the daemon we need if | ||||
|     # pikerd is not live we now become the root of the | ||||
|     # process tree | ||||
|     async with maybe_open_pikerd( | ||||
|         loglevel=loglevel, | ||||
|         **pikerd_kwargs, | ||||
|         # ask root ``pikerd`` daemon to spawn the daemon we need if | ||||
|         # pikerd is not live we now become the root of the | ||||
|         # process tree | ||||
|         async with maybe_open_pikerd( | ||||
|             loglevel=loglevel, | ||||
|             **pikerd_kwargs, | ||||
| 
 | ||||
|     ) as pikerd_portal: | ||||
|         ) as pikerd_portal: | ||||
| 
 | ||||
|         # we are the root and thus are `pikerd` | ||||
|         # so spawn the target service directly by calling | ||||
|         # the provided target routine. | ||||
|         # XXX: this assumes that the target is well formed and will | ||||
|         # do the right things to setup both a sub-actor **and** call | ||||
|         # the ``_Services`` api from above to start the top level | ||||
|         # service task for that actor. | ||||
|         started: bool | ||||
|         if pikerd_portal is None: | ||||
|             started = await service_task_target( | ||||
|                 loglevel=loglevel, | ||||
|                 **spawn_args, | ||||
|             # we are the root and thus are `pikerd` | ||||
|             # so spawn the target service directly by calling | ||||
|             # the provided target routine. | ||||
|             # XXX: this assumes that the target is well formed and will | ||||
|             # do the right things to setup both a sub-actor **and** call | ||||
|             # the ``_Services`` api from above to start the top level | ||||
|             # service task for that actor. | ||||
|             started: bool | ||||
|             if pikerd_portal is None: | ||||
|                 started = await service_task_target( | ||||
|                     loglevel=loglevel, | ||||
|                     **spawn_args, | ||||
|                 ) | ||||
| 
 | ||||
|             else: | ||||
|                 # request a remote `pikerd` (service manager) to start the | ||||
|                 # target daemon-task, the target can't return | ||||
|                 # a non-serializable value since it is expected that service | ||||
|                 # starting is non-blocking and the target task will persist | ||||
|                 # running "under" or "within" the `pikerd` actor tree after | ||||
|                 # the questing client disconnects. in other words this | ||||
|                 # spawns a persistent daemon actor that continues to live | ||||
|                 # for the lifespan of whatever the service manager inside | ||||
|                 # `pikerd` says it should. | ||||
|                 started = await pikerd_portal.run( | ||||
|                     service_task_target, | ||||
|                     loglevel=loglevel, | ||||
|                     **spawn_args, | ||||
|                 ) | ||||
| 
 | ||||
|             if started: | ||||
|                 log.info(f'Service {service_name} started!') | ||||
| 
 | ||||
|             # block until we can discover (by IPC connection) to the newly | ||||
|             # spawned daemon-actor and then deliver the portal to the | ||||
|             # caller. | ||||
|             async with tractor.wait_for_actor(service_name) as portal: | ||||
|                 lock.release() | ||||
|                 yield portal | ||||
|                 await portal.cancel_actor() | ||||
| 
 | ||||
|     except BaseException as _err: | ||||
|         err = _err | ||||
|         if ( | ||||
|             lock.locked() | ||||
|             and | ||||
|             lock.statistics().owner is current_task() | ||||
|         ): | ||||
|             log.exception( | ||||
|                 f'Releasing stale lock after crash..?' | ||||
|                 f'{err!r}\n' | ||||
|             ) | ||||
| 
 | ||||
|         else: | ||||
|             # request a remote `pikerd` (service manager) to start the | ||||
|             # target daemon-task, the target can't return | ||||
|             # a non-serializable value since it is expected that service | ||||
|             # starting is non-blocking and the target task will persist | ||||
|             # running "under" or "within" the `pikerd` actor tree after | ||||
|             # the questing client disconnects. in other words this | ||||
|             # spawns a persistent daemon actor that continues to live | ||||
|             # for the lifespan of whatever the service manager inside | ||||
|             # `pikerd` says it should. | ||||
|             started = await pikerd_portal.run( | ||||
|                 service_task_target, | ||||
|                 loglevel=loglevel, | ||||
|                 **spawn_args, | ||||
|             ) | ||||
| 
 | ||||
|         if started: | ||||
|             log.info(f'Service {service_name} started!') | ||||
| 
 | ||||
|         # block until we can discover (by IPC connection) to the newly | ||||
|         # spawned daemon-actor and then deliver the portal to the | ||||
|         # caller. | ||||
|         async with tractor.wait_for_actor(service_name) as portal: | ||||
|             lock.release() | ||||
|             yield portal | ||||
|             await portal.cancel_actor() | ||||
|             raise err | ||||
| 
 | ||||
| 
 | ||||
| async def spawn_emsd( | ||||
|  |  | |||
|  | @ -109,7 +109,7 @@ class Services: | |||
|                         # wait on any context's return value | ||||
|                         # and any final portal result from the | ||||
|                         # sub-actor. | ||||
|                         ctx_res: Any = await ctx.result() | ||||
|                         ctx_res: Any = await ctx.wait_for_result() | ||||
| 
 | ||||
|                         # NOTE: blocks indefinitely until cancelled | ||||
|                         # either by error from the target context | ||||
|  |  | |||
|  | @ -101,13 +101,15 @@ async def open_registry( | |||
| 
 | ||||
|     if ( | ||||
|         not tractor.is_root_process() | ||||
|         and not Registry.addrs | ||||
|         and | ||||
|         not Registry.addrs | ||||
|     ): | ||||
|         Registry.addrs.extend(actor.reg_addrs) | ||||
| 
 | ||||
|     if ( | ||||
|         ensure_exists | ||||
|         and not Registry.addrs | ||||
|         and | ||||
|         not Registry.addrs | ||||
|     ): | ||||
|         raise RuntimeError( | ||||
|             f"`{uid}` registry should already exist but doesn't?" | ||||
|  | @ -146,7 +148,7 @@ async def find_service( | |||
|     | list[Portal] | ||||
|     | None | ||||
| ): | ||||
| 
 | ||||
|     # try: | ||||
|     reg_addrs: list[tuple[str, int]] | ||||
|     async with open_registry( | ||||
|         addrs=( | ||||
|  | @ -157,22 +159,39 @@ async def find_service( | |||
|             or Registry.addrs | ||||
|         ), | ||||
|     ) as reg_addrs: | ||||
|         log.info(f'Scanning for service `{service_name}`') | ||||
| 
 | ||||
|         maybe_portals: list[Portal] | Portal | None | ||||
|         log.info( | ||||
|             f'Scanning for service {service_name!r}' | ||||
|         ) | ||||
| 
 | ||||
|         # attach to existing daemon by name if possible | ||||
|         maybe_portals: list[Portal]|Portal|None | ||||
|         async with tractor.find_actor( | ||||
|             service_name, | ||||
|             registry_addrs=reg_addrs, | ||||
|             only_first=first_only,  # if set only returns single ref | ||||
|         ) as maybe_portals: | ||||
|             if not maybe_portals: | ||||
|                 # log.info( | ||||
|                 print( | ||||
|                     f'Could NOT find service {service_name!r} -> {maybe_portals!r}' | ||||
|                 ) | ||||
|                 yield None | ||||
|                 return | ||||
| 
 | ||||
|             # log.info( | ||||
|             print( | ||||
|                 f'Found service {service_name!r} -> {maybe_portals}' | ||||
|             ) | ||||
|             yield maybe_portals | ||||
| 
 | ||||
|     # except BaseException as _berr: | ||||
|     #     berr = _berr | ||||
|     #     log.exception( | ||||
|     #         'tractor.find_actor() failed with,\n' | ||||
|     #     ) | ||||
|     #     raise berr | ||||
| 
 | ||||
| 
 | ||||
| async def check_for_service( | ||||
|     service_name: str, | ||||
|  |  | |||
|  | @ -138,6 +138,16 @@ class StorageClient( | |||
|     ) -> None: | ||||
|         ... | ||||
| 
 | ||||
|     async def write_oi( | ||||
|         self, | ||||
|         fqme: str, | ||||
|         oi: np.ndarray, | ||||
|         append_and_duplicate: bool = True, | ||||
|         limit: int = int(800e3), | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ... | ||||
| 
 | ||||
| 
 | ||||
| class TimeseriesNotFound(Exception): | ||||
|     ''' | ||||
|  |  | |||
|  | @ -111,6 +111,24 @@ def mk_ohlcv_shm_keyed_filepath( | |||
|     return path | ||||
| 
 | ||||
| 
 | ||||
| def mk_oi_shm_keyed_filepath( | ||||
|     fqme: str, | ||||
|     period: float | int, | ||||
|     datadir: Path, | ||||
| 
 | ||||
| ) -> Path: | ||||
| 
 | ||||
|     if period < 1.: | ||||
|         raise ValueError('Sample period should be >= 1.!?') | ||||
| 
 | ||||
|     path: Path = ( | ||||
|         datadir | ||||
|         / | ||||
|         f'{fqme}.oi{int(period)}s.parquet' | ||||
|     ) | ||||
|     return path | ||||
| 
 | ||||
| 
 | ||||
| def unpack_fqme_from_parquet_filepath(path: Path) -> str: | ||||
| 
 | ||||
|     filename: str = str(path.name) | ||||
|  | @ -172,7 +190,11 @@ class NativeStorageClient: | |||
| 
 | ||||
|             key: str = path.name.rstrip('.parquet') | ||||
|             fqme, _, descr = key.rpartition('.') | ||||
|             prefix, _, suffix = descr.partition('ohlcv') | ||||
|             if 'ohlcv' in descr:  | ||||
|                 prefix, _, suffix = descr.partition('ohlcv') | ||||
|             elif 'oi' in descr: | ||||
|                 prefix, _, suffix = descr.partition('oi') | ||||
| 
 | ||||
|             period: int = int(suffix.strip('s')) | ||||
| 
 | ||||
|             # cache description data | ||||
|  | @ -369,6 +391,61 @@ class NativeStorageClient: | |||
|             timeframe, | ||||
|         ) | ||||
| 
 | ||||
|     def _write_oi( | ||||
|         self, | ||||
|         fqme: str, | ||||
|         oi: np.ndarray, | ||||
| 
 | ||||
|     ) -> Path: | ||||
|         ''' | ||||
|         Sync version of the public interface meth, since we don't | ||||
|         currently actually need or support an async impl. | ||||
| 
 | ||||
|         ''' | ||||
|         path: Path = mk_oi_shm_keyed_filepath( | ||||
|             fqme=fqme, | ||||
|             period=1, | ||||
|             datadir=self._datadir, | ||||
|         ) | ||||
|         if isinstance(oi, np.ndarray): | ||||
|             new_df: pl.DataFrame = tsp.np2pl(oi) | ||||
|         else: | ||||
|             new_df = oi | ||||
| 
 | ||||
|         if path.exists(): | ||||
|             old_df = pl.read_parquet(path) | ||||
|             df = pl.concat([old_df, new_df]) | ||||
|         else: | ||||
|             df = new_df | ||||
| 
 | ||||
|         start = time.time() | ||||
|         df.write_parquet(path) | ||||
|         delay: float = round( | ||||
|             time.time() - start, | ||||
|             ndigits=6, | ||||
|         ) | ||||
|         log.info( | ||||
|             f'parquet write took {delay} secs\n' | ||||
|             f'file path: {path}' | ||||
|         ) | ||||
|         return path | ||||
| 
 | ||||
|     async def write_oi( | ||||
|         self, | ||||
|         fqme: str, | ||||
|         oi: np.ndarray, | ||||
| 
 | ||||
|     ) -> Path: | ||||
|         ''' | ||||
|         Write input oi time series for fqme and sampling period | ||||
|         to (local) disk. | ||||
| 
 | ||||
|         ''' | ||||
|         return self._write_oi( | ||||
|             fqme, | ||||
|             oi, | ||||
|         ) | ||||
| 
 | ||||
|     async def delete_ts( | ||||
|         self, | ||||
|         key: str, | ||||
|  |  | |||
|  | @ -963,7 +963,10 @@ async def tsdb_backfill( | |||
|         # concurrently load the provider's most-recent-frame AND any | ||||
|         # pre-existing tsdb history already saved in `piker` storage. | ||||
|         dt_eps: list[DateTime, DateTime] = [] | ||||
|         async with trio.open_nursery() as tn: | ||||
|         async with ( | ||||
|             tractor.trionics.collapse_eg(), | ||||
|             trio.open_nursery() as tn | ||||
|         ): | ||||
|             tn.start_soon( | ||||
|                 push_latest_frame, | ||||
|                 dt_eps, | ||||
|  | @ -1012,9 +1015,16 @@ async def tsdb_backfill( | |||
|                 int, | ||||
|                 Duration, | ||||
|             ]|None = config.get('frame_types', None) | ||||
| 
 | ||||
|             if def_frame_durs: | ||||
|                 def_frame_size: Duration = def_frame_durs[timeframe] | ||||
|                 assert def_frame_size == calced_frame_size | ||||
| 
 | ||||
|                 if def_frame_size != calced_frame_size: | ||||
|                     log.warning( | ||||
|                         f'Expected frame size {def_frame_size}\n' | ||||
|                         f'Rxed frame {calced_frame_size}\n' | ||||
|                     ) | ||||
|                     # await tractor.pause() | ||||
|             else: | ||||
|                 # use what we calced from first frame above. | ||||
|                 def_frame_size = calced_frame_size | ||||
|  | @ -1043,7 +1053,9 @@ async def tsdb_backfill( | |||
|             # if there is a gap to backfill from the first | ||||
|             # history frame until the last datum loaded from the tsdb | ||||
|             # continue that now in the background | ||||
|             async with trio.open_nursery() as tn: | ||||
|             async with trio.open_nursery( | ||||
|                 strict_exception_groups=False, | ||||
|             ) as tn: | ||||
| 
 | ||||
|                 bf_done = await tn.start( | ||||
|                     partial( | ||||
|  | @ -1245,13 +1257,17 @@ async def manage_history( | |||
|     service: str = name.rstrip(f'.{mod.name}') | ||||
|     fqme: str = mkt.get_fqme(delim_char='') | ||||
| 
 | ||||
|     # Create a short hash of the fqme for macOS compatibility | ||||
|     import hashlib | ||||
|     fqme_hash = hashlib.md5(fqme.encode()).hexdigest()[:8] | ||||
| 
 | ||||
|     # (maybe) allocate shm array for this broker/symbol which will | ||||
|     # be used for fast near-term history capture and processing. | ||||
|     hist_shm, opened = maybe_open_shm_array( | ||||
|         size=_default_hist_size, | ||||
|         append_start_index=_hist_buffer_start, | ||||
| 
 | ||||
|         key=f'piker.{service}[{uuid[:16]}].{fqme}.hist', | ||||
|         key=f'{uuid[:8]}_{fqme_hash}.h', | ||||
| 
 | ||||
|         # use any broker defined ohlc dtype: | ||||
|         dtype=getattr(mod, '_ohlc_dtype', def_iohlcv_fields), | ||||
|  | @ -1270,7 +1286,7 @@ async def manage_history( | |||
|     rt_shm, opened = maybe_open_shm_array( | ||||
|         size=_default_rt_size, | ||||
|         append_start_index=_rt_buffer_start, | ||||
|         key=f'piker.{service}[{uuid[:16]}].{fqme}.rt', | ||||
|         key=f'{uuid[:8]}_{fqme_hash}.r', | ||||
| 
 | ||||
|         # use any broker defined ohlc dtype: | ||||
|         dtype=getattr(mod, '_ohlc_dtype', def_iohlcv_fields), | ||||
|  | @ -1308,6 +1324,7 @@ async def manage_history( | |||
|         # sampling period) data set since normally differently | ||||
|         # sampled timeseries can be loaded / process independently | ||||
|         # ;) | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         log.info( | ||||
|  |  | |||
|  | @ -517,7 +517,7 @@ def with_dts( | |||
| 
 | ||||
|     ''' | ||||
|     return df.with_columns([ | ||||
|         pl.col(time_col).shift(1).suffix('_prev'), | ||||
|         pl.col(time_col).shift(1).name.suffix('_prev'), | ||||
|         pl.col(time_col).diff().alias('s_diff'), | ||||
|         pl.from_epoch(pl.col(time_col)).alias('dt'), | ||||
|     ]).with_columns([ | ||||
|  | @ -623,7 +623,7 @@ def detect_vlm_gaps( | |||
| 
 | ||||
| ) -> pl.DataFrame: | ||||
| 
 | ||||
|     vnull: pl.DataFrame = w_dts.filter( | ||||
|     vnull: pl.DataFrame = df.filter( | ||||
|         pl.col(col) == 0 | ||||
|     ) | ||||
|     return vnull | ||||
|  |  | |||
|  | @ -21,6 +21,7 @@ Main app startup and run. | |||
| from functools import partial | ||||
| from types import ModuleType | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| 
 | ||||
| from piker.ui.qt import ( | ||||
|  | @ -116,6 +117,7 @@ async def _async_main( | |||
|         needed_brokermods[brokername] = brokers[brokername] | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as root_n, | ||||
|     ): | ||||
|         # set root nursery and task stack for spawning other charts/feeds | ||||
|  |  | |||
|  | @ -75,6 +75,9 @@ class Axis(pg.AxisItem): | |||
|         self.pi = plotitem | ||||
|         self._dpi_font = _font | ||||
| 
 | ||||
|         # store for later recalculation on zoom | ||||
|         self._typical_max_str = typical_max_str | ||||
| 
 | ||||
|         self.setTickFont(_font.font) | ||||
|         font_size = self._dpi_font.font.pixelSize() | ||||
| 
 | ||||
|  | @ -156,6 +159,41 @@ class Axis(pg.AxisItem): | |||
|     def size_to_values(self) -> None: | ||||
|         pass | ||||
| 
 | ||||
|     def update_fonts(self, font: DpiAwareFont) -> None: | ||||
|         '''Update font and recalculate axis sizing after zoom change.''' | ||||
|         # IMPORTANT: tell Qt we're about to change geometry | ||||
|         self.prepareGeometryChange() | ||||
| 
 | ||||
|         self._dpi_font = font | ||||
|         self.setTickFont(font.font) | ||||
|         font_size = font.font.pixelSize() | ||||
| 
 | ||||
|         # recalculate text offset based on new font size | ||||
|         text_offset = None | ||||
|         if self.orientation in ('bottom',): | ||||
|             text_offset = floor(0.25 * font_size) | ||||
|         elif self.orientation in ('left', 'right'): | ||||
|             text_offset = floor(font_size / 2) | ||||
| 
 | ||||
|         if text_offset: | ||||
|             self.setStyle(tickTextOffset=text_offset) | ||||
| 
 | ||||
|         # recalculate bounding rect with new font | ||||
|         # Note: typical_max_str should be stored from init | ||||
|         if not hasattr(self, '_typical_max_str'): | ||||
|             self._typical_max_str = '100 000.000  '  # fallback default | ||||
|         self.typical_br = font._qfm.boundingRect(self._typical_max_str) | ||||
| 
 | ||||
|         # Update PyQtGraph's internal text size tracking | ||||
|         # This is critical - PyQtGraph uses these internally for auto-expand | ||||
|         if self.orientation in ['left', 'right']: | ||||
|             self.textWidth = self.typical_br.width() | ||||
|         else: | ||||
|             self.textHeight = self.typical_br.height() | ||||
| 
 | ||||
|         # resize axis to fit new font - this triggers PyQtGraph's auto-expand | ||||
|         self.size_to_values() | ||||
| 
 | ||||
|     def txt_offsets(self) -> tuple[int, int]: | ||||
|         return tuple(self.style['tickTextOffset']) | ||||
| 
 | ||||
|  | @ -256,7 +294,14 @@ class PriceAxis(Axis): | |||
|         self._min_tick = size | ||||
| 
 | ||||
|     def size_to_values(self) -> None: | ||||
|         self.setWidth(self.typical_br.width()) | ||||
|         # Call PyQtGraph's internal width update mechanism | ||||
|         # This respects autoExpandTextSpace and updates min/max constraints | ||||
|         self._updateWidth() | ||||
|         # tell Qt our preferred size changed so layout recalculates | ||||
|         self.updateGeometry() | ||||
|         # force parent plot item to recalculate its layout | ||||
|         if self.pi and hasattr(self.pi, 'updateGeometry'): | ||||
|             self.pi.updateGeometry() | ||||
| 
 | ||||
|     # XXX: drop for now since it just eats up h space | ||||
| 
 | ||||
|  | @ -300,7 +345,14 @@ class DynamicDateAxis(Axis): | |||
|     } | ||||
| 
 | ||||
|     def size_to_values(self) -> None: | ||||
|         self.setHeight(self.typical_br.height() + 1) | ||||
|         # Call PyQtGraph's internal height update mechanism | ||||
|         # This respects autoExpandTextSpace and updates min/max constraints | ||||
|         self._updateHeight() | ||||
|         # tell Qt our preferred size changed so layout recalculates | ||||
|         self.updateGeometry() | ||||
|         # force parent plot item to recalculate its layout | ||||
|         if self.pi and hasattr(self.pi, 'updateGeometry'): | ||||
|             self.pi.updateGeometry() | ||||
| 
 | ||||
|     def _indexes_to_timestrs( | ||||
|         self, | ||||
|  |  | |||
|  | @ -33,7 +33,6 @@ import trio | |||
| 
 | ||||
| from piker.ui.qt import ( | ||||
|     QtCore, | ||||
|     QtWidgets, | ||||
|     Qt, | ||||
|     QLineF, | ||||
|     QFrame, | ||||
|  |  | |||
|  | @ -212,7 +212,9 @@ async def increment_history_view( | |||
|     hist_chart: ChartPlotWidget = ds.hist_chart | ||||
|     hist_viz: Viz = ds.hist_viz | ||||
|     # viz: Viz = ds.viz | ||||
|     assert 'hist' in hist_viz.shm.token['shm_name'] | ||||
|     # NOTE: Changed for macOS compatibility with shortened shm names | ||||
|     # assert 'hist' in hist_viz.shm.token['shm_name'] | ||||
|     assert hist_viz.shm.token['shm_name'].endswith('.h') | ||||
|     # name: str = hist_viz.name | ||||
| 
 | ||||
|     # TODO: seems this is more reliable at keeping the slow | ||||
|  | @ -1445,7 +1447,10 @@ async def display_symbol_data( | |||
|         # for pause/resume on mouse interaction | ||||
|         rt_chart.feed = feed | ||||
| 
 | ||||
|         async with trio.open_nursery() as ln: | ||||
|         async with ( | ||||
|             tractor.trionics.collapse_eg(), | ||||
|             trio.open_nursery() as ln, | ||||
|         ): | ||||
|             # if available load volume related built-in display(s) | ||||
|             vlm_charts: dict[ | ||||
|                 str, | ||||
|  |  | |||
|  | @ -22,7 +22,10 @@ from contextlib import asynccontextmanager as acm | |||
| from typing import Callable | ||||
| 
 | ||||
| import trio | ||||
| from tractor.trionics import gather_contexts | ||||
| from tractor.trionics import ( | ||||
|     gather_contexts, | ||||
|     collapse_eg, | ||||
| ) | ||||
| 
 | ||||
| from piker.ui.qt import ( | ||||
|     QtCore, | ||||
|  | @ -207,7 +210,10 @@ async def open_signal_handler( | |||
|         async for args in recv: | ||||
|             await async_handler(*args) | ||||
| 
 | ||||
|     async with trio.open_nursery() as tn: | ||||
|     async with ( | ||||
|         collapse_eg(), | ||||
|         trio.open_nursery() as tn | ||||
|     ): | ||||
|         tn.start_soon(proxy_to_handler) | ||||
|         async with send: | ||||
|             yield | ||||
|  | @ -242,6 +248,7 @@ async def open_handlers( | |||
|     widget: QWidget | ||||
|     streams: list[trio.abc.ReceiveChannel] | ||||
|     async with ( | ||||
|         collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|         gather_contexts([ | ||||
|             open_event_stream( | ||||
|  |  | |||
|  | @ -199,6 +199,9 @@ def run_qtractor( | |||
|     if is_windows: | ||||
|         window.configure_to_desktop() | ||||
| 
 | ||||
|     # install global keyboard shortcuts for UI zoom | ||||
|     window.install_global_zoom_filter() | ||||
| 
 | ||||
|     # actually render to screen | ||||
|     window.show() | ||||
|     app.exec_() | ||||
|  |  | |||
|  | @ -18,10 +18,11 @@ | |||
| Feed status and controls widget(s) for embedding in a UI-pane. | ||||
| 
 | ||||
| """ | ||||
| 
 | ||||
| from __future__ import annotations | ||||
| from textwrap import dedent | ||||
| from typing import TYPE_CHECKING | ||||
| from typing import ( | ||||
|     Any, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| # from PyQt5.QtCore import Qt | ||||
| 
 | ||||
|  | @ -49,35 +50,55 @@ def mk_feed_label( | |||
|     a feed control protocol. | ||||
| 
 | ||||
|     ''' | ||||
|     status = feed.status | ||||
|     status: dict[str, Any] = feed.status | ||||
|     assert status | ||||
| 
 | ||||
|     msg = dedent(""" | ||||
|         actor: **{actor_name}**\n | ||||
|         |_ @**{host}:{port}**\n | ||||
|     """) | ||||
|     # SO tips on ws/nls, | ||||
|     # https://stackoverflow.com/a/15721400 | ||||
|     ws: str = ' ' | ||||
|     # nl: str = '<br>'  # dun work? | ||||
|     actor_info_repr: str = ( | ||||
|         f')> **{status["actor_short_id"]}**\n' | ||||
|         '\n'  # bc md? | ||||
|     ) | ||||
| 
 | ||||
|     for key, val in status.items(): | ||||
|         if key in ('host', 'port', 'actor_name'): | ||||
|             continue | ||||
|         msg += f'\n|_ {key}: **{{{key}}}**\n' | ||||
|     # fields to select *IN* for display | ||||
|     # (see `.data.feed.open_feed()` status | ||||
|     #  update -> TAG_feed_status_update) | ||||
|     for key in [ | ||||
|         'ipc', | ||||
|         'hist_shm', | ||||
|         'rt_shm', | ||||
|         'throttle_hz', | ||||
|     ]: | ||||
|         # NOTE, the 2nd key is filled via `.format()` updates. | ||||
|         actor_info_repr += ( | ||||
|             f'\n'  # bc md? | ||||
|             f'{ws}|_{key}: **{{{key}}}**\n' | ||||
|         ) | ||||
|         # ^TODO? formatting and content.. | ||||
|         # -[ ] showing which fqme is "forward" on the | ||||
|         #    chart/fsp/order-mode? | ||||
|         #  '|_ flows: **{symbols}**\n' | ||||
|         # | ||||
|         # -[x] why isn't the indent working? | ||||
|         #  => markdown, now solved.. | ||||
| 
 | ||||
|     feed_label = FormatLabel( | ||||
|         fmt_str=msg, | ||||
|         # |_ streams: **{symbols}**\n | ||||
|         fmt_str=actor_info_repr, | ||||
|         font=_font.font, | ||||
|         font_size=_font_small.px_size, | ||||
|         font_color='default_lightest', | ||||
|     ) | ||||
| 
 | ||||
|     # ?TODO, remove this? | ||||
|     # form.vbox.setAlignment(feed_label, Qt.AlignBottom) | ||||
|     # form.vbox.setAlignment(Qt.AlignBottom) | ||||
|     _ = chart.height() - ( | ||||
|         form.height() + | ||||
|         form.fill_bar.height() | ||||
|         # feed_label.height() | ||||
|     ) | ||||
|     # _ = chart.height() - ( | ||||
|     #     form.height() + | ||||
|     #     form.fill_bar.height() | ||||
|     #     # feed_label.height() | ||||
|     # ) | ||||
| 
 | ||||
|     feed_label.format(**feed.status) | ||||
| 
 | ||||
|     return feed_label | ||||
|  |  | |||
|  | @ -124,6 +124,13 @@ class Edit(QLineEdit): | |||
|         self.sizeHint() | ||||
|         self.update() | ||||
| 
 | ||||
|     def update_fonts(self, font: DpiAwareFont) -> None: | ||||
|         '''Update font and recalculate widget size.''' | ||||
|         self.dpi_font = font | ||||
|         self.setFont(font.font) | ||||
|         # tell Qt our size hint changed so it recalculates layout | ||||
|         self.updateGeometry() | ||||
| 
 | ||||
|     def focus(self) -> None: | ||||
|         self.selectAll() | ||||
|         self.show() | ||||
|  | @ -241,6 +248,14 @@ class Selection(QComboBox): | |||
|         icon_size = round(h * 0.75) | ||||
|         self.setIconSize(QSize(icon_size, icon_size)) | ||||
| 
 | ||||
|     def update_fonts(self, font: DpiAwareFont) -> None: | ||||
|         '''Update font and recalculate widget size.''' | ||||
|         self.setFont(font.font) | ||||
|         # recalculate heights with new font | ||||
|         self.resize() | ||||
|         # tell Qt our size hint changed so it recalculates layout | ||||
|         self.updateGeometry() | ||||
| 
 | ||||
|     def set_items( | ||||
|         self, | ||||
|         keys: list[str], | ||||
|  | @ -431,6 +446,39 @@ class FieldsForm(QWidget): | |||
|         self.fields[key] = select | ||||
|         return select | ||||
| 
 | ||||
|     def update_fonts(self) -> None: | ||||
|         '''Update font sizes after zoom change.''' | ||||
|         from ._style import _font, _font_small | ||||
| 
 | ||||
|         # update stored font size | ||||
|         self._font_size = _font_small.px_size - 2 | ||||
| 
 | ||||
|         # update all labels | ||||
|         for name, label in self.labels.items(): | ||||
|             if hasattr(label, 'update_font'): | ||||
|                 label.update_font(_font.font, self._font_size - 1) | ||||
| 
 | ||||
|         # update all fields (edits, selects) | ||||
|         for key, field in self.fields.items(): | ||||
|             # first check for our custom update_fonts method (Edit, Selection) | ||||
|             if hasattr(field, 'update_fonts'): | ||||
|                 field.update_fonts(_font) | ||||
|             # then handle stylesheet updates for those without custom methods | ||||
|             elif hasattr(field, 'setStyleSheet'): | ||||
|                 # regenerate stylesheet with new font size | ||||
|                 field.setStyleSheet( | ||||
|                     f"""QLineEdit {{ | ||||
|                         color : {hcolor('gunmetal')}; | ||||
|                         font-size : {self._font_size}px; | ||||
|                     }} | ||||
|                     """ | ||||
|                 ) | ||||
|                 field.setFont(_font.font) | ||||
| 
 | ||||
|             # for Selection widgets that need style updates | ||||
|             if hasattr(field, 'set_style'): | ||||
|                 field.set_style(color='gunmetal', font_size=self._font_size) | ||||
| 
 | ||||
| 
 | ||||
| async def handle_field_input( | ||||
| 
 | ||||
|  | @ -633,6 +681,37 @@ class FillStatusBar(QProgressBar): | |||
|         self.setRange(0, int(slots)) | ||||
|         self.setValue(value) | ||||
| 
 | ||||
|     def update_fonts(self, font_size: int) -> None: | ||||
|         '''Update font size after zoom change.''' | ||||
|         from ._style import _font_small | ||||
| 
 | ||||
|         self.font_size = font_size | ||||
|         # regenerate stylesheet with new font size | ||||
|         self.setStyleSheet( | ||||
|             f""" | ||||
|             QProgressBar {{ | ||||
| 
 | ||||
|                 text-align: center; | ||||
| 
 | ||||
|                 font-size : {self.font_size - 2}px; | ||||
| 
 | ||||
|                 background-color: {hcolor('papas_special')}; | ||||
|                 color : {hcolor('papas_special')}; | ||||
| 
 | ||||
|                 border: {self.border_px}px solid {hcolor('default_light')}; | ||||
|                 border-radius: 2px; | ||||
|             }} | ||||
|             QProgressBar::chunk {{ | ||||
| 
 | ||||
|                 background-color: {hcolor('default_spotlight')}; | ||||
|                 color: {hcolor('bracket')}; | ||||
| 
 | ||||
|                 border-radius: 2px; | ||||
|             }} | ||||
|             """ | ||||
|         ) | ||||
|         self.setFont(_font_small.font) | ||||
| 
 | ||||
| 
 | ||||
| def mk_fill_status_bar( | ||||
| 
 | ||||
|  |  | |||
|  | @ -600,6 +600,7 @@ async def open_fsp_admin( | |||
|             kwargs=kwargs, | ||||
|         ) as (cache_hit, cluster_map), | ||||
| 
 | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
|     ): | ||||
|         if cache_hit: | ||||
|  | @ -613,6 +614,8 @@ async def open_fsp_admin( | |||
|         ) | ||||
|         try: | ||||
|             yield admin | ||||
| 
 | ||||
|         # ??TODO, does this *need* to be inside a finally? | ||||
|         finally: | ||||
|             # terminate all tasks via signals | ||||
|             for key, entry in admin._registry.items(): | ||||
|  |  | |||
|  | @ -285,18 +285,20 @@ class FormatLabel(QLabel): | |||
|         font_size: int, | ||||
|         font_color: str, | ||||
| 
 | ||||
|         use_md: bool = True, | ||||
| 
 | ||||
|         parent=None, | ||||
| 
 | ||||
|     ) -> None: | ||||
| 
 | ||||
|         super().__init__(parent) | ||||
| 
 | ||||
|         # by default set the format string verbatim and expect user to | ||||
|         # call ``.format()`` later (presumably they'll notice the | ||||
|         # by default set the format string verbatim and expect user | ||||
|         # to call ``.format()`` later (presumably they'll notice the | ||||
|         # unformatted content if ``fmt_str`` isn't meant to be | ||||
|         # unformatted). | ||||
|         self.fmt_str = fmt_str | ||||
|         self.setText(fmt_str) | ||||
|         # self.setText(fmt_str)  # ?TODO, why here? | ||||
| 
 | ||||
|         self.setStyleSheet( | ||||
|             f"""QLabel {{ | ||||
|  | @ -306,9 +308,10 @@ class FormatLabel(QLabel): | |||
|             """ | ||||
|         ) | ||||
|         self.setFont(_font.font) | ||||
|         self.setTextFormat( | ||||
|             Qt.TextFormat.MarkdownText | ||||
|         ) | ||||
|         if use_md: | ||||
|             self.setTextFormat( | ||||
|                 Qt.TextFormat.MarkdownText | ||||
|             ) | ||||
|         self.setMargin(0) | ||||
| 
 | ||||
|         self.setSizePolicy( | ||||
|  | @ -316,7 +319,10 @@ class FormatLabel(QLabel): | |||
|             size_policy.Expanding, | ||||
|         ) | ||||
|         self.setAlignment( | ||||
|             Qt.AlignVCenter | Qt.AlignLeft | ||||
|             Qt.AlignLeft | ||||
|             | | ||||
|             Qt.AlignBottom | ||||
|             # Qt.AlignVCenter | ||||
|         ) | ||||
|         self.setText(self.fmt_str) | ||||
| 
 | ||||
|  | @ -328,3 +334,19 @@ class FormatLabel(QLabel): | |||
|         out = self.fmt_str.format(**fields) | ||||
|         self.setText(out) | ||||
|         return out | ||||
| 
 | ||||
|     def update_font( | ||||
|         self, | ||||
|         font: QtGui.QFont, | ||||
|         font_size: int, | ||||
|         font_color: str = 'default_lightest', | ||||
|     ) -> None: | ||||
|         '''Update font after zoom change.''' | ||||
|         self.setStyleSheet( | ||||
|             f"""QLabel {{ | ||||
|                 color : {hcolor(font_color)}; | ||||
|                 font-size : {font_size}px; | ||||
|             }} | ||||
|             """ | ||||
|         ) | ||||
|         self.setFont(font) | ||||
|  |  | |||
|  | @ -178,6 +178,26 @@ class SettingsPane: | |||
|     # encompasing high level namespace | ||||
|     order_mode: OrderMode | None = None  # typing: ignore # noqa | ||||
| 
 | ||||
|     def update_fonts(self) -> None: | ||||
|         '''Update font sizes after zoom change.''' | ||||
|         from ._style import _font_small | ||||
| 
 | ||||
|         # update form fields | ||||
|         if self.form and hasattr(self.form, 'update_fonts'): | ||||
|             self.form.update_fonts() | ||||
| 
 | ||||
|         # update fill status bar | ||||
|         if self.fill_bar and hasattr(self.fill_bar, 'update_fonts'): | ||||
|             self.fill_bar.update_fonts(_font_small.px_size) | ||||
| 
 | ||||
|         # update labels with new fonts | ||||
|         if self.step_label: | ||||
|             self.step_label.setFont(_font_small.font) | ||||
|         if self.pnl_label: | ||||
|             self.pnl_label.setFont(_font_small.font) | ||||
|         if self.limit_label: | ||||
|             self.limit_label.setFont(_font_small.font) | ||||
| 
 | ||||
|     def set_accounts( | ||||
|         self, | ||||
|         names: list[str], | ||||
|  |  | |||
|  | @ -15,7 +15,8 @@ | |||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| qompleterz: embeddable search and complete using trio, Qt and rapidfuzz. | ||||
| qompleterz: embeddable search and complete using trio, Qt and | ||||
| rapidfuzz. | ||||
| 
 | ||||
| """ | ||||
| 
 | ||||
|  | @ -46,6 +47,7 @@ import time | |||
| from pprint import pformat | ||||
| 
 | ||||
| from rapidfuzz import process as fuzzy | ||||
| import tractor | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| 
 | ||||
|  | @ -53,7 +55,7 @@ from piker.ui.qt import ( | |||
|     size_policy, | ||||
|     align_flag, | ||||
|     Qt, | ||||
|     QtCore, | ||||
|     # QtCore, | ||||
|     QtWidgets, | ||||
|     QModelIndex, | ||||
|     QItemSelectionModel, | ||||
|  | @ -172,6 +174,12 @@ class CompleterView(QTreeView): | |||
| 
 | ||||
|         self.setStyleSheet(f"font: {size}px") | ||||
| 
 | ||||
|     def update_fonts(self) -> None: | ||||
|         '''Update font sizes after zoom change.''' | ||||
|         self.set_font_size(_font.px_size) | ||||
|         self.setIndentation(_font.px_size) | ||||
|         self.setFont(_font.font) | ||||
| 
 | ||||
|     def resize_to_results( | ||||
|         self, | ||||
|         w: float | None = 0, | ||||
|  | @ -628,6 +636,27 @@ class SearchWidget(QtWidgets.QWidget): | |||
|             | align_flag.AlignLeft, | ||||
|         ) | ||||
| 
 | ||||
|     def update_fonts(self) -> None: | ||||
|         '''Update font sizes after zoom change.''' | ||||
|         # regenerate label stylesheet with new font size | ||||
|         self.label.setStyleSheet( | ||||
|             f"""QLabel {{ | ||||
|                 color : {hcolor('default_lightest')}; | ||||
|                 font-size : {_font.px_size - 2}px; | ||||
|             }} | ||||
|             """ | ||||
|         ) | ||||
|         self.label.setFont(_font.font) | ||||
| 
 | ||||
|         # update search bar and view fonts | ||||
|         if hasattr(self.bar, 'update_fonts'): | ||||
|             self.bar.update_fonts(_font) | ||||
|         elif hasattr(self.bar, 'setFont'): | ||||
|             self.bar.setFont(_font.font) | ||||
| 
 | ||||
|         if hasattr(self.view, 'update_fonts'): | ||||
|             self.view.update_fonts() | ||||
| 
 | ||||
|     def focus(self) -> None: | ||||
|         self.show() | ||||
|         self.bar.focus() | ||||
|  | @ -920,7 +949,10 @@ async def fill_results( | |||
| 
 | ||||
|             # issue multi-provider fan-out search request and place | ||||
|             # "searching.." statuses on outstanding results providers | ||||
|             async with trio.open_nursery() as n: | ||||
|             async with ( | ||||
|                 tractor.trionics.collapse_eg(), | ||||
|                 trio.open_nursery() as tn | ||||
|             ): | ||||
| 
 | ||||
|                 for provider, (search, pause) in ( | ||||
|                     _searcher_cache.copy().items() | ||||
|  | @ -944,7 +976,7 @@ async def fill_results( | |||
|                                 status_field='-> searchin..', | ||||
|                             ) | ||||
| 
 | ||||
|                         await n.start( | ||||
|                         await tn.start( | ||||
|                             pack_matches, | ||||
|                             view, | ||||
|                             has_results, | ||||
|  | @ -1004,12 +1036,14 @@ async def handle_keyboard_input( | |||
|     view.set_font_size(searchbar.dpi_font.px_size) | ||||
|     send, recv = trio.open_memory_channel(616) | ||||
| 
 | ||||
|     async with trio.open_nursery() as n: | ||||
| 
 | ||||
|     async with ( | ||||
|         tractor.trionics.collapse_eg(),  # needed? | ||||
|         trio.open_nursery() as tn | ||||
|     ): | ||||
|         # start a background multi-searcher task which receives | ||||
|         # patterns relayed from this keyboard input handler and | ||||
|         # async updates the completer view's results. | ||||
|         n.start_soon( | ||||
|         tn.start_soon( | ||||
|             partial( | ||||
|                 fill_results, | ||||
|                 searchw, | ||||
|  |  | |||
|  | @ -80,7 +80,7 @@ class DpiAwareFont: | |||
|         self._screen = None | ||||
| 
 | ||||
|     def _set_qfont_px_size(self, px_size: int) -> None: | ||||
|         self._qfont.setPixelSize(px_size) | ||||
|         self._qfont.setPixelSize(int(px_size)) | ||||
|         self._qfm = QtGui.QFontMetrics(self._qfont) | ||||
| 
 | ||||
|     @property | ||||
|  | @ -109,7 +109,11 @@ class DpiAwareFont: | |||
|     def px_size(self) -> int: | ||||
|         return self._qfont.pixelSize() | ||||
| 
 | ||||
|     def configure_to_dpi(self, screen: QtGui.QScreen | None = None): | ||||
|     def configure_to_dpi( | ||||
|         self, | ||||
|         screen: QtGui.QScreen | None = None, | ||||
|         zoom_level: float = 1.0, | ||||
|     ): | ||||
|         ''' | ||||
|         Set an appropriately sized font size depending on the screen DPI. | ||||
| 
 | ||||
|  | @ -118,7 +122,7 @@ class DpiAwareFont: | |||
| 
 | ||||
|         ''' | ||||
|         if self._font_size is not None: | ||||
|             self._set_qfont_px_size(self._font_size) | ||||
|             self._set_qfont_px_size(self._font_size * zoom_level) | ||||
|             return | ||||
| 
 | ||||
|         # NOTE: if no font size set either in the [ui] section of the | ||||
|  | @ -184,9 +188,13 @@ class DpiAwareFont: | |||
|         self._font_inches = inches | ||||
|         font_size = math.floor(inches * dpi) | ||||
| 
 | ||||
|         # apply zoom level multiplier | ||||
|         font_size = int(font_size * zoom_level) | ||||
| 
 | ||||
|         log.debug( | ||||
|             f"screen:{screen.name()}\n" | ||||
|             f"pDPI: {pdpi}, lDPI: {ldpi}, scale: {scale}\n" | ||||
|             f"zoom_level: {zoom_level}\n" | ||||
|             f"\nOur best guess font size is {font_size}\n" | ||||
|         ) | ||||
|         # apply the size | ||||
|  | @ -213,12 +221,12 @@ _font = DpiAwareFont() | |||
| _font_small = DpiAwareFont(_font_size_key='small') | ||||
| 
 | ||||
| 
 | ||||
| def _config_fonts_to_screen() -> None: | ||||
| def _config_fonts_to_screen(zoom_level: float = 1.0) -> None: | ||||
|     'configure global DPI aware font sizes' | ||||
| 
 | ||||
|     global _font, _font_small | ||||
|     _font.configure_to_dpi() | ||||
|     _font_small.configure_to_dpi() | ||||
|     _font.configure_to_dpi(zoom_level=zoom_level) | ||||
|     _font_small.configure_to_dpi(zoom_level=zoom_level) | ||||
| 
 | ||||
| 
 | ||||
| # TODO: re-compute font size when main widget switches screens? | ||||
|  | @ -269,6 +277,8 @@ def hcolor(name: str) -> str: | |||
| 
 | ||||
|         # default ohlc-bars/curve gray | ||||
|         'bracket': '#666666',  # like the logo | ||||
|         'pikers': '#616161',  # a trader shade of.. | ||||
|         'beast': '#161616',  # in the dark alone. | ||||
| 
 | ||||
|         # bluish | ||||
|         'charcoal': '#36454F', | ||||
|  |  | |||
|  | @ -18,6 +18,7 @@ | |||
| Qt main window singletons and stuff. | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| import os | ||||
| import signal | ||||
| import time | ||||
|  | @ -37,6 +38,8 @@ from piker.ui.qt import ( | |||
|     QStatusBar, | ||||
|     QScreen, | ||||
|     QCloseEvent, | ||||
|     QEvent, | ||||
|     QObject, | ||||
| ) | ||||
| from ..log import get_logger | ||||
| from ._style import _font_small, hcolor | ||||
|  | @ -46,6 +49,68 @@ from ._chart import GodWidget | |||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class GlobalZoomEventFilter(QObject): | ||||
|     ''' | ||||
|     Application-level event filter for global UI zoom shortcuts. | ||||
| 
 | ||||
|     This filter intercepts keyboard events BEFORE they reach widgets, | ||||
|     allowing us to implement global UI zoom shortcuts that take precedence | ||||
|     over widget-specific shortcuts. | ||||
| 
 | ||||
|     Shortcuts: | ||||
|     - Ctrl+Shift+Plus/Equal: Zoom in | ||||
|     - Ctrl+Shift+Minus: Zoom out | ||||
|     - Ctrl+Shift+0: Reset zoom | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__(self, main_window: MainWindow): | ||||
|         super().__init__() | ||||
|         self.main_window = main_window | ||||
| 
 | ||||
|     def eventFilter(self, obj: QObject, event: QEvent) -> bool: | ||||
|         ''' | ||||
|         Filter keyboard events for global zoom shortcuts. | ||||
| 
 | ||||
|         Returns True to filter out (consume) the event, False to pass through. | ||||
|         ''' | ||||
|         if event.type() == QEvent.Type.KeyPress: | ||||
|             key = event.key() | ||||
|             mods = event.modifiers() | ||||
| 
 | ||||
|             # Mask out the KeypadModifier which Qt sometimes adds | ||||
|             mods = mods & ~Qt.KeyboardModifier.KeypadModifier | ||||
| 
 | ||||
|             # Check if we have Ctrl+Shift (both required) | ||||
|             has_ctrl = bool(mods & Qt.KeyboardModifier.ControlModifier) | ||||
|             has_shift = bool(mods & Qt.KeyboardModifier.ShiftModifier) | ||||
| 
 | ||||
|             # Only handle UI zoom if BOTH Ctrl and Shift are pressed | ||||
|             # For Plus key: user presses Cmd+Shift+Equal (which makes Plus) | ||||
|             # For Minus key: user presses Cmd+Shift+Minus | ||||
|             if has_ctrl and has_shift: | ||||
|                 # Zoom in: Ctrl+Shift+Plus | ||||
|                 # Note: Plus key usually comes as Key_Equal with Shift modifier | ||||
|                 if key in (Qt.Key.Key_Plus, Qt.Key.Key_Equal): | ||||
|                     self.main_window.zoom_in() | ||||
|                     return True  # consume event | ||||
| 
 | ||||
|                 # Zoom out: Ctrl+Shift+Minus | ||||
|                 elif key == Qt.Key.Key_Minus: | ||||
|                     self.main_window.zoom_out() | ||||
|                     return True  # consume event | ||||
| 
 | ||||
|                 # Reset zoom: Ctrl+Shift+0 | ||||
|                 elif key == Qt.Key.Key_0: | ||||
|                     self.main_window.reset_zoom() | ||||
|                     return True  # consume event | ||||
| 
 | ||||
|             # Pass through if only Ctrl (no Shift) - this goes to chart zoom | ||||
|             # Pass through all other events too | ||||
|             return False | ||||
| 
 | ||||
|         return False | ||||
| 
 | ||||
| 
 | ||||
| class MultiStatus: | ||||
| 
 | ||||
|     bar: QStatusBar | ||||
|  | @ -181,6 +246,24 @@ class MainWindow(QMainWindow): | |||
|         self._status_label: QLabel = None | ||||
|         self._size: tuple[int, int] | None = None | ||||
| 
 | ||||
|         # zoom level for UI scaling (1.0 = 100%, 1.5 = 150%, etc) | ||||
|         # Change this value to set the default startup zoom level | ||||
|         self._zoom_level: float = 4.0  # Start at 200% zoom | ||||
|         self._min_zoom: float = 0.5 | ||||
|         self._max_zoom: float = 10.0 | ||||
|         self._zoom_step: float = 1.0 | ||||
| 
 | ||||
|         # event filter for global zoom shortcuts | ||||
|         self._zoom_filter: GlobalZoomEventFilter | None = None | ||||
| 
 | ||||
|     def install_global_zoom_filter(self) -> None: | ||||
|         '''Install application-level event filter for global UI zoom shortcuts.''' | ||||
|         if self._zoom_filter is None: | ||||
|             self._zoom_filter = GlobalZoomEventFilter(self) | ||||
|             app = QApplication.instance() | ||||
|             app.installEventFilter(self._zoom_filter) | ||||
|             log.info('Installed global zoom shortcuts: Ctrl+Shift+Plus/Minus/0') | ||||
| 
 | ||||
|     @property | ||||
|     def mode_label(self) -> QLabel: | ||||
| 
 | ||||
|  | @ -336,6 +419,161 @@ class MainWindow(QMainWindow): | |||
|         self.godwidget.on_win_resize(event) | ||||
|         event.accept() | ||||
| 
 | ||||
|     def zoom_in(self) -> None: | ||||
|         '''Increase UI zoom level.''' | ||||
|         new_zoom = min(self._zoom_level + self._zoom_step, self._max_zoom) | ||||
|         if new_zoom != self._zoom_level: | ||||
|             self._zoom_level = new_zoom | ||||
|             self._apply_zoom() | ||||
|             log.info(f'Zoomed in to {self._zoom_level:.1%}') | ||||
| 
 | ||||
|     def zoom_out(self) -> None: | ||||
|         '''Decrease UI zoom level.''' | ||||
|         new_zoom = max(self._zoom_level - self._zoom_step, self._min_zoom) | ||||
|         if new_zoom != self._zoom_level: | ||||
|             self._zoom_level = new_zoom | ||||
|             self._apply_zoom() | ||||
|             log.info(f'Zoomed out to {self._zoom_level:.1%}') | ||||
| 
 | ||||
|     def reset_zoom(self) -> None: | ||||
|         '''Reset UI zoom to 100%.''' | ||||
|         if self._zoom_level != 1.0: | ||||
|             self._zoom_level = 1.0 | ||||
|             self._apply_zoom() | ||||
|             log.info('Reset zoom to 100%') | ||||
| 
 | ||||
|     def _apply_zoom(self) -> None: | ||||
|         '''Apply current zoom level to all UI elements.''' | ||||
|         from . import _style | ||||
| 
 | ||||
|         # reconfigure fonts with zoom multiplier | ||||
|         _style._config_fonts_to_screen(zoom_level=self._zoom_level) | ||||
| 
 | ||||
|         # update status bar styling with new font size | ||||
|         if self._status_bar: | ||||
|             sb = self.statusBar() | ||||
|             sb.setStyleSheet(( | ||||
|                 f"color : {hcolor('gunmetal')};" | ||||
|                 f"background : {hcolor('default_dark')};" | ||||
|                 f"font-size : {_style._font_small.px_size}px;" | ||||
|                 "padding : 0px;" | ||||
|             )) | ||||
| 
 | ||||
|         # force update of mode label if it exists | ||||
|         if self._status_label: | ||||
|             self._status_label.setFont(_style._font_small.font) | ||||
| 
 | ||||
|         # update godwidget and its children | ||||
|         if self.godwidget: | ||||
|             # update search widget if it exists | ||||
|             if hasattr(self.godwidget, 'search') and self.godwidget.search: | ||||
|                 self.godwidget.search.update_fonts() | ||||
| 
 | ||||
|             # update order mode panes in all chart views | ||||
|             self._update_chart_order_panes() | ||||
| 
 | ||||
|             # recursively update all other widgets with stylesheets | ||||
|             self._refresh_widget_fonts(self.godwidget) | ||||
|             self.godwidget.update() | ||||
| 
 | ||||
|     def _update_chart_order_panes(self) -> None: | ||||
|         '''Update order entry panels in all charts.''' | ||||
|         if not self.godwidget: | ||||
|             return | ||||
| 
 | ||||
|         # iterate through all linked splits (hist and rt) | ||||
|         for splits_name in ['hist_linked', 'rt_linked']: | ||||
|             splits = getattr(self.godwidget, splits_name, None) | ||||
|             if not splits: | ||||
|                 continue | ||||
| 
 | ||||
|             # get main chart | ||||
|             chart = getattr(splits, 'chart', None) | ||||
|             if chart: | ||||
|                 # update axes | ||||
|                 self._update_chart_axes(chart) | ||||
| 
 | ||||
|                 # update order pane | ||||
|                 if hasattr(chart, 'view'): | ||||
|                     view = chart.view | ||||
|                     if hasattr(view, 'order_mode') and view.order_mode: | ||||
|                         order_mode = view.order_mode | ||||
|                         if hasattr(order_mode, 'pane') and order_mode.pane: | ||||
|                             order_mode.pane.update_fonts() | ||||
| 
 | ||||
|             # also check subplots | ||||
|             subplots = getattr(splits, 'subplots', {}) | ||||
|             for name, subplot_chart in subplots.items(): | ||||
|                 # update subplot axes | ||||
|                 self._update_chart_axes(subplot_chart) | ||||
| 
 | ||||
|                 # update subplot order pane | ||||
|                 if hasattr(subplot_chart, 'view'): | ||||
|                     subplot_view = subplot_chart.view | ||||
|                     if hasattr(subplot_view, 'order_mode') and subplot_view.order_mode: | ||||
|                         subplot_order_mode = subplot_view.order_mode | ||||
|                         if hasattr(subplot_order_mode, 'pane') and subplot_order_mode.pane: | ||||
|                             subplot_order_mode.pane.update_fonts() | ||||
| 
 | ||||
|             # resize all sidepanes to match main chart's sidepane width | ||||
|             # this ensures volume/subplot sidepanes match the main chart | ||||
|             if splits and hasattr(splits, 'resize_sidepanes'): | ||||
|                 splits.resize_sidepanes() | ||||
| 
 | ||||
|     def _update_chart_axes(self, chart) -> None: | ||||
|         '''Update axis fonts and sizing for a chart.''' | ||||
|         from . import _style | ||||
| 
 | ||||
|         # update price axis (right side) | ||||
|         if hasattr(chart, 'pi') and chart.pi: | ||||
|             plot_item = chart.pi | ||||
|             # get all axes from plot item | ||||
|             for axis_name in ['left', 'right', 'bottom', 'top']: | ||||
|                 axis = plot_item.getAxis(axis_name) | ||||
|                 if axis and hasattr(axis, 'update_fonts'): | ||||
|                     axis.update_fonts(_style._font) | ||||
| 
 | ||||
|             # force plot item to recalculate its entire layout | ||||
|             plot_item.updateGeometry() | ||||
| 
 | ||||
|             # force chart widget to update | ||||
|             if hasattr(chart, 'updateGeometry'): | ||||
|                 chart.updateGeometry() | ||||
| 
 | ||||
|             # trigger a full scene update | ||||
|             if hasattr(chart, 'update'): | ||||
|                 chart.update() | ||||
| 
 | ||||
|     def _refresh_widget_fonts(self, widget: QWidget) -> None: | ||||
|         ''' | ||||
|         Recursively update font sizes in all child widgets. | ||||
| 
 | ||||
|         This handles widgets that have font-size hardcoded in their stylesheets. | ||||
|         ''' | ||||
|         from . import _style | ||||
| 
 | ||||
|         # recursively process all children | ||||
|         for child in widget.findChildren(QWidget): | ||||
|             # skip widgets that have their own update_fonts method (handled separately) | ||||
|             if hasattr(child, 'update_fonts'): | ||||
|                 continue | ||||
| 
 | ||||
|             # update child's stylesheet if it has font-size | ||||
|             child_stylesheet = child.styleSheet() | ||||
|             if child_stylesheet and 'font-size' in child_stylesheet: | ||||
|                 # for labels and simple widgets, regenerate stylesheet | ||||
|                 # this is a heuristic - may need refinement | ||||
|                 try: | ||||
|                     child.setFont(_style._font.font) | ||||
|                 except (AttributeError, RuntimeError): | ||||
|                     pass | ||||
| 
 | ||||
|             # update child's font | ||||
|             try: | ||||
|                 child.setFont(_style._font.font) | ||||
|             except (AttributeError, RuntimeError): | ||||
|                 pass | ||||
| 
 | ||||
| 
 | ||||
| # singleton app per actor | ||||
| _qt_win: QMainWindow = None | ||||
|  |  | |||
|  | @ -21,6 +21,7 @@ Chart trading, the only way to scalp. | |||
| from __future__ import annotations | ||||
| from contextlib import asynccontextmanager | ||||
| from dataclasses import dataclass, field | ||||
| from decimal import Decimal | ||||
| from functools import partial | ||||
| from pprint import pformat | ||||
| import time | ||||
|  | @ -41,7 +42,6 @@ from piker.accounting import ( | |||
|     Position, | ||||
|     mk_allocator, | ||||
|     MktPair, | ||||
|     Symbol, | ||||
| ) | ||||
| from piker.clearing import ( | ||||
|     open_ems, | ||||
|  | @ -143,6 +143,15 @@ class OrderMode: | |||
|     } | ||||
|     _staged_order: Order | None = None | ||||
| 
 | ||||
|     @property | ||||
|     def curr_mkt(self) -> MktPair: | ||||
|         ''' | ||||
|         Deliver the currently selected `MktPair` according | ||||
|         chart state. | ||||
| 
 | ||||
|         ''' | ||||
|         return self.chart.linked.mkt | ||||
| 
 | ||||
|     def on_level_change_update_next_order_info( | ||||
|         self, | ||||
|         level: float, | ||||
|  | @ -172,7 +181,11 @@ class OrderMode: | |||
|         line.update_labels(order_info) | ||||
| 
 | ||||
|         # update bound-in staged order | ||||
|         order.price = level | ||||
|         mkt: MktPair = self.curr_mkt | ||||
|         order.price: Decimal = mkt.quantize( | ||||
|             size=level, | ||||
|             quantity_type='price', | ||||
|         ) | ||||
|         order.size = order_info['size'] | ||||
| 
 | ||||
|         # when an order is changed we flip the settings side-pane to | ||||
|  | @ -187,7 +200,9 @@ class OrderMode: | |||
| 
 | ||||
|     ) -> LevelLine: | ||||
| 
 | ||||
|         level = order.price | ||||
|         # TODO, if we instead just always decimalize at the ems layer | ||||
|         # we can avoid this back-n-forth casting? | ||||
|         level = float(order.price) | ||||
| 
 | ||||
|         line = order_line( | ||||
|             chart or self.chart, | ||||
|  | @ -224,7 +239,11 @@ class OrderMode: | |||
|             # the order mode allocator but we still need to update the | ||||
|             # "staged" order message we'll send to the ems | ||||
|             def update_order_price(y: float) -> None: | ||||
|                 order.price = y | ||||
|                 mkt: MktPair = self.curr_mkt | ||||
|                 order.price: Decimal = mkt.quantize( | ||||
|                     size=y, | ||||
|                     quantity_type='price', | ||||
|                 ) | ||||
| 
 | ||||
|             line._on_level_change = update_order_price | ||||
| 
 | ||||
|  | @ -275,34 +294,31 @@ class OrderMode: | |||
|         chart = cursor.linked.chart | ||||
|         if ( | ||||
|             not chart | ||||
|             and cursor | ||||
|             and cursor.active_plot | ||||
|             and | ||||
|             cursor | ||||
|             and | ||||
|             cursor.active_plot | ||||
|         ): | ||||
|             return | ||||
| 
 | ||||
|         chart = cursor.active_plot | ||||
|         price = cursor._datum_xy[1] | ||||
|         price: float = cursor._datum_xy[1] | ||||
|         if not price: | ||||
|             # zero prices are not supported by any means | ||||
|             # since that's illogical / a no-op. | ||||
|             return | ||||
| 
 | ||||
|         mkt: MktPair = self.chart.linked.mkt | ||||
| 
 | ||||
|         # NOTE : we could also use instead, | ||||
|         # mkt.quantize(price, quantity_type='price') | ||||
|         # but it returns a Decimal and it's probably gonna | ||||
|         # be slower? | ||||
|         # TODO: should we be enforcing this precision | ||||
|         # at a different layer in the stack? right now | ||||
|         # any precision error will literally be relayed | ||||
|         # all the way back from the backend. | ||||
| 
 | ||||
|         price = round( | ||||
|             price, | ||||
|             ndigits=mkt.price_tick_digits, | ||||
|         # at a different layer in the stack? | ||||
|         # |_ might require `MktPair` tracking in the EMS? | ||||
|         # |_ right now any precision error will be relayed | ||||
|         #    all the way back from the backend and vice-versa.. | ||||
|         # | ||||
|         mkt: MktPair = self.curr_mkt | ||||
|         price: Decimal = mkt.quantize( | ||||
|             size=price, | ||||
|             quantity_type='price', | ||||
|         ) | ||||
| 
 | ||||
|         order = self._staged_order = Order( | ||||
|             action=action, | ||||
|             price=price, | ||||
|  | @ -378,7 +394,7 @@ class OrderMode: | |||
|                 'oid': oid, | ||||
|             }) | ||||
| 
 | ||||
|         if order.price <= 0: | ||||
|         if float(order.price) <= 0: | ||||
|             log.error( | ||||
|                 '*!? Invalid `Order.price <= 0` ?!*\n' | ||||
|                 # TODO: make this present multi-line in object form | ||||
|  | @ -515,14 +531,15 @@ class OrderMode: | |||
|             # if an order msg is provided update the line | ||||
|             # **from** that msg. | ||||
|             if order: | ||||
|                 if order.price <= 0: | ||||
|                 price: float = float(order.price) | ||||
|                 if price <= 0: | ||||
|                     log.error(f'Order has 0 price, cancelling..\n{order}') | ||||
|                     self.cancel_orders([order.oid]) | ||||
|                     return None | ||||
| 
 | ||||
|                 line.set_level(order.price) | ||||
|                 line.set_level(price) | ||||
|                 self.on_level_change_update_next_order_info( | ||||
|                     level=order.price, | ||||
|                     level=price, | ||||
|                     line=line, | ||||
|                     order=order, | ||||
|                     # use the corresponding position tracker for the | ||||
|  | @ -681,9 +698,9 @@ class OrderMode: | |||
|     ) -> Dialog | None: | ||||
|         # NOTE: the `.order` attr **must** be set with the | ||||
|         # equivalent order msg in order to be loaded. | ||||
|         order = msg.req | ||||
|         order: Order = msg.req | ||||
|         oid = str(msg.oid) | ||||
|         symbol = order.symbol | ||||
|         symbol: str = order.symbol | ||||
| 
 | ||||
|         # TODO: MEGA UGGG ZONEEEE! | ||||
|         src = msg.src | ||||
|  | @ -702,13 +719,22 @@ class OrderMode: | |||
|         order.oid = str(order.oid) | ||||
|         order.brokers = [brokername] | ||||
| 
 | ||||
|         # TODO: change this over to `MktPair`, but it's | ||||
|         # gonna be tough since we don't have any such data | ||||
|         # really in our clearing msg schema.. | ||||
|         order.symbol = Symbol.from_fqme( | ||||
|             fqsn=fqme, | ||||
|             info={}, | ||||
|         ) | ||||
|         # ?TODO? change this over to `MktPair`, but it's gonna be | ||||
|         # tough since we don't have any such data really in our | ||||
|         # clearing msg schema.. | ||||
|         # BUT WAIT! WHY do we even want/need this!? | ||||
|         # | ||||
|         # order.symbol = self.curr_mkt | ||||
|         # | ||||
|         # XXX, the old approach.. which i don't quire member why.. | ||||
|         # -[ ] verify we for sure don't require this any more! | ||||
|         #  |_https://github.com/pikers/piker/issues/517 | ||||
|         # | ||||
|         # order.symbol = Symbol.from_fqme( | ||||
|         #     fqsn=fqme, | ||||
|         #     info={}, | ||||
|         # ) | ||||
| 
 | ||||
|         maybe_dialog: Dialog | None = self.submit_order( | ||||
|             send_msg=False, | ||||
|             order=order, | ||||
|  | @ -766,6 +792,7 @@ async def open_order_mode( | |||
|             brokerd_accounts, | ||||
|             ems_dialog_msgs, | ||||
|         ), | ||||
|         tractor.trionics.collapse_eg(), | ||||
|         trio.open_nursery() as tn, | ||||
| 
 | ||||
|     ): | ||||
|  | @ -1101,7 +1128,7 @@ async def process_trade_msg( | |||
|                         ) | ||||
|                     ) | ||||
|                 ): | ||||
|                     msg.req = order | ||||
|                     msg.req: Order = order | ||||
|                     dialog: ( | ||||
|                         Dialog | ||||
|                         # NOTE: on an invalid order submission (eg. | ||||
|  | @ -1166,7 +1193,7 @@ async def process_trade_msg( | |||
|             tm = time.time() | ||||
|             mode.on_fill( | ||||
|                 oid, | ||||
|                 price=req.price, | ||||
|                 price=float(req.price), | ||||
|                 time_s=tm, | ||||
|             ) | ||||
|             mode.lines.remove_line(uuid=oid) | ||||
|  | @ -1221,7 +1248,7 @@ async def process_trade_msg( | |||
|             tm = details['broker_time'] | ||||
|             mode.on_fill( | ||||
|                 oid, | ||||
|                 price=details['price'], | ||||
|                 price=float(details['price']), | ||||
|                 time_s=tm, | ||||
|                 pointing='up' if action == 'buy' else 'down', | ||||
|             ) | ||||
|  |  | |||
|  | @ -42,6 +42,7 @@ from PyQt6.QtCore import ( | |||
|     QSize, | ||||
|     QModelIndex, | ||||
|     QItemSelectionModel, | ||||
|     QObject, | ||||
|     pyqtBoundSignal, | ||||
|     pyqtRemoveInputHook, | ||||
| ) | ||||
|  |  | |||
|  | @ -0,0 +1,20 @@ | |||
| #!/usr/bin/env bash | ||||
| # macOS wrapper for pikerd to handle missing XDG_RUNTIME_DIR | ||||
| 
 | ||||
| # Set up runtime directory for macOS if not already set | ||||
| if [ -z "$XDG_RUNTIME_DIR" ]; then | ||||
|     # Use macOS standard temp directory with user-specific subdirectory | ||||
|     export XDG_RUNTIME_DIR="/tmp/piker-runtime-$(id -u)" | ||||
| 
 | ||||
|     # Create the directory if it doesn't exist | ||||
|     if [ ! -d "$XDG_RUNTIME_DIR" ]; then | ||||
|         mkdir -p "$XDG_RUNTIME_DIR" | ||||
|         # Set proper permissions (only user can access) | ||||
|         chmod 700 "$XDG_RUNTIME_DIR" | ||||
|     fi | ||||
| 
 | ||||
|     echo "Set XDG_RUNTIME_DIR to: $XDG_RUNTIME_DIR" | ||||
| fi | ||||
| 
 | ||||
| # Run pikerd with all passed arguments | ||||
| exec uv run pikerd "$@" | ||||
|  | @ -23,7 +23,7 @@ name = "piker" | |||
| version = "0.1.0a0dev0" | ||||
| description = "trading gear for hackers" | ||||
| authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }] | ||||
| requires-python = ">=3.12, <3.13" | ||||
| requires-python = ">=3.12" | ||||
| license = "AGPL-3.0-or-later" | ||||
| readme = "README.rst" | ||||
| keywords = [ | ||||
|  | @ -39,8 +39,8 @@ classifiers = [ | |||
|     "Operating System :: POSIX :: Linux", | ||||
|     "Programming Language :: Python :: Implementation :: CPython", | ||||
|     "Programming Language :: Python :: 3 :: Only", | ||||
|     "Programming Language :: Python :: 3.11", | ||||
|     "Programming Language :: Python :: 3.12", | ||||
|     "Programming Language :: Python :: 3.13", | ||||
|     "Intended Audience :: Financial and Insurance Industry", | ||||
|     "Intended Audience :: Science/Research", | ||||
|     "Intended Audience :: Developers", | ||||
|  | @ -49,13 +49,13 @@ classifiers = [ | |||
| dependencies = [ | ||||
|     "async-generator >=1.10, <2.0.0", | ||||
|     "attrs >=23.1.0, <24.0.0", | ||||
|     "bidict >=0.22.1, <0.23.0", | ||||
|     "bidict >=0.23.1", | ||||
|     "colorama >=0.4.6, <0.5.0", | ||||
|     "colorlog >=6.7.0, <7.0.0", | ||||
|     "ib-insync >=0.9.86, <0.10.0", | ||||
|     "numba >=0.59.0, <0.60.0", | ||||
|     "numpy >=1.25, <2.0", | ||||
|     "polars >=0.18.13, <0.19.0", | ||||
|     "numpy>=2.0", | ||||
|     "polars >=0.20.6", | ||||
|     "polars-fuzzy-match>=0.1.5", | ||||
|     "pygments >=2.16.1, <3.0.0", | ||||
|     "rich >=13.5.2, <14.0.0", | ||||
|     "tomli >=2.0.1, <3.0.0", | ||||
|  | @ -65,16 +65,18 @@ dependencies = [ | |||
|     "typer >=0.9.0, <1.0.0", | ||||
|     "rapidfuzz >=3.5.2, <4.0.0", | ||||
|     "pdbp >=1.5.0, <2.0.0", | ||||
|     "trio >=0.24, <0.25", | ||||
|     "trio >=0.27", | ||||
|     "pendulum >=3.0.0, <4.0.0", | ||||
|     "httpx >=0.27.0, <0.28.0", | ||||
|     "cryptofeed >=2.4.0, <3.0.0", | ||||
|     "pyarrow >=17.0.0, <18.0.0", | ||||
|     "pyarrow>=18.0.0", | ||||
|     "websockets ==12.0", | ||||
|     "msgspec", | ||||
|     "msgspec>=0.19.0,<0.20", | ||||
|     "tractor", | ||||
|     "asyncvnc", | ||||
|     "tomlkit", | ||||
|     "trio-typing>=0.10.0", | ||||
|     "numba>=0.61.0", | ||||
| ] | ||||
| 
 | ||||
| [project.optional-dependencies] | ||||
|  | @ -107,11 +109,13 @@ uis = [ | |||
| dev = [ | ||||
|     "pytest >=6.0.0, <7.0.0", | ||||
|     "elasticsearch >=8.9.0, <9.0.0", | ||||
|     "xonsh >=0.14.2, <0.15.0", | ||||
|     'xonsh', | ||||
|     "prompt-toolkit ==3.0.40", | ||||
|     "cython >=3.0.0, <4.0.0", | ||||
|     "greenback >=1.1.1, <2.0.0", | ||||
|     "ruff>=0.9.6", | ||||
|     "pyperclip>=1.9.0", | ||||
|     "i3ipc>=2.2.1", | ||||
| ] | ||||
| 
 | ||||
| [project.scripts] | ||||
|  | @ -125,9 +129,24 @@ include = ["piker"] | |||
| [tool.hatch.build.targets.wheel] | ||||
| include = ["piker"] | ||||
| 
 | ||||
| 
 | ||||
| # TODO? move to a `uv.toml`? | ||||
| [tool.uv] | ||||
| python-preference = 'system' | ||||
| python-downloads = 'manual' | ||||
| 
 | ||||
| 
 | ||||
| [tool.uv.sources] | ||||
| pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" } | ||||
| asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" } | ||||
| tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" } | ||||
| msgspec = { git = "https://github.com/jcrist/msgspec.git" } | ||||
| tractor = { path = "../tractor", editable = true } | ||||
| 
 | ||||
| # TODO, long term we should be synced to upstream `main` branch! | ||||
| # tractor = { git = "https://github.com/goodboy/tractor.git", branch ="piker_pin" } | ||||
| tractor = { git = "https://pikers.dev/goodboy/tractor", branch = "piker_pin" } | ||||
| 
 | ||||
| # goodboy's dev-env | ||||
| # XXX for @goodboy's hackin dev env, usually there's something new in | ||||
| # the runtime being seriously tested here Bp | ||||
| # tractor = { path = "../tractor/", editable = true } | ||||
| # xonsh = { path = "../xonsh", editable = true } | ||||
|  |  | |||
|  | @ -62,8 +62,9 @@ ignore-init-module-imports = false | |||
| fixable = ["ALL"] | ||||
| unfixable = [] | ||||
| 
 | ||||
| # TODO? uhh why no work!? | ||||
| # Allow unused variables when underscore-prefixed. | ||||
| dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" | ||||
| # dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" | ||||
| 
 | ||||
| [format] | ||||
| # Use single quotes in `ruff format`. | ||||
|  |  | |||
|  | @ -0,0 +1 @@ | |||
| TAG_feed_status_update	./piker/data/feed.py	/TAG_feed_status_update/ | ||||
|  | @ -179,7 +179,7 @@ def test_ems_err_on_bad_broker( | |||
|         # NOTE: emsd should error on the actor's enabled modules | ||||
|         # import phase, when looking for a backend named `doggy`. | ||||
|         except tractor.RemoteActorError as re: | ||||
|             assert re.type == ModuleNotFoundError | ||||
|             assert re.type is ModuleNotFoundError | ||||
| 
 | ||||
|     run_and_tollerate_cancels(load_bad_fqme) | ||||
| 
 | ||||
|  |  | |||
|  | @ -142,7 +142,12 @@ async def test_concurrent_tokens_refresh(us_symbols, loglevel): | |||
|         # async with tractor.open_nursery() as n: | ||||
|         #     await n.run_in_actor('other', intermittently_refresh_tokens) | ||||
| 
 | ||||
|         async with trio.open_nursery() as n: | ||||
|         async with ( | ||||
|             tractor.trionics.collapse_eg(), | ||||
|             trio.open_nursery( | ||||
|             # strict_exception_groups=False, | ||||
|             ) as n | ||||
|         ): | ||||
| 
 | ||||
|             quoter = await qt.stock_quoter(client, us_symbols) | ||||
| 
 | ||||
|  | @ -383,7 +388,9 @@ async def test_quote_streaming(tmx_symbols, loglevel, stream_what): | |||
|             else: | ||||
|                 symbols = [tmx_symbols] | ||||
| 
 | ||||
|             async with trio.open_nursery() as n: | ||||
|             async with trio.open_nursery( | ||||
|                 strict_exception_groups=False, | ||||
|             ) as n: | ||||
|                 for syms, func in zip(symbols, stream_what): | ||||
|                     n.start_soon(func, feed, syms) | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue