Compare commits
	
		
			130 Commits 
		
	
	
		
			gitea_feat
			...
			incr_updat
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | 8c1905e35a | |
|  | d9e2666e80 | |
|  | 1abe7d87a5 | |
|  | 2f9d199a7f | |
|  | 2f3418546f | |
|  | 729c72a48f | |
|  | 324dcbbfb0 | |
|  | 112cba43e5 | |
|  | 468cd3a381 | |
|  | e8aaf42cc6 | |
|  | 1967bc7973 | |
|  | 303a5cc66c | |
|  | 2e6b7da4bc | |
|  | 69d0555180 | |
|  | 8915f0c0c9 | |
|  | 946d554716 | |
|  | 8711465d96 | |
|  | 0f53450c68 | |
|  | 1c91cf56de | |
|  | 4129677ec5 | |
|  | aa8f9c02f2 | |
|  | c282555c2f | |
|  | f4f6ffe819 | |
|  | 3b15f83e70 | |
|  | d968f4897f | |
|  | ac57396acd | |
|  | 9df96cd182 | |
|  | bc2ebbc457 | |
|  | ecefc74d07 | |
|  | 9505350ff0 | |
|  | 15fe46091b | |
|  | e726a3ebef | |
|  | ba30b5d9bf | |
|  | 447549e1af | |
|  | 5ca0d1a19d | |
|  | 2c3da825e3 | |
|  | a752a22a4b | |
|  | 55606cffbb | |
|  | d3251a2922 | |
|  | dc4a0a6fd2 | |
|  | 04c12a756c | |
|  | cde23361a4 | |
|  | 3c58847595 | |
|  | fae249dd2f | |
|  | 6f5bb9cbe0 | |
|  | b4c7d02fcb | |
|  | b2697bfd13 | |
|  | 14037cd1dc | |
|  | a935245360 | |
|  | 8a5538f490 | |
|  | 90b9c12d25 | |
|  | aab1a3c565 | |
|  | 6c136e82b4 | |
|  | 8388918af0 | |
|  | 82dbdd6148 | |
|  | 79eff13e76 | |
|  | 8e5f5b6be6 | |
|  | 8110c4c70d | |
|  | 6f64ff5842 | |
|  | bd23b6e8f2 | |
|  | 49f3e15a3c | |
|  | c985c01c62 | |
|  | 8e11d79712 | |
|  | 48cce42c77 | |
|  | 1fd3513689 | |
|  | 9d375a0ce5 | |
|  | c18795e454 | |
|  | 76287a7523 | |
|  | b0f659a66b | |
|  | f7b3215aa4 | |
|  | c5ed9b5955 | |
|  | ea9d76ffce | |
|  | 16f2f6ff94 | |
|  | 4a383795bf | |
|  | ef8e71f628 | |
|  | 21d0d551d3 | |
|  | 32c3f63cfd | |
|  | 894dcc2de4 | |
|  | 8b8ffe78af | |
|  | 99a37f504f | |
|  | d3ddcc8206 | |
|  | 9b1491efc9 | |
|  | 7175901d0d | |
|  | bb13f76375 | |
|  | a682887e63 | |
|  | 1837e467be | |
|  | 7df795435e | |
|  | 8421422768 | |
|  | 23d386e0f5 | |
|  | f0c4261aa4 | |
|  | 2be8f63487 | |
|  | 24a07fd5e5 | |
|  | a893537ade | |
|  | 15d15fdfbf | |
|  | d1b05246f8 | |
|  | 96ec4ba28b | |
|  | fa0be47d66 | |
|  | 65609a35dc | |
|  | d8d7757e88 | |
|  | c439e99f8a | |
|  | 5c2b9a01e9 | |
|  | 15d3f99410 | |
|  | ae8170204f | |
|  | 7d628c4059 | |
|  | 6dfe59cce6 | |
|  | a465a11782 | |
|  | 807685d27e | |
|  | aea42ccbd9 | |
|  | 8fb9308e21 | |
|  | bbae8ad426 | |
|  | 893ac7a986 | |
|  | 91856ddda8 | |
|  | 25e2e13bd7 | |
|  | 2427c96336 | |
|  | 8732b2bd5e | |
|  | ff9208c15b | |
|  | a2547a548f | |
|  | 3873b8619e | |
|  | 326d05ac82 | |
|  | dcab99e3d2 | |
|  | 5a5df21f94 | |
|  | d7c1286e5d | |
|  | 8acaa28df0 | |
|  | bb45100168 | |
|  | d9ded54e10 | |
|  | 1bde86a7b2 | |
|  | 5031892dcf | |
|  | afe6f0b42b | |
|  | 5d539b7c49 | |
|  | e2ce341f93 | 
|  | @ -19,7 +19,7 @@ Structured, daemon tree service management. | |||
| 
 | ||||
| """ | ||||
| from typing import Optional, Union, Callable, Any | ||||
| from contextlib import asynccontextmanager | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from collections import defaultdict | ||||
| 
 | ||||
| from pydantic import BaseModel | ||||
|  | @ -130,7 +130,7 @@ class Services(BaseModel): | |||
| _services: Optional[Services] = None | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def open_pikerd( | ||||
|     start_method: str = 'trio', | ||||
|     loglevel: Optional[str] = None, | ||||
|  | @ -185,7 +185,7 @@ async def open_pikerd( | |||
|             yield _services | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def open_piker_runtime( | ||||
|     name: str, | ||||
|     enable_modules: list[str] = [], | ||||
|  | @ -226,7 +226,7 @@ async def open_piker_runtime( | |||
|         yield tractor.current_actor() | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def maybe_open_runtime( | ||||
|     loglevel: Optional[str] = None, | ||||
|     **kwargs, | ||||
|  | @ -249,7 +249,7 @@ async def maybe_open_runtime( | |||
|         yield | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def maybe_open_pikerd( | ||||
|     loglevel: Optional[str] = None, | ||||
|     **kwargs, | ||||
|  | @ -300,7 +300,36 @@ class Brokerd: | |||
|     locks = defaultdict(trio.Lock) | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def find_service( | ||||
|     service_name: str, | ||||
| ) -> Optional[tractor.Portal]: | ||||
| 
 | ||||
|     log.info(f'Scanning for service `{service_name}`') | ||||
|     # attach to existing daemon by name if possible | ||||
|     async with tractor.find_actor( | ||||
|         service_name, | ||||
|         arbiter_sockaddr=_registry_addr, | ||||
|     ) as maybe_portal: | ||||
|         yield maybe_portal | ||||
| 
 | ||||
| 
 | ||||
| async def check_for_service( | ||||
|     service_name: str, | ||||
| 
 | ||||
| ) -> bool: | ||||
|     ''' | ||||
|     Service daemon "liveness" predicate. | ||||
| 
 | ||||
|     ''' | ||||
|     async with tractor.query_actor( | ||||
|         service_name, | ||||
|         arbiter_sockaddr=_registry_addr, | ||||
|     ) as sockaddr: | ||||
|         return sockaddr | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def maybe_spawn_daemon( | ||||
| 
 | ||||
|     service_name: str, | ||||
|  | @ -310,7 +339,7 @@ async def maybe_spawn_daemon( | |||
|     **kwargs, | ||||
| 
 | ||||
| ) -> tractor.Portal: | ||||
|     """ | ||||
|     ''' | ||||
|     If no ``service_name`` daemon-actor can be found, | ||||
|     spawn one in a local subactor and return a portal to it. | ||||
| 
 | ||||
|  | @ -321,7 +350,7 @@ async def maybe_spawn_daemon( | |||
|     This can be seen as a service starting api for remote-actor | ||||
|     clients. | ||||
| 
 | ||||
|     """ | ||||
|     ''' | ||||
|     if loglevel: | ||||
|         get_console_log(loglevel) | ||||
| 
 | ||||
|  | @ -330,19 +359,13 @@ async def maybe_spawn_daemon( | |||
|     lock = Brokerd.locks[service_name] | ||||
|     await lock.acquire() | ||||
| 
 | ||||
|     log.info(f'Scanning for existing {service_name}') | ||||
|     # attach to existing daemon by name if possible | ||||
|     async with tractor.find_actor( | ||||
|         service_name, | ||||
|         arbiter_sockaddr=_registry_addr, | ||||
| 
 | ||||
|     ) as portal: | ||||
|     async with find_service(service_name) as portal: | ||||
|         if portal is not None: | ||||
|             lock.release() | ||||
|             yield portal | ||||
|             return | ||||
| 
 | ||||
|         log.warning(f"Couldn't find any existing {service_name}") | ||||
|     log.warning(f"Couldn't find any existing {service_name}") | ||||
| 
 | ||||
|     # ask root ``pikerd`` daemon to spawn the daemon we need if | ||||
|     # pikerd is not live we now become the root of the | ||||
|  | @ -423,7 +446,7 @@ async def spawn_brokerd( | |||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def maybe_spawn_brokerd( | ||||
| 
 | ||||
|     brokername: str, | ||||
|  | @ -431,7 +454,9 @@ async def maybe_spawn_brokerd( | |||
|     **kwargs, | ||||
| 
 | ||||
| ) -> tractor.Portal: | ||||
|     '''Helper to spawn a brokerd service. | ||||
|     ''' | ||||
|     Helper to spawn a brokerd service *from* a client | ||||
|     who wishes to use the sub-actor-daemon. | ||||
| 
 | ||||
|     ''' | ||||
|     async with maybe_spawn_daemon( | ||||
|  | @ -483,7 +508,7 @@ async def spawn_emsd( | |||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def maybe_open_emsd( | ||||
| 
 | ||||
|     brokername: str, | ||||
|  |  | |||
|  | @ -33,7 +33,41 @@ class SymbolNotFound(BrokerError): | |||
| 
 | ||||
| 
 | ||||
| class NoData(BrokerError): | ||||
|     "Symbol data not permitted" | ||||
|     ''' | ||||
|     Symbol data not permitted or no data | ||||
|     for time range found. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         *args, | ||||
|         frame_size: int = 1000, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         super().__init__(*args) | ||||
| 
 | ||||
|         # when raised, machinery can check if the backend | ||||
|         # set a "frame size" for doing datetime calcs. | ||||
|         self.frame_size: int = 1000 | ||||
| 
 | ||||
| 
 | ||||
| class DataUnavailable(BrokerError): | ||||
|     ''' | ||||
|     Signal storage requests to terminate. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: add in a reason that can be displayed in the | ||||
|     # UI (for eg. `kraken` is bs and you should complain | ||||
|     # to them that you can't pull more OHLC data..) | ||||
| 
 | ||||
| 
 | ||||
| class DataThrottle(BrokerError): | ||||
|     ''' | ||||
|     Broker throttled request rate for data. | ||||
| 
 | ||||
|     ''' | ||||
|     # TODO: add in throttle metrics/feedback | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| def resproc( | ||||
|  | @ -50,12 +84,12 @@ def resproc( | |||
|     if not resp.status_code == 200: | ||||
|         raise BrokerError(resp.body) | ||||
|     try: | ||||
|         json = resp.json() | ||||
|         msg = resp.json() | ||||
|     except json.decoder.JSONDecodeError: | ||||
|         log.exception(f"Failed to process {resp}:\n{resp.text}") | ||||
|         raise BrokerError(resp.text) | ||||
| 
 | ||||
|     if log_resp: | ||||
|         log.debug(f"Received json contents:\n{colorize_json(json)}") | ||||
|         log.debug(f"Received json contents:\n{colorize_json(msg)}") | ||||
| 
 | ||||
|     return json if return_json else resp | ||||
|     return msg if return_json else resp | ||||
|  |  | |||
|  | @ -19,6 +19,7 @@ Binance backend | |||
| 
 | ||||
| """ | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from datetime import datetime | ||||
| from typing import ( | ||||
|     Any, Union, Optional, | ||||
|     AsyncGenerator, Callable, | ||||
|  | @ -221,20 +222,22 @@ class Client: | |||
|     async def bars( | ||||
|         self, | ||||
|         symbol: str, | ||||
|         start_time: int = None, | ||||
|         end_time: int = None, | ||||
|         start_dt: Optional[datetime] = None, | ||||
|         end_dt: Optional[datetime] = None, | ||||
|         limit: int = 1000,  # <- max allowed per query | ||||
|         as_np: bool = True, | ||||
| 
 | ||||
|     ) -> dict: | ||||
| 
 | ||||
|         if start_time is None: | ||||
|             start_time = binance_timestamp( | ||||
|                 pendulum.now('UTC').start_of('minute').subtract(minutes=limit) | ||||
|             ) | ||||
|         if end_dt is None: | ||||
|             end_dt = pendulum.now('UTC') | ||||
| 
 | ||||
|         if end_time is None: | ||||
|             end_time = binance_timestamp(pendulum.now('UTC')) | ||||
|         if start_dt is None: | ||||
|             start_dt = end_dt.start_of( | ||||
|                 'minute').subtract(minutes=limit) | ||||
| 
 | ||||
|         start_time = binance_timestamp(start_dt) | ||||
|         end_time = binance_timestamp(end_dt) | ||||
| 
 | ||||
|         # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data | ||||
|         bars = await self._api( | ||||
|  | @ -379,7 +382,27 @@ async def open_history_client( | |||
| 
 | ||||
|     # TODO implement history getter for the new storage layer. | ||||
|     async with open_cached_client('binance') as client: | ||||
|         yield client | ||||
| 
 | ||||
|         async def get_ohlc( | ||||
|             end_dt: Optional[datetime] = None, | ||||
|             start_dt: Optional[datetime] = None, | ||||
| 
 | ||||
|         ) -> tuple[ | ||||
|             np.ndarray, | ||||
|             datetime,  # start | ||||
|             datetime,  # end | ||||
|         ]: | ||||
| 
 | ||||
|             array = await client.bars( | ||||
|                 symbol, | ||||
|                 start_dt=start_dt, | ||||
|                 end_dt=end_dt, | ||||
|             ) | ||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) | ||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) | ||||
|             return array, start_dt, end_dt | ||||
| 
 | ||||
|         yield get_ohlc, {'erlangs': 4, 'rate': 4} | ||||
| 
 | ||||
| 
 | ||||
| async def backfill_bars( | ||||
|  |  | |||
|  | @ -57,6 +57,8 @@ from ib_insync.wrapper import Wrapper | |||
| from ib_insync.client import Client as ib_Client | ||||
| from fuzzywuzzy import process as fuzzy | ||||
| import numpy as np | ||||
| import pendulum | ||||
| 
 | ||||
| 
 | ||||
| from .. import config | ||||
| from ..log import get_logger, get_console_log | ||||
|  | @ -295,6 +297,10 @@ class Client: | |||
|         global _enters | ||||
|         # log.info(f'REQUESTING BARS {_enters} @ end={end_dt}') | ||||
|         print(f'REQUESTING BARS {_enters} @ end={end_dt}') | ||||
| 
 | ||||
|         if not end_dt: | ||||
|             end_dt = '' | ||||
| 
 | ||||
|         _enters += 1 | ||||
| 
 | ||||
|         contract = await self.find_contract(fqsn) | ||||
|  | @ -1438,8 +1444,6 @@ async def get_bars( | |||
|     a ``MethoProxy``. | ||||
| 
 | ||||
|     ''' | ||||
|     import pendulum | ||||
| 
 | ||||
|     fails = 0 | ||||
|     bars: Optional[list] = None | ||||
|     first_dt: datetime = None | ||||
|  | @ -1467,7 +1471,9 @@ async def get_bars( | |||
|             time = bars_array['time'] | ||||
|             assert time[-1] == last_dt.timestamp() | ||||
|             assert time[0] == first_dt.timestamp() | ||||
|             log.info(f'bars retreived for dts {first_dt}:{last_dt}') | ||||
|             log.info( | ||||
|                 f'{len(bars)} bars retreived for {first_dt} -> {last_dt}' | ||||
|             ) | ||||
| 
 | ||||
|             return (bars, bars_array, first_dt, last_dt), fails | ||||
| 
 | ||||
|  | @ -1478,21 +1484,30 @@ async def get_bars( | |||
| 
 | ||||
|             if 'No market data permissions for' in msg: | ||||
|                 # TODO: signalling for no permissions searches | ||||
|                 raise NoData(f'Symbol: {fqsn}') | ||||
|                 break | ||||
|                 raise NoData( | ||||
|                     f'Symbol: {fqsn}', | ||||
|                 ) | ||||
| 
 | ||||
|             elif ( | ||||
|                 err.code == 162 | ||||
|                 and 'HMDS query returned no data' in err.message | ||||
|             ): | ||||
|                 # try to decrement start point and look further back | ||||
|                 end_dt = last_dt = last_dt.subtract(seconds=2000) | ||||
|                 # XXX: this is now done in the storage mgmt layer | ||||
|                 # and we shouldn't implicitly decrement the frame dt | ||||
|                 # index since the upper layer may be doing so | ||||
|                 # concurrently and we don't want to be delivering frames | ||||
|                 # that weren't asked for. | ||||
|                 log.warning( | ||||
|                     f'No data found ending @ {end_dt}\n' | ||||
|                     f'Starting another request for {end_dt}' | ||||
|                     f'NO DATA found ending @ {end_dt}\n' | ||||
|                 ) | ||||
| 
 | ||||
|                 continue | ||||
|                 # try to decrement start point and look further back | ||||
|                 # end_dt = last_dt = last_dt.subtract(seconds=2000) | ||||
| 
 | ||||
|                 raise NoData( | ||||
|                     f'Symbol: {fqsn}', | ||||
|                     frame_size=2000, | ||||
|                 ) | ||||
| 
 | ||||
|             elif _pacing in msg: | ||||
| 
 | ||||
|  | @ -1546,8 +1561,8 @@ async def open_history_client( | |||
|     async with open_client_proxy() as proxy: | ||||
| 
 | ||||
|         async def get_hist( | ||||
|             end_dt: str, | ||||
|             start_dt: str = '', | ||||
|             end_dt: Optional[datetime] = None, | ||||
|             start_dt: Optional[datetime] = None, | ||||
| 
 | ||||
|         ) -> tuple[np.ndarray, str]: | ||||
| 
 | ||||
|  | @ -1555,10 +1570,13 @@ async def open_history_client( | |||
| 
 | ||||
|             # TODO: add logic here to handle tradable hours and only grab | ||||
|             # valid bars in the range | ||||
|             if out == (None, None): | ||||
|             if out is None: | ||||
|                 # could be trying to retreive bars over weekend | ||||
|                 log.error(f"Can't grab bars starting at {end_dt}!?!?") | ||||
|                 raise NoData(f'{end_dt}') | ||||
|                 raise NoData( | ||||
|                     f'{end_dt}', | ||||
|                     frame_size=2000, | ||||
|                 ) | ||||
| 
 | ||||
|             bars, bars_array, first_dt, last_dt = out | ||||
| 
 | ||||
|  | @ -1569,7 +1587,12 @@ async def open_history_client( | |||
| 
 | ||||
|             return bars_array, first_dt, last_dt | ||||
| 
 | ||||
|         yield get_hist | ||||
|         # TODO: it seems like we can do async queries for ohlc | ||||
|         # but getting the order right still isn't working and I'm not | ||||
|         # quite sure why.. needs some tinkering and probably | ||||
|         # a lookthrough of the ``ib_insync`` machinery, for eg. maybe | ||||
|         # we have to do the batch queries on the `asyncio` side? | ||||
|         yield get_hist, {'erlangs': 1, 'rate': 6} | ||||
| 
 | ||||
| 
 | ||||
| async def backfill_bars( | ||||
|  | @ -1831,6 +1854,7 @@ async def stream_quotes( | |||
|         symbol=sym, | ||||
|     ) | ||||
|     first_quote = normalize(first_ticker) | ||||
|     # print(f'first quote: {first_quote}') | ||||
| 
 | ||||
|     def mk_init_msgs() -> dict[str, dict]: | ||||
|         # pass back some symbol info like min_tick, trading_hours, etc. | ||||
|  | @ -2435,8 +2459,8 @@ async def data_reset_hack( | |||
|     try: | ||||
|         import i3ipc | ||||
|     except ImportError: | ||||
|         return False | ||||
|         log.warning('IB data hack no-supported on ur platformz') | ||||
|         return False | ||||
| 
 | ||||
|     i3 = i3ipc.Connection() | ||||
|     t = i3.get_tree() | ||||
|  |  | |||
|  | @ -20,7 +20,8 @@ Kraken backend. | |||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from dataclasses import asdict, field | ||||
| from typing import Any, Optional, AsyncIterator, Callable | ||||
| from datetime import datetime | ||||
| from typing import Any, Optional, AsyncIterator, Callable, Union | ||||
| import time | ||||
| 
 | ||||
| from trio_typing import TaskStatus | ||||
|  | @ -40,7 +41,13 @@ import base64 | |||
| 
 | ||||
| from .. import config | ||||
| from .._cacheables import open_cached_client | ||||
| from ._util import resproc, SymbolNotFound, BrokerError | ||||
| from ._util import ( | ||||
|     resproc, | ||||
|     SymbolNotFound, | ||||
|     BrokerError, | ||||
|     DataThrottle, | ||||
|     DataUnavailable, | ||||
| ) | ||||
| from ..log import get_logger, get_console_log | ||||
| from ..data import ShmArray | ||||
| from ..data._web_bs import open_autorecon_ws, NoBsWs | ||||
|  | @ -305,7 +312,7 @@ class Client: | |||
|         action: str, | ||||
|         size: float, | ||||
|         reqid: str = None, | ||||
|         validate: bool = False # set True test call without a real submission | ||||
|         validate: bool = False  # set True test call without a real submission | ||||
|     ) -> dict: | ||||
|         ''' | ||||
|         Place an order and return integer request id provided by client. | ||||
|  | @ -391,17 +398,26 @@ class Client: | |||
|     async def bars( | ||||
|         self, | ||||
|         symbol: str = 'XBTUSD', | ||||
| 
 | ||||
|         # UTC 2017-07-02 12:53:20 | ||||
|         since: int = None, | ||||
|         since: Optional[Union[int, datetime]] = None, | ||||
|         count: int = 720,  # <- max allowed per query | ||||
|         as_np: bool = True, | ||||
| 
 | ||||
|     ) -> dict: | ||||
| 
 | ||||
|         if since is None: | ||||
|             since = pendulum.now('UTC').start_of('minute').subtract( | ||||
|                 minutes=count).timestamp() | ||||
| 
 | ||||
|         elif isinstance(since, int): | ||||
|             since = pendulum.from_timestamp(since).timestamp() | ||||
| 
 | ||||
|         else:  # presumably a pendulum datetime | ||||
|             since = since.timestamp() | ||||
| 
 | ||||
|         # UTC 2017-07-02 12:53:20 is oldest seconds value | ||||
|         since = str(max(1499000000, since)) | ||||
|         since = str(max(1499000000, int(since))) | ||||
|         json = await self._public( | ||||
|             'OHLC', | ||||
|             data={ | ||||
|  | @ -445,7 +461,16 @@ class Client: | |||
|             array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars | ||||
|             return array | ||||
|         except KeyError: | ||||
|             raise SymbolNotFound(json['error'][0] + f': {symbol}') | ||||
|             errmsg = json['error'][0] | ||||
| 
 | ||||
|             if 'not found' in errmsg: | ||||
|                 raise SymbolNotFound(errmsg + f': {symbol}') | ||||
| 
 | ||||
|             elif 'Too many requests' in errmsg: | ||||
|                 raise DataThrottle(f'{symbol}') | ||||
| 
 | ||||
|             else: | ||||
|                 raise BrokerError(errmsg) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
|  | @ -668,8 +693,8 @@ async def handle_order_requests( | |||
|                                 oid=msg.oid, | ||||
|                                 reqid=msg.reqid, | ||||
|                                 symbol=msg.symbol, | ||||
|                                 # TODO: maybe figure out if pending cancels will | ||||
|                                 # eventually get cancelled | ||||
|                                 # TODO: maybe figure out if pending | ||||
|                                 # cancels will eventually get cancelled | ||||
|                                 reason="Order cancel is still pending?", | ||||
|                                 broker_details=resp | ||||
|                             ).dict() | ||||
|  | @ -1003,7 +1028,45 @@ async def open_history_client( | |||
| 
 | ||||
|     # TODO implement history getter for the new storage layer. | ||||
|     async with open_cached_client('kraken') as client: | ||||
|         yield client | ||||
| 
 | ||||
|         # lol, kraken won't send any more then the "last" | ||||
|         # 720 1m bars.. so we have to just ignore further | ||||
|         # requests of this type.. | ||||
|         queries: int = 0 | ||||
| 
 | ||||
|         async def get_ohlc( | ||||
|             end_dt: Optional[datetime] = None, | ||||
|             start_dt: Optional[datetime] = None, | ||||
| 
 | ||||
|         ) -> tuple[ | ||||
|             np.ndarray, | ||||
|             datetime,  # start | ||||
|             datetime,  # end | ||||
|         ]: | ||||
| 
 | ||||
|             nonlocal queries | ||||
|             if queries > 0: | ||||
|                 raise DataUnavailable | ||||
| 
 | ||||
|             count = 0 | ||||
|             while count <= 3: | ||||
|                 try: | ||||
|                     array = await client.bars( | ||||
|                         symbol, | ||||
|                         since=end_dt, | ||||
|                     ) | ||||
|                     count += 1 | ||||
|                     queries += 1 | ||||
|                     break | ||||
|                 except DataThrottle: | ||||
|                     log.warning(f'kraken OHLC throttle for {symbol}') | ||||
|                     await trio.sleep(1) | ||||
| 
 | ||||
|             start_dt = pendulum.from_timestamp(array[0]['time']) | ||||
|             end_dt = pendulum.from_timestamp(array[-1]['time']) | ||||
|             return array, start_dt, end_dt | ||||
| 
 | ||||
|         yield get_ohlc, {'erlangs': 1, 'rate': 1} | ||||
| 
 | ||||
| 
 | ||||
| async def backfill_bars( | ||||
|  |  | |||
|  | @ -16,29 +16,22 @@ from .. import config | |||
| log = get_logger('cli') | ||||
| DEFAULT_BROKER = 'questrade' | ||||
| 
 | ||||
| _config_dir = click.get_app_dir('piker') | ||||
| _watchlists_data_path = os.path.join(_config_dir, 'watchlists.json') | ||||
| _context_defaults = dict( | ||||
|     default_map={ | ||||
|         # Questrade specific quote poll rates | ||||
|         'monitor': { | ||||
|             'rate': 3, | ||||
|         }, | ||||
|         'optschain': { | ||||
|             'rate': 1, | ||||
|         }, | ||||
|     } | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| @click.command() | ||||
| @click.option('--loglevel', '-l', default='warning', help='Logging level') | ||||
| @click.option('--tl', is_flag=True, help='Enable tractor logging') | ||||
| @click.option('--pdb', is_flag=True, help='Enable tractor debug mode') | ||||
| @click.option('--host', '-h', default='127.0.0.1', help='Host address to bind') | ||||
| def pikerd(loglevel, host, tl, pdb): | ||||
|     """Spawn the piker broker-daemon. | ||||
|     """ | ||||
| @click.option( | ||||
|     '--tsdb', | ||||
|     is_flag=True, | ||||
|     help='Enable local ``marketstore`` instance' | ||||
| ) | ||||
| def pikerd(loglevel, host, tl, pdb, tsdb): | ||||
|     ''' | ||||
|     Spawn the piker broker-daemon. | ||||
| 
 | ||||
|     ''' | ||||
|     from .._daemon import open_pikerd | ||||
|     log = get_console_log(loglevel) | ||||
| 
 | ||||
|  | @ -52,13 +45,33 @@ def pikerd(loglevel, host, tl, pdb): | |||
|         )) | ||||
| 
 | ||||
|     async def main(): | ||||
|         async with open_pikerd(loglevel=loglevel, debug_mode=pdb): | ||||
| 
 | ||||
|         async with ( | ||||
|             open_pikerd( | ||||
|                 loglevel=loglevel, | ||||
|                 debug_mode=pdb, | ||||
|             ),  # normally delivers a ``Services`` handle | ||||
|             trio.open_nursery() as n, | ||||
|         ): | ||||
|             if tsdb: | ||||
|                 # TODO: | ||||
|                 # async with maybe_open_marketstored(): | ||||
| 
 | ||||
|                 from piker.data._ahab import start_ahab | ||||
|                 log.info('Spawning `marketstore` supervisor') | ||||
|                 ctn_ready = await n.start( | ||||
|                     start_ahab, | ||||
|                     'marketstored', | ||||
|                 ) | ||||
|                 await ctn_ready.wait() | ||||
|                 log.info('`marketstore` container:{uid} up') | ||||
| 
 | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @click.group(context_settings=_context_defaults) | ||||
| @click.group(context_settings=config._context_defaults) | ||||
| @click.option( | ||||
|     '--brokers', '-b', | ||||
|     default=[DEFAULT_BROKER], | ||||
|  | @ -87,8 +100,8 @@ def cli(ctx, brokers, loglevel, tl, configdir): | |||
|         'loglevel': loglevel, | ||||
|         'tractorloglevel': None, | ||||
|         'log': get_console_log(loglevel), | ||||
|         'confdir': _config_dir, | ||||
|         'wl_path': _watchlists_data_path, | ||||
|         'confdir': config._config_dir, | ||||
|         'wl_path': config._watchlists_data_path, | ||||
|     }) | ||||
| 
 | ||||
|     # allow enabling same loglevel in ``tractor`` machinery | ||||
|  |  | |||
|  | @ -17,6 +17,8 @@ | |||
| """ | ||||
| Broker configuration mgmt. | ||||
| """ | ||||
| import platform | ||||
| import sys | ||||
| import os | ||||
| from os.path import dirname | ||||
| import shutil | ||||
|  | @ -24,14 +26,100 @@ from typing import Optional | |||
| 
 | ||||
| from bidict import bidict | ||||
| import toml | ||||
| import click | ||||
| 
 | ||||
| from .log import get_logger | ||||
| 
 | ||||
| log = get_logger('broker-config') | ||||
| 
 | ||||
| _config_dir = click.get_app_dir('piker') | ||||
| 
 | ||||
| # taken from ``click`` since apparently they have some | ||||
| # super weirdness with sigint and sudo..no clue | ||||
| def get_app_dir(app_name, roaming=True, force_posix=False): | ||||
|     r"""Returns the config folder for the application.  The default behavior | ||||
|     is to return whatever is most appropriate for the operating system. | ||||
| 
 | ||||
|     To give you an idea, for an app called ``"Foo Bar"``, something like | ||||
|     the following folders could be returned: | ||||
| 
 | ||||
|     Mac OS X: | ||||
|       ``~/Library/Application Support/Foo Bar`` | ||||
|     Mac OS X (POSIX): | ||||
|       ``~/.foo-bar`` | ||||
|     Unix: | ||||
|       ``~/.config/foo-bar`` | ||||
|     Unix (POSIX): | ||||
|       ``~/.foo-bar`` | ||||
|     Win XP (roaming): | ||||
|       ``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar`` | ||||
|     Win XP (not roaming): | ||||
|       ``C:\Documents and Settings\<user>\Application Data\Foo Bar`` | ||||
|     Win 7 (roaming): | ||||
|       ``C:\Users\<user>\AppData\Roaming\Foo Bar`` | ||||
|     Win 7 (not roaming): | ||||
|       ``C:\Users\<user>\AppData\Local\Foo Bar`` | ||||
| 
 | ||||
|     .. versionadded:: 2.0 | ||||
| 
 | ||||
|     :param app_name: the application name.  This should be properly capitalized | ||||
|                      and can contain whitespace. | ||||
|     :param roaming: controls if the folder should be roaming or not on Windows. | ||||
|                     Has no affect otherwise. | ||||
|     :param force_posix: if this is set to `True` then on any POSIX system the | ||||
|                         folder will be stored in the home folder with a leading | ||||
|                         dot instead of the XDG config home or darwin's | ||||
|                         application support folder. | ||||
|     """ | ||||
| 
 | ||||
|     def _posixify(name): | ||||
|         return "-".join(name.split()).lower() | ||||
| 
 | ||||
|     # if WIN: | ||||
|     if platform.system() == 'Windows': | ||||
|         key = "APPDATA" if roaming else "LOCALAPPDATA" | ||||
|         folder = os.environ.get(key) | ||||
|         if folder is None: | ||||
|             folder = os.path.expanduser("~") | ||||
|         return os.path.join(folder, app_name) | ||||
|     if force_posix: | ||||
|         return os.path.join(os.path.expanduser("~/.{}".format(_posixify(app_name)))) | ||||
|     if sys.platform == "darwin": | ||||
|         return os.path.join( | ||||
|             os.path.expanduser("~/Library/Application Support"), app_name | ||||
|         ) | ||||
|     return os.path.join( | ||||
|         os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), | ||||
|         _posixify(app_name), | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| _config_dir = _click_config_dir = get_app_dir('piker') | ||||
| _parent_user = os.environ.get('SUDO_USER') | ||||
| 
 | ||||
| if _parent_user: | ||||
|     non_root_user_dir = os.path.expanduser( | ||||
|         f'~{_parent_user}' | ||||
|     ) | ||||
|     root = 'root' | ||||
|     _config_dir = ( | ||||
|         non_root_user_dir + | ||||
|         _click_config_dir[ | ||||
|             _click_config_dir.rfind(root) + len(root): | ||||
|         ] | ||||
|     ) | ||||
| 
 | ||||
| _file_name = 'brokers.toml' | ||||
| _watchlists_data_path = os.path.join(_config_dir, 'watchlists.json') | ||||
| _context_defaults = dict( | ||||
|     default_map={ | ||||
|         # Questrade specific quote poll rates | ||||
|         'monitor': { | ||||
|             'rate': 3, | ||||
|         }, | ||||
|         'optschain': { | ||||
|             'rate': 1, | ||||
|         }, | ||||
|     } | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| def _override_config_dir( | ||||
|  |  | |||
|  | @ -0,0 +1,469 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| Supervisor for docker with included specific-image service helpers. | ||||
| 
 | ||||
| ''' | ||||
| import os | ||||
| from typing import ( | ||||
|     Optional, | ||||
|     # Any, | ||||
| ) | ||||
| from contextlib import asynccontextmanager as acm | ||||
| 
 | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| import tractor | ||||
| import docker | ||||
| import json | ||||
| from docker.models.containers import Container as DockerContainer | ||||
| from docker.errors import DockerException, APIError | ||||
| from requests.exceptions import ConnectionError, ReadTimeout | ||||
| 
 | ||||
| from ..log import get_logger, get_console_log | ||||
| from .. import config | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| _config = ''' | ||||
| # piker's ``marketstore`` config. | ||||
| 
 | ||||
| # mount this config using: | ||||
| # sudo docker run --mount \ | ||||
| # type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \ | ||||
| # 5993:5993 alpacamarkets/marketstore:latest | ||||
| 
 | ||||
| root_directory: data | ||||
| listen_port: 5993 | ||||
| grpc_listen_port: 5995 | ||||
| log_level: debug | ||||
| queryable: true | ||||
| stop_grace_period: 0 | ||||
| wal_rotate_interval: 5 | ||||
| stale_threshold: 5 | ||||
| enable_add: true | ||||
| enable_remove: false | ||||
| 
 | ||||
| triggers: | ||||
|   - module: ondiskagg.so | ||||
|     on: "*/1Sec/OHLCV" | ||||
|     config: | ||||
|         # filter: "nasdaq" | ||||
|         destinations: | ||||
|             - 1Min | ||||
|             - 5Min | ||||
|             - 15Min | ||||
|             - 1H | ||||
|             - 1D | ||||
| 
 | ||||
|   - module: stream.so | ||||
|     on: '*/*/*' | ||||
|     # config: | ||||
|     #     filter: "nasdaq" | ||||
| 
 | ||||
| ''' | ||||
| 
 | ||||
| 
 | ||||
| class DockerNotStarted(Exception): | ||||
|     'Prolly you dint start da daemon bruh' | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_docker( | ||||
|     url: Optional[str] = None, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> docker.DockerClient: | ||||
| 
 | ||||
|     client: Optional[docker.DockerClient] = None | ||||
|     try: | ||||
|         client = docker.DockerClient( | ||||
|             base_url=url, | ||||
|             **kwargs | ||||
|         ) if url else docker.from_env(**kwargs) | ||||
| 
 | ||||
|         yield client | ||||
| 
 | ||||
|     except ( | ||||
|         DockerException, | ||||
|         APIError, | ||||
|     ) as err: | ||||
| 
 | ||||
|         def unpack_msg(err: Exception) -> str: | ||||
|             args = getattr(err, 'args', None) | ||||
|             if args: | ||||
|                 return args | ||||
|             else: | ||||
|                 return str(err) | ||||
| 
 | ||||
|         # could be more specific so let's check if it's just perms. | ||||
|         if err.args: | ||||
|             errs = err.args | ||||
|             for err in errs: | ||||
|                 msg = unpack_msg(err) | ||||
|                 if 'PermissionError' in msg: | ||||
|                     raise DockerException('You dint run as root yo!') | ||||
| 
 | ||||
|                 elif 'FileNotFoundError' in msg: | ||||
|                     raise DockerNotStarted('Did you start da service sister?') | ||||
| 
 | ||||
|         # not perms? | ||||
|         raise | ||||
| 
 | ||||
|     finally: | ||||
|         if client: | ||||
|             client.close() | ||||
|             # client.api._custom_adapter.close() | ||||
|             for c in client.containers.list(): | ||||
|                 c.kill() | ||||
| 
 | ||||
| 
 | ||||
| class Container: | ||||
|     ''' | ||||
|     Wrapper around a ``docker.models.containers.Container`` to include | ||||
|     log capture and relay through our native logging system and helper | ||||
|     method(s) for cancellation/teardown. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         cntr: DockerContainer, | ||||
|     ) -> None: | ||||
| 
 | ||||
|         self.cntr = cntr | ||||
|         # log msg de-duplication | ||||
|         self.seen_so_far = set() | ||||
| 
 | ||||
|     async def process_logs_until( | ||||
|         self, | ||||
|         patt: str, | ||||
|         bp_on_msg: bool = False, | ||||
|     ) -> bool: | ||||
|         ''' | ||||
|         Attempt to capture container log messages and relay through our | ||||
|         native logging system. | ||||
| 
 | ||||
|         ''' | ||||
|         seen_so_far = self.seen_so_far | ||||
| 
 | ||||
|         while True: | ||||
|             logs = self.cntr.logs() | ||||
|             entries = logs.decode().split('\n') | ||||
|             for entry in entries: | ||||
| 
 | ||||
|                 # ignore null lines | ||||
|                 if not entry: | ||||
|                     continue | ||||
| 
 | ||||
|                 try: | ||||
|                     record = json.loads(entry.strip()) | ||||
|                 except json.JSONDecodeError: | ||||
|                     if 'Error' in entry: | ||||
|                         raise RuntimeError(entry) | ||||
|                     raise | ||||
| 
 | ||||
|                 msg = record['msg'] | ||||
|                 level = record['level'] | ||||
|                 if msg and entry not in seen_so_far: | ||||
|                     seen_so_far.add(entry) | ||||
|                     if bp_on_msg: | ||||
|                         await tractor.breakpoint() | ||||
| 
 | ||||
|                     getattr(log, level, log.error)(f'{msg}') | ||||
| 
 | ||||
|                 if patt in msg: | ||||
|                     return True | ||||
| 
 | ||||
|                 # do a checkpoint so we don't block if cancelled B) | ||||
|                 await trio.sleep(0.01) | ||||
| 
 | ||||
|         return False | ||||
| 
 | ||||
|     def try_signal( | ||||
|         self, | ||||
|         signal: str = 'SIGINT', | ||||
| 
 | ||||
|     ) -> bool: | ||||
|         try: | ||||
|             # XXX: market store doesn't seem to shutdown nicely all the | ||||
|             # time with this (maybe because there are still open grpc | ||||
|             # connections?) noticably after client connections have been | ||||
|             # made or are in use/teardown. It works just fine if you | ||||
|             # just start and stop the container tho?.. | ||||
|             log.cancel(f'SENDING {signal} to {self.cntr.id}') | ||||
|             self.cntr.kill(signal) | ||||
|             return True | ||||
| 
 | ||||
|         except docker.errors.APIError as err: | ||||
|             # _err = err | ||||
|             if 'is not running' in err.explanation: | ||||
|                 return False | ||||
| 
 | ||||
|     async def cancel( | ||||
|         self, | ||||
|     ) -> None: | ||||
| 
 | ||||
|         cid = self.cntr.id | ||||
|         self.try_signal('SIGINT') | ||||
| 
 | ||||
|         with trio.move_on_after(0.5) as cs: | ||||
|             cs.shield = True | ||||
|             # print('PROCESSINGN LOGS') | ||||
|             await self.process_logs_until('initiating graceful shutdown') | ||||
|             # print('SHUTDOWN REPORTED BY CONTAINER') | ||||
|             await self.process_logs_until('exiting...',) | ||||
| 
 | ||||
|         for _ in range(10): | ||||
|             with trio.move_on_after(0.5) as cs: | ||||
|                 cs.shield = True | ||||
|                 # print('waiting on EXITING') | ||||
|                 await self.process_logs_until('exiting...',) | ||||
|                 # print('got EXITING') | ||||
|                 break | ||||
| 
 | ||||
|             if cs.cancelled_caught: | ||||
|                 # get out the big guns, bc apparently marketstore | ||||
|                 # doesn't actually know how to terminate gracefully | ||||
|                 # :eyeroll:... | ||||
|                 self.try_signal('SIGKILL') | ||||
| 
 | ||||
|                 try: | ||||
|                     log.info('Waiting on container shutdown: {cid}') | ||||
|                     self.cntr.wait( | ||||
|                         timeout=0.1, | ||||
|                         condition='not-running', | ||||
|                     ) | ||||
|                     break | ||||
| 
 | ||||
|                 except ( | ||||
|                     ReadTimeout, | ||||
|                     ConnectionError, | ||||
|                 ): | ||||
|                     log.error(f'failed to wait on container {cid}') | ||||
|                     raise | ||||
| 
 | ||||
|         else: | ||||
|             raise RuntimeError('Failed to cancel container {cid}') | ||||
| 
 | ||||
|         log.cancel(f'Container stopped: {cid}') | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
| async def open_marketstored( | ||||
|     ctx: tractor.Context, | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Start and supervise a marketstore instance with its config bind-mounted | ||||
|     in from the piker config directory on the system. | ||||
| 
 | ||||
|     The equivalent cli cmd to this code is: | ||||
| 
 | ||||
|         sudo docker run --mount \ | ||||
|         type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \ | ||||
|         5993:5993 alpacamarkets/marketstore:latest | ||||
| 
 | ||||
|     ''' | ||||
|     log = get_console_log('info', name=__name__) | ||||
| 
 | ||||
|     async with open_docker() as client: | ||||
| 
 | ||||
|         # create a mount from user's local piker config dir into container | ||||
|         config_dir_mnt = docker.types.Mount( | ||||
|             target='/etc', | ||||
|             source=config._config_dir, | ||||
|             type='bind', | ||||
|         ) | ||||
| 
 | ||||
|         # create a user config subdir where the marketstore | ||||
|         # backing filesystem database can be persisted. | ||||
|         persistent_data_dir = os.path.join( | ||||
|             config._config_dir, 'data', | ||||
|         ) | ||||
|         if not os.path.isdir(persistent_data_dir): | ||||
|             os.mkdir(persistent_data_dir) | ||||
| 
 | ||||
|         data_dir_mnt = docker.types.Mount( | ||||
|             target='/data', | ||||
|             source=persistent_data_dir, | ||||
|             type='bind', | ||||
|         ) | ||||
| 
 | ||||
|         dcntr: DockerContainer = client.containers.run( | ||||
|             'alpacamarkets/marketstore:latest', | ||||
|             # do we need this for cmds? | ||||
|             # '-i', | ||||
| 
 | ||||
|             # '-p 5993:5993', | ||||
|             ports={ | ||||
|                 '5993/tcp': 5993,  # jsonrpc | ||||
|                 '5995/tcp': 5995,  # grpc | ||||
|             }, | ||||
|             mounts=[config_dir_mnt, data_dir_mnt], | ||||
|             detach=True, | ||||
|             # stop_signal='SIGINT', | ||||
|             init=True, | ||||
|             # remove=True, | ||||
|         ) | ||||
|         cntr = Container(dcntr) | ||||
| 
 | ||||
|         with trio.move_on_after(1): | ||||
|             found = await cntr.process_logs_until( | ||||
|                 "launching tcp listener for all services...", | ||||
|             ) | ||||
| 
 | ||||
|             if not found and cntr not in client.containers.list(): | ||||
|                 raise RuntimeError( | ||||
|                     'Failed to start `marketstore` check logs deats' | ||||
|                 ) | ||||
| 
 | ||||
|         await ctx.started((cntr.cntr.id, os.getpid())) | ||||
| 
 | ||||
|         # async with ctx.open_stream() as stream: | ||||
| 
 | ||||
|         try: | ||||
| 
 | ||||
|             # TODO: we might eventually want a proxy-style msg-prot here | ||||
|             # to allow remote control of containers without needing | ||||
|             # callers to have root perms? | ||||
|             await trio.sleep_forever() | ||||
| 
 | ||||
|             # await cntr.cancel() | ||||
|             # with trio.CancelScope(shield=True): | ||||
|             #     # block for the expected "teardown log msg".. | ||||
|             #     # await cntr.process_logs_until('exiting...',) | ||||
| 
 | ||||
|             #     # only msg should be to signal killing the | ||||
|             #     # container and this super daemon. | ||||
|             #     msg = await stream.receive() | ||||
|             #     # print("GOT CANCEL MSG") | ||||
| 
 | ||||
|             #     cid = msg['cancel'] | ||||
|             #     log.cancel(f'Cancelling container {cid}') | ||||
| 
 | ||||
|             #     # print("CANCELLING CONTAINER") | ||||
|             #     await cntr.cancel() | ||||
| 
 | ||||
|             #     # print("SENDING ACK") | ||||
|             #     await stream.send('ack') | ||||
| 
 | ||||
|         except ( | ||||
|             BaseException, | ||||
|             # trio.Cancelled, | ||||
|             # KeyboardInterrupt, | ||||
|         ): | ||||
| 
 | ||||
|             with trio.CancelScope(shield=True): | ||||
|                 await cntr.cancel() | ||||
|                 # await stream.send('ack') | ||||
| 
 | ||||
|             raise | ||||
| 
 | ||||
| 
 | ||||
| async def start_ahab( | ||||
|     service_name: str, | ||||
|     task_status: TaskStatus[trio.Event] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Start a ``docker`` container supervisor with given service name. | ||||
| 
 | ||||
|     Currently the actor calling this task should normally be started | ||||
|     with root permissions (until we decide to use something that doesn't | ||||
|     require this, like docker's rootless mode or some wrapper project) but | ||||
|     te root perms are de-escalated after the docker supervisor sub-actor | ||||
|     is started. | ||||
| 
 | ||||
|     ''' | ||||
|     cn_ready = trio.Event() | ||||
|     try: | ||||
|         async with tractor.open_nursery( | ||||
|             loglevel='runtime', | ||||
|         ) as tn: | ||||
| 
 | ||||
|             portal = await tn.start_actor( | ||||
|                 service_name, | ||||
|                 enable_modules=[__name__] | ||||
|             ) | ||||
| 
 | ||||
|             # TODO: we have issues with this on teardown | ||||
|             # where ``tractor`` tries to issue ``os.kill()`` | ||||
|             # and hits perms errors since the root process | ||||
|             # doesn't any longer have root perms.. | ||||
| 
 | ||||
|             # de-escalate root perms to the original user | ||||
|             # after the docker supervisor actor is spawned. | ||||
|             if config._parent_user: | ||||
|                 import pwd | ||||
|                 os.setuid( | ||||
|                     pwd.getpwnam( | ||||
|                         config._parent_user | ||||
|                     )[2]  # named user's uid | ||||
|                 ) | ||||
| 
 | ||||
|             task_status.started(cn_ready) | ||||
| 
 | ||||
|             async with portal.open_context( | ||||
|                 open_marketstored, | ||||
|             ) as (ctx, first): | ||||
| 
 | ||||
|                 cid, pid = first | ||||
| 
 | ||||
|                 await trio.sleep_forever() | ||||
|                 # async with ctx.open_stream() as stream: | ||||
|                 #     try: | ||||
|                 #         # run till cancelled | ||||
|                 #         await trio.sleep_forever() | ||||
|                 #     finally: | ||||
|                 #         with trio.CancelScope(shield=True): | ||||
|                 #             # print('SENDING CANCEL TO MARKETSTORED') | ||||
|                 #             await stream.send({'cancel': (cid, pid)}) | ||||
|                 #         assert await stream.receive() == 'ack' | ||||
| 
 | ||||
|     # since we demoted root perms in this parent | ||||
|     # we'll get a perms error on proc cleanup in | ||||
|     # ``tractor`` nursery exit. just make sure | ||||
|     # the child is terminated and don't raise the | ||||
|     # error if so. | ||||
| 
 | ||||
|     # TODO: we could also consider adding | ||||
|     # a ``tractor.ZombieDetected`` or something that we could raise | ||||
|     # if we find the child didn't terminate. | ||||
|     # await tractor.breakpoint() | ||||
|     except PermissionError: | ||||
|         log.warning('Failed to cancel root permsed container') | ||||
| 
 | ||||
|     except ( | ||||
|         trio.MultiError, | ||||
|     ) as err: | ||||
|         for subexc in err.exceptions: | ||||
|             if isinstance(subexc, PermissionError): | ||||
|                 log.warning('Failed to cancel root perms-ed container') | ||||
|                 return | ||||
|         else: | ||||
|             raise | ||||
| 
 | ||||
| 
 | ||||
| async def main(): | ||||
|     await start_ahab() | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     trio.run(main) | ||||
|  | @ -22,14 +22,16 @@ financial data flows. | |||
| from __future__ import annotations | ||||
| from collections import Counter | ||||
| import time | ||||
| from typing import TYPE_CHECKING, Optional | ||||
| 
 | ||||
| import tractor | ||||
| import trio | ||||
| from trio_typing import TaskStatus | ||||
| 
 | ||||
| from ._sharedmem import ShmArray | ||||
| from ..log import get_logger | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._sharedmem import ShmArray | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
|  | @ -88,6 +90,7 @@ async def increment_ohlc_buffer( | |||
| 
 | ||||
|     total_s = 0  # total seconds counted | ||||
|     lowest = min(sampler.ohlcv_shms.keys()) | ||||
|     lowest_shm = sampler.ohlcv_shms[lowest][0] | ||||
|     ad = lowest - 0.001 | ||||
| 
 | ||||
|     with trio.CancelScope() as cs: | ||||
|  | @ -131,21 +134,46 @@ async def increment_ohlc_buffer( | |||
|                     # write to the buffer | ||||
|                     shm.push(last) | ||||
| 
 | ||||
|             # broadcast the buffer index step to any subscribers for | ||||
|             # a given sample period. | ||||
|             subs = sampler.subscribers.get(delay_s, ()) | ||||
|             await broadcast(delay_s, shm=lowest_shm) | ||||
| 
 | ||||
|             for stream in subs: | ||||
|                 try: | ||||
|                     await stream.send({'index': shm._last.value}) | ||||
|                 except ( | ||||
|                     trio.BrokenResourceError, | ||||
|                     trio.ClosedResourceError | ||||
|                 ): | ||||
|                     log.error( | ||||
|                         f'{stream._ctx.chan.uid} dropped connection' | ||||
|                     ) | ||||
|                     subs.remove(stream) | ||||
| 
 | ||||
| async def broadcast( | ||||
|     delay_s: int, | ||||
|     shm: Optional[ShmArray] = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     # broadcast the buffer index step to any subscribers for | ||||
|     # a given sample period. | ||||
|     subs = sampler.subscribers.get(delay_s, ()) | ||||
| 
 | ||||
|     last = -1 | ||||
| 
 | ||||
|     if shm is None: | ||||
|         periods = sampler.ohlcv_shms.keys() | ||||
|         # if this is an update triggered by a history update there | ||||
|         # might not actually be any sampling bus setup since there's | ||||
|         # no "live feed" active yet. | ||||
|         if periods: | ||||
|             lowest = min(periods) | ||||
|             shm = sampler.ohlcv_shms[lowest][0] | ||||
|             last = shm._last.value | ||||
| 
 | ||||
|     for stream in subs: | ||||
|         try: | ||||
|             await stream.send({'index': last}) | ||||
|         except ( | ||||
|             trio.BrokenResourceError, | ||||
|             trio.ClosedResourceError | ||||
|         ): | ||||
|             log.error( | ||||
|                 f'{stream._ctx.chan.uid} dropped connection' | ||||
|             ) | ||||
|             try: | ||||
|                 subs.remove(stream) | ||||
|             except ValueError: | ||||
|                 log.warning( | ||||
|                     f'{stream._ctx.chan.uid} sub already removed!?' | ||||
|                 ) | ||||
| 
 | ||||
| 
 | ||||
| @tractor.context | ||||
|  | @ -365,7 +393,12 @@ async def uniform_rate_send( | |||
| 
 | ||||
|         if left_to_sleep > 0: | ||||
|             with trio.move_on_after(left_to_sleep) as cs: | ||||
|                 sym, last_quote = await quote_stream.receive() | ||||
|                 try: | ||||
|                     sym, last_quote = await quote_stream.receive() | ||||
|                 except trio.EndOfChannel: | ||||
|                     log.exception(f"feed for {stream} ended?") | ||||
|                     break | ||||
| 
 | ||||
|                 diff = time.time() - last_send | ||||
| 
 | ||||
|                 if not first_quote: | ||||
|  |  | |||
|  | @ -22,7 +22,6 @@ from __future__ import annotations | |||
| from sys import byteorder | ||||
| from typing import Optional | ||||
| from multiprocessing.shared_memory import SharedMemory, _USE_POSIX | ||||
| from multiprocessing import resource_tracker as mantracker | ||||
| 
 | ||||
| if _USE_POSIX: | ||||
|     from _posixshmem import shm_unlink | ||||
|  | @ -30,6 +29,7 @@ if _USE_POSIX: | |||
| import tractor | ||||
| import numpy as np | ||||
| from pydantic import BaseModel | ||||
| from numpy.lib import recfunctions as rfn | ||||
| 
 | ||||
| from ..log import get_logger | ||||
| from ._source import base_iohlc_dtype | ||||
|  | @ -40,32 +40,39 @@ log = get_logger(__name__) | |||
| 
 | ||||
| # how  much is probably dependent on lifestyle | ||||
| _secs_in_day = int(60 * 60 * 24) | ||||
| # we try for 3 times but only on a run-every-other-day kinda week. | ||||
| _default_size = 10 * _secs_in_day | ||||
| # we try for a buncha times, but only on a run-every-other-day kinda week. | ||||
| _days_worth = 16 | ||||
| _default_size = _days_worth * _secs_in_day | ||||
| # where to start the new data append index | ||||
| _rt_buffer_start = int(9*_secs_in_day) | ||||
| _rt_buffer_start = int((_days_worth - 1) * _secs_in_day) | ||||
| 
 | ||||
| 
 | ||||
| # Tell the "resource tracker" thing to fuck off. | ||||
| class ManTracker(mantracker.ResourceTracker): | ||||
|     def register(self, name, rtype): | ||||
|         pass | ||||
| def cuckoff_mantracker(): | ||||
| 
 | ||||
|     def unregister(self, name, rtype): | ||||
|         pass | ||||
|     from multiprocessing import resource_tracker as mantracker | ||||
| 
 | ||||
|     def ensure_running(self): | ||||
|         pass | ||||
|     # Tell the "resource tracker" thing to fuck off. | ||||
|     class ManTracker(mantracker.ResourceTracker): | ||||
|         def register(self, name, rtype): | ||||
|             pass | ||||
| 
 | ||||
|         def unregister(self, name, rtype): | ||||
|             pass | ||||
| 
 | ||||
|         def ensure_running(self): | ||||
|             pass | ||||
| 
 | ||||
|     # "know your land and know your prey" | ||||
|     # https://www.dailymotion.com/video/x6ozzco | ||||
|     mantracker._resource_tracker = ManTracker() | ||||
|     mantracker.register = mantracker._resource_tracker.register | ||||
|     mantracker.ensure_running = mantracker._resource_tracker.ensure_running | ||||
|     # ensure_running = mantracker._resource_tracker.ensure_running | ||||
|     mantracker.unregister = mantracker._resource_tracker.unregister | ||||
|     mantracker.getfd = mantracker._resource_tracker.getfd | ||||
| 
 | ||||
| 
 | ||||
| # "know your land and know your prey" | ||||
| # https://www.dailymotion.com/video/x6ozzco | ||||
| mantracker._resource_tracker = ManTracker() | ||||
| mantracker.register = mantracker._resource_tracker.register | ||||
| mantracker.ensure_running = mantracker._resource_tracker.ensure_running | ||||
| ensure_running = mantracker._resource_tracker.ensure_running | ||||
| mantracker.unregister = mantracker._resource_tracker.unregister | ||||
| mantracker.getfd = mantracker._resource_tracker.getfd | ||||
| cuckoff_mantracker() | ||||
| 
 | ||||
| 
 | ||||
| class SharedInt: | ||||
|  | @ -191,7 +198,11 @@ class ShmArray: | |||
|         self._post_init: bool = False | ||||
| 
 | ||||
|         # pushing data does not write the index (aka primary key) | ||||
|         self._write_fields = list(shmarr.dtype.fields.keys())[1:] | ||||
|         dtype = shmarr.dtype | ||||
|         if dtype.fields: | ||||
|             self._write_fields = list(shmarr.dtype.fields.keys())[1:] | ||||
|         else: | ||||
|             self._write_fields = None | ||||
| 
 | ||||
|     # TODO: ringbuf api? | ||||
| 
 | ||||
|  | @ -237,6 +248,48 @@ class ShmArray: | |||
| 
 | ||||
|         return a | ||||
| 
 | ||||
|     def ustruct( | ||||
|         self, | ||||
|         fields: Optional[list[str]] = None, | ||||
| 
 | ||||
|         # type that all field values will be cast to | ||||
|         # in the returned view. | ||||
|         common_dtype: np.dtype = np.float, | ||||
| 
 | ||||
|     ) -> np.ndarray: | ||||
| 
 | ||||
|         array = self._array | ||||
| 
 | ||||
|         if fields: | ||||
|             selection = array[fields] | ||||
|             # fcount = len(fields) | ||||
|         else: | ||||
|             selection = array | ||||
|             # fcount = len(array.dtype.fields) | ||||
| 
 | ||||
|         # XXX: manual ``.view()`` attempt that also doesn't work. | ||||
|         # uview = selection.view( | ||||
|         #     dtype='<f16', | ||||
|         # ).reshape(-1, 4, order='A') | ||||
| 
 | ||||
|         # assert len(selection) == len(uview) | ||||
| 
 | ||||
|         u = rfn.structured_to_unstructured( | ||||
|             selection, | ||||
|             # dtype=float, | ||||
|             copy=True, | ||||
|         ) | ||||
| 
 | ||||
|         # unstruct = np.ndarray(u.shape, dtype=a.dtype, buffer=shm.buf) | ||||
|         # array[:] = a[:] | ||||
|         return u | ||||
|         # return ShmArray( | ||||
|         #     shmarr=u, | ||||
|         #     first=self._first, | ||||
|         #     last=self._last, | ||||
|         #     shm=self._shm | ||||
|         # ) | ||||
| 
 | ||||
|     def last( | ||||
|         self, | ||||
|         length: int = 1, | ||||
|  | @ -255,6 +308,7 @@ class ShmArray: | |||
| 
 | ||||
|         field_map: Optional[dict[str, str]] = None, | ||||
|         prepend: bool = False, | ||||
|         update_first: bool = True, | ||||
|         start: Optional[int] = None, | ||||
| 
 | ||||
|     ) -> int: | ||||
|  | @ -267,10 +321,9 @@ class ShmArray: | |||
| 
 | ||||
|         ''' | ||||
|         length = len(data) | ||||
|         index = start if start is not None else self._last.value | ||||
| 
 | ||||
|         if prepend: | ||||
|             index = self._first.value - length | ||||
|             index = (start or self._first.value) - length | ||||
| 
 | ||||
|             if index < 0: | ||||
|                 raise ValueError( | ||||
|  | @ -278,6 +331,9 @@ class ShmArray: | |||
|                     f'You have passed {abs(index)} too many datums.' | ||||
|                 ) | ||||
| 
 | ||||
|         else: | ||||
|             index = start if start is not None else self._last.value | ||||
| 
 | ||||
|         end = index + length | ||||
| 
 | ||||
|         if field_map: | ||||
|  | @ -295,12 +351,17 @@ class ShmArray: | |||
|             # tries to access ``.array`` (which due to the index | ||||
|             # overlap will be empty). Pretty sure we've fixed it now | ||||
|             # but leaving this here as a reminder. | ||||
|             if prepend: | ||||
|             if prepend and update_first and length: | ||||
|                 assert index < self._first.value | ||||
| 
 | ||||
|             if index < self._first.value: | ||||
|             if ( | ||||
|                 index < self._first.value | ||||
|                 and update_first | ||||
|             ): | ||||
|                 assert prepend, 'prepend=True not passed but index decreased?' | ||||
|                 self._first.value = index | ||||
|             else: | ||||
| 
 | ||||
|             elif not prepend: | ||||
|                 self._last.value = end | ||||
| 
 | ||||
|             self._post_init = True | ||||
|  | @ -336,6 +397,7 @@ class ShmArray: | |||
|                 f"Input array has unknown field(s): {only_in_theirs}" | ||||
|             ) | ||||
| 
 | ||||
|     # TODO: support "silent" prepends that don't update ._first.value? | ||||
|     def prepend( | ||||
|         self, | ||||
|         data: np.ndarray, | ||||
|  | @ -386,7 +448,11 @@ def open_shm_array( | |||
|         create=True, | ||||
|         size=a.nbytes | ||||
|     ) | ||||
|     array = np.ndarray(a.shape, dtype=a.dtype, buffer=shm.buf) | ||||
|     array = np.ndarray( | ||||
|         a.shape, | ||||
|         dtype=a.dtype, | ||||
|         buffer=shm.buf | ||||
|     ) | ||||
|     array[:] = a[:] | ||||
|     array.setflags(write=int(not readonly)) | ||||
| 
 | ||||
|  |  | |||
|  | @ -21,9 +21,9 @@ from __future__ import annotations | |||
| from typing import Any | ||||
| import decimal | ||||
| 
 | ||||
| from bidict import bidict | ||||
| import numpy as np | ||||
| import pandas as pd | ||||
| from pydantic import BaseModel, validate_arguments | ||||
| from pydantic import BaseModel | ||||
| # from numba import from_dtype | ||||
| 
 | ||||
| 
 | ||||
|  | @ -48,16 +48,16 @@ base_ohlc_dtype = np.dtype(ohlc_fields) | |||
| # https://github.com/numba/numba/issues/4511 | ||||
| # numba_ohlc_dtype = from_dtype(base_ohlc_dtype) | ||||
| 
 | ||||
| # map time frame "keys" to minutes values | ||||
| tf_in_1m = { | ||||
|     '1m': 1, | ||||
|     '5m':  5, | ||||
|     '15m': 15, | ||||
|     '30m':  30, | ||||
|     '1h': 60, | ||||
|     '4h': 240, | ||||
|     '1d': 1440, | ||||
| } | ||||
| # map time frame "keys" to seconds values | ||||
| tf_in_1s = bidict({ | ||||
|     1: '1s', | ||||
|     60: '1m', | ||||
|     60*5: '5m', | ||||
|     60*15: '15m', | ||||
|     60*30: '30m', | ||||
|     60*60: '1h', | ||||
|     60*60*24: '1d', | ||||
| }) | ||||
| 
 | ||||
| 
 | ||||
| def mk_fqsn( | ||||
|  | @ -127,11 +127,11 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: | |||
| 
 | ||||
| 
 | ||||
| class Symbol(BaseModel): | ||||
|     """I guess this is some kinda container thing for dealing with | ||||
|     ''' | ||||
|     I guess this is some kinda container thing for dealing with | ||||
|     all the different meta-data formats from brokers? | ||||
| 
 | ||||
|     Yah, i guess dats what it izz. | ||||
|     """ | ||||
|     ''' | ||||
|     key: str | ||||
|     tick_size: float = 0.01 | ||||
|     lot_tick_size: float = 0.0  # "volume" precision as min step value | ||||
|  | @ -254,61 +254,6 @@ class Symbol(BaseModel): | |||
|         return keys | ||||
| 
 | ||||
| 
 | ||||
| def from_df( | ||||
| 
 | ||||
|     df: pd.DataFrame, | ||||
|     source=None, | ||||
|     default_tf=None | ||||
| 
 | ||||
| ) -> np.recarray: | ||||
|     """Convert OHLC formatted ``pandas.DataFrame`` to ``numpy.recarray``. | ||||
| 
 | ||||
|     """ | ||||
|     df.reset_index(inplace=True) | ||||
| 
 | ||||
|     # hackery to convert field names | ||||
|     date = 'Date' | ||||
|     if 'date' in df.columns: | ||||
|         date = 'date' | ||||
| 
 | ||||
|     # convert to POSIX time | ||||
|     df[date] = [d.timestamp() for d in df[date]] | ||||
| 
 | ||||
|     # try to rename from some camel case | ||||
|     columns = { | ||||
|         'Date': 'time', | ||||
|         'date': 'time', | ||||
|         'Open': 'open', | ||||
|         'High': 'high', | ||||
|         'Low': 'low', | ||||
|         'Close': 'close', | ||||
|         'Volume': 'volume', | ||||
| 
 | ||||
|         # most feeds are providing this over sesssion anchored | ||||
|         'vwap': 'bar_wap', | ||||
| 
 | ||||
|         # XXX: ib_insync calls this the "wap of the bar" | ||||
|         # but no clue what is actually is... | ||||
|         # https://github.com/pikers/piker/issues/119#issuecomment-729120988 | ||||
|         'average': 'bar_wap', | ||||
|     } | ||||
| 
 | ||||
|     df = df.rename(columns=columns) | ||||
| 
 | ||||
|     for name in df.columns: | ||||
|         # if name not in base_ohlc_dtype.names[1:]: | ||||
|         if name not in base_ohlc_dtype.names: | ||||
|             del df[name] | ||||
| 
 | ||||
|     # TODO: it turns out column access on recarrays is actually slower: | ||||
|     # https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist | ||||
|     # it might make sense to make these structured arrays? | ||||
|     array = df.to_records(index=False) | ||||
|     _nan_to_closest_num(array) | ||||
| 
 | ||||
|     return array | ||||
| 
 | ||||
| 
 | ||||
| def _nan_to_closest_num(array: np.ndarray): | ||||
|     """Return interpolated values instead of NaN. | ||||
| 
 | ||||
|  |  | |||
|  | @ -16,26 +16,34 @@ | |||
| 
 | ||||
| """ | ||||
| marketstore cli. | ||||
| 
 | ||||
| """ | ||||
| from typing import List | ||||
| from functools import partial | ||||
| from pprint import pformat | ||||
| 
 | ||||
| from anyio_marketstore import open_marketstore_client | ||||
| import trio | ||||
| import tractor | ||||
| import click | ||||
| import numpy as np | ||||
| 
 | ||||
| from .marketstore import ( | ||||
|     get_client, | ||||
|     stream_quotes, | ||||
|     # stream_quotes, | ||||
|     ingest_quote_stream, | ||||
|     _url, | ||||
|     # _url, | ||||
|     _tick_tbk_ids, | ||||
|     mk_tbk, | ||||
| ) | ||||
| from ..cli import cli | ||||
| from .. import watchlists as wl | ||||
| from ..log import get_logger | ||||
| from ._sharedmem import ( | ||||
|     maybe_open_shm_array, | ||||
| ) | ||||
| from ._source import ( | ||||
|     base_iohlc_dtype, | ||||
| ) | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  | @ -49,51 +57,58 @@ log = get_logger(__name__) | |||
| ) | ||||
| @click.argument('names', nargs=-1) | ||||
| @click.pass_obj | ||||
| def ms_stream(config: dict, names: List[str], url: str): | ||||
|     """Connect to a marketstore time bucket stream for (a set of) symbols(s) | ||||
| def ms_stream( | ||||
|     config: dict, | ||||
|     names: list[str], | ||||
|     url: str, | ||||
| ) -> None: | ||||
|     ''' | ||||
|     Connect to a marketstore time bucket stream for (a set of) symbols(s) | ||||
|     and print to console. | ||||
|     """ | ||||
| 
 | ||||
|     ''' | ||||
|     async def main(): | ||||
|         async for quote in stream_quotes(symbols=names): | ||||
|             log.info(f"Received quote:\n{quote}") | ||||
|         # async for quote in stream_quotes(symbols=names): | ||||
|         #    log.info(f"Received quote:\n{quote}") | ||||
|         ... | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.option( | ||||
|     '--url', | ||||
|     default=_url, | ||||
|     help='HTTP URL of marketstore instance' | ||||
| ) | ||||
| @click.argument('names', nargs=-1) | ||||
| @click.pass_obj | ||||
| def ms_destroy(config: dict, names: List[str], url: str) -> None: | ||||
|     """Destroy symbol entries in the local marketstore instance. | ||||
|     """ | ||||
|     async def main(): | ||||
|         nonlocal names | ||||
|         async with get_client(url) as client: | ||||
| 
 | ||||
|             if not names: | ||||
|                 names = await client.list_symbols() | ||||
| 
 | ||||
|             # default is to wipe db entirely. | ||||
|             answer = input( | ||||
|                 "This will entirely wipe you local marketstore db @ " | ||||
|                 f"{url} of the following symbols:\n {pformat(names)}" | ||||
|                 "\n\nDelete [N/y]?\n") | ||||
| 
 | ||||
|             if answer == 'y': | ||||
|                 for sym in names: | ||||
|                     # tbk = _tick_tbk.format(sym) | ||||
|                     tbk = tuple(sym, *_tick_tbk_ids) | ||||
|                     print(f"Destroying {tbk}..") | ||||
|                     await client.destroy(mk_tbk(tbk)) | ||||
|             else: | ||||
|                 print("Nothing deleted.") | ||||
| 
 | ||||
|     tractor.run(main) | ||||
| # @cli.command() | ||||
| # @click.option( | ||||
| #     '--url', | ||||
| #     default=_url, | ||||
| #     help='HTTP URL of marketstore instance' | ||||
| # ) | ||||
| # @click.argument('names', nargs=-1) | ||||
| # @click.pass_obj | ||||
| # def ms_destroy(config: dict, names: list[str], url: str) -> None: | ||||
| #     """Destroy symbol entries in the local marketstore instance. | ||||
| #     """ | ||||
| #     async def main(): | ||||
| #         nonlocal names | ||||
| #         async with get_client(url) as client: | ||||
| #  | ||||
| #             if not names: | ||||
| #                 names = await client.list_symbols() | ||||
| #  | ||||
| #             # default is to wipe db entirely. | ||||
| #             answer = input( | ||||
| #                 "This will entirely wipe you local marketstore db @ " | ||||
| #                 f"{url} of the following symbols:\n {pformat(names)}" | ||||
| #                 "\n\nDelete [N/y]?\n") | ||||
| #  | ||||
| #             if answer == 'y': | ||||
| #                 for sym in names: | ||||
| #                     # tbk = _tick_tbk.format(sym) | ||||
| #                     tbk = tuple(sym, *_tick_tbk_ids) | ||||
| #                     print(f"Destroying {tbk}..") | ||||
| #                     await client.destroy(mk_tbk(tbk)) | ||||
| #             else: | ||||
| #                 print("Nothing deleted.") | ||||
| #  | ||||
| #     tractor.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
|  | @ -102,41 +117,53 @@ def ms_destroy(config: dict, names: List[str], url: str) -> None: | |||
|     is_flag=True, | ||||
|     help='Enable tractor logging') | ||||
| @click.option( | ||||
|     '--url', | ||||
|     default=_url, | ||||
|     help='HTTP URL of marketstore instance' | ||||
|     '--host', | ||||
|     default='localhost' | ||||
| ) | ||||
| @click.argument('name', nargs=1, required=True) | ||||
| @click.option( | ||||
|     '--port', | ||||
|     default=5993 | ||||
| ) | ||||
| @click.argument('symbols', nargs=-1) | ||||
| @click.pass_obj | ||||
| def ms_shell(config, name, tl, url): | ||||
|     """Start an IPython shell ready to query the local marketstore db. | ||||
|     """ | ||||
|     async def main(): | ||||
|         async with get_client(url) as client: | ||||
|             query = client.query  # noqa | ||||
|             # TODO: write magics to query marketstore | ||||
|             from IPython import embed | ||||
|             embed() | ||||
| def storesh( | ||||
|     config, | ||||
|     tl, | ||||
|     host, | ||||
|     port, | ||||
|     symbols: list[str], | ||||
| ): | ||||
|     ''' | ||||
|     Start an IPython shell ready to query the local marketstore db. | ||||
| 
 | ||||
|     tractor.run(main) | ||||
|     ''' | ||||
|     from piker.data.marketstore import tsdb_history_update | ||||
|     from piker._daemon import open_piker_runtime | ||||
| 
 | ||||
|     async def main(): | ||||
|         nonlocal symbols | ||||
| 
 | ||||
|         async with open_piker_runtime( | ||||
|             'storesh', | ||||
|             enable_modules=['piker.data._ahab'], | ||||
|         ): | ||||
|             symbol = symbols[0] | ||||
|             await tsdb_history_update(symbol) | ||||
| 
 | ||||
|     trio.run(main) | ||||
| 
 | ||||
| 
 | ||||
| @cli.command() | ||||
| @click.option('--test-file', '-t', help='Test quote stream file') | ||||
| @click.option('--tl', is_flag=True, help='Enable tractor logging') | ||||
| @click.option('--tl', is_flag=True, help='Enable tractor logging') | ||||
| @click.option( | ||||
|     '--url', | ||||
|     default=_url, | ||||
|     help='HTTP URL of marketstore instance' | ||||
| ) | ||||
| @click.argument('name', nargs=1, required=True) | ||||
| @click.pass_obj | ||||
| def ingest(config, name, test_file, tl, url): | ||||
|     """Ingest real-time broker quotes and ticks to a marketstore instance. | ||||
|     """ | ||||
| def ingest(config, name, test_file, tl): | ||||
|     ''' | ||||
|     Ingest real-time broker quotes and ticks to a marketstore instance. | ||||
| 
 | ||||
|     ''' | ||||
|     # global opts | ||||
|     brokermod = config['brokermod'] | ||||
|     loglevel = config['loglevel'] | ||||
|     tractorloglevel = config['tractorloglevel'] | ||||
|     # log = config['log'] | ||||
|  | @ -145,15 +172,25 @@ def ingest(config, name, test_file, tl, url): | |||
|     watchlists = wl.merge_watchlist(watchlist_from_file, wl._builtins) | ||||
|     symbols = watchlists[name] | ||||
| 
 | ||||
|     tractor.run( | ||||
|         partial( | ||||
|             ingest_quote_stream, | ||||
|             symbols, | ||||
|             brokermod.name, | ||||
|             tries=1, | ||||
|             loglevel=loglevel, | ||||
|         ), | ||||
|         name='ingest_marketstore', | ||||
|         loglevel=tractorloglevel, | ||||
|         debug_mode=True, | ||||
|     ) | ||||
|     grouped_syms = {} | ||||
|     for sym in symbols: | ||||
|         symbol, _, provider = sym.rpartition('.') | ||||
|         if provider not in grouped_syms: | ||||
|             grouped_syms[provider] = [] | ||||
| 
 | ||||
|         grouped_syms[provider].append(symbol) | ||||
| 
 | ||||
|     async def entry_point(): | ||||
|         async with tractor.open_nursery() as n: | ||||
|             for provider, symbols in grouped_syms.items():  | ||||
|                 await n.run_in_actor( | ||||
|                     ingest_quote_stream, | ||||
|                     name='ingest_marketstore', | ||||
|                     symbols=symbols, | ||||
|                     brokername=provider, | ||||
|                     tries=1, | ||||
|                     actorloglevel=loglevel, | ||||
|                     loglevel=tractorloglevel | ||||
|                 ) | ||||
| 
 | ||||
|     tractor.run(entry_point) | ||||
|  |  | |||
|  | @ -20,27 +20,35 @@ Data feed apis and infra. | |||
| This module is enabled for ``brokerd`` daemons. | ||||
| 
 | ||||
| """ | ||||
| from __future__ import annotations | ||||
| from dataclasses import dataclass, field | ||||
| from datetime import datetime | ||||
| from contextlib import asynccontextmanager | ||||
| from functools import partial | ||||
| from types import ModuleType | ||||
| from typing import ( | ||||
|     Any, | ||||
|     AsyncIterator, Optional, | ||||
|     Generator, | ||||
|     Awaitable, | ||||
|     TYPE_CHECKING, | ||||
| ) | ||||
| 
 | ||||
| import trio | ||||
| from trio.abc import ReceiveChannel | ||||
| from trio_typing import TaskStatus | ||||
| import trimeter | ||||
| import tractor | ||||
| from pydantic import BaseModel | ||||
| import pendulum | ||||
| import numpy as np | ||||
| 
 | ||||
| from ..brokers import get_brokermod | ||||
| from .._cacheables import maybe_open_context | ||||
| from ..log import get_logger, get_console_log | ||||
| from .._daemon import ( | ||||
|     maybe_spawn_brokerd, | ||||
|     check_for_service, | ||||
| ) | ||||
| from ._sharedmem import ( | ||||
|     maybe_open_shm_array, | ||||
|  | @ -56,12 +64,19 @@ from ._source import ( | |||
| from ..ui import _search | ||||
| from ._sampling import ( | ||||
|     sampler, | ||||
|     broadcast, | ||||
|     increment_ohlc_buffer, | ||||
|     iter_ohlc_periods, | ||||
|     sample_and_broadcast, | ||||
|     uniform_rate_send, | ||||
| ) | ||||
| from ..brokers._util import ( | ||||
|     NoData, | ||||
|     DataUnavailable, | ||||
| ) | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from .marketstore import Storage | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
|  | @ -124,7 +139,7 @@ class _FeedsBus(BaseModel): | |||
| 
 | ||||
|     # def cancel_task( | ||||
|     #     self, | ||||
|     #     task: trio.lowlevel.Task | ||||
|     #     task: trio.lowlevel.Task, | ||||
|     # ) -> bool: | ||||
|     #     ... | ||||
| 
 | ||||
|  | @ -188,6 +203,388 @@ async def _setup_persistent_brokerd( | |||
|         await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| def diff_history( | ||||
|     array, | ||||
|     start_dt, | ||||
|     end_dt, | ||||
|     last_tsdb_dt: Optional[datetime] = None | ||||
| 
 | ||||
| ) -> np.ndarray: | ||||
| 
 | ||||
|     to_push = array | ||||
| 
 | ||||
|     if last_tsdb_dt: | ||||
|         s_diff = (start_dt - last_tsdb_dt).seconds | ||||
| 
 | ||||
|         # if we detect a partial frame's worth of data | ||||
|         # that is new, slice out only that history and | ||||
|         # write to shm. | ||||
|         if ( | ||||
|             s_diff < 0 | ||||
|             and abs(s_diff) < len(array) | ||||
|         ): | ||||
|             # the + 1 is because ``last_tsdb_dt`` is pulled from | ||||
|             # the last row entry for the ``'time'`` field retreived | ||||
|             # from the tsdb. | ||||
|             to_push = array[abs(s_diff)+1:] | ||||
|             log.info( | ||||
|                 f'Pushing partial frame {to_push.size} to shm' | ||||
|             ) | ||||
| 
 | ||||
|     return to_push | ||||
| 
 | ||||
| 
 | ||||
| async def start_backfill( | ||||
|     mod: ModuleType, | ||||
|     bfqsn: str, | ||||
|     shm: ShmArray, | ||||
| 
 | ||||
|     last_tsdb_dt: Optional[datetime] = None, | ||||
|     storage: Optional[Storage] = None, | ||||
|     write_tsdb: bool = False, | ||||
| 
 | ||||
|     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
| ) -> int: | ||||
| 
 | ||||
|     async with mod.open_history_client(bfqsn) as (hist, config): | ||||
| 
 | ||||
|         # get latest query's worth of history all the way | ||||
|         # back to what is recorded in the tsdb | ||||
|         array, start_dt, end_dt = await hist(end_dt=None) | ||||
| 
 | ||||
|         times = array['time'] | ||||
| 
 | ||||
|         # sample period step size in seconds | ||||
|         step_size_s = ( | ||||
|             pendulum.from_timestamp(times[-1]) - | ||||
|             pendulum.from_timestamp(times[-2]) | ||||
|         ).seconds | ||||
| 
 | ||||
|         # "frame"'s worth of sample period steps in seconds | ||||
|         frame_size_s = len(array) * step_size_s | ||||
| 
 | ||||
|         to_push = diff_history( | ||||
|             array, | ||||
|             start_dt, | ||||
|             end_dt, | ||||
|             last_tsdb_dt=last_tsdb_dt, | ||||
|         ) | ||||
| 
 | ||||
|         log.info(f'Pushing {to_push.size} to shm!') | ||||
|         shm.push(to_push) | ||||
| 
 | ||||
|         for delay_s in sampler.subscribers: | ||||
|             await broadcast(delay_s) | ||||
| 
 | ||||
|         # signal that backfilling to tsdb's end datum is complete | ||||
|         bf_done = trio.Event() | ||||
| 
 | ||||
|         # let caller unblock and deliver latest history frame | ||||
|         task_status.started((shm, start_dt, end_dt, bf_done)) | ||||
| 
 | ||||
|         if last_tsdb_dt is None: | ||||
|             # maybe a better default (they don't seem to define epoch?!) | ||||
| 
 | ||||
|             # based on the sample step size load a certain amount | ||||
|             # history | ||||
|             if step_size_s == 1: | ||||
|                 last_tsdb_dt = pendulum.now().subtract(days=2) | ||||
| 
 | ||||
|             elif step_size_s == 60: | ||||
|                 last_tsdb_dt = pendulum.now().subtract(years=2) | ||||
| 
 | ||||
|             else: | ||||
|                 raise ValueError( | ||||
|                     '`piker` only needs to support 1m and 1s sampling ' | ||||
|                     'but ur api is trying to deliver a longer ' | ||||
|                     f'timeframe of {step_size_s} ' 'seconds.. so ye, dun ' | ||||
|                     'do dat bruh.' | ||||
|                 ) | ||||
| 
 | ||||
|         # configure async query throttling | ||||
|         erlangs = config.get('erlangs', 1) | ||||
|         rate = config.get('rate', 1) | ||||
|         frames = {} | ||||
| 
 | ||||
|         def iter_dts(start: datetime): | ||||
|             while True: | ||||
| 
 | ||||
|                 hist_period = pendulum.period( | ||||
|                     start.subtract(seconds=step_size_s), | ||||
|                     last_tsdb_dt, | ||||
|                 ) | ||||
|                 dtrange = hist_period.range('seconds', frame_size_s) | ||||
| 
 | ||||
|                 for end_dt in dtrange: | ||||
|                     log.warning(f'Yielding next frame start {end_dt}') | ||||
|                     start = yield end_dt | ||||
| 
 | ||||
|                     # if caller sends a new start date, reset to that | ||||
|                     if start is not None: | ||||
|                         log.warning(f'Resetting date range: {start}') | ||||
|                         # import pdbpp | ||||
|                         # pdbpp.set_trace() | ||||
|                         break | ||||
|                 else: | ||||
|                     # from while | ||||
|                     return | ||||
| 
 | ||||
|         # pull new history frames until we hit latest | ||||
|         # already in the tsdb or a max count. | ||||
|         count = 0 | ||||
| 
 | ||||
|         # NOTE: when gaps are detected in the retreived history (by | ||||
|         # comparisor of the end - start versus the expected "frame size" | ||||
|         # in seconds) we need a way to alert the async request code not | ||||
|         # to continue to query for data "within the gap". This var is | ||||
|         # set in such cases such that further requests in that period | ||||
|         # are discarded and further we reset the "datetimem query frame | ||||
|         # index" in such cases to avoid needless noop requests. | ||||
|         earliest_end_dt: Optional[datetime] = start_dt | ||||
| 
 | ||||
|         async def get_ohlc_frame( | ||||
|             input_end_dt: datetime, | ||||
|             iter_dts_gen: Generator[datetime], | ||||
| 
 | ||||
|         ) -> np.ndarray: | ||||
| 
 | ||||
|             nonlocal count, frames, earliest_end_dt, frame_size_s | ||||
|             count += 1 | ||||
| 
 | ||||
|             if input_end_dt > earliest_end_dt: | ||||
|                 # if a request comes in for an inter-gap frame we | ||||
|                 # discard it since likely this request is still | ||||
|                 # lingering from before the reset of ``iter_dts()`` via | ||||
|                 # ``.send()`` below. | ||||
|                 log.info(f'Discarding request history ending @ {input_end_dt}') | ||||
| 
 | ||||
|                 # signals to ``trimeter`` loop to discard and | ||||
|                 # ``continue`` in it's schedule loop. | ||||
|                 return None | ||||
| 
 | ||||
|             try: | ||||
|                 log.info( | ||||
|                     f'Requesting {step_size_s}s frame ending in {input_end_dt}' | ||||
|                 ) | ||||
|                 array, start_dt, end_dt = await hist(end_dt=input_end_dt) | ||||
|                 assert array['time'][0] == start_dt.timestamp() | ||||
| 
 | ||||
|             except NoData: | ||||
|                 log.warning( | ||||
|                     f'NO DATA for {frame_size_s}s frame @ {input_end_dt} ?!?' | ||||
|                 ) | ||||
|                 return None  # discard signal | ||||
| 
 | ||||
|             except DataUnavailable as duerr: | ||||
|                 # broker is being a bish and we can't pull | ||||
|                 # any more.. | ||||
|                 log.warning('backend halted on data deliver !?!?') | ||||
| 
 | ||||
|                 # ugh, what's a better way? | ||||
|                 # TODO: fwiw, we probably want a way to signal a throttle | ||||
|                 # condition (eg. with ib) so that we can halt the | ||||
|                 # request loop until the condition is resolved? | ||||
|                 return duerr | ||||
| 
 | ||||
|             diff = end_dt - start_dt | ||||
|             frame_time_diff_s = diff.seconds | ||||
|             expected_frame_size_s = frame_size_s + step_size_s | ||||
| 
 | ||||
|             if frame_time_diff_s > expected_frame_size_s: | ||||
| 
 | ||||
|                 # XXX: query result includes a start point prior to our | ||||
|                 # expected "frame size" and thus is likely some kind of | ||||
|                 # history gap (eg. market closed period, outage, etc.) | ||||
|                 # so indicate to the request loop that this gap is | ||||
|                 # expected by both, | ||||
|                 # - resetting the ``iter_dts()`` generator to start at | ||||
|                 #   the new start point delivered in this result | ||||
|                 # - setting the non-locally scoped ``earliest_end_dt`` | ||||
|                 #   to this new value so that the request loop doesn't | ||||
|                 #   get tripped up thinking there's an out of order | ||||
|                 #   request-result condition. | ||||
| 
 | ||||
|                 log.warning( | ||||
|                     f'History frame ending @ {end_dt} appears to have a gap:\n' | ||||
|                     f'{diff} ~= {frame_time_diff_s} seconds' | ||||
|                 ) | ||||
| 
 | ||||
|                 # reset dtrange gen to new start point | ||||
|                 try: | ||||
|                     next_end = iter_dts_gen.send(start_dt) | ||||
|                     log.info( | ||||
|                         f'Reset frame index to start at {start_dt}\n' | ||||
|                         f'Was at {next_end}' | ||||
|                     ) | ||||
| 
 | ||||
|                     # NOTE: manually set "earliest end datetime" index-value | ||||
|                     # to avoid the request loop getting confused about | ||||
|                     # new frames that are earlier in history - i.e. this | ||||
|                     # **is not** the case of out-of-order frames from | ||||
|                     # an async batch request. | ||||
|                     earliest_end_dt = start_dt | ||||
| 
 | ||||
|                 except StopIteration: | ||||
|                     # gen already terminated meaning we probably already | ||||
|                     # exhausted it via frame requests. | ||||
|                     log.info( | ||||
|                         "Datetime index already exhausted, can't reset.." | ||||
|                     ) | ||||
| 
 | ||||
|             to_push = diff_history( | ||||
|                 array, | ||||
|                 start_dt, | ||||
|                 end_dt, | ||||
|                 last_tsdb_dt=last_tsdb_dt, | ||||
|             ) | ||||
|             ln = len(to_push) | ||||
|             if ln: | ||||
|                 log.info(f'{ln} bars for {start_dt} -> {end_dt}') | ||||
|                 frames[input_end_dt.timestamp()] = (to_push, start_dt, end_dt) | ||||
|                 return to_push, start_dt, end_dt | ||||
| 
 | ||||
|             else: | ||||
|                 log.warning( | ||||
|                     f'{ln} BARS TO PUSH after diff?!: {start_dt} -> {end_dt}' | ||||
|                 ) | ||||
|                 return None | ||||
| 
 | ||||
|         # initial dt index starts at the start of the first query result | ||||
|         idts = iter_dts(start_dt) | ||||
| 
 | ||||
|         async with trimeter.amap( | ||||
|             partial( | ||||
|                 get_ohlc_frame, | ||||
|                 # we close in the ``iter_dt()`` gen in so we can send | ||||
|                 # reset signals as needed for gap dection in the | ||||
|                 # history. | ||||
|                 iter_dts_gen=idts, | ||||
|             ), | ||||
|             idts, | ||||
| 
 | ||||
|             capture_outcome=True, | ||||
|             include_value=True, | ||||
| 
 | ||||
|             # better technical names bruv... | ||||
|             max_at_once=erlangs, | ||||
|             max_per_second=rate, | ||||
| 
 | ||||
|         ) as outcomes: | ||||
| 
 | ||||
|             # Then iterate over the return values, as they become available | ||||
|             # (i.e., not necessarily in the original order) | ||||
|             async for input_end_dt, outcome in outcomes: | ||||
| 
 | ||||
|                 try: | ||||
|                     out = outcome.unwrap() | ||||
| 
 | ||||
|                     if out is None: | ||||
|                         # skip signal | ||||
|                         continue | ||||
| 
 | ||||
|                     elif isinstance(out, DataUnavailable): | ||||
|                         # no data available case signal.. so just kill | ||||
|                         # further requests and basically just stop | ||||
|                         # trying... | ||||
|                         break | ||||
| 
 | ||||
|                 except Exception: | ||||
|                     log.exception('uhh trimeter bail') | ||||
|                     raise | ||||
|                 else: | ||||
|                     to_push, start_dt, end_dt = out | ||||
| 
 | ||||
|                 if not len(to_push): | ||||
|                     # diff returned no new data (i.e. we probablyl hit | ||||
|                     # the ``last_tsdb_dt`` point). | ||||
|                     # TODO: raise instead? | ||||
|                     log.warning(f'No history for range {start_dt} -> {end_dt}') | ||||
|                     continue | ||||
| 
 | ||||
|                 # pipeline-style pull frames until we need to wait for | ||||
|                 # the next in order to arrive. | ||||
|                 # i = end_dts.index(input_end_dt) | ||||
|                 # print(f'latest end_dt {end_dt} found at index {i}') | ||||
| 
 | ||||
|                 epochs = list(reversed(sorted(frames))) | ||||
|                 for epoch in epochs: | ||||
|                     start = shm.array['time'][0] | ||||
| 
 | ||||
|                     diff = epoch - start | ||||
|                     if abs(diff) > step_size_s: | ||||
| 
 | ||||
|                         if earliest_end_dt < end_dt: | ||||
|                             # XXX: an expected gap was encountered (see | ||||
|                             # logic in ``get_ohlc_frame()``, so allow | ||||
|                             # this frame through to the storage layer. | ||||
|                             log.warning( | ||||
|                                 f'there is an expected history gap of {diff}s:' | ||||
|                             ) | ||||
| 
 | ||||
|                         elif ( | ||||
|                             erlangs > 1 | ||||
|                             and len(epochs) < erlangs | ||||
|                         ): | ||||
|                             # we don't yet have the next frame to push | ||||
|                             # so break back to the async request loop | ||||
|                             # while we wait for more async frame-results | ||||
|                             # to arrive. | ||||
|                             expect_end = pendulum.from_timestamp(start) | ||||
|                             expect_start = expect_end.subtract( | ||||
|                                 seconds=frame_size_s) | ||||
|                             log.warning( | ||||
|                                 'waiting on out-of-order history frame:\n' | ||||
|                                 f'{expect_end - expect_start}' | ||||
|                             ) | ||||
|                             break | ||||
| 
 | ||||
|                     to_push, start_dt, end_dt = frames.pop(epoch) | ||||
|                     ln = len(to_push) | ||||
| 
 | ||||
|                     # bail gracefully on shm allocation overrun/full condition | ||||
|                     try: | ||||
|                         shm.push(to_push, prepend=True) | ||||
|                     except ValueError: | ||||
|                         log.info( | ||||
|                             f'Shm buffer overrun on: {start_dt} -> {end_dt}?' | ||||
|                         ) | ||||
|                         break | ||||
| 
 | ||||
|                     log.info( | ||||
|                         f'Shm pushed {ln} frame:\n' | ||||
|                         f'{start_dt} -> {end_dt}' | ||||
|                     ) | ||||
|                     # keep track of most recent "prepended" ``start_dt`` | ||||
|                     # both for detecting gaps and ensuring async | ||||
|                     # frame-result order. | ||||
|                     earliest_end_dt = start_dt | ||||
| 
 | ||||
|                     if ( | ||||
|                         storage is not None | ||||
|                         and write_tsdb | ||||
|                     ): | ||||
|                         log.info( | ||||
|                             f'Writing {ln} frame to storage:\n' | ||||
|                             f'{start_dt} -> {end_dt}' | ||||
|                         ) | ||||
|                         await storage.write_ohlcv( | ||||
|                             f'{bfqsn}.{mod.name}',  # lul.. | ||||
|                             to_push, | ||||
|                         ) | ||||
| 
 | ||||
|                 # TODO: can we only trigger this if the respective | ||||
|                 # history in "in view"?!? | ||||
|                 # XXX: extremely important, there can be no checkpoints | ||||
|                 # in the block above to avoid entering new ``frames`` | ||||
|                 # values while we're pipelining the current ones to | ||||
|                 # memory... | ||||
|                 for delay_s in sampler.subscribers: | ||||
|                     await broadcast(delay_s) | ||||
| 
 | ||||
|         bf_done.set() | ||||
| 
 | ||||
| 
 | ||||
| async def manage_history( | ||||
|     mod: ModuleType, | ||||
|     bus: _FeedsBus, | ||||
|  | @ -216,50 +613,180 @@ async def manage_history( | |||
|         # we expect the sub-actor to write | ||||
|         readonly=False, | ||||
|     ) | ||||
|     # TODO: history validation | ||||
|     if not opened: | ||||
|         raise RuntimeError( | ||||
|             "Persistent shm for sym was already open?!" | ||||
|         ) | ||||
| 
 | ||||
|     if opened: | ||||
|     log.info('Scanning for existing `marketstored`') | ||||
| 
 | ||||
|     is_up = await check_for_service('marketstored') | ||||
| 
 | ||||
|     # for now only do backfilling if no tsdb can be found | ||||
|     do_legacy_backfill = not is_up and opened | ||||
| 
 | ||||
|     bfqsn = fqsn.replace('.' + mod.name, '') | ||||
|     open_history_client = getattr(mod, 'open_history_client', None) | ||||
| 
 | ||||
|     if is_up and opened and open_history_client: | ||||
| 
 | ||||
|         log.info('Found existing `marketstored`') | ||||
|         from . import marketstore | ||||
|         async with marketstore.open_storage_client( | ||||
|             fqsn, | ||||
|         ) as storage: | ||||
| 
 | ||||
|             # TODO: this should be used verbatim for the pure | ||||
|             # shm backfiller approach below. | ||||
| 
 | ||||
|             # start history anal and load missing new data via backend. | ||||
|             series, _, last_tsdb_dt = await storage.load(fqsn) | ||||
| 
 | ||||
|             broker, symbol, expiry = unpack_fqsn(fqsn) | ||||
|             ( | ||||
|                 shm, | ||||
|                 latest_start_dt, | ||||
|                 latest_end_dt, | ||||
|                 bf_done, | ||||
|             ) = await bus.nursery.start( | ||||
|                 partial( | ||||
|                     start_backfill, | ||||
|                     mod, | ||||
|                     bfqsn, | ||||
|                     shm, | ||||
|                     last_tsdb_dt=last_tsdb_dt, | ||||
|                     storage=storage, | ||||
|                 ) | ||||
|             ) | ||||
| 
 | ||||
|             # if len(shm.array) < 2: | ||||
|             # TODO: there's an edge case here to solve where if the last | ||||
|             # frame before market close (at least on ib) was pushed and | ||||
|             # there was only "1 new" row pushed from the first backfill | ||||
|             # query-iteration, then the sample step sizing calcs will | ||||
|             # break upstream from here since you can't diff on at least | ||||
|             # 2 steps... probably should also add logic to compute from | ||||
|             # the tsdb series and stash that somewhere as meta data on | ||||
|             # the shm buffer?.. no se. | ||||
| 
 | ||||
|             task_status.started(shm) | ||||
|             some_data_ready.set() | ||||
| 
 | ||||
|             await bf_done.wait() | ||||
|             # do diff against last start frame of history and only fill | ||||
|             # in from the tsdb an allotment that allows for most recent | ||||
|             # to be loaded into mem *before* tsdb data. | ||||
|             if last_tsdb_dt: | ||||
|                 dt_diff_s = ( | ||||
|                     latest_start_dt - last_tsdb_dt | ||||
|                 ).seconds | ||||
|             else: | ||||
|                 dt_diff_s = 0 | ||||
| 
 | ||||
|             # await trio.sleep_forever() | ||||
|             # TODO: see if there's faster multi-field reads: | ||||
|             # https://numpy.org/doc/stable/user/basics.rec.html#accessing-multiple-fields | ||||
|             # re-index  with a `time` and index field | ||||
|             prepend_start = shm._first.value | ||||
| 
 | ||||
|             # sanity check on most-recent-data loading | ||||
|             assert prepend_start > dt_diff_s | ||||
| 
 | ||||
|             history = list(series.values()) | ||||
|             if history: | ||||
|                 fastest = history[0] | ||||
|                 to_push = fastest[:prepend_start] | ||||
| 
 | ||||
|                 shm.push( | ||||
|                     to_push, | ||||
| 
 | ||||
|                     # insert the history pre a "days worth" of samples | ||||
|                     # to leave some real-time buffer space at the end. | ||||
|                     prepend=True, | ||||
|                     # update_first=False, | ||||
|                     # start=prepend_start, | ||||
|                     field_map=marketstore.ohlc_key_map, | ||||
|                 ) | ||||
| 
 | ||||
|                 # load as much from storage into shm as space will | ||||
|                 # allow according to user's shm size settings. | ||||
|                 count = 0 | ||||
|                 end = fastest['Epoch'][0] | ||||
| 
 | ||||
|                 while shm._first.value > 0: | ||||
|                     count += 1 | ||||
|                     series = await storage.read_ohlcv( | ||||
|                         fqsn, | ||||
|                         end=end, | ||||
|                     ) | ||||
|                     history = list(series.values()) | ||||
|                     fastest = history[0] | ||||
|                     end = fastest['Epoch'][0] | ||||
|                     prepend_start -= len(to_push) | ||||
|                     to_push = fastest[:prepend_start] | ||||
| 
 | ||||
|                     shm.push( | ||||
|                         to_push, | ||||
| 
 | ||||
|                         # insert the history pre a "days worth" of samples | ||||
|                         # to leave some real-time buffer space at the end. | ||||
|                         prepend=True, | ||||
|                         # update_first=False, | ||||
|                         # start=prepend_start, | ||||
|                         field_map=marketstore.ohlc_key_map, | ||||
|                     ) | ||||
| 
 | ||||
|                     # manually trigger step update to update charts/fsps | ||||
|                     # which need an incremental update. | ||||
|                     for delay_s in sampler.subscribers: | ||||
|                         await broadcast(delay_s) | ||||
| 
 | ||||
|                     if count > 6: | ||||
|                         break | ||||
| 
 | ||||
|                 log.info(f'Loaded {to_push.shape} datums from storage') | ||||
| 
 | ||||
|                 # TODO: write new data to tsdb to be ready to for next read. | ||||
| 
 | ||||
|     if do_legacy_backfill: | ||||
|         # do a legacy incremental backfill from the provider. | ||||
|         log.info('No existing `marketstored` found..') | ||||
| 
 | ||||
|         # start history backfill task ``backfill_bars()`` is | ||||
|         # a required backend func this must block until shm is | ||||
|         # filled with first set of ohlc bars | ||||
|         _ = await bus.nursery.start(mod.backfill_bars, fqsn, shm) | ||||
| 
 | ||||
|     # yield back after client connect with filled shm | ||||
|     task_status.started(shm) | ||||
| 
 | ||||
|     # indicate to caller that feed can be delivered to | ||||
|     # remote requesting client since we've loaded history | ||||
|     # data that can be used. | ||||
|     some_data_ready.set() | ||||
| 
 | ||||
|     # detect sample step size for sampled historical data | ||||
|     times = shm.array['time'] | ||||
|     delay_s = times[-1] - times[times != times[-1]][-1] | ||||
| 
 | ||||
|     # begin real-time updates of shm and tsb once the feed | ||||
|     # goes live. | ||||
|     await feed_is_live.wait() | ||||
| 
 | ||||
|     if opened: | ||||
|         sampler.ohlcv_shms.setdefault(delay_s, []).append(shm) | ||||
| 
 | ||||
|         # start shm incrementing for OHLC sampling at the current | ||||
|         # detected sampling period if one dne. | ||||
|         if sampler.incrementers.get(delay_s) is None: | ||||
|             await bus.start_task( | ||||
|                 increment_ohlc_buffer, | ||||
|                 delay_s, | ||||
|         await bus.nursery.start( | ||||
|             partial( | ||||
|                 start_backfill, | ||||
|                 mod, | ||||
|                 bfqsn, | ||||
|                 shm, | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         # yield back after client connect with filled shm | ||||
|         task_status.started(shm) | ||||
| 
 | ||||
|         # indicate to caller that feed can be delivered to | ||||
|         # remote requesting client since we've loaded history | ||||
|         # data that can be used. | ||||
|         some_data_ready.set() | ||||
| 
 | ||||
|     # history retreival loop depending on user interaction and thus | ||||
|     # a small RPC-prot for remotely controllinlg what data is loaded | ||||
|     # for viewing. | ||||
|     await trio.sleep_forever() | ||||
| 
 | ||||
| 
 | ||||
| async def allocate_persistent_feed( | ||||
|     bus: _FeedsBus, | ||||
| 
 | ||||
|     brokername: str, | ||||
|     symbol: str, | ||||
| 
 | ||||
|     loglevel: str, | ||||
|     start_stream: bool = True, | ||||
| 
 | ||||
|     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||
| 
 | ||||
|  | @ -277,6 +804,7 @@ async def allocate_persistent_feed( | |||
|     - a real-time streaming task which connec | ||||
| 
 | ||||
|     ''' | ||||
|     # load backend module | ||||
|     try: | ||||
|         mod = get_brokermod(brokername) | ||||
|     except ImportError: | ||||
|  | @ -319,7 +847,7 @@ async def allocate_persistent_feed( | |||
|         manage_history, | ||||
|         mod, | ||||
|         bus, | ||||
|         bfqsn, | ||||
|         '.'.join((bfqsn, brokername)), | ||||
|         some_data_ready, | ||||
|         feed_is_live, | ||||
|     ) | ||||
|  | @ -333,7 +861,10 @@ async def allocate_persistent_feed( | |||
|     # true fqsn | ||||
|     fqsn = '.'.join((bfqsn, brokername)) | ||||
|     # add a fqsn entry that includes the ``.<broker>`` suffix | ||||
|     # and an entry that includes the broker-specific fqsn (including | ||||
|     # any new suffixes or elements as injected by the backend). | ||||
|     init_msg[fqsn] = msg | ||||
|     init_msg[bfqsn] = msg | ||||
| 
 | ||||
|     # TODO: pretty sure we don't need this? why not just leave 1s as | ||||
|     # the fastest "sample period" since we'll probably always want that | ||||
|  | @ -347,13 +878,14 @@ async def allocate_persistent_feed( | |||
|     await some_data_ready.wait() | ||||
| 
 | ||||
|     # append ``.<broker>`` suffix to each quote symbol | ||||
|     bsym = symbol + f'.{brokername}' | ||||
|     acceptable_not_fqsn_with_broker_suffix = symbol + f'.{brokername}' | ||||
| 
 | ||||
|     generic_first_quotes = { | ||||
|         bsym: first_quote, | ||||
|         acceptable_not_fqsn_with_broker_suffix: first_quote, | ||||
|         fqsn: first_quote, | ||||
|     } | ||||
| 
 | ||||
|     bus.feeds[symbol] = bus.feeds[fqsn] = ( | ||||
|     bus.feeds[symbol] = bus.feeds[bfqsn] = ( | ||||
|         init_msg, | ||||
|         generic_first_quotes, | ||||
|     ) | ||||
|  | @ -363,9 +895,25 @@ async def allocate_persistent_feed( | |||
|     # task_status.started((init_msg,  generic_first_quotes)) | ||||
|     task_status.started() | ||||
| 
 | ||||
|     # backend will indicate when real-time quotes have begun. | ||||
|     if not start_stream: | ||||
|         await trio.sleep_forever() | ||||
| 
 | ||||
|     # begin real-time updates of shm and tsb once the feed goes live and | ||||
|     # the backend will indicate when real-time quotes have begun. | ||||
|     await feed_is_live.wait() | ||||
| 
 | ||||
|     # start shm incrementer task for OHLC style sampling | ||||
|     # at the current detected step period. | ||||
|     times = shm.array['time'] | ||||
|     delay_s = times[-1] - times[times != times[-1]][-1] | ||||
| 
 | ||||
|     sampler.ohlcv_shms.setdefault(delay_s, []).append(shm) | ||||
|     if sampler.incrementers.get(delay_s) is None: | ||||
|         await bus.start_task( | ||||
|             increment_ohlc_buffer, | ||||
|             delay_s, | ||||
|         ) | ||||
| 
 | ||||
|     sum_tick_vlm: bool = init_msg.get( | ||||
|         'shm_write_opts', {} | ||||
|     ).get('sum_tick_vlm', True) | ||||
|  | @ -388,7 +936,7 @@ async def open_feed_bus( | |||
| 
 | ||||
|     ctx: tractor.Context, | ||||
|     brokername: str, | ||||
|     symbol: str, | ||||
|     symbol: str,  # normally expected to the broker-specific fqsn | ||||
|     loglevel: str, | ||||
|     tick_throttle:  Optional[float] = None, | ||||
|     start_stream: bool = True, | ||||
|  | @ -410,7 +958,9 @@ async def open_feed_bus( | |||
|     # TODO: check for any stale shm entries for this symbol | ||||
|     # (after we also group them in a nice `/dev/shm/piker/` subdir). | ||||
|     # ensure we are who we think we are | ||||
|     assert 'brokerd' in tractor.current_actor().name | ||||
|     servicename = tractor.current_actor().name | ||||
|     assert 'brokerd' in servicename | ||||
|     assert brokername in servicename | ||||
| 
 | ||||
|     bus = get_feed_bus(brokername) | ||||
| 
 | ||||
|  | @ -420,7 +970,7 @@ async def open_feed_bus( | |||
|     entry = bus.feeds.get(symbol) | ||||
|     if entry is None: | ||||
|         # allocate a new actor-local stream bus which | ||||
|         # will persist for this `brokerd`. | ||||
|         # will persist for this `brokerd`'s service lifetime. | ||||
|         async with bus.task_lock: | ||||
|             await bus.nursery.start( | ||||
|                 partial( | ||||
|  | @ -428,13 +978,12 @@ async def open_feed_bus( | |||
| 
 | ||||
|                     bus=bus, | ||||
|                     brokername=brokername, | ||||
| 
 | ||||
|                     # here we pass through the selected symbol in native | ||||
|                     # "format" (i.e. upper vs. lowercase depending on | ||||
|                     # provider). | ||||
|                     symbol=symbol, | ||||
| 
 | ||||
|                     loglevel=loglevel, | ||||
|                     start_stream=start_stream, | ||||
|                 ) | ||||
|             ) | ||||
|             # TODO: we can remove this? | ||||
|  | @ -450,7 +999,7 @@ async def open_feed_bus( | |||
|     # true fqsn | ||||
|     fqsn = '.'.join([bfqsn, brokername]) | ||||
|     assert fqsn in first_quotes | ||||
|     assert bus.feeds[fqsn] | ||||
|     assert bus.feeds[bfqsn] | ||||
| 
 | ||||
|     # broker-ambiguous symbol (provided on cli - eg. mnq.globex.ib) | ||||
|     bsym = symbol + f'.{brokername}' | ||||
|  |  | |||
|  | @ -14,36 +14,60 @@ | |||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| """ | ||||
| ''' | ||||
| ``marketstore`` integration. | ||||
| 
 | ||||
| - client management routines | ||||
| - ticK data ingest routines | ||||
| - websocket client for subscribing to write triggers | ||||
| - todo: tick sequence stream-cloning for testing | ||||
| - todo: docker container management automation | ||||
| """ | ||||
| from contextlib import asynccontextmanager | ||||
| from typing import Dict, Any, List, Callable, Tuple | ||||
| 
 | ||||
| ''' | ||||
| from contextlib import asynccontextmanager as acm | ||||
| from datetime import datetime | ||||
| from pprint import pformat | ||||
| from typing import ( | ||||
|     Any, | ||||
|     Optional, | ||||
|     Union, | ||||
| ) | ||||
| import time | ||||
| from math import isnan | ||||
| 
 | ||||
| from bidict import bidict | ||||
| import msgpack | ||||
| import pyqtgraph as pg | ||||
| import numpy as np | ||||
| import pandas as pd | ||||
| import pymarketstore as pymkts | ||||
| import tractor | ||||
| from trio_websocket import open_websocket_url | ||||
| from anyio_marketstore import ( | ||||
|     open_marketstore_client, | ||||
|     MarketstoreClient, | ||||
|     Params, | ||||
| ) | ||||
| import pendulum | ||||
| import purerpc | ||||
| 
 | ||||
| from .feed import maybe_open_feed | ||||
| from ..log import get_logger, get_console_log | ||||
| from ..data import open_feed | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| _tick_tbk_ids: Tuple[str, str] = ('1Sec', 'TICK') | ||||
| _tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK') | ||||
| _tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids) | ||||
| _url: str = 'http://localhost:5993/rpc' | ||||
| 
 | ||||
| _tick_dt = [ | ||||
|     # these two are required for as a "primary key" | ||||
|     ('Epoch', 'i8'), | ||||
|     ('Nanoseconds', 'i4'), | ||||
|     ('IsTrade', 'i1'), | ||||
|     ('IsBid', 'i1'), | ||||
|     ('Price', 'f4'), | ||||
|     ('Size', 'f4') | ||||
| ] | ||||
| 
 | ||||
| _quote_dt = [ | ||||
|     # these two are required for as a "primary key" | ||||
|     ('Epoch', 'i8'), | ||||
|  | @ -61,6 +85,7 @@ _quote_dt = [ | |||
|     # ('brokerd_ts', 'i64'), | ||||
|     # ('VWAP', 'f4') | ||||
| ] | ||||
| 
 | ||||
| _quote_tmp = {}.fromkeys(dict(_quote_dt).keys(), np.nan) | ||||
| _tick_map = { | ||||
|     'Up': 1, | ||||
|  | @ -69,28 +94,49 @@ _tick_map = { | |||
|     None: np.nan, | ||||
| } | ||||
| 
 | ||||
| _ohlcv_dt = [ | ||||
|     # these two are required for as a "primary key" | ||||
|     ('Epoch', 'i8'), | ||||
|     # ('Nanoseconds', 'i4'), | ||||
| 
 | ||||
| class MarketStoreError(Exception): | ||||
|     "Generic marketstore client error" | ||||
|     # ohlcv sampling | ||||
|     ('Open', 'f4'), | ||||
|     ('High', 'f4'), | ||||
|     ('Low', 'i8'), | ||||
|     ('Close', 'i8'), | ||||
|     ('Volume', 'f4'), | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| def err_on_resp(response: dict) -> None: | ||||
|     """Raise any errors found in responses from client request. | ||||
|     """ | ||||
|     responses = response['responses'] | ||||
|     if responses is not None: | ||||
|         for r in responses: | ||||
|             err = r['error'] | ||||
|             if err: | ||||
|                 raise MarketStoreError(err) | ||||
| ohlc_key_map = bidict({ | ||||
|     'Epoch': 'time', | ||||
|     'Open': 'open', | ||||
|     'High': 'high', | ||||
|     'Low': 'low', | ||||
|     'Close': 'close', | ||||
|     'Volume': 'volume', | ||||
| }) | ||||
| 
 | ||||
| 
 | ||||
| def mk_tbk(keys: tuple[str, str, str]) -> str: | ||||
|     ''' | ||||
|     Generate a marketstore table key from a tuple. | ||||
|     Converts, | ||||
|         ``('SPY', '1Sec', 'TICK')`` -> ``"SPY/1Sec/TICK"``` | ||||
| 
 | ||||
|     ''' | ||||
|     return '/'.join(keys) | ||||
| 
 | ||||
| 
 | ||||
| def quote_to_marketstore_structarray( | ||||
|     quote: Dict[str, Any], | ||||
|     last_fill: str, | ||||
|     quote: dict[str, Any], | ||||
|     last_fill: Optional[float] | ||||
| 
 | ||||
| ) -> np.array: | ||||
|     """Return marketstore writeable structarray from quote ``dict``. | ||||
|     """ | ||||
|     ''' | ||||
|     Return marketstore writeable structarray from quote ``dict``. | ||||
| 
 | ||||
|     ''' | ||||
|     if last_fill: | ||||
|         # new fill bby | ||||
|         now = timestamp(last_fill) | ||||
|  | @ -101,7 +147,7 @@ def quote_to_marketstore_structarray( | |||
| 
 | ||||
|     secs, ns = now / 10**9, now % 10**9 | ||||
| 
 | ||||
|     # pack into List[Tuple[str, Any]] | ||||
|     # pack into list[tuple[str, Any]] | ||||
|     array_input = [] | ||||
| 
 | ||||
|     # insert 'Epoch' entry first and then 'Nanoseconds'. | ||||
|  | @ -123,146 +169,456 @@ def quote_to_marketstore_structarray( | |||
|     return np.array([tuple(array_input)], dtype=_quote_dt) | ||||
| 
 | ||||
| 
 | ||||
| def timestamp(datestr: str) -> int: | ||||
|     """Return marketstore compatible 'Epoch' integer in nanoseconds | ||||
| def timestamp(date, **kwargs) -> int: | ||||
|     ''' | ||||
|     Return marketstore compatible 'Epoch' integer in nanoseconds | ||||
|     from a date formatted str. | ||||
|     """ | ||||
|     return int(pd.Timestamp(datestr).value) | ||||
| 
 | ||||
|     ''' | ||||
|     return int(pd.Timestamp(date, **kwargs).value) | ||||
| 
 | ||||
| 
 | ||||
| def mk_tbk(keys: Tuple[str, str, str]) -> str: | ||||
|     """Generate a marketstore table key from a tuple. | ||||
| 
 | ||||
|     Converts, | ||||
|         ``('SPY', '1Sec', 'TICK')`` -> ``"SPY/1Sec/TICK"``` | ||||
|     """ | ||||
|     return '{}/' + '/'.join(keys) | ||||
| 
 | ||||
| 
 | ||||
| class Client: | ||||
|     """Async wrapper around the alpaca ``pymarketstore`` sync client. | ||||
| 
 | ||||
|     This will server as the shell for building out a proper async client | ||||
|     that isn't horribly documented and un-tested.. | ||||
|     """ | ||||
|     def __init__(self, url: str): | ||||
|         self._client = pymkts.Client(url) | ||||
| 
 | ||||
|     async def _invoke( | ||||
|         self, | ||||
|         meth: Callable, | ||||
|         *args, | ||||
|         **kwargs, | ||||
|     ) -> Any: | ||||
|         return err_on_resp(meth(*args, **kwargs)) | ||||
| 
 | ||||
|     async def destroy( | ||||
|         self, | ||||
|         tbk: Tuple[str, str, str], | ||||
|     ) -> None: | ||||
|         return await self._invoke(self._client.destroy, mk_tbk(tbk)) | ||||
| 
 | ||||
|     async def list_symbols( | ||||
|         self, | ||||
|         tbk: str, | ||||
|     ) -> List[str]: | ||||
|         return await self._invoke(self._client.list_symbols, mk_tbk(tbk)) | ||||
| 
 | ||||
|     async def write( | ||||
|         self, | ||||
|         symbol: str, | ||||
|         array: np.ndarray, | ||||
|     ) -> None: | ||||
|         start = time.time() | ||||
|         await self._invoke( | ||||
|             self._client.write, | ||||
|             array, | ||||
|             _tick_tbk.format(symbol), | ||||
|             isvariablelength=True | ||||
|         ) | ||||
|         log.debug(f"{symbol} write time (s): {time.time() - start}") | ||||
| 
 | ||||
|     def query( | ||||
|         self, | ||||
|         symbol, | ||||
|         tbk: Tuple[str, str] = _tick_tbk_ids, | ||||
|     ) -> pd.DataFrame: | ||||
|         # XXX: causes crash | ||||
|         # client.query(pymkts.Params(symbol, '*', 'OHCLV' | ||||
|         result = self._client.query( | ||||
|             pymkts.Params(symbol, *tbk), | ||||
|         ) | ||||
|         return result.first().df() | ||||
| 
 | ||||
| 
 | ||||
| @asynccontextmanager | ||||
| @acm | ||||
| async def get_client( | ||||
|     url: str = _url, | ||||
| ) -> Client: | ||||
|     yield Client(url) | ||||
|     host: str = 'localhost', | ||||
|     port: int = 5995 | ||||
| 
 | ||||
| ) -> MarketstoreClient: | ||||
|     ''' | ||||
|     Load a ``anyio_marketstore`` grpc client connected | ||||
|     to an existing ``marketstore`` server. | ||||
| 
 | ||||
|     ''' | ||||
|     async with open_marketstore_client( | ||||
|         host, | ||||
|         port | ||||
|     ) as client: | ||||
|         yield client | ||||
| 
 | ||||
| 
 | ||||
| class MarketStoreError(Exception): | ||||
|     "Generic marketstore client error" | ||||
| 
 | ||||
| 
 | ||||
| # def err_on_resp(response: dict) -> None: | ||||
| #     """Raise any errors found in responses from client request. | ||||
| #     """ | ||||
| #     responses = response['responses'] | ||||
| #     if responses is not None: | ||||
| #         for r in responses: | ||||
| #             err = r['error'] | ||||
| #             if err: | ||||
| #                 raise MarketStoreError(err) | ||||
| 
 | ||||
| 
 | ||||
| # map of seconds ints to "time frame" accepted keys | ||||
| tf_in_1s = bidict({ | ||||
|     1: '1Sec', | ||||
|     60: '1Min', | ||||
|     60*5: '5Min', | ||||
|     60*15: '15Min', | ||||
|     60*30: '30Min', | ||||
|     60*60: '1H', | ||||
|     60*60*24: '1D', | ||||
| }) | ||||
| 
 | ||||
| 
 | ||||
| class Storage: | ||||
|     ''' | ||||
|     High level storage api for both real-time and historical ingest. | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|         self, | ||||
|         client: MarketstoreClient, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         # TODO: eventually this should be an api/interface type that | ||||
|         # ensures we can support multiple tsdb backends. | ||||
|         self.client = client | ||||
| 
 | ||||
|         # series' cache from tsdb reads | ||||
|         self._arrays: dict[str, np.ndarray] = {} | ||||
| 
 | ||||
|     async def list_keys(self) -> list[str]: | ||||
|         return await self.client.list_symbols() | ||||
| 
 | ||||
|     async def search_keys(self, pattern: str) -> list[str]: | ||||
|         ''' | ||||
|         Search for time series key in the storage backend. | ||||
| 
 | ||||
|         ''' | ||||
|         ... | ||||
| 
 | ||||
|     async def write_ticks(self, ticks: list) -> None: | ||||
|         ... | ||||
| 
 | ||||
|     async def load( | ||||
|         self, | ||||
|         fqsn: str, | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         dict[int, np.ndarray],  # timeframe (in secs) to series | ||||
|         Optional[datetime],  # first dt | ||||
|         Optional[datetime],  # last dt | ||||
|     ]: | ||||
| 
 | ||||
|         first_tsdb_dt, last_tsdb_dt = None, None | ||||
|         tsdb_arrays = await self.read_ohlcv(fqsn) | ||||
|         log.info(f'Loaded tsdb history {tsdb_arrays}') | ||||
| 
 | ||||
|         if tsdb_arrays: | ||||
|             fastest = list(tsdb_arrays.values())[0] | ||||
|             times = fastest['Epoch'] | ||||
|             first, last = times[0], times[-1] | ||||
|             first_tsdb_dt, last_tsdb_dt = map( | ||||
|                 pendulum.from_timestamp, [first, last] | ||||
|             ) | ||||
| 
 | ||||
|         return tsdb_arrays, first_tsdb_dt, last_tsdb_dt | ||||
| 
 | ||||
|     async def read_ohlcv( | ||||
|         self, | ||||
|         fqsn: str, | ||||
|         timeframe: Optional[Union[int, str]] = None, | ||||
|         end: Optional[int] = None, | ||||
| 
 | ||||
|     ) -> tuple[ | ||||
|         MarketstoreClient, | ||||
|         Union[dict, np.ndarray] | ||||
|     ]: | ||||
|         client = self.client | ||||
|         syms = await client.list_symbols() | ||||
| 
 | ||||
|         if fqsn not in syms: | ||||
|             return {} | ||||
| 
 | ||||
|         tfstr = tf_in_1s[1] | ||||
| 
 | ||||
|         params = Params( | ||||
|             symbols=fqsn, | ||||
|             timeframe=tfstr, | ||||
|             attrgroup='OHLCV', | ||||
|             end=end, | ||||
|             # limit_from_start=True, | ||||
| 
 | ||||
|             # TODO: figure the max limit here given the | ||||
|             # ``purepc`` msg size limit of purerpc: 33554432 | ||||
|             limit=int(800e3), | ||||
|         ) | ||||
| 
 | ||||
|         if timeframe is None: | ||||
|             log.info(f'starting {fqsn} tsdb granularity scan..') | ||||
|             # loop through and try to find highest granularity | ||||
|             for tfstr in tf_in_1s.values(): | ||||
|                 try: | ||||
|                     log.info(f'querying for {tfstr}@{fqsn}') | ||||
|                     params.set('timeframe', tfstr) | ||||
|                     result = await client.query(params) | ||||
|                     break | ||||
| 
 | ||||
|                 except purerpc.grpclib.exceptions.UnknownError: | ||||
|                     # XXX: this is already logged by the container and | ||||
|                     # thus shows up through `marketstored` logs relay. | ||||
|                     # log.warning(f'{tfstr}@{fqsn} not found') | ||||
|                     continue | ||||
|             else: | ||||
|                 return {} | ||||
| 
 | ||||
|         else: | ||||
|             result = await client.query(params) | ||||
| 
 | ||||
|         # TODO: it turns out column access on recarrays is actually slower: | ||||
|         # https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist | ||||
|         # it might make sense to make these structured arrays? | ||||
|         # Fill out a `numpy` array-results map | ||||
|         arrays = {} | ||||
|         for fqsn, data_set in result.by_symbols().items(): | ||||
|             arrays.setdefault(fqsn, {})[ | ||||
|                 tf_in_1s.inverse[data_set.timeframe] | ||||
|             ] = data_set.array | ||||
| 
 | ||||
|         return arrays[fqsn][timeframe] if timeframe else arrays[fqsn] | ||||
| 
 | ||||
|     async def delete_ts( | ||||
|         self, | ||||
|         key: str, | ||||
|         timeframe: Optional[Union[int, str]] = None, | ||||
| 
 | ||||
|     ) -> bool: | ||||
| 
 | ||||
|         client = self.client | ||||
|         syms = await client.list_symbols() | ||||
|         print(syms) | ||||
|         # if key not in syms: | ||||
|         #     raise KeyError(f'`{fqsn}` table key not found?') | ||||
| 
 | ||||
|         return await client.destroy(tbk=key) | ||||
| 
 | ||||
|     async def write_ohlcv( | ||||
|         self, | ||||
|         fqsn: str, | ||||
|         ohlcv: np.ndarray, | ||||
|         append_and_duplicate: bool = True, | ||||
|         limit: int = int(800e3), | ||||
| 
 | ||||
|     ) -> None: | ||||
|         # build mkts schema compat array for writing | ||||
|         mkts_dt = np.dtype(_ohlcv_dt) | ||||
|         mkts_array = np.zeros( | ||||
|             len(ohlcv), | ||||
|             dtype=mkts_dt, | ||||
|         ) | ||||
|         # copy from shm array (yes it's this easy): | ||||
|         # https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays | ||||
|         mkts_array[:] = ohlcv[[ | ||||
|             'time', | ||||
|             'open', | ||||
|             'high', | ||||
|             'low', | ||||
|             'close', | ||||
|             'volume', | ||||
|         ]] | ||||
| 
 | ||||
|         m, r = divmod(len(mkts_array), limit) | ||||
| 
 | ||||
|         for i in range(m, 1): | ||||
|             to_push = mkts_array[i-1:i*limit] | ||||
| 
 | ||||
|             # write to db | ||||
|             resp = await self.client.write( | ||||
|                 to_push, | ||||
|                 tbk=f'{fqsn}/1Sec/OHLCV', | ||||
| 
 | ||||
|                 # NOTE: will will append duplicates | ||||
|                 # for the same timestamp-index. | ||||
|                 # TODO: pre deduplicate? | ||||
|                 isvariablelength=append_and_duplicate, | ||||
|             ) | ||||
| 
 | ||||
|             log.info( | ||||
|                 f'Wrote {mkts_array.size} datums to tsdb\n' | ||||
|             ) | ||||
| 
 | ||||
|             for resp in resp.responses: | ||||
|                 err = resp.error | ||||
|                 if err: | ||||
|                     raise MarketStoreError(err) | ||||
| 
 | ||||
|         if r: | ||||
|             to_push = mkts_array[m*limit:] | ||||
| 
 | ||||
|             # write to db | ||||
|             resp = await self.client.write( | ||||
|                 to_push, | ||||
|                 tbk=f'{fqsn}/1Sec/OHLCV', | ||||
| 
 | ||||
|                 # NOTE: will will append duplicates | ||||
|                 # for the same timestamp-index. | ||||
|                 # TODO: pre deduplicate? | ||||
|                 isvariablelength=append_and_duplicate, | ||||
|             ) | ||||
| 
 | ||||
|             log.info( | ||||
|                 f'Wrote {mkts_array.size} datums to tsdb\n' | ||||
|             ) | ||||
| 
 | ||||
|             for resp in resp.responses: | ||||
|                 err = resp.error | ||||
|                 if err: | ||||
|                     raise MarketStoreError(err) | ||||
| 
 | ||||
| 
 | ||||
| @acm | ||||
| async def open_storage_client( | ||||
|     fqsn: str, | ||||
|     period: Optional[Union[int, str]] = None,  # in seconds | ||||
| 
 | ||||
| ) -> tuple[Storage, dict[str, np.ndarray]]: | ||||
|     ''' | ||||
|     Load a series by key and deliver in ``numpy`` struct array format. | ||||
| 
 | ||||
|     ''' | ||||
|     async with ( | ||||
|         # eventually a storage backend endpoint | ||||
|         get_client() as client, | ||||
|     ): | ||||
|         # slap on our wrapper api | ||||
|         yield Storage(client) | ||||
| 
 | ||||
| 
 | ||||
| async def tsdb_history_update( | ||||
|     fqsn: Optional[str] = None, | ||||
| 
 | ||||
| ) -> list[str]: | ||||
| 
 | ||||
|     # TODO: real-time dedicated task for ensuring | ||||
|     # history consistency between the tsdb, shm and real-time feed.. | ||||
| 
 | ||||
|     # update sequence design notes: | ||||
| 
 | ||||
|     # - load existing highest frequency data from mkts | ||||
|     #   * how do we want to offer this to the UI? | ||||
|     #    - lazy loading? | ||||
|     #    - try to load it all and expect graphics caching/diffing | ||||
|     #      to  hide extra bits that aren't in view? | ||||
| 
 | ||||
|     # - compute the diff between latest data from broker and shm | ||||
|     #   * use sql api in mkts to determine where the backend should | ||||
|     #     start querying for data? | ||||
|     #   * append any diff with new shm length | ||||
|     #   * determine missing (gapped) history by scanning | ||||
|     #   * how far back do we look? | ||||
| 
 | ||||
|     # - begin rt update ingest and aggregation | ||||
|     #   * could start by always writing ticks to mkts instead of | ||||
|     #     worrying about a shm queue for now. | ||||
|     #   * we have a short list of shm queues worth groking: | ||||
|     #     - https://github.com/pikers/piker/issues/107 | ||||
|     #   * the original data feed arch blurb: | ||||
|     #     - https://github.com/pikers/piker/issues/98 | ||||
|     # | ||||
|     profiler = pg.debug.Profiler( | ||||
|         disabled=False,  # not pg_profile_enabled(), | ||||
|         delayed=False, | ||||
|     ) | ||||
| 
 | ||||
|     async with ( | ||||
|         open_storage_client(fqsn) as storage, | ||||
| 
 | ||||
|         maybe_open_feed( | ||||
|             [fqsn], | ||||
|             start_stream=False, | ||||
| 
 | ||||
|         ) as (feed, stream), | ||||
|     ): | ||||
|         profiler(f'opened feed for {fqsn}') | ||||
| 
 | ||||
| 
 | ||||
|         to_append = feed.shm.array | ||||
|         to_prepend = None | ||||
| 
 | ||||
|         if fqsn: | ||||
|             symbol = feed.symbols.get(fqsn) | ||||
|             if symbol: | ||||
|                 fqsn = symbol.front_fqsn() | ||||
| 
 | ||||
|             # diff db history with shm and only write the missing portions | ||||
|             ohlcv = feed.shm.array | ||||
| 
 | ||||
|             # TODO: use pg profiler | ||||
|             tsdb_arrays = await storage.read_ohlcv(fqsn) | ||||
|             # hist diffing | ||||
|             if tsdb_arrays: | ||||
|                 onesec = tsdb_arrays[1] | ||||
|                 to_append = ohlcv[ohlcv['time'] > onesec['Epoch'][-1]] | ||||
|                 to_prepend = ohlcv[ohlcv['time'] < onesec['Epoch'][0]] | ||||
| 
 | ||||
|             profiler('Finished db arrays diffs') | ||||
| 
 | ||||
|         syms = await storage.client.list_symbols() | ||||
|         log.info(f'Existing tsdb symbol set:\n{pformat(syms)}') | ||||
|         profiler(f'listed symbols {syms}') | ||||
| 
 | ||||
|         # TODO: ask if user wants to write history for detected | ||||
|         # available shm buffers? | ||||
|         from tractor.trionics import ipython_embed | ||||
|         await ipython_embed() | ||||
| 
 | ||||
|         # for array in [to_append, to_prepend]: | ||||
|         #     if array is None: | ||||
|         #         continue | ||||
| 
 | ||||
|         #     log.info( | ||||
|         #         f'Writing datums {array.size} -> to tsdb from shm\n' | ||||
|         #     ) | ||||
|         #     await storage.write_ohlcv(fqsn, array) | ||||
| 
 | ||||
|         # profiler('Finished db writes') | ||||
| 
 | ||||
| 
 | ||||
| async def ingest_quote_stream( | ||||
|     symbols: List[str], | ||||
|     symbols: list[str], | ||||
|     brokername: str, | ||||
|     tries: int = 1, | ||||
|     loglevel: str = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     """Ingest a broker quote stream into marketstore in (sampled) tick format. | ||||
|     """ | ||||
|     async with open_feed( | ||||
|         brokername, | ||||
|         symbols, | ||||
|         loglevel=loglevel, | ||||
|     ) as (first_quotes, qstream): | ||||
|     ''' | ||||
|     Ingest a broker quote stream into a ``marketstore`` tsdb. | ||||
| 
 | ||||
|         quote_cache = first_quotes.copy() | ||||
|     ''' | ||||
|     async with ( | ||||
|         maybe_open_feed(brokername, symbols, loglevel=loglevel) as feed, | ||||
|         get_client() as ms_client, | ||||
|     ): | ||||
|         async for quotes in feed.stream: | ||||
|             log.info(quotes) | ||||
|             for symbol, quote in quotes.items(): | ||||
|                 for tick in quote.get('ticks', ()): | ||||
|                     ticktype = tick.get('type', 'n/a') | ||||
| 
 | ||||
|         async with get_client() as ms_client: | ||||
|             # techtonic tick write | ||||
|             array = quote_to_marketstore_structarray({ | ||||
|                 'IsTrade': 1 if ticktype == 'trade' else 0, | ||||
|                 'IsBid': 1 if ticktype in ('bid', 'bsize') else 0, | ||||
|                 'Price': tick.get('price'), | ||||
|                 'Size': tick.get('size') | ||||
|             }, last_fill=quote.get('broker_ts', None)) | ||||
| 
 | ||||
|             # start ingest to marketstore | ||||
|             async for quotes in qstream: | ||||
|                 log.info(quotes) | ||||
|                 for symbol, quote in quotes.items(): | ||||
|             await ms_client.write(array, _tick_tbk) | ||||
| 
 | ||||
|                     # remap tick strs to ints | ||||
|                     quote['tick'] = _tick_map[quote.get('tick', 'Equal')] | ||||
|             # LEGACY WRITE LOOP (using old tick dt) | ||||
|             # quote_cache = { | ||||
|             #     'size': 0, | ||||
|             #     'tick': 0 | ||||
|             # } | ||||
| 
 | ||||
|                     # check for volume update (i.e. did trades happen | ||||
|                     # since last quote) | ||||
|                     new_vol = quote.get('volume', None) | ||||
|                     if new_vol is None: | ||||
|                         log.debug(f"No fills for {symbol}") | ||||
|                         if new_vol == quote_cache.get('volume'): | ||||
|                             # should never happen due to field diffing | ||||
|                             # on sender side | ||||
|                             log.error( | ||||
|                                 f"{symbol}: got same volume as last quote?") | ||||
|             # async for quotes in qstream: | ||||
|             #     log.info(quotes) | ||||
|             #     for symbol, quote in quotes.items(): | ||||
| 
 | ||||
|                     quote_cache.update(quote) | ||||
|             #         # remap tick strs to ints | ||||
|             #         quote['tick'] = _tick_map[quote.get('tick', 'Equal')] | ||||
| 
 | ||||
|                     a = quote_to_marketstore_structarray( | ||||
|                         quote, | ||||
|                         # TODO: check this closer to the broker query api | ||||
|                         last_fill=quote.get('fill_time', '') | ||||
|                     ) | ||||
|                     await ms_client.write(symbol, a) | ||||
|             #         # check for volume update (i.e. did trades happen | ||||
|             #         # since last quote) | ||||
|             #         new_vol = quote.get('volume', None) | ||||
|             #         if new_vol is None: | ||||
|             #             log.debug(f"No fills for {symbol}") | ||||
|             #             if new_vol == quote_cache.get('volume'): | ||||
|             #                 # should never happen due to field diffing | ||||
|             #                 # on sender side | ||||
|             #                 log.error( | ||||
|             #                     f"{symbol}: got same volume as last quote?") | ||||
| 
 | ||||
|             #         quote_cache.update(quote) | ||||
| 
 | ||||
|             #         a = quote_to_marketstore_structarray( | ||||
|             #             quote, | ||||
|             #             # TODO: check this closer to the broker query api | ||||
|             #             last_fill=quote.get('fill_time', '') | ||||
|             #         ) | ||||
|             #         await ms_client.write(symbol, a) | ||||
| 
 | ||||
| 
 | ||||
| async def stream_quotes( | ||||
|     symbols: List[str], | ||||
|     symbols: list[str], | ||||
|     host: str = 'localhost', | ||||
|     port: int = 5993, | ||||
|     diff_cached: bool = True, | ||||
|     loglevel: str = None, | ||||
| 
 | ||||
| ) -> None: | ||||
|     """Open a symbol stream from a running instance of marketstore and | ||||
|     ''' | ||||
|     Open a symbol stream from a running instance of marketstore and | ||||
|     log to console. | ||||
|     """ | ||||
| 
 | ||||
|     ''' | ||||
|     # XXX: required to propagate ``tractor`` loglevel to piker logging | ||||
|     get_console_log(loglevel or tractor.current_actor().loglevel) | ||||
| 
 | ||||
|     tbks: Dict[str, str] = {sym: f"{sym}/*/*" for sym in symbols} | ||||
|     tbks: dict[str, str] = {sym: f"{sym}/*/*" for sym in symbols} | ||||
| 
 | ||||
|     async with open_websocket_url(f'ws://{host}:{port}/ws') as ws: | ||||
|         # send subs topics to server | ||||
|  | @ -271,7 +627,7 @@ async def stream_quotes( | |||
|         ) | ||||
|         log.info(resp) | ||||
| 
 | ||||
|         async def recv() -> Dict[str, Any]: | ||||
|         async def recv() -> dict[str, Any]: | ||||
|             return msgpack.loads((await ws.get_message()), encoding='utf-8') | ||||
| 
 | ||||
|         streams = (await recv())['streams'] | ||||
|  |  | |||
|  | @ -167,6 +167,7 @@ def _wma( | |||
| 
 | ||||
|     assert length == len(weights) | ||||
| 
 | ||||
|     # lol, for long sequences this is nutso slow and expensive.. | ||||
|     return np.convolve(signal, weights, 'valid') | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
|  | @ -309,7 +309,7 @@ async def flow_rates( | |||
| 
 | ||||
|         if period > 1: | ||||
|             trade_rate_wma = _wma( | ||||
|                 dvlm_shm.array['trade_count'], | ||||
|                 dvlm_shm.array['trade_count'][-period:], | ||||
|                 period, | ||||
|                 weights=weights, | ||||
|             ) | ||||
|  | @ -332,7 +332,7 @@ async def flow_rates( | |||
| 
 | ||||
|         if period > 1: | ||||
|             dark_trade_rate_wma = _wma( | ||||
|                 dvlm_shm.array['dark_trade_count'], | ||||
|                 dvlm_shm.array['dark_trade_count'][-period:], | ||||
|                 period, | ||||
|                 weights=weights, | ||||
|             ) | ||||
|  |  | |||
|  | @ -223,8 +223,9 @@ class DynamicDateAxis(Axis): | |||
|     ) -> List[str]: | ||||
| 
 | ||||
|         chart = self.linkedsplits.chart | ||||
|         bars = chart._arrays[chart.name] | ||||
|         shm = self.linkedsplits.chart._shm | ||||
|         flow = chart._flows[chart.name] | ||||
|         shm = flow.shm | ||||
|         bars = shm.array | ||||
|         first = shm._first.value | ||||
| 
 | ||||
|         bars_len = len(bars) | ||||
|  |  | |||
|  | @ -34,9 +34,7 @@ from PyQt5.QtWidgets import ( | |||
|     QVBoxLayout, | ||||
|     QSplitter, | ||||
| ) | ||||
| import msgspec | ||||
| import numpy as np | ||||
| # from pydantic import BaseModel | ||||
| import pyqtgraph as pg | ||||
| import trio | ||||
| 
 | ||||
|  | @ -49,6 +47,7 @@ from ._cursor import ( | |||
|     Cursor, | ||||
|     ContentsLabel, | ||||
| ) | ||||
| from ..data._sharedmem import ShmArray | ||||
| from ._l1 import L1Labels | ||||
| from ._ohlc import BarItems | ||||
| from ._curve import FastAppendCurve | ||||
|  | @ -60,15 +59,12 @@ from ._style import ( | |||
| ) | ||||
| from ..data.feed import Feed | ||||
| from ..data._source import Symbol | ||||
| from ..data._sharedmem import ( | ||||
|     ShmArray, | ||||
|     # _Token, | ||||
| ) | ||||
| from ..log import get_logger | ||||
| from ._interaction import ChartView | ||||
| from ._forms import FieldsForm | ||||
| from .._profile import pg_profile_enabled, ms_slower_then | ||||
| from ._overlay import PlotItemOverlay | ||||
| from ._flows import Flow | ||||
| 
 | ||||
| if TYPE_CHECKING: | ||||
|     from ._display import DisplayState | ||||
|  | @ -419,7 +415,7 @@ class LinkedSplits(QWidget): | |||
|         self, | ||||
| 
 | ||||
|         symbol: Symbol, | ||||
|         array: np.ndarray, | ||||
|         shm: ShmArray, | ||||
|         sidepane: FieldsForm, | ||||
| 
 | ||||
|         style: str = 'bar', | ||||
|  | @ -444,7 +440,7 @@ class LinkedSplits(QWidget): | |||
|         self.chart = self.add_plot( | ||||
| 
 | ||||
|             name=symbol.key, | ||||
|             array=array, | ||||
|             shm=shm, | ||||
|             style=style, | ||||
|             _is_main=True, | ||||
| 
 | ||||
|  | @ -472,7 +468,7 @@ class LinkedSplits(QWidget): | |||
|         self, | ||||
| 
 | ||||
|         name: str, | ||||
|         array: np.ndarray, | ||||
|         shm: ShmArray, | ||||
| 
 | ||||
|         array_key: Optional[str] = None, | ||||
|         style: str = 'line', | ||||
|  | @ -516,7 +512,6 @@ class LinkedSplits(QWidget): | |||
|             name=name, | ||||
|             data_key=array_key or name, | ||||
| 
 | ||||
|             array=array, | ||||
|             parent=qframe, | ||||
|             linkedsplits=self, | ||||
|             axisItems=axes, | ||||
|  | @ -580,7 +575,7 @@ class LinkedSplits(QWidget): | |||
| 
 | ||||
|             graphics, data_key = cpw.draw_ohlc( | ||||
|                 name, | ||||
|                 array, | ||||
|                 shm, | ||||
|                 array_key=array_key | ||||
|             ) | ||||
|             self.cursor.contents_labels.add_label( | ||||
|  | @ -594,7 +589,7 @@ class LinkedSplits(QWidget): | |||
|             add_label = True | ||||
|             graphics, data_key = cpw.draw_curve( | ||||
|                 name, | ||||
|                 array, | ||||
|                 shm, | ||||
|                 array_key=array_key, | ||||
|                 color='default_light', | ||||
|             ) | ||||
|  | @ -603,7 +598,7 @@ class LinkedSplits(QWidget): | |||
|             add_label = True | ||||
|             graphics, data_key = cpw.draw_curve( | ||||
|                 name, | ||||
|                 array, | ||||
|                 shm, | ||||
|                 array_key=array_key, | ||||
|                 step_mode=True, | ||||
|                 color='davies', | ||||
|  | @ -691,7 +686,6 @@ class ChartPlotWidget(pg.PlotWidget): | |||
| 
 | ||||
|         # the "data view" we generate graphics from | ||||
|         name: str, | ||||
|         array: np.ndarray, | ||||
|         data_key: str, | ||||
|         linkedsplits: LinkedSplits, | ||||
| 
 | ||||
|  | @ -744,14 +738,6 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         self._max_l1_line_len: float = 0 | ||||
| 
 | ||||
|         # self.setViewportMargins(0, 0, 0, 0) | ||||
|         # self._ohlc = array  # readonly view of ohlc data | ||||
| 
 | ||||
|         # TODO: move to Aggr above XD | ||||
|         # readonly view of data arrays | ||||
|         self._arrays = { | ||||
|             self.data_key: array, | ||||
|         } | ||||
|         self._graphics = {}  # registry of underlying graphics | ||||
| 
 | ||||
|         # registry of overlay curve names | ||||
|         self._flows: dict[str, Flow] = {} | ||||
|  | @ -767,7 +753,6 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         # show background grid | ||||
|         self.showGrid(x=False, y=True, alpha=0.3) | ||||
| 
 | ||||
|         self.default_view() | ||||
|         self.cv.enable_auto_yrange() | ||||
| 
 | ||||
|         self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem) | ||||
|  | @ -816,14 +801,8 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         Return a range tuple for the bars present in view. | ||||
| 
 | ||||
|         ''' | ||||
|         l, r = self.view_range() | ||||
|         array = self._arrays[self.name] | ||||
|         start, stop = self._xrange = ( | ||||
|             array[0]['index'], | ||||
|             array[-1]['index'], | ||||
|         ) | ||||
|         lbar = max(l, start) | ||||
|         rbar = min(r, stop) | ||||
|         main_flow = self._flows[self.name] | ||||
|         ifirst, l, lbar, rbar, r, ilast = main_flow.datums_range() | ||||
|         return l, lbar, rbar, r | ||||
| 
 | ||||
|     def curve_width_pxs( | ||||
|  | @ -877,40 +856,51 @@ class ChartPlotWidget(pg.PlotWidget): | |||
| 
 | ||||
|     def default_view( | ||||
|         self, | ||||
|         steps_on_screen: Optional[int] = None | ||||
|         bars_from_y: int = 3000, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Set the view box to the "default" startup view of the scene. | ||||
| 
 | ||||
|         ''' | ||||
|         try: | ||||
|             index = self._arrays[self.name]['index'] | ||||
|         except IndexError: | ||||
|             log.warning(f'array for {self.name} not loaded yet?') | ||||
|         flow = self._flows.get(self.name) | ||||
|         if not flow: | ||||
|             log.warning(f'`Flow` for {self.name} not loaded yet?') | ||||
|             return | ||||
| 
 | ||||
|         index = flow.shm.array['index'] | ||||
|         xfirst, xlast = index[0], index[-1] | ||||
|         l, lbar, rbar, r = self.bars_range() | ||||
| 
 | ||||
|         marker_pos, l1_len = self.pre_l1_xs() | ||||
|         end = xlast + l1_len + 1 | ||||
|         view = self.view | ||||
| 
 | ||||
|         if ( | ||||
|             rbar < 0 | ||||
|             or l < xfirst | ||||
|             or l < 0 | ||||
|             or (rbar - lbar) < 6 | ||||
|         ): | ||||
|             # set fixed bars count on screen that approx includes as | ||||
|             # TODO: set fixed bars count on screen that approx includes as | ||||
|             # many bars as possible before a downsample line is shown. | ||||
|             begin = xlast - round(6116 / 6) | ||||
|             begin = xlast - bars_from_y | ||||
|             view.setXRange( | ||||
|                 min=begin, | ||||
|                 max=xlast, | ||||
|                 padding=0, | ||||
|             ) | ||||
|             # re-get range | ||||
|             l, lbar, rbar, r = self.bars_range() | ||||
| 
 | ||||
|         else: | ||||
|             begin = end - (r - l) | ||||
|         # we get the L1 spread label "length" in view coords | ||||
|         # terms now that we've scaled either by user control | ||||
|         # or to the default set of bars as per the immediate block | ||||
|         # above. | ||||
|         marker_pos, l1_len = self.pre_l1_xs() | ||||
|         end = xlast + l1_len + 1 | ||||
|         begin = end - (r - l) | ||||
| 
 | ||||
|         # for debugging | ||||
|         # print( | ||||
|         #     f'bars range: {brange}\n' | ||||
|         #     # f'bars range: {brange}\n' | ||||
|         #     f'xlast: {xlast}\n' | ||||
|         #     f'marker pos: {marker_pos}\n' | ||||
|         #     f'l1 len: {l1_len}\n' | ||||
|  | @ -922,14 +912,13 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         if self._static_yrange == 'axis': | ||||
|             self._static_yrange = None | ||||
| 
 | ||||
|         view = self.view | ||||
|         view.setXRange( | ||||
|             min=begin, | ||||
|             max=end, | ||||
|             padding=0, | ||||
|         ) | ||||
|         view._set_yrange() | ||||
|         self.view.maybe_downsample_graphics() | ||||
|         view._set_yrange() | ||||
|         try: | ||||
|             self.linked.graphics_cycle() | ||||
|         except IndexError: | ||||
|  | @ -960,7 +949,7 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|     def draw_ohlc( | ||||
|         self, | ||||
|         name: str, | ||||
|         data: np.ndarray, | ||||
|         shm: ShmArray, | ||||
| 
 | ||||
|         array_key: Optional[str] = None, | ||||
| 
 | ||||
|  | @ -980,19 +969,21 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         # the np array buffer to be drawn on next render cycle | ||||
|         self.plotItem.addItem(graphics) | ||||
| 
 | ||||
|         # draw after to allow self.scene() to work... | ||||
|         graphics.draw_from_data(data) | ||||
| 
 | ||||
|         data_key = array_key or name | ||||
|         self._graphics[data_key] = graphics | ||||
| 
 | ||||
|         self._flows[data_key] = Flow( | ||||
|             name=name, | ||||
|             plot=self.plotItem, | ||||
|             _shm=shm, | ||||
|             is_ohlc=True, | ||||
|             graphics=graphics, | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: i think we can eventually remove this if | ||||
|         # we write the ``Flow.update_graphics()`` method right? | ||||
|         # draw after to allow self.scene() to work... | ||||
|         graphics.draw_from_data(shm.array) | ||||
| 
 | ||||
|         self._add_sticky(name, bg_color='davies') | ||||
| 
 | ||||
|         return graphics, data_key | ||||
|  | @ -1058,7 +1049,7 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         self, | ||||
| 
 | ||||
|         name: str, | ||||
|         data: np.ndarray, | ||||
|         shm: ShmArray, | ||||
| 
 | ||||
|         array_key: Optional[str] = None, | ||||
|         overlay: bool = False, | ||||
|  | @ -1071,7 +1062,7 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|     ) -> (pg.PlotDataItem, str): | ||||
|         ''' | ||||
|         Draw a "curve" (line plot graphics) for the provided data in | ||||
|         the input array ``data``. | ||||
|         the input shm array ``shm``. | ||||
| 
 | ||||
|         ''' | ||||
|         color = color or self.pen_color or 'default_light' | ||||
|  | @ -1082,6 +1073,7 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         data_key = array_key or name | ||||
| 
 | ||||
|         # yah, we wrote our own B) | ||||
|         data = shm.array | ||||
|         curve = FastAppendCurve( | ||||
|             y=data[data_key], | ||||
|             x=data['index'], | ||||
|  | @ -1105,16 +1097,14 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         # and is disastrous for performance. | ||||
|         # curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache) | ||||
| 
 | ||||
|         # register curve graphics and backing array for name | ||||
|         self._graphics[name] = curve | ||||
|         self._arrays[data_key] = data | ||||
| 
 | ||||
|         pi = pi or self.plotItem | ||||
| 
 | ||||
|         self._flows[data_key] = Flow( | ||||
|             name=name, | ||||
|             plot=pi, | ||||
|             _shm=shm, | ||||
|             is_ohlc=False, | ||||
|             # register curve graphics with this flow | ||||
|             graphics=curve, | ||||
|         ) | ||||
| 
 | ||||
|  | @ -1175,16 +1165,11 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         ) | ||||
|         return last | ||||
| 
 | ||||
|     def update_graphics_from_array( | ||||
|     def update_graphics_from_flow( | ||||
|         self, | ||||
|         graphics_name: str, | ||||
| 
 | ||||
|         array: Optional[np.ndarray] = None, | ||||
|         array_key: Optional[str] = None, | ||||
| 
 | ||||
|         use_vr: bool = True, | ||||
|         render: bool = True, | ||||
| 
 | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> pg.GraphicsObject: | ||||
|  | @ -1192,63 +1177,11 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|         Update the named internal graphics from ``array``. | ||||
| 
 | ||||
|         ''' | ||||
|         if array is not None: | ||||
|             assert len(array) | ||||
| 
 | ||||
|         data_key = array_key or graphics_name | ||||
|         if graphics_name not in self._flows: | ||||
|             data_key = self.name | ||||
| 
 | ||||
|         if array is not None: | ||||
|             # write array to internal graphics table | ||||
|             self._arrays[data_key] = array | ||||
|         else: | ||||
|             array = self._arrays[data_key] | ||||
| 
 | ||||
|         # array key and graphics "name" might be different.. | ||||
|         graphics = self._graphics[graphics_name] | ||||
| 
 | ||||
|         # compute "in-view" indices | ||||
|         l, lbar, rbar, r = self.bars_range() | ||||
|         indexes = array['index'] | ||||
|         ifirst = indexes[0] | ||||
|         ilast = indexes[-1] | ||||
| 
 | ||||
|         lbar_i = max(l, ifirst) - ifirst | ||||
|         rbar_i = min(r, ilast) - ifirst | ||||
| 
 | ||||
|         # TODO: we could do it this way as well no? | ||||
|         # to_draw = array[lbar - ifirst:(rbar - ifirst) + 1] | ||||
|         in_view = array[lbar_i: rbar_i + 1] | ||||
| 
 | ||||
|         if ( | ||||
|             not in_view.size | ||||
|             or not render | ||||
|         ): | ||||
|             return graphics | ||||
| 
 | ||||
|         if isinstance(graphics, BarItems): | ||||
|             graphics.update_from_array( | ||||
|                 array, | ||||
|                 in_view, | ||||
|                 view_range=(lbar_i, rbar_i) if use_vr else None, | ||||
| 
 | ||||
|                 **kwargs, | ||||
|             ) | ||||
| 
 | ||||
|         else: | ||||
|             graphics.update_from_array( | ||||
|                 x=array['index'], | ||||
|                 y=array[data_key], | ||||
| 
 | ||||
|                 x_iv=in_view['index'], | ||||
|                 y_iv=in_view[data_key], | ||||
|                 view_range=(lbar_i, rbar_i) if use_vr else None, | ||||
| 
 | ||||
|                 **kwargs | ||||
|             ) | ||||
| 
 | ||||
|         return graphics | ||||
|         flow = self._flows[array_key or graphics_name] | ||||
|         return flow.update_graphics( | ||||
|             array_key=array_key, | ||||
|             **kwargs, | ||||
|         ) | ||||
| 
 | ||||
|     # def _label_h(self, yhigh: float, ylow: float) -> float: | ||||
|     #     # compute contents label "height" in view terms | ||||
|  | @ -1295,7 +1228,7 @@ class ChartPlotWidget(pg.PlotWidget): | |||
| 
 | ||||
|         # TODO: this should go onto some sort of | ||||
|         # data-view thinger..right? | ||||
|         ohlc = self._shm.array | ||||
|         ohlc = self._flows[self.name].shm.array | ||||
| 
 | ||||
|         # XXX: not sure why the time is so off here | ||||
|         # looks like we're gonna have to do some fixing.. | ||||
|  | @ -1341,9 +1274,6 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|             delayed=True, | ||||
|         ) | ||||
| 
 | ||||
|         l, lbar, rbar, r = bars_range or self.bars_range() | ||||
|         profiler(f'{self.name} got bars range') | ||||
| 
 | ||||
|         # TODO: here we should instead look up the ``Flow.shm.array`` | ||||
|         # and read directly from shm to avoid copying to memory first | ||||
|         # and then reading it again here. | ||||
|  | @ -1353,112 +1283,19 @@ class ChartPlotWidget(pg.PlotWidget): | |||
|             flow is None | ||||
|         ): | ||||
|             log.error(f"flow {flow_key} doesn't exist in chart {self.name} !?") | ||||
|             res = 0, 0 | ||||
|             key = res = 0, 0 | ||||
| 
 | ||||
|         else: | ||||
|             first, l, lbar, rbar, r, last = bars_range or flow.datums_range() | ||||
|             profiler(f'{self.name} got bars range') | ||||
| 
 | ||||
|             key = round(lbar), round(rbar) | ||||
|             res = flow.maxmin(*key) | ||||
|             profiler(f'yrange mxmn: {key} -> {res}') | ||||
|             if res == (None, None): | ||||
|                 log.error( | ||||
|                     f"{flow_key} no mxmn for bars_range => {key} !?" | ||||
|                 ) | ||||
|                 res = 0, 0 | ||||
| 
 | ||||
|         profiler(f'yrange mxmn: {key} -> {res}') | ||||
|         return res | ||||
| 
 | ||||
| 
 | ||||
| # class FlowsTable(pydantic.BaseModel): | ||||
| #     ''' | ||||
| #     Data-AGGRegate: high level API onto multiple (categorized) | ||||
| #     ``Flow``s with high level processing routines for | ||||
| #     multi-graphics computations and display. | ||||
| 
 | ||||
| #     ''' | ||||
| #     flows: dict[str, np.ndarray] = {} | ||||
| 
 | ||||
| 
 | ||||
| class Flow(msgspec.Struct):  # , frozen=True): | ||||
|     ''' | ||||
|     (FinancialSignal-)Flow compound type which wraps a real-time | ||||
|     graphics (curve) and its backing data stream together for high level | ||||
|     access and control. | ||||
| 
 | ||||
|     The intention is for this type to eventually be capable of shm-passing | ||||
|     of incrementally updated graphics stream data between actors. | ||||
| 
 | ||||
|     ''' | ||||
|     name: str | ||||
|     plot: pg.PlotItem | ||||
|     is_ohlc: bool = False | ||||
|     graphics: pg.GraphicsObject | ||||
| 
 | ||||
|     # TODO: hackery to be able to set a shm later | ||||
|     # but whilst also allowing this type to hashable, | ||||
|     # likely will require serializable token that is used to attach | ||||
|     # to the underlying shm ref after startup? | ||||
|     _shm: Optional[ShmArray] = None  # currently, may be filled in "later" | ||||
| 
 | ||||
|     # cache of y-range values per x-range input. | ||||
|     _mxmns: dict[tuple[int, int], tuple[float, float]] = {} | ||||
| 
 | ||||
|     @property | ||||
|     def shm(self) -> ShmArray: | ||||
|         return self._shm | ||||
| 
 | ||||
|     @shm.setter | ||||
|     def shm(self, shm: ShmArray) -> ShmArray: | ||||
|         self._shm = shm | ||||
| 
 | ||||
|     def maxmin( | ||||
|         self, | ||||
|         lbar, | ||||
|         rbar, | ||||
| 
 | ||||
|     ) -> tuple[float, float]: | ||||
|         ''' | ||||
|         Compute the cached max and min y-range values for a given | ||||
|         x-range determined by ``lbar`` and ``rbar``. | ||||
| 
 | ||||
|         ''' | ||||
|         rkey = (lbar, rbar) | ||||
|         cached_result = self._mxmns.get(rkey) | ||||
|         if cached_result: | ||||
|             return cached_result | ||||
| 
 | ||||
|         shm = self.shm | ||||
|         if shm is None: | ||||
|             mxmn = None | ||||
| 
 | ||||
|         else:  # new block for profiling?.. | ||||
|             arr = shm.array | ||||
| 
 | ||||
|             # build relative indexes into shm array | ||||
|             # TODO: should we just add/use a method | ||||
|             # on the shm to do this? | ||||
|             ifirst = arr[0]['index'] | ||||
|             slice_view = arr[ | ||||
|                 lbar - ifirst: | ||||
|                 (rbar - ifirst) + 1 | ||||
|             ] | ||||
| 
 | ||||
|             if not slice_view.size: | ||||
|                 mxmn = None | ||||
| 
 | ||||
|             else: | ||||
|                 if self.is_ohlc: | ||||
|                     ylow = np.min(slice_view['low']) | ||||
|                     yhigh = np.max(slice_view['high']) | ||||
| 
 | ||||
|                 else: | ||||
|                     view = slice_view[self.name] | ||||
|                     ylow = np.min(view) | ||||
|                     yhigh = np.max(view) | ||||
| 
 | ||||
|                 mxmn = ylow, yhigh | ||||
| 
 | ||||
|             if mxmn is not None: | ||||
|                 # cache new mxmn result | ||||
|                 self._mxmns[rkey] = mxmn | ||||
| 
 | ||||
|             return mxmn | ||||
|  |  | |||
|  | @ -162,7 +162,7 @@ def ohlc_to_m4_line( | |||
|             flat, | ||||
|             px_width=px_width, | ||||
|             uppx=uppx, | ||||
|             log_scale=bool(uppx) | ||||
|             # log_scale=bool(uppx) | ||||
|         ) | ||||
|         x = np.broadcast_to(x[:, None], y.shape) | ||||
|         x = (x + np.array([-0.43, 0, 0, 0.43])).flatten() | ||||
|  | @ -181,7 +181,8 @@ def ds_m4( | |||
|     # in display-device-local pixel units. | ||||
|     px_width: int, | ||||
|     uppx: Optional[float] = None, | ||||
|     log_scale: bool = True, | ||||
|     xrange: Optional[float] = None, | ||||
|     # log_scale: bool = True, | ||||
| 
 | ||||
| ) -> tuple[int, np.ndarray, np.ndarray]: | ||||
|     ''' | ||||
|  | @ -210,50 +211,77 @@ def ds_m4( | |||
| 
 | ||||
|     # optionally log-scale down the "supposed pxs on screen" | ||||
|     # as the units-per-px (uppx) get's large. | ||||
|     if log_scale: | ||||
|         assert uppx, 'You must provide a `uppx` value to use log scaling!' | ||||
|     # if log_scale: | ||||
|     #     assert uppx, 'You must provide a `uppx` value to use log scaling!' | ||||
|     #     # uppx = uppx * math.log(uppx, 2) | ||||
| 
 | ||||
|         # scaler = 2**7 / (1 + math.log(uppx, 2)) | ||||
|         scaler = round( | ||||
|             max( | ||||
|                 # NOTE: found that a 16x px width brought greater | ||||
|                 # detail, likely due to dpi scaling? | ||||
|                 # px_width=px_width * 16, | ||||
|                 2**7 / (1 + math.log(uppx, 2)), | ||||
|                 1 | ||||
|             ) | ||||
|         ) | ||||
|         px_width *= scaler | ||||
|     #     # scaler = 2**7 / (1 + math.log(uppx, 2)) | ||||
|     #     scaler = round( | ||||
|     #         max( | ||||
|     #             # NOTE: found that a 16x px width brought greater | ||||
|     #             # detail, likely due to dpi scaling? | ||||
|     #             # px_width=px_width * 16, | ||||
|     #             2**7 / (1 + math.log(uppx, 2)), | ||||
|     #             1 | ||||
|     #         ) | ||||
|     #     ) | ||||
|     #     px_width *= scaler | ||||
| 
 | ||||
|     assert px_width > 1  # width of screen in pxs? | ||||
|     # else: | ||||
|     #     px_width *= 16 | ||||
| 
 | ||||
|     # should never get called unless actually needed | ||||
|     assert px_width > 1 and uppx > 0 | ||||
| 
 | ||||
|     # NOTE: if we didn't pre-slice the data to downsample | ||||
|     # you could in theory pass these as the slicing params, | ||||
|     # do we care though since we can always just pre-slice the | ||||
|     # input? | ||||
|     x_start = x[0]  # x value start/lowest in domain | ||||
|     x_end = x[-1]  # x end value/highest in domain | ||||
| 
 | ||||
|     if xrange is None: | ||||
|         x_end = x[-1]  # x end value/highest in domain | ||||
|         xrange = (x_end - x_start) | ||||
| 
 | ||||
|     # XXX: always round up on the input pixels | ||||
|     px_width = math.ceil(px_width) | ||||
|     # lnx = len(x) | ||||
|     # uppx *= max(4 / (1 + math.log(uppx, 2)), 1) | ||||
| 
 | ||||
|     x_range = x_end - x_start | ||||
|     pxw = math.ceil(xrange / uppx) | ||||
|     # px_width = math.ceil(px_width) | ||||
| 
 | ||||
|     # ratio of indexed x-value to width of raster in pixels. | ||||
|     # this is more or less, uppx: units-per-pixel. | ||||
|     w = x_range / float(px_width) | ||||
|     # w = xrange / float(px_width) | ||||
|     # uppx = uppx * math.log(uppx, 2) | ||||
|     # w2 = px_width / uppx | ||||
| 
 | ||||
|     # scale up the width as the uppx get's large | ||||
|     w = uppx  # * math.log(uppx, 666) | ||||
| 
 | ||||
|     # ensure we make more then enough | ||||
|     # frames (windows) for the output pixel | ||||
|     frames = px_width | ||||
|     frames = pxw | ||||
| 
 | ||||
|     # if we have more and then exact integer's | ||||
|     # (uniform quotient output) worth of datum-domain-points | ||||
|     # per windows-frame, add one more window to ensure | ||||
|     # we have room for all output down-samples. | ||||
|     pts_per_pixel, r = divmod(len(x), frames) | ||||
|     pts_per_pixel, r = divmod(xrange, frames) | ||||
|     if r: | ||||
|         # while r: | ||||
|         frames += 1 | ||||
|         pts_per_pixel, r = divmod(xrange, frames) | ||||
| 
 | ||||
|     # print( | ||||
|     #     f'uppx: {uppx}\n' | ||||
|     #     f'xrange: {xrange}\n' | ||||
|     #     f'px_width: {px_width}\n' | ||||
|     #     f'pxw: {pxw}\n' | ||||
|     #     f'WTF w:{w}, w2:{w2}\n' | ||||
|     #     f'frames: {frames}\n' | ||||
|     # ) | ||||
|     assert frames >= (xrange / uppx) | ||||
| 
 | ||||
|     # call into ``numba`` | ||||
|     nb, i_win, y_out = _m4( | ||||
|  |  | |||
|  | @ -43,8 +43,8 @@ log = get_logger(__name__) | |||
| # latency (in terms of perceived lag in cross hair) so really be sure | ||||
| # there's an improvement if you want to change it! | ||||
| 
 | ||||
| _mouse_rate_limit = 120  # TODO; should we calc current screen refresh rate? | ||||
| _debounce_delay = 1 / 40 | ||||
| _mouse_rate_limit = 60  # TODO; should we calc current screen refresh rate? | ||||
| _debounce_delay = 0 | ||||
| _ch_label_opac = 1 | ||||
| 
 | ||||
| 
 | ||||
|  | @ -191,6 +191,9 @@ class ContentsLabel(pg.LabelItem): | |||
| 
 | ||||
|         self.setText( | ||||
|             "<b>i</b>:{index}<br/>" | ||||
|             # NB: these fields must be indexed in the correct order via | ||||
|             # the slice syntax below. | ||||
|             "<b>epoch</b>:{}<br/>" | ||||
|             "<b>O</b>:{}<br/>" | ||||
|             "<b>H</b>:{}<br/>" | ||||
|             "<b>L</b>:{}<br/>" | ||||
|  | @ -198,7 +201,15 @@ class ContentsLabel(pg.LabelItem): | |||
|             "<b>V</b>:{}<br/>" | ||||
|             "<b>wap</b>:{}".format( | ||||
|                 *array[index - first][ | ||||
|                     ['open', 'high', 'low', 'close', 'volume', 'bar_wap'] | ||||
|                     [ | ||||
|                         'time', | ||||
|                         'open', | ||||
|                         'high', | ||||
|                         'low', | ||||
|                         'close', | ||||
|                         'volume', | ||||
|                         'bar_wap', | ||||
|                     ] | ||||
|                 ], | ||||
|                 name=name, | ||||
|                 index=index, | ||||
|  | @ -243,13 +254,13 @@ class ContentsLabels: | |||
|     def update_labels( | ||||
|         self, | ||||
|         index: int, | ||||
|         # array_name: str, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         # for name, (label, update) in self._labels.items(): | ||||
|         for chart, name, label, update in self._labels: | ||||
| 
 | ||||
|             array = chart._arrays[name] | ||||
|             flow = chart._flows[name] | ||||
|             array = flow.shm.array | ||||
| 
 | ||||
|             if not ( | ||||
|                 index >= 0 | ||||
|                 and index < array[-1]['index'] | ||||
|  | @ -258,8 +269,6 @@ class ContentsLabels: | |||
|                 print('WTF out of range?') | ||||
|                 continue | ||||
| 
 | ||||
|             # array = chart._arrays[name] | ||||
| 
 | ||||
|             # call provided update func with data point | ||||
|             try: | ||||
|                 label.show() | ||||
|  | @ -295,7 +304,8 @@ class ContentsLabels: | |||
| 
 | ||||
| 
 | ||||
| class Cursor(pg.GraphicsObject): | ||||
|     '''Multi-plot cursor for use on a ``LinkedSplits`` chart (set). | ||||
|     ''' | ||||
|     Multi-plot cursor for use on a ``LinkedSplits`` chart (set). | ||||
| 
 | ||||
|     ''' | ||||
|     def __init__( | ||||
|  | @ -310,7 +320,7 @@ class Cursor(pg.GraphicsObject): | |||
| 
 | ||||
|         self.linked = linkedsplits | ||||
|         self.graphics: dict[str, pg.GraphicsObject] = {} | ||||
|         self.plots: List['PlotChartWidget'] = []  # type: ignore # noqa | ||||
|         self.plots: list['PlotChartWidget'] = []  # type: ignore # noqa | ||||
|         self.active_plot = None | ||||
|         self.digits: int = digits | ||||
|         self._datum_xy: tuple[int, float] = (0, 0) | ||||
|  | @ -439,7 +449,10 @@ class Cursor(pg.GraphicsObject): | |||
|         if plot.linked.xaxis_chart is plot: | ||||
|             xlabel = self.xaxis_label = XAxisLabel( | ||||
|                 parent=self.plots[plot_index].getAxis('bottom'), | ||||
|                 # parent=self.plots[plot_index].pi_overlay.get_axis(plot.plotItem, 'bottom'), | ||||
|                 # parent=self.plots[plot_index].pi_overlay.get_axis( | ||||
|                 #     plot.plotItem, 'bottom' | ||||
|                 # ), | ||||
| 
 | ||||
|                 opacity=_ch_label_opac, | ||||
|                 bg_color=self.label_color, | ||||
|             ) | ||||
|  | @ -457,9 +470,12 @@ class Cursor(pg.GraphicsObject): | |||
|     ) -> LineDot: | ||||
|         # if this plot contains curves add line dot "cursors" to denote | ||||
|         # the current sample under the mouse | ||||
|         main_flow = plot._flows[plot.name] | ||||
|         # read out last index | ||||
|         i = main_flow.shm.array[-1]['index'] | ||||
|         cursor = LineDot( | ||||
|             curve, | ||||
|             index=plot._arrays[plot.name][-1]['index'], | ||||
|             index=i, | ||||
|             plot=plot | ||||
|         ) | ||||
|         plot.addItem(cursor) | ||||
|  |  | |||
|  | @ -44,55 +44,78 @@ from ..log import get_logger | |||
| log = get_logger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| def step_path_arrays_from_1d( | ||||
|     x: np.ndarray, | ||||
|     y: np.ndarray, | ||||
|     include_endpoints: bool = False, | ||||
| # TODO: numba this instead.. | ||||
| # def step_path_arrays_from_1d( | ||||
| #     x: np.ndarray, | ||||
| #     y: np.ndarray, | ||||
| #     include_endpoints: bool = True, | ||||
| 
 | ||||
| ) -> (np.ndarray, np.ndarray): | ||||
|     ''' | ||||
|     Generate a "step mode" curve aligned with OHLC style bars | ||||
|     such that each segment spans each bar (aka "centered" style). | ||||
| # ) -> (np.ndarray, np.ndarray): | ||||
| #     ''' | ||||
| #     Generate a "step mode" curve aligned with OHLC style bars | ||||
| #     such that each segment spans each bar (aka "centered" style). | ||||
| 
 | ||||
|     ''' | ||||
|     y_out = y.copy() | ||||
|     x_out = x.copy() | ||||
|     x2 = np.empty( | ||||
|         # the data + 2 endpoints on either end for | ||||
|         # "termination of the path". | ||||
|         (len(x) + 1, 2), | ||||
|         # we want to align with OHLC or other sampling style | ||||
|         # bars likely so we need fractinal values | ||||
|         dtype=float, | ||||
|     ) | ||||
|     x2[0] = x[0] - 0.5 | ||||
|     x2[1] = x[0] + 0.5 | ||||
|     x2[1:] = x[:, np.newaxis] + 0.5 | ||||
| #     ''' | ||||
| #     # y_out = y.copy() | ||||
| #     # x_out = x.copy() | ||||
| 
 | ||||
|     # flatten to 1-d | ||||
|     x_out = x2.reshape(x2.size) | ||||
| #     # x2 = np.empty( | ||||
| #     #     # the data + 2 endpoints on either end for | ||||
| #     #     # "termination of the path". | ||||
| #     #     (len(x) + 1, 2), | ||||
| #     #     # we want to align with OHLC or other sampling style | ||||
| #     #     # bars likely so we need fractinal values | ||||
| #     #     dtype=float, | ||||
| #     # ) | ||||
| 
 | ||||
|     # we create a 1d with 2 extra indexes to | ||||
|     # hold the start and (current) end value for the steps | ||||
|     # on either end | ||||
|     y2 = np.empty((len(y), 2), dtype=y.dtype) | ||||
|     y2[:] = y[:, np.newaxis] | ||||
| #     x2 = np.broadcast_to( | ||||
| #         x[:, None], | ||||
| #         ( | ||||
| #             x.size + 1, | ||||
| #             # 4,  # only ohlc | ||||
| #             2, | ||||
| #         ), | ||||
| #     ) + np.array([-0.5, 0.5]) | ||||
| 
 | ||||
|     y_out = np.empty( | ||||
|         2*len(y) + 2, | ||||
|         dtype=y.dtype | ||||
|     ) | ||||
| #     # x2[0] = x[0] - 0.5 | ||||
| #     # x2[1] = x[0] + 0.5 | ||||
| #     # x2[0, 0] = x[0] - 0.5 | ||||
| #     # x2[0, 1] = x[0] + 0.5 | ||||
| #     # x2[1:] = x[:, np.newaxis] + 0.5 | ||||
| #     # import pdbpp | ||||
| #     # pdbpp.set_trace() | ||||
| 
 | ||||
|     # flatten and set 0 endpoints | ||||
|     y_out[1:-1] = y2.reshape(y2.size) | ||||
|     y_out[0] = 0 | ||||
|     y_out[-1] = 0 | ||||
| #     # flatten to 1-d | ||||
| #     # x_out = x2.reshape(x2.size) | ||||
| #     # x_out = x2 | ||||
| 
 | ||||
|     if not include_endpoints: | ||||
|         return x_out[:-1], y_out[:-1] | ||||
| #     # we create a 1d with 2 extra indexes to | ||||
| #     # hold the start and (current) end value for the steps | ||||
| #     # on either end | ||||
| #     y2 = np.empty( | ||||
| #         (len(y) + 1, 2), | ||||
| #         dtype=y.dtype, | ||||
| #     ) | ||||
| #     y2[:] = y[:, np.newaxis] | ||||
| #     # y2[-1] = 0 | ||||
| 
 | ||||
|     else: | ||||
|         return x_out, y_out | ||||
| #     # y_out = y2 | ||||
| 
 | ||||
| # #     y_out = np.empty( | ||||
| # #         2*len(y) + 2, | ||||
| # #         dtype=y.dtype | ||||
| # #     ) | ||||
| 
 | ||||
| #     # flatten and set 0 endpoints | ||||
| #     # y_out[1:-1] = y2.reshape(y2.size) | ||||
| #     # y_out[0] = 0 | ||||
| #     # y_out[-1] = 0 | ||||
| 
 | ||||
| #     if not include_endpoints: | ||||
| #         return x2[:-1], y2[:-1] | ||||
| 
 | ||||
| #     else: | ||||
| #         return x2, y2 | ||||
| 
 | ||||
| 
 | ||||
| _line_styles: dict[str, int] = { | ||||
|  | @ -119,8 +142,8 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|     def __init__( | ||||
|         self, | ||||
| 
 | ||||
|         x: np.ndarray, | ||||
|         y: np.ndarray, | ||||
|         x: np.ndarray = None, | ||||
|         y: np.ndarray = None, | ||||
|         *args, | ||||
| 
 | ||||
|         step_mode: bool = False, | ||||
|  | @ -137,6 +160,9 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|         # brutaaalll, see comments within.. | ||||
|         self._y = self.yData = y | ||||
|         self._x = self.xData = x | ||||
|         self._vr: Optional[tuple] = None | ||||
|         self._avr: Optional[tuple] = None | ||||
|         self._br = None | ||||
| 
 | ||||
|         self._name = name | ||||
|         self.path: Optional[QtGui.QPainterPath] = None | ||||
|  | @ -150,6 +176,7 @@ class FastAppendCurve(pg.GraphicsObject): | |||
| 
 | ||||
|         # self._xrange: tuple[int, int] = self.dataBounds(ax=0) | ||||
|         self._xrange: Optional[tuple[int, int]] = None | ||||
|         # self._x_iv_range = None | ||||
| 
 | ||||
|         # self._last_draw = time.time() | ||||
|         self._in_ds: bool = False | ||||
|  | @ -181,16 +208,12 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|         # interactions slower (such as zooming) and if so maybe if/when | ||||
|         # we implement a "history" mode for the view we disable this in | ||||
|         # that mode? | ||||
|         if step_mode: | ||||
|             # don't enable caching by default for the case where the | ||||
|             # only thing drawn is the "last" line segment which can | ||||
|             # have a weird artifact where it won't be fully drawn to its | ||||
|             # endpoint (something we saw on trade rate curves) | ||||
|             self.setCacheMode( | ||||
|                 QGraphicsItem.DeviceCoordinateCache | ||||
|             ) | ||||
| 
 | ||||
|         self.update() | ||||
|         # if step_mode: | ||||
|         # don't enable caching by default for the case where the | ||||
|         # only thing drawn is the "last" line segment which can | ||||
|         # have a weird artifact where it won't be fully drawn to its | ||||
|         # endpoint (something we saw on trade rate curves) | ||||
|         self.setCacheMode(QGraphicsItem.DeviceCoordinateCache) | ||||
| 
 | ||||
|     # TODO: probably stick this in a new parent | ||||
|     # type which will contain our own version of | ||||
|  | @ -242,7 +265,7 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|             y, | ||||
|             px_width=px_width, | ||||
|             uppx=uppx, | ||||
|             log_scale=bool(uppx) | ||||
|             # log_scale=bool(uppx) | ||||
|         ) | ||||
|         x = np.broadcast_to(x[:, None], y.shape) | ||||
|         # x = (x + np.array([-0.43, 0, 0, 0.43])).flatten() | ||||
|  | @ -266,6 +289,10 @@ class FastAppendCurve(pg.GraphicsObject): | |||
| 
 | ||||
|         view_range: Optional[tuple[int, int]] = None, | ||||
|         profiler: Optional[pg.debug.Profiler] = None, | ||||
|         draw_last: bool = True, | ||||
|         slice_to_head: int = -1, | ||||
|         do_append: bool = True, | ||||
|         should_redraw: bool = False, | ||||
| 
 | ||||
|     ) -> QtGui.QPainterPath: | ||||
|         ''' | ||||
|  | @ -280,78 +307,137 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|             disabled=not pg_profile_enabled(), | ||||
|             gt=ms_slower_then, | ||||
|         ) | ||||
|         # flip_cache = False | ||||
|         flip_cache = False | ||||
| 
 | ||||
|         if self._xrange: | ||||
|             istart, istop = self._xrange | ||||
|         else: | ||||
|             self._xrange = istart, istop = x[0], x[-1] | ||||
| 
 | ||||
|         # compute the length diffs between the first/last index entry in | ||||
|         # the input data and the last indexes we have on record from the | ||||
|         # last time we updated the curve index. | ||||
|         prepend_length = int(istart - x[0]) | ||||
|         append_length = int(x[-1] - istop) | ||||
| 
 | ||||
|         # this is the diff-mode, "data"-rendered index | ||||
|         # tracking var.. | ||||
|         self._xrange = x[0], x[-1] | ||||
| 
 | ||||
|         # print(f"xrange: {self._xrange}") | ||||
| 
 | ||||
|         # XXX: lol brutal, the internals of `CurvePoint` (inherited by | ||||
|         # our `LineDot`) required ``.getData()`` to work.. | ||||
|         self.xData = x | ||||
|         self.yData = y | ||||
|         self._x, self._y = x, y | ||||
| 
 | ||||
|         if view_range: | ||||
|             profiler(f'view range slice {view_range}') | ||||
|         # self.xData = x | ||||
|         # self.yData = y | ||||
|         # self._x, self._y = x, y | ||||
| 
 | ||||
|         # downsampling incremental state checking | ||||
|         uppx = self.x_uppx() | ||||
|         px_width = self.px_width() | ||||
|         uppx_diff = (uppx - self._last_uppx) | ||||
| 
 | ||||
|         should_ds = False | ||||
|         should_redraw = False | ||||
|         new_sample_rate = False | ||||
|         should_ds = self._in_ds | ||||
|         showing_src_data = self._in_ds | ||||
|         # should_redraw = False | ||||
| 
 | ||||
|         # if a view range is passed, plan to draw the | ||||
|         # source ouput that's "in view" of the chart. | ||||
|         if view_range and not self._in_ds: | ||||
|         if ( | ||||
|             view_range | ||||
|             # and not self._in_ds | ||||
|             # and not prepend_length > 0 | ||||
|         ): | ||||
|             # print(f'{self._name} vr: {view_range}') | ||||
| 
 | ||||
|             # by default we only pull data up to the last (current) index | ||||
|             x_out, y_out = x_iv[:-1], y_iv[:-1] | ||||
|             x_out, y_out = x_iv[:slice_to_head], y_iv[:slice_to_head] | ||||
|             profiler(f'view range slice {view_range}') | ||||
| 
 | ||||
|             # step mode: draw flat top discrete "step" | ||||
|             # over the index space for each datum. | ||||
|             if self._step_mode: | ||||
|                 # TODO: numba this bish | ||||
|                 x_out, y_out = step_path_arrays_from_1d( | ||||
|                     x_out, | ||||
|                     y_out | ||||
|             vl, vr = view_range | ||||
| 
 | ||||
|             # last_ivr = self._x_iv_range | ||||
|             # ix_iv, iy_iv = self._x_iv_range = (x_iv[0], x_iv[-1]) | ||||
| 
 | ||||
|             zoom_or_append = False | ||||
|             last_vr = self._vr | ||||
|             last_ivr = self._avr | ||||
| 
 | ||||
|             if last_vr: | ||||
|                 # relative slice indices | ||||
|                 lvl, lvr = last_vr | ||||
|                 # abs slice indices | ||||
|                 al, ar = last_ivr | ||||
| 
 | ||||
|                 # append_length = int(x[-1] - istop) | ||||
|                 # append_length = int(x_iv[-1] - ar) | ||||
| 
 | ||||
|                 # left_change = abs(x_iv[0] - al) >= 1 | ||||
|                 # right_change = abs(x_iv[-1] - ar) >= 1 | ||||
| 
 | ||||
|                 if ( | ||||
|                     # likely a zoom view change | ||||
|                     (vr - lvr) > 2 or vl < lvl | ||||
|                     # append / prepend update | ||||
|                     # we had an append update where the view range | ||||
|                     # didn't change but the data-viewed (shifted) | ||||
|                     # underneath, so we need to redraw. | ||||
|                     # or left_change and right_change and last_vr == view_range | ||||
| 
 | ||||
|                         # not (left_change and right_change) and ivr | ||||
|                     # ( | ||||
|                     # or abs(x_iv[ivr] - livr) > 1 | ||||
|                 ): | ||||
|                     zoom_or_append = True | ||||
| 
 | ||||
|             # if last_ivr: | ||||
|             #     liivl, liivr = last_ivr | ||||
| 
 | ||||
|             if ( | ||||
|                 view_range != last_vr | ||||
|                 and ( | ||||
|                     append_length > 1 | ||||
|                     or zoom_or_append | ||||
|                 ) | ||||
|                 profiler('generated step arrays') | ||||
|             ): | ||||
|                 should_redraw = True | ||||
|                 # print("REDRAWING BRUH") | ||||
| 
 | ||||
|             should_redraw = True | ||||
|             profiler('sliced in-view array history') | ||||
|             self._vr = view_range | ||||
|             self._avr = x_iv[0], x_iv[slice_to_head] | ||||
| 
 | ||||
|             # x_last = x_iv[-1] | ||||
|             # y_last = y_iv[-1] | ||||
|             self._last_vr = view_range | ||||
|             # self._last_vr = view_range | ||||
| 
 | ||||
|             # self.disable_cache() | ||||
|             # flip_cache = True | ||||
| 
 | ||||
|         else: | ||||
|             self._xrange = x[0], x[-1] | ||||
|             # if ( | ||||
|             #     not view_range | ||||
|             #     or self._in_ds | ||||
|             # ): | ||||
|             # by default we only pull data up to the last (current) index | ||||
|             x_out, y_out = x[:slice_to_head], y[:slice_to_head] | ||||
| 
 | ||||
|         x_last = x[-1] | ||||
|         y_last = y[-1] | ||||
|             if prepend_length > 0: | ||||
|                 should_redraw = True | ||||
| 
 | ||||
|         # check for downsampling conditions | ||||
|         if ( | ||||
|             # std m4 downsample conditions | ||||
|             px_width | ||||
|             and uppx_diff >= 4 | ||||
|             or uppx_diff <= -3 | ||||
|             or self._step_mode and abs(uppx_diff) >= 4 | ||||
| 
 | ||||
|             and abs(uppx_diff) >= 1 | ||||
|         ): | ||||
|             log.info( | ||||
|                 f'{self._name} sampler change: {self._last_uppx} -> {uppx}' | ||||
|             ) | ||||
|             self._last_uppx = uppx | ||||
|             new_sample_rate = True | ||||
|             showing_src_data = False | ||||
|             should_redraw = True | ||||
|             should_ds = True | ||||
| 
 | ||||
|         elif ( | ||||
|  | @ -362,53 +448,54 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|             # source data so we clear our path data in prep | ||||
|             # to generate a new one from original source data. | ||||
|             should_redraw = True | ||||
|             new_sample_rate = True | ||||
|             should_ds = False | ||||
| 
 | ||||
|         # compute the length diffs between the first/last index entry in | ||||
|         # the input data and the last indexes we have on record from the | ||||
|         # last time we updated the curve index. | ||||
|         prepend_length = int(istart - x[0]) | ||||
|         append_length = int(x[-1] - istop) | ||||
|             showing_src_data = True | ||||
| 
 | ||||
|         # no_path_yet = self.path is None | ||||
|         if ( | ||||
|             self.path is None | ||||
|             or should_redraw | ||||
|             or should_ds | ||||
|             or new_sample_rate | ||||
|             or prepend_length > 0 | ||||
|         ): | ||||
|             if ( | ||||
|                 not view_range | ||||
|                 or self._in_ds | ||||
|             ): | ||||
|                 # by default we only pull data up to the last (current) index | ||||
|                 x_out, y_out = x[:-1], y[:-1] | ||||
|             # if ( | ||||
|             #     not view_range | ||||
|             #     or self._in_ds | ||||
|             # ): | ||||
|             #     # by default we only pull data up to the last (current) index | ||||
|             #     x_out, y_out = x[:-1], y[:-1] | ||||
| 
 | ||||
|                 # step mode: draw flat top discrete "step" | ||||
|                 # over the index space for each datum. | ||||
|                 if self._step_mode: | ||||
|                     x_out, y_out = step_path_arrays_from_1d( | ||||
|                         x_out, | ||||
|                         y_out, | ||||
|                     ) | ||||
|                     # TODO: numba this bish | ||||
|                     profiler('generated step arrays') | ||||
|             # step mode: draw flat top discrete "step" | ||||
|             # over the index space for each datum. | ||||
|             # if self._step_mode: | ||||
|             #     self.disable_cache() | ||||
|             #     flip_cache = True | ||||
|             #     x_out, y_out = step_path_arrays_from_1d( | ||||
|             #         x_out, | ||||
|             #         y_out, | ||||
|             #     ) | ||||
| 
 | ||||
|             #     # TODO: numba this bish | ||||
|             #     profiler('generated step arrays') | ||||
| 
 | ||||
|             if should_redraw: | ||||
|                 profiler('path reversion to non-ds') | ||||
|                 if self.path: | ||||
|                     # print(f'CLEARING PATH {self._name}') | ||||
|                     self.path.clear() | ||||
| 
 | ||||
|                 if self.fast_path: | ||||
|                     self.fast_path.clear() | ||||
| 
 | ||||
|             if should_redraw and not should_ds: | ||||
|                 if self._in_ds: | ||||
|                     log.info(f'DEDOWN -> {self._name}') | ||||
|                 profiler('cleared paths due to `should_redraw` set') | ||||
| 
 | ||||
|             if new_sample_rate and showing_src_data: | ||||
|                 # if self._in_ds: | ||||
|                 log.info(f'DEDOWN -> {self._name}') | ||||
| 
 | ||||
|                 self._in_ds = False | ||||
| 
 | ||||
|             elif should_ds and px_width: | ||||
|             elif should_ds and uppx and px_width > 1: | ||||
|                 x_out, y_out = self.downsample( | ||||
|                     x_out, | ||||
|                     y_out, | ||||
|  | @ -425,7 +512,10 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|                 finiteCheck=False, | ||||
|                 path=self.path, | ||||
|             ) | ||||
|             profiler('generated fresh path') | ||||
|             self.prepareGeometryChange() | ||||
|             profiler( | ||||
|                 f'generated fresh path. (should_redraw: {should_redraw} should_ds: {should_ds} new_sample_rate: {new_sample_rate})' | ||||
|             ) | ||||
|             # profiler(f'DRAW PATH IN VIEW -> {self._name}') | ||||
| 
 | ||||
|             # reserve mem allocs see: | ||||
|  | @ -457,32 +547,44 @@ class FastAppendCurve(pg.GraphicsObject): | |||
| 
 | ||||
|         elif ( | ||||
|             append_length > 0 | ||||
|             and not view_range | ||||
|             and do_append | ||||
|             and not should_redraw | ||||
|             # and not view_range | ||||
|         ): | ||||
|             new_x = x[-append_length - 2:-1] | ||||
|             new_y = y[-append_length - 2:-1] | ||||
|             print(f'{self._name} append len: {append_length}') | ||||
|             new_x = x[-append_length - 2:slice_to_head] | ||||
|             new_y = y[-append_length - 2:slice_to_head] | ||||
|             profiler('sliced append path') | ||||
| 
 | ||||
|             if self._step_mode: | ||||
|                 new_x, new_y = step_path_arrays_from_1d( | ||||
|                     new_x, | ||||
|                     new_y, | ||||
|                 ) | ||||
|                 # [1:] since we don't need the vertical line normally at | ||||
|                 # the beginning of the step curve taking the first (x, | ||||
|                 # y) poing down to the x-axis **because** this is an | ||||
|                 # appended path graphic. | ||||
|                 new_x = new_x[1:] | ||||
|                 new_y = new_y[1:] | ||||
|             # if self._step_mode: | ||||
|             # #     new_x, new_y = step_path_arrays_from_1d( | ||||
|             # #         new_x, | ||||
|             # #         new_y, | ||||
|             # #     ) | ||||
|             # #     # [1:] since we don't need the vertical line normally at | ||||
|             # #     # the beginning of the step curve taking the first (x, | ||||
|             # #     # y) poing down to the x-axis **because** this is an | ||||
|             # #     # appended path graphic. | ||||
|             # #     new_x = new_x[1:] | ||||
|             # #     new_y = new_y[1:] | ||||
| 
 | ||||
|             profiler('diffed append arrays') | ||||
|             #     self.disable_cache() | ||||
|             #     flip_cache = True | ||||
| 
 | ||||
|             if should_ds: | ||||
|                 new_x, new_y = self.downsample( | ||||
|                     new_x, | ||||
|                     new_y, | ||||
|                     **should_ds, | ||||
|                 ) | ||||
|                 profiler(f'fast path downsample redraw={should_ds}') | ||||
|             #     profiler('generated step data') | ||||
| 
 | ||||
|             profiler( | ||||
|                 f'diffed array input, append_length={append_length}' | ||||
|             ) | ||||
| 
 | ||||
|             # if should_ds: | ||||
|             #     new_x, new_y = self.downsample( | ||||
|             #         new_x, | ||||
|             #         new_y, | ||||
|             #         px_width, | ||||
|             #         uppx, | ||||
|             #     ) | ||||
|             #     profiler(f'fast path downsample redraw={should_ds}') | ||||
| 
 | ||||
|             append_path = pg.functions.arrayToQPath( | ||||
|                 new_x, | ||||
|  | @ -491,12 +593,13 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|                 finiteCheck=False, | ||||
|                 path=self.fast_path, | ||||
|             ) | ||||
|             profiler('generated append qpath') | ||||
| 
 | ||||
|             if self.use_fpath: | ||||
|                 # an attempt at trying to make append-updates faster.. | ||||
|                 if self.fast_path is None: | ||||
|                     self.fast_path = append_path | ||||
|                     self.fast_path.reserve(int(6e3)) | ||||
|                     # self.fast_path.reserve(int(6e3)) | ||||
|                 else: | ||||
|                     self.fast_path.connectPath(append_path) | ||||
|                     size = self.fast_path.capacity() | ||||
|  | @ -529,16 +632,43 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|             # self.disable_cache() | ||||
|             # flip_cache = True | ||||
| 
 | ||||
|         if draw_last: | ||||
|             self.draw_last(x, y) | ||||
|             profiler('draw last segment') | ||||
| 
 | ||||
| 
 | ||||
|         # if flip_cache: | ||||
|         # #     # XXX: seems to be needed to avoid artifacts (see above). | ||||
|         #     self.setCacheMode(QGraphicsItem.DeviceCoordinateCache) | ||||
| 
 | ||||
|         # trigger redraw of path | ||||
|         # do update before reverting to cache mode | ||||
|         self.update() | ||||
|         profiler('.update()') | ||||
| 
 | ||||
|     def draw_last( | ||||
|         self, | ||||
|         x: np.ndarray, | ||||
|         y: np.ndarray, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         x_last = x[-1] | ||||
|         y_last = y[-1] | ||||
| 
 | ||||
|         # draw the "current" step graphic segment so it lines up with | ||||
|         # the "middle" of the current (OHLC) sample. | ||||
|         if self._step_mode: | ||||
|             self._last_line = QLineF( | ||||
|                 x_last - 0.5, 0, | ||||
|                 x_last + 0.5, 0, | ||||
|                 # x_last, 0, | ||||
|                 # x_last, 0, | ||||
|             ) | ||||
|             self._last_step_rect = QRectF( | ||||
|                 x_last - 0.5, 0, | ||||
|                 x_last + 0.5, y_last | ||||
|                 # x_last, 0, | ||||
|                 # x_last, y_last | ||||
|             ) | ||||
|             # print( | ||||
|             #     f"path br: {self.path.boundingRect()}", | ||||
|  | @ -548,20 +678,10 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|         else: | ||||
|             self._last_line = QLineF( | ||||
|                 x[-2], y[-2], | ||||
|                 x[-1], y_last | ||||
|                 x_last, y_last | ||||
|             ) | ||||
| 
 | ||||
|         profiler('draw last segment') | ||||
| 
 | ||||
|         # trigger redraw of path | ||||
|         # do update before reverting to cache mode | ||||
|         # self.prepareGeometryChange() | ||||
|         self.update() | ||||
|         profiler('.update()') | ||||
| 
 | ||||
|         # if flip_cache: | ||||
|         #     # XXX: seems to be needed to avoid artifacts (see above). | ||||
|         #     self.setCacheMode(QGraphicsItem.DeviceCoordinateCache) | ||||
| 
 | ||||
|     # XXX: lol brutal, the internals of `CurvePoint` (inherited by | ||||
|     # our `LineDot`) required ``.getData()`` to work.. | ||||
|  | @ -596,6 +716,10 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|         # self.disable_cache() | ||||
|         # self.setCacheMode(QGraphicsItem.DeviceCoordinateCache) | ||||
| 
 | ||||
|     def reset_cache(self) -> None: | ||||
|         self.disable_cache() | ||||
|         self.setCacheMode(QGraphicsItem.DeviceCoordinateCache) | ||||
| 
 | ||||
|     def disable_cache(self) -> None: | ||||
|         ''' | ||||
|         Disable the use of the pixel coordinate cache and trigger a geo event. | ||||
|  | @ -604,7 +728,7 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|         # XXX: pretty annoying but, without this there's little | ||||
|         # artefacts on the append updates to the curve... | ||||
|         self.setCacheMode(QtWidgets.QGraphicsItem.NoCache) | ||||
|         self.prepareGeometryChange() | ||||
|         # self.prepareGeometryChange() | ||||
| 
 | ||||
|     def boundingRect(self): | ||||
|         ''' | ||||
|  | @ -624,6 +748,7 @@ class FastAppendCurve(pg.GraphicsObject): | |||
| 
 | ||||
|         ''' | ||||
|         hb = self.path.controlPointRect() | ||||
|         # hb = self.path.boundingRect() | ||||
|         hb_size = hb.size() | ||||
| 
 | ||||
|         fp = self.fast_path | ||||
|  | @ -632,17 +757,47 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|             hb_size = fhb.size() + hb_size | ||||
|         # print(f'hb_size: {hb_size}') | ||||
| 
 | ||||
|         # if self._last_step_rect: | ||||
|         #     hb_size += self._last_step_rect.size() | ||||
| 
 | ||||
|         # if self._line: | ||||
|         #     br = self._last_step_rect.bottomRight() | ||||
| 
 | ||||
|         # tl = QPointF( | ||||
|         #     # self._vr[0], | ||||
|         #     # hb.topLeft().y(), | ||||
|         #     # 0, | ||||
|         #     # hb_size.height() + 1 | ||||
|         # ) | ||||
| 
 | ||||
|         # if self._last_step_rect: | ||||
|         #     br = self._last_step_rect.bottomRight() | ||||
| 
 | ||||
|         # else: | ||||
|         # hb_size += QSizeF(1, 1) | ||||
|         w = hb_size.width() + 1 | ||||
|         h = hb_size.height() + 1 | ||||
| 
 | ||||
|         # br = QPointF( | ||||
|         #     self._vr[-1], | ||||
|         #     # tl.x() + w, | ||||
|         #     tl.y() + h, | ||||
|         # ) | ||||
| 
 | ||||
|         br = QRectF( | ||||
| 
 | ||||
|             # top left | ||||
|             # hb.topLeft() | ||||
|             # tl, | ||||
|             QPointF(hb.topLeft()), | ||||
| 
 | ||||
|             # br, | ||||
|             # total size | ||||
|             # QSizeF(hb_size) | ||||
|             # hb_size, | ||||
|             QSizeF(w, h) | ||||
|         ) | ||||
|         self._br = br | ||||
|         # print(f'bounding rect: {br}') | ||||
|         return br | ||||
| 
 | ||||
|  | @ -659,6 +814,7 @@ class FastAppendCurve(pg.GraphicsObject): | |||
|             disabled=not pg_profile_enabled(), | ||||
|             gt=ms_slower_then, | ||||
|         ) | ||||
|         self.prepareGeometryChange() | ||||
| 
 | ||||
|         if ( | ||||
|             self._step_mode | ||||
|  | @ -681,7 +837,7 @@ class FastAppendCurve(pg.GraphicsObject): | |||
| 
 | ||||
|         if path: | ||||
|             p.drawPath(path) | ||||
|             profiler('.drawPath(path)') | ||||
|             profiler(f'.drawPath(path): {path.capacity()}') | ||||
| 
 | ||||
|         fp = self.fast_path | ||||
|         if fp: | ||||
|  |  | |||
|  | @ -29,6 +29,7 @@ from typing import Optional, Any, Callable | |||
| import numpy as np | ||||
| import tractor | ||||
| import trio | ||||
| import pendulum | ||||
| import pyqtgraph as pg | ||||
| 
 | ||||
| from .. import brokers | ||||
|  | @ -47,6 +48,7 @@ from ._fsp import ( | |||
|     open_vlm_displays, | ||||
| ) | ||||
| from ..data._sharedmem import ShmArray | ||||
| from ..data._source import tf_in_1s | ||||
| from ._forms import ( | ||||
|     FieldsForm, | ||||
|     mk_order_pane_layout, | ||||
|  | @ -61,7 +63,7 @@ from ..log import get_logger | |||
| log = get_logger(__name__) | ||||
| 
 | ||||
| # TODO: load this from a config.toml! | ||||
| _quote_throttle_rate: int = 12  # Hz | ||||
| _quote_throttle_rate: int = 22  # Hz | ||||
| 
 | ||||
| 
 | ||||
| # a working tick-type-classes template | ||||
|  | @ -94,28 +96,19 @@ def chart_maxmin( | |||
|     Compute max and min datums "in view" for range limits. | ||||
| 
 | ||||
|     ''' | ||||
|     array = ohlcv_shm.array | ||||
|     ifirst = array[0]['index'] | ||||
| 
 | ||||
|     last_bars_range = chart.bars_range() | ||||
|     l, lbar, rbar, r = last_bars_range | ||||
|     in_view = array[lbar - ifirst:rbar - ifirst + 1] | ||||
|     out = chart.maxmin() | ||||
| 
 | ||||
|     if not in_view.size: | ||||
|         log.warning('Resetting chart to data') | ||||
|         chart.default_view() | ||||
|     if out is None: | ||||
|         return (last_bars_range, 0, 0, 0) | ||||
| 
 | ||||
|     mx, mn = ( | ||||
|         np.nanmax(in_view['high']), | ||||
|         np.nanmin(in_view['low'],) | ||||
|     ) | ||||
|     mn, mx = out | ||||
| 
 | ||||
|     mx_vlm_in_view = 0 | ||||
|     if vlm_chart: | ||||
|         mx_vlm_in_view = np.max( | ||||
|             in_view['volume'] | ||||
|         ) | ||||
|         out = vlm_chart.maxmin() | ||||
|         if out: | ||||
|             _, mx_vlm_in_view = out | ||||
| 
 | ||||
|     return ( | ||||
|         last_bars_range, | ||||
|  | @ -338,7 +331,7 @@ def graphics_update_cycle( | |||
|     vars = ds.vars | ||||
|     tick_margin = vars['tick_margin'] | ||||
| 
 | ||||
|     update_uppx = 6 | ||||
|     update_uppx = 16 | ||||
| 
 | ||||
|     for sym, quote in ds.quotes.items(): | ||||
| 
 | ||||
|  | @ -373,7 +366,7 @@ def graphics_update_cycle( | |||
|         mx = mx_in_view + tick_margin | ||||
|         mn = mn_in_view - tick_margin | ||||
|         profiler('maxmin call') | ||||
|         liv = r > i_step  # the last datum is in view | ||||
|         liv = r >= i_step  # the last datum is in view | ||||
| 
 | ||||
|         # don't real-time "shift" the curve to the | ||||
|         # left unless we get one of the following: | ||||
|  | @ -381,7 +374,6 @@ def graphics_update_cycle( | |||
|             ( | ||||
|                 i_diff > 0  # no new sample step | ||||
|                 and xpx < 4  # chart is zoomed out very far | ||||
|                 and r >= i_step  # the last datum isn't in view | ||||
|                 and liv | ||||
|             ) | ||||
|             or trigger_all | ||||
|  | @ -398,16 +390,17 @@ def graphics_update_cycle( | |||
|             ) | ||||
| 
 | ||||
|             if ( | ||||
|                 (xpx < update_uppx or i_diff > 0) | ||||
|                 ( | ||||
|                     xpx < update_uppx | ||||
|                     or i_diff > 0 | ||||
|                     and liv | ||||
|                 ) | ||||
|                 or trigger_all | ||||
|                 and liv | ||||
|             ): | ||||
|                 # TODO: make it so this doesn't have to be called | ||||
|                 # once the $vlm is up? | ||||
|                 vlm_chart.update_graphics_from_array( | ||||
|                 vlm_chart.update_graphics_from_flow( | ||||
|                     'volume', | ||||
|                     array, | ||||
| 
 | ||||
|                     # UGGGh, see ``maxmin()`` impl in `._fsp` for | ||||
|                     # the overlayed plotitems... we need a better | ||||
|                     # bay to invoke a maxmin per overlay.. | ||||
|  | @ -432,11 +425,16 @@ def graphics_update_cycle( | |||
|                     vars['last_mx_vlm'] = mx_vlm_in_view | ||||
| 
 | ||||
|                 for curve_name, flow in vlm_chart._flows.items(): | ||||
| 
 | ||||
|                     if not flow.render: | ||||
|                         continue | ||||
| 
 | ||||
|                     update_fsp_chart( | ||||
|                         vlm_chart, | ||||
|                         flow, | ||||
|                         curve_name, | ||||
|                         array_key=curve_name, | ||||
|                         do_append=xpx < update_uppx, | ||||
|                     ) | ||||
|                     # is this even doing anything? | ||||
|                     # (pretty sure it's the real-time | ||||
|  | @ -494,10 +492,11 @@ def graphics_update_cycle( | |||
|         if ( | ||||
|             xpx < update_uppx | ||||
|             or i_diff > 0 | ||||
|             or trigger_all | ||||
|         ): | ||||
|             chart.update_graphics_from_array( | ||||
|             chart.update_graphics_from_flow( | ||||
|                 chart.name, | ||||
|                 array, | ||||
|                 do_append=xpx < update_uppx, | ||||
|             ) | ||||
| 
 | ||||
|         # iterate in FIFO order per tick-frame | ||||
|  | @ -510,8 +509,9 @@ def graphics_update_cycle( | |||
|             # tick frames to determine the y-range for chart | ||||
|             # auto-scaling. | ||||
|             # TODO: we need a streaming minmax algo here, see def above. | ||||
|             mx = max(price + tick_margin, mx) | ||||
|             mn = min(price - tick_margin, mn) | ||||
|             if liv: | ||||
|                 mx = max(price + tick_margin, mx) | ||||
|                 mn = min(price - tick_margin, mn) | ||||
| 
 | ||||
|             if typ in clear_types: | ||||
| 
 | ||||
|  | @ -534,9 +534,8 @@ def graphics_update_cycle( | |||
| 
 | ||||
|                 if wap_in_history: | ||||
|                     # update vwap overlay line | ||||
|                     chart.update_graphics_from_array( | ||||
|                     chart.update_graphics_from_flow( | ||||
|                         'bar_wap', | ||||
|                         array, | ||||
|                     ) | ||||
| 
 | ||||
|             # L1 book label-line updates | ||||
|  | @ -552,7 +551,7 @@ def graphics_update_cycle( | |||
| 
 | ||||
|                 if ( | ||||
|                     label is not None | ||||
|                     # and liv | ||||
|                     and liv | ||||
|                 ): | ||||
|                     label.update_fields( | ||||
|                         {'level': price, 'size': size} | ||||
|  | @ -566,7 +565,7 @@ def graphics_update_cycle( | |||
|                 typ in _tick_groups['asks'] | ||||
|                 # TODO: instead we could check if the price is in the | ||||
|                 # y-view-range? | ||||
|                 # and liv | ||||
|                 and liv | ||||
|             ): | ||||
|                 l1.ask_label.update_fields({'level': price, 'size': size}) | ||||
| 
 | ||||
|  | @ -574,7 +573,7 @@ def graphics_update_cycle( | |||
|                 typ in _tick_groups['bids'] | ||||
|                 # TODO: instead we could check if the price is in the | ||||
|                 # y-view-range? | ||||
|                 # and liv | ||||
|                 and liv | ||||
|             ): | ||||
|                 l1.bid_label.update_fields({'level': price, 'size': size}) | ||||
| 
 | ||||
|  | @ -589,6 +588,7 @@ def graphics_update_cycle( | |||
|                 main_vb._ic is None | ||||
|                 or not main_vb._ic.is_set() | ||||
|             ): | ||||
|                 # print(f'updating range due to mxmn') | ||||
|                 main_vb._set_yrange( | ||||
|                     # TODO: we should probably scale | ||||
|                     # the view margin based on the size | ||||
|  | @ -599,6 +599,7 @@ def graphics_update_cycle( | |||
|                     yrange=(mn, mx), | ||||
|                 ) | ||||
| 
 | ||||
|         # XXX: update this every draw cycle to make L1-always-in-view work. | ||||
|         vars['last_mx'], vars['last_mn'] = mx, mn | ||||
| 
 | ||||
|         # run synchronous update on all linked flows | ||||
|  | @ -661,11 +662,17 @@ async def display_symbol_data( | |||
|         symbol = feed.symbols[sym] | ||||
|         fqsn = symbol.front_fqsn() | ||||
| 
 | ||||
|         times = bars['time'] | ||||
|         end = pendulum.from_timestamp(times[-1]) | ||||
|         start = pendulum.from_timestamp(times[times != times[-1]][-1]) | ||||
|         step_size_s = (end - start).seconds | ||||
|         tf_key = tf_in_1s[step_size_s] | ||||
| 
 | ||||
|         # load in symbol's ohlc data | ||||
|         godwidget.window.setWindowTitle( | ||||
|             f'{fqsn} ' | ||||
|             f'tick:{symbol.tick_size} ' | ||||
|             f'step:1s ' | ||||
|             f'step:{tf_key} ' | ||||
|         ) | ||||
| 
 | ||||
|         linked = godwidget.linkedsplits | ||||
|  | @ -681,32 +688,31 @@ async def display_symbol_data( | |||
|         # create main OHLC chart | ||||
|         chart = linked.plot_ohlc_main( | ||||
|             symbol, | ||||
|             bars, | ||||
|             ohlcv, | ||||
|             sidepane=pp_pane, | ||||
|         ) | ||||
|         chart.default_view() | ||||
|         chart._feeds[symbol.key] = feed | ||||
|         chart.setFocus() | ||||
| 
 | ||||
|         # plot historical vwap if available | ||||
|         wap_in_history = False | ||||
| 
 | ||||
|         if brokermod._show_wap_in_history: | ||||
|         # XXX: FOR SOME REASON THIS IS CAUSING HANGZ!?! | ||||
|         # if brokermod._show_wap_in_history: | ||||
| 
 | ||||
|             if 'bar_wap' in bars.dtype.fields: | ||||
|                 wap_in_history = True | ||||
|                 chart.draw_curve( | ||||
|                     name='bar_wap', | ||||
|                     data=bars, | ||||
|                     add_label=False, | ||||
|                 ) | ||||
|         #     if 'bar_wap' in bars.dtype.fields: | ||||
|         #         wap_in_history = True | ||||
|         #         chart.draw_curve( | ||||
|         #             name='bar_wap', | ||||
|         #             shm=ohlcv, | ||||
|         #             color='default_light', | ||||
|         #             add_label=False, | ||||
|         #         ) | ||||
| 
 | ||||
|         # size view to data once at outset | ||||
|         chart.cv._set_yrange() | ||||
| 
 | ||||
|         # TODO: a data view api that makes this less shit | ||||
|         chart._shm = ohlcv | ||||
|         chart._flows[chart.data_key].shm = ohlcv | ||||
| 
 | ||||
|         # NOTE: we must immediately tell Qt to show the OHLC chart | ||||
|         # to avoid a race where the subplots get added/shown to | ||||
|         # the linked set *before* the main price chart! | ||||
|  | @ -769,6 +775,5 @@ async def display_symbol_data( | |||
|                 sbar._status_groups[loading_sym_key][1]() | ||||
| 
 | ||||
|                 # let the app run.. bby | ||||
|                 chart.default_view() | ||||
|                 # linked.graphics_cycle() | ||||
|                 await trio.sleep_forever() | ||||
|  |  | |||
|  | @ -343,7 +343,7 @@ class SelectRect(QtGui.QGraphicsRectItem): | |||
|         nbars = ixmx - ixmn + 1 | ||||
| 
 | ||||
|         chart = self._chart | ||||
|         data = chart._arrays[chart.name][ixmn:ixmx] | ||||
|         data = chart._flows[chart.name].shm.array[ixmn:ixmx] | ||||
| 
 | ||||
|         if len(data): | ||||
|             std = data['close'].std() | ||||
|  |  | |||
|  | @ -0,0 +1,924 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
| # the Free Software Foundation, either version 3 of the License, or | ||||
| # (at your option) any later version. | ||||
| 
 | ||||
| # This program is distributed in the hope that it will be useful, | ||||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| # GNU Affero General Public License for more details. | ||||
| 
 | ||||
| # You should have received a copy of the GNU Affero General Public License | ||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| ''' | ||||
| High level streaming graphics primitives. | ||||
| 
 | ||||
| This is an intermediate layer which associates real-time low latency | ||||
| graphics primitives with underlying FSP related data structures for fast | ||||
| incremental update. | ||||
| 
 | ||||
| ''' | ||||
| from __future__ import annotations | ||||
| from functools import partial | ||||
| from typing import ( | ||||
|     Optional, | ||||
|     Callable, | ||||
| ) | ||||
| 
 | ||||
| import msgspec | ||||
| import numpy as np | ||||
| from numpy.lib import recfunctions as rfn | ||||
| import pyqtgraph as pg | ||||
| from PyQt5.QtGui import QPainterPath | ||||
| from PyQt5.QtCore import ( | ||||
|     # Qt, | ||||
|     QLineF, | ||||
|     # QSizeF, | ||||
|     QRectF, | ||||
|     # QPointF, | ||||
| ) | ||||
| 
 | ||||
| from ..data._sharedmem import ( | ||||
|     ShmArray, | ||||
|     # open_shm_array, | ||||
| ) | ||||
| from .._profile import pg_profile_enabled, ms_slower_then | ||||
| from ._ohlc import ( | ||||
|     BarItems, | ||||
|     gen_qpath, | ||||
| ) | ||||
| from ._curve import ( | ||||
|     FastAppendCurve, | ||||
|     # step_path_arrays_from_1d, | ||||
| ) | ||||
| from ._compression import ( | ||||
|     # ohlc_flatten, | ||||
|     ds_m4, | ||||
| ) | ||||
| from ..log import get_logger | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
| 
 | ||||
| # class FlowsTable(msgspec.Struct): | ||||
| #     ''' | ||||
| #     Data-AGGRegate: high level API onto multiple (categorized) | ||||
| #     ``Flow``s with high level processing routines for | ||||
| #     multi-graphics computations and display. | ||||
| 
 | ||||
| #     ''' | ||||
| #     flows: dict[str, np.ndarray] = {} | ||||
| 
 | ||||
| # @classmethod | ||||
| # def from_token( | ||||
| #     cls, | ||||
| #     shm_token: tuple[ | ||||
| #         str, | ||||
| #         str, | ||||
| #         tuple[str, str], | ||||
| #     ], | ||||
| 
 | ||||
| # ) -> Renderer: | ||||
| 
 | ||||
| #     shm = attach_shm_array(token) | ||||
| #     return cls(shm) | ||||
| 
 | ||||
| 
 | ||||
| def rowarr_to_path( | ||||
|     rows_array: np.ndarray, | ||||
|     x_basis: np.ndarray, | ||||
|     flow: Flow, | ||||
| 
 | ||||
| ) -> QPainterPath: | ||||
| 
 | ||||
|     # TODO: we could in theory use ``numba`` to flatten | ||||
|     # if needed? | ||||
| 
 | ||||
|     # to 1d | ||||
|     y = rows_array.flatten() | ||||
| 
 | ||||
|     return pg.functions.arrayToQPath( | ||||
|         # these get passed at render call time | ||||
|         x=x_basis[:y.size], | ||||
|         y=y, | ||||
|         connect='all', | ||||
|         finiteCheck=False, | ||||
|         path=flow.path, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def mk_ohlc_flat_copy( | ||||
|     ohlc_shm: ShmArray, | ||||
| 
 | ||||
|     # XXX: we bind this in currently.. | ||||
|     # x_basis: np.ndarray, | ||||
| 
 | ||||
|     # vr: Optional[slice] = None, | ||||
| 
 | ||||
| ) -> tuple[np.ndarray, np.ndarray]: | ||||
|     ''' | ||||
|     Return flattened-non-copy view into an OHLC shm array. | ||||
| 
 | ||||
|     ''' | ||||
|     ohlc = ohlc_shm._array[['open', 'high', 'low', 'close']] | ||||
|     # if vr: | ||||
|     #     ohlc = ohlc[vr] | ||||
|     #     x = x_basis[vr] | ||||
| 
 | ||||
|     unstructured = rfn.structured_to_unstructured( | ||||
|         ohlc, | ||||
|         copy=False, | ||||
|     ) | ||||
|     # breakpoint() | ||||
|     y = unstructured.flatten() | ||||
|     # x = x_basis[:y.size] | ||||
|     return y | ||||
| 
 | ||||
| 
 | ||||
| class Flow(msgspec.Struct):  # , frozen=True): | ||||
|     ''' | ||||
|     (Financial Signal-)Flow compound type which wraps a real-time | ||||
|     shm array stream with displayed graphics (curves, charts) | ||||
|     for high level access and control as well as efficient incremental | ||||
|     update. | ||||
| 
 | ||||
|     The intention is for this type to eventually be capable of shm-passing | ||||
|     of incrementally updated graphics stream data between actors. | ||||
| 
 | ||||
|     ''' | ||||
|     name: str | ||||
|     plot: pg.PlotItem | ||||
|     graphics: pg.GraphicsObject | ||||
|     _shm: ShmArray | ||||
| 
 | ||||
|     is_ohlc: bool = False | ||||
|     render: bool = True  # toggle for display loop | ||||
|     gy: Optional[ShmArray] = None | ||||
|     gx: Optional[np.ndarray] = None | ||||
|     _iflat_last: int = 0 | ||||
|     _iflat_first: int = 0 | ||||
| 
 | ||||
|     _last_uppx: float = 0 | ||||
|     _in_ds: bool = False | ||||
| 
 | ||||
|     _graphics_tranform_fn: Optional[Callable[ShmArray, np.ndarray]] = None | ||||
| 
 | ||||
|     # map from uppx -> (downsampled data, incremental graphics) | ||||
|     _src_r: Optional[Renderer] = None | ||||
|     _render_table: dict[ | ||||
|         Optional[int], | ||||
|         tuple[Renderer, pg.GraphicsItem], | ||||
|     ] = {} | ||||
| 
 | ||||
|     # TODO: hackery to be able to set a shm later | ||||
|     # but whilst also allowing this type to hashable, | ||||
|     # likely will require serializable token that is used to attach | ||||
|     # to the underlying shm ref after startup? | ||||
|     # _shm: Optional[ShmArray] = None  # currently, may be filled in "later" | ||||
| 
 | ||||
|     # last read from shm (usually due to an update call) | ||||
|     _last_read: Optional[np.ndarray] = None | ||||
| 
 | ||||
|     # cache of y-range values per x-range input. | ||||
|     _mxmns: dict[tuple[int, int], tuple[float, float]] = {} | ||||
| 
 | ||||
|     @property | ||||
|     def shm(self) -> ShmArray: | ||||
|         return self._shm | ||||
| 
 | ||||
|     # TODO: remove this and only allow setting through | ||||
|     # private ``._shm`` attr? | ||||
|     @shm.setter | ||||
|     def shm(self, shm: ShmArray) -> ShmArray: | ||||
|         print(f'{self.name} DO NOT SET SHM THIS WAY!?') | ||||
|         self._shm = shm | ||||
| 
 | ||||
|     def maxmin( | ||||
|         self, | ||||
|         lbar, | ||||
|         rbar, | ||||
| 
 | ||||
|     ) -> tuple[float, float]: | ||||
|         ''' | ||||
|         Compute the cached max and min y-range values for a given | ||||
|         x-range determined by ``lbar`` and ``rbar``. | ||||
| 
 | ||||
|         ''' | ||||
|         rkey = (lbar, rbar) | ||||
|         cached_result = self._mxmns.get(rkey) | ||||
|         if cached_result: | ||||
|             return cached_result | ||||
| 
 | ||||
|         shm = self.shm | ||||
|         if shm is None: | ||||
|             mxmn = None | ||||
| 
 | ||||
|         else:  # new block for profiling?.. | ||||
|             arr = shm.array | ||||
| 
 | ||||
|             # build relative indexes into shm array | ||||
|             # TODO: should we just add/use a method | ||||
|             # on the shm to do this? | ||||
|             ifirst = arr[0]['index'] | ||||
|             slice_view = arr[ | ||||
|                 lbar - ifirst: | ||||
|                 (rbar - ifirst) + 1 | ||||
|             ] | ||||
| 
 | ||||
|             if not slice_view.size: | ||||
|                 mxmn = None | ||||
| 
 | ||||
|             else: | ||||
|                 if self.is_ohlc: | ||||
|                     ylow = np.min(slice_view['low']) | ||||
|                     yhigh = np.max(slice_view['high']) | ||||
| 
 | ||||
|                 else: | ||||
|                     view = slice_view[self.name] | ||||
|                     ylow = np.min(view) | ||||
|                     yhigh = np.max(view) | ||||
| 
 | ||||
|                 mxmn = ylow, yhigh | ||||
| 
 | ||||
|             if mxmn is not None: | ||||
|                 # cache new mxmn result | ||||
|                 self._mxmns[rkey] = mxmn | ||||
| 
 | ||||
|             return mxmn | ||||
| 
 | ||||
|     def view_range(self) -> tuple[int, int]: | ||||
|         ''' | ||||
|         Return the indexes in view for the associated | ||||
|         plot displaying this flow's data. | ||||
| 
 | ||||
|         ''' | ||||
|         vr = self.plot.viewRect() | ||||
|         return int(vr.left()), int(vr.right()) | ||||
| 
 | ||||
|     def datums_range(self) -> tuple[ | ||||
|         int, int, int, int, int, int | ||||
|     ]: | ||||
|         ''' | ||||
|         Return a range tuple for the datums present in view. | ||||
| 
 | ||||
|         ''' | ||||
|         l, r = self.view_range() | ||||
| 
 | ||||
|         # TODO: avoid this and have shm passed | ||||
|         # in earlier. | ||||
|         if self.shm is None: | ||||
|             # haven't initialized the flow yet | ||||
|             return (0, l, 0, 0, r, 0) | ||||
| 
 | ||||
|         array = self.shm.array | ||||
|         index = array['index'] | ||||
|         start = index[0] | ||||
|         end = index[-1] | ||||
|         lbar = max(l, start) | ||||
|         rbar = min(r, end) | ||||
|         return ( | ||||
|             start, l, lbar, rbar, r, end, | ||||
|         ) | ||||
| 
 | ||||
|     def read(self) -> tuple[ | ||||
|             int, int, np.ndarray, | ||||
|             int, int, np.ndarray, | ||||
|     ]: | ||||
|         # read call | ||||
|         array = self.shm.array | ||||
| 
 | ||||
|         indexes = array['index'] | ||||
|         ifirst = indexes[0] | ||||
|         ilast = indexes[-1] | ||||
| 
 | ||||
|         ifirst, l, lbar, rbar, r, ilast = self.datums_range() | ||||
| 
 | ||||
|         # get read-relative indices adjusting | ||||
|         # for master shm index. | ||||
|         lbar_i = max(l, ifirst) - ifirst | ||||
|         rbar_i = min(r, ilast) - ifirst | ||||
| 
 | ||||
|         # TODO: we could do it this way as well no? | ||||
|         # to_draw = array[lbar - ifirst:(rbar - ifirst) + 1] | ||||
|         in_view = array[lbar_i: rbar_i + 1] | ||||
| 
 | ||||
|         return ( | ||||
|             # abs indices + full data set | ||||
|             ifirst, ilast, array, | ||||
| 
 | ||||
|             # relative indices + in view datums | ||||
|             lbar_i, rbar_i, in_view, | ||||
|         ) | ||||
| 
 | ||||
|     def update_graphics( | ||||
|         self, | ||||
|         use_vr: bool = True, | ||||
|         render: bool = True, | ||||
|         array_key: Optional[str] = None, | ||||
| 
 | ||||
|         profiler: Optional[pg.debug.Profiler] = None, | ||||
| 
 | ||||
|         **kwargs, | ||||
| 
 | ||||
|     ) -> pg.GraphicsObject: | ||||
|         ''' | ||||
|         Read latest datums from shm and render to (incrementally) | ||||
|         render to graphics. | ||||
| 
 | ||||
|         ''' | ||||
| 
 | ||||
|         profiler = profiler or pg.debug.Profiler( | ||||
|             msg=f'Flow.update_graphics() for {self.name}', | ||||
|             disabled=not pg_profile_enabled(), | ||||
|             gt=ms_slower_then, | ||||
|             delayed=True, | ||||
|         ) | ||||
|         # shm read and slice to view | ||||
|         read = ( | ||||
|             xfirst, xlast, array, | ||||
|             ivl, ivr, in_view, | ||||
|         ) = self.read() | ||||
|         profiler('read src shm data') | ||||
| 
 | ||||
|         if ( | ||||
|             not in_view.size | ||||
|             or not render | ||||
|         ): | ||||
|             return self.graphics | ||||
| 
 | ||||
|         graphics = self.graphics | ||||
|         if isinstance(graphics, BarItems): | ||||
| 
 | ||||
|             # if no source data renderer exists create one. | ||||
|             r = self._src_r | ||||
|             if not r: | ||||
|                 # OHLC bars path renderer | ||||
|                 r = self._src_r = Renderer( | ||||
|                     flow=self, | ||||
|                     # TODO: rename this to something with ohlc | ||||
|                     draw_path=gen_qpath, | ||||
|                     last_read=read, | ||||
|                 ) | ||||
| 
 | ||||
|                 ds_curve_r = Renderer( | ||||
|                     flow=self, | ||||
| 
 | ||||
|                     # just swap in the flat view | ||||
|                     # data_t=lambda array: self.gy.array, | ||||
|                     last_read=read, | ||||
|                     draw_path=partial( | ||||
|                         rowarr_to_path, | ||||
|                         x_basis=None, | ||||
|                     ), | ||||
| 
 | ||||
|                 ) | ||||
|                 curve = FastAppendCurve( | ||||
|                     name='OHLC', | ||||
|                     color=graphics._color, | ||||
|                 ) | ||||
|                 curve.hide() | ||||
|                 self.plot.addItem(curve) | ||||
| 
 | ||||
|                 # baseline "line" downsampled OHLC curve that should | ||||
|                 # kick on only when we reach a certain uppx threshold. | ||||
|                 self._render_table[0] = ( | ||||
|                     ds_curve_r, | ||||
|                     curve, | ||||
|                 ) | ||||
| 
 | ||||
|             dsc_r, curve = self._render_table[0] | ||||
| 
 | ||||
|             # do checks for whether or not we require downsampling: | ||||
|             # - if we're **not** downsampling then we simply want to | ||||
|             #   render the bars graphics curve and update.. | ||||
|             # - if insteam we are in a downsamplig state then we to | ||||
|             x_gt = 6 | ||||
|             uppx = curve.x_uppx() | ||||
|             in_line = should_line = curve.isVisible() | ||||
|             if ( | ||||
|                 should_line | ||||
|                 and uppx < x_gt | ||||
|             ): | ||||
|                 print('FLIPPING TO BARS') | ||||
|                 should_line = False | ||||
| 
 | ||||
|             elif ( | ||||
|                 not should_line | ||||
|                 and uppx >= x_gt | ||||
|             ): | ||||
|                 print('FLIPPING TO LINE') | ||||
|                 should_line = True | ||||
| 
 | ||||
|             profiler(f'ds logic complete line={should_line}') | ||||
| 
 | ||||
|             # do graphics updates | ||||
|             if should_line: | ||||
| 
 | ||||
|                 fields = ['open', 'high', 'low', 'close'] | ||||
|                 if self.gy is None: | ||||
|                     # create a flattened view onto the OHLC array | ||||
|                     # which can be read as a line-style format | ||||
|                     shm = self.shm | ||||
| 
 | ||||
|                     # flat = self.gy = self.shm.unstruct_view(fields) | ||||
|                     self.gy = self.shm.ustruct(fields) | ||||
|                     first = self._iflat_first = self.shm._first.value | ||||
|                     last = self._iflat_last = self.shm._last.value | ||||
| 
 | ||||
|                     # write pushed data to flattened copy | ||||
|                     self.gy[first:last] = rfn.structured_to_unstructured( | ||||
|                         self.shm.array[fields] | ||||
|                     ) | ||||
| 
 | ||||
|                     # generate an flat-interpolated x-domain | ||||
|                     self.gx = ( | ||||
|                         np.broadcast_to( | ||||
|                             shm._array['index'][:, None], | ||||
|                             ( | ||||
|                                 shm._array.size, | ||||
|                                 # 4,  # only ohlc | ||||
|                                 self.gy.shape[1], | ||||
|                             ), | ||||
|                         ) + np.array([-0.5, 0, 0, 0.5]) | ||||
|                     ) | ||||
|                     assert self.gy.any() | ||||
| 
 | ||||
|                 # print(f'unstruct diff: {time.time() - start}') | ||||
|                 # profiler('read unstr view bars to line') | ||||
|                 # start = self.gy._first.value | ||||
|                 # update flatted ohlc copy | ||||
|                 ( | ||||
|                     iflat_first, | ||||
|                     iflat, | ||||
|                     ishm_last, | ||||
|                     ishm_first, | ||||
|                 ) = ( | ||||
|                     self._iflat_first, | ||||
|                     self._iflat_last, | ||||
|                     self.shm._last.value, | ||||
|                     self.shm._first.value | ||||
|                 ) | ||||
| 
 | ||||
|                 # check for shm prepend updates since last read. | ||||
|                 if iflat_first != ishm_first: | ||||
| 
 | ||||
|                     # write newly prepended data to flattened copy | ||||
|                     self.gy[ | ||||
|                         ishm_first:iflat_first | ||||
|                     ] = rfn.structured_to_unstructured( | ||||
|                         self.shm._array[fields][ishm_first:iflat_first] | ||||
|                     ) | ||||
|                     self._iflat_first = ishm_first | ||||
| 
 | ||||
|                 #     # flat = self.gy = self.shm.unstruct_view(fields) | ||||
|                 #     self.gy = self.shm.ustruct(fields) | ||||
|                 #     # self._iflat_last = self.shm._last.value | ||||
| 
 | ||||
|                 #     # self._iflat_first = self.shm._first.value | ||||
|                 #     # do an update for the most recent prepend | ||||
|                 #     # index | ||||
|                 #     iflat = ishm_first | ||||
| 
 | ||||
|                 to_update = rfn.structured_to_unstructured( | ||||
|                     self.shm._array[iflat:ishm_last][fields] | ||||
|                 ) | ||||
| 
 | ||||
|                 self.gy[iflat:ishm_last][:] = to_update | ||||
|                 profiler('updated ustruct OHLC data') | ||||
| 
 | ||||
|                 # slice out up-to-last step contents | ||||
|                 y_flat = self.gy[ishm_first:ishm_last] | ||||
|                 x_flat = self.gx[ishm_first:ishm_last] | ||||
| 
 | ||||
|                 # update local last-index tracking | ||||
|                 self._iflat_last = ishm_last | ||||
| 
 | ||||
|                 # reshape to 1d for graphics rendering | ||||
|                 y = y_flat.reshape(-1) | ||||
|                 x = x_flat.reshape(-1) | ||||
|                 profiler('flattened ustruct OHLC data') | ||||
| 
 | ||||
|                 # do all the same for only in-view data | ||||
|                 y_iv_flat = y_flat[ivl:ivr] | ||||
|                 x_iv_flat = x_flat[ivl:ivr] | ||||
|                 y_iv = y_iv_flat.reshape(-1) | ||||
|                 x_iv = x_iv_flat.reshape(-1) | ||||
|                 profiler('flattened ustruct in-view OHLC data') | ||||
| 
 | ||||
|                 # legacy full-recompute-everytime method | ||||
|                 # x, y = ohlc_flatten(array) | ||||
|                 # x_iv, y_iv = ohlc_flatten(in_view) | ||||
|                 # profiler('flattened OHLC data') | ||||
| 
 | ||||
|                 curve.update_from_array( | ||||
|                     x, | ||||
|                     y, | ||||
|                     x_iv=x_iv, | ||||
|                     y_iv=y_iv, | ||||
|                     view_range=(ivl, ivr),  # hack | ||||
|                     profiler=profiler, | ||||
|                     # should_redraw=False, | ||||
| 
 | ||||
|                     # NOTE: already passed through by display loop? | ||||
|                     # do_append=uppx < 16, | ||||
|                     **kwargs, | ||||
|                 ) | ||||
|                 curve.show() | ||||
|                 profiler('updated ds curve') | ||||
| 
 | ||||
|             else: | ||||
|                 # render incremental or in-view update | ||||
|                 # and apply ouput (path) to graphics. | ||||
|                 path, last = r.render( | ||||
|                     read, | ||||
|                     only_in_view=True, | ||||
|                 ) | ||||
| 
 | ||||
|                 graphics.path = path | ||||
|                 graphics.draw_last(last) | ||||
| 
 | ||||
|                 # NOTE: on appends we used to have to flip the coords | ||||
|                 # cache thought it doesn't seem to be required any more? | ||||
|                 # graphics.setCacheMode(QtWidgets.QGraphicsItem.NoCache) | ||||
|                 # graphics.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache) | ||||
| 
 | ||||
|                 # graphics.prepareGeometryChange() | ||||
|                 graphics.update() | ||||
| 
 | ||||
|             if ( | ||||
|                 not in_line | ||||
|                 and should_line | ||||
|             ): | ||||
|                 # change to line graphic | ||||
| 
 | ||||
|                 log.info( | ||||
|                     f'downsampling to line graphic {self.name}' | ||||
|                 ) | ||||
|                 graphics.hide() | ||||
|                 # graphics.update() | ||||
|                 curve.show() | ||||
|                 curve.update() | ||||
| 
 | ||||
|             elif in_line and not should_line: | ||||
|                 log.info(f'showing bars graphic {self.name}') | ||||
|                 curve.hide() | ||||
|                 graphics.show() | ||||
|                 graphics.update() | ||||
| 
 | ||||
|             #   update our pre-downsample-ready data and then pass that | ||||
|             #   new data the downsampler algo for incremental update. | ||||
| 
 | ||||
|                 # graphics.update_from_array( | ||||
|                 #     array, | ||||
|                 #     in_view, | ||||
|                 #     view_range=(ivl, ivr) if use_vr else None, | ||||
| 
 | ||||
|                 #     **kwargs, | ||||
|                 # ) | ||||
| 
 | ||||
|                 # generate and apply path to graphics obj | ||||
|                 # graphics.path, last = r.render( | ||||
|                 #     read, | ||||
|                 #     only_in_view=True, | ||||
|                 # ) | ||||
|                 # graphics.draw_last(last) | ||||
| 
 | ||||
|         else: | ||||
|             # ``FastAppendCurve`` case: | ||||
|             array_key = array_key or self.name | ||||
|             uppx = graphics.x_uppx() | ||||
| 
 | ||||
|             if graphics._step_mode and self.gy is None: | ||||
|                 self._iflat_first = self.shm._first.value | ||||
| 
 | ||||
|                 # create a flattened view onto the OHLC array | ||||
|                 # which can be read as a line-style format | ||||
|                 shm = self.shm | ||||
| 
 | ||||
|                 # fields = ['index', array_key] | ||||
|                 i = shm._array['index'].copy() | ||||
|                 out = shm._array[array_key].copy() | ||||
| 
 | ||||
|                 self.gx = np.broadcast_to( | ||||
|                     i[:, None], | ||||
|                     (i.size, 2), | ||||
|                 ) + np.array([-0.5, 0.5]) | ||||
| 
 | ||||
|                 # self.gy = np.broadcast_to( | ||||
|                 #     out[:, None], (out.size, 2), | ||||
|                 # ) | ||||
|                 self.gy = np.empty((len(out), 2), dtype=out.dtype) | ||||
|                 self.gy[:] = out[:, np.newaxis] | ||||
| 
 | ||||
|                 # start y at origin level | ||||
|                 self.gy[0, 0] = 0 | ||||
| 
 | ||||
|             if graphics._step_mode: | ||||
|                 ( | ||||
|                     iflat_first, | ||||
|                     iflat, | ||||
|                     ishm_last, | ||||
|                     ishm_first, | ||||
|                 ) = ( | ||||
|                     self._iflat_first, | ||||
|                     self._iflat_last, | ||||
|                     self.shm._last.value, | ||||
|                     self.shm._first.value | ||||
|                 ) | ||||
| 
 | ||||
|                 il = max(iflat - 1, 0) | ||||
| 
 | ||||
|                 # check for shm prepend updates since last read. | ||||
|                 if iflat_first != ishm_first: | ||||
| 
 | ||||
|                     print(f'prepend {array_key}') | ||||
| 
 | ||||
|                     # i_prepend = self.shm._array['index'][ | ||||
|                     #   ishm_first:iflat_first] | ||||
|                     y_prepend = self.shm._array[array_key][ | ||||
|                         ishm_first:iflat_first | ||||
|                     ] | ||||
| 
 | ||||
|                     y2_prepend = np.broadcast_to( | ||||
|                         y_prepend[:, None], (y_prepend.size, 2), | ||||
|                     ) | ||||
| 
 | ||||
|                     # write newly prepended data to flattened copy | ||||
|                     self.gy[ishm_first:iflat_first] = y2_prepend | ||||
|                     self._iflat_first = ishm_first | ||||
| 
 | ||||
|                 append_diff = ishm_last - iflat | ||||
|                 if append_diff: | ||||
| 
 | ||||
|                     # slice up to the last datum since last index/append update | ||||
|                     # new_x = self.shm._array[il:ishm_last]['index'] | ||||
|                     new_y = self.shm._array[il:ishm_last][array_key] | ||||
| 
 | ||||
|                     new_y2 = np.broadcast_to( | ||||
|                         new_y[:, None], (new_y.size, 2), | ||||
|                     ) | ||||
|                     self.gy[il:ishm_last] = new_y2 | ||||
|                     profiler('updated step curve data') | ||||
| 
 | ||||
|                     # print( | ||||
|                     #     f'append size: {append_diff}\n' | ||||
|                     #     f'new_x: {new_x}\n' | ||||
|                     #     f'new_y: {new_y}\n' | ||||
|                     #     f'new_y2: {new_y2}\n' | ||||
|                     #     f'new gy: {gy}\n' | ||||
|                     # ) | ||||
| 
 | ||||
|                     # update local last-index tracking | ||||
|                     self._iflat_last = ishm_last | ||||
| 
 | ||||
|                 # slice out up-to-last step contents | ||||
|                 x_step = self.gx[ishm_first:ishm_last+2] | ||||
|                 # shape to 1d | ||||
|                 x = x_step.reshape(-1) | ||||
| 
 | ||||
|                 y_step = self.gy[ishm_first:ishm_last+2] | ||||
|                 lasts = self.shm.array[['index', array_key]] | ||||
|                 last = lasts[array_key][-1] | ||||
|                 y_step[-1] = last | ||||
|                 # shape to 1d | ||||
|                 y = y_step.reshape(-1) | ||||
| 
 | ||||
|                 # s = 6 | ||||
|                 # print(f'lasts: {x[-2*s:]}, {y[-2*s:]}') | ||||
| 
 | ||||
|                 profiler('sliced step data') | ||||
| 
 | ||||
|                 # do all the same for only in-view data | ||||
|                 ys_iv = y_step[ivl:ivr+1] | ||||
|                 xs_iv = x_step[ivl:ivr+1] | ||||
|                 y_iv = ys_iv.reshape(ys_iv.size) | ||||
|                 x_iv = xs_iv.reshape(xs_iv.size) | ||||
|                 # print( | ||||
|                 #     f'ys_iv : {ys_iv[-s:]}\n' | ||||
|                 #     f'y_iv: {y_iv[-s:]}\n' | ||||
|                 #     f'xs_iv: {xs_iv[-s:]}\n' | ||||
|                 #     f'x_iv: {x_iv[-s:]}\n' | ||||
|                 # ) | ||||
|                 profiler('flattened ustruct in-view OHLC data') | ||||
| 
 | ||||
|                 # legacy full-recompute-everytime method | ||||
|                 # x, y = ohlc_flatten(array) | ||||
|                 # x_iv, y_iv = ohlc_flatten(in_view) | ||||
|                 # profiler('flattened OHLC data') | ||||
| 
 | ||||
|                 x_last = array['index'][-1] | ||||
|                 y_last = array[array_key][-1] | ||||
|                 graphics._last_line = QLineF( | ||||
|                     x_last - 0.5, 0, | ||||
|                     x_last + 0.5, 0, | ||||
|                 ) | ||||
|                 graphics._last_step_rect = QRectF( | ||||
|                     x_last - 0.5, 0, | ||||
|                     x_last + 0.5, y_last, | ||||
|                 ) | ||||
|                 # graphics.update() | ||||
| 
 | ||||
|                 graphics.update_from_array( | ||||
|                     x=x, | ||||
|                     y=y, | ||||
| 
 | ||||
|                     x_iv=x_iv, | ||||
|                     y_iv=y_iv, | ||||
| 
 | ||||
|                     view_range=(ivl, ivr) if use_vr else None, | ||||
| 
 | ||||
|                     draw_last=False, | ||||
|                     slice_to_head=-2, | ||||
| 
 | ||||
|                     should_redraw=bool(append_diff), | ||||
| 
 | ||||
|                     # NOTE: already passed through by display loop? | ||||
|                     # do_append=uppx < 16, | ||||
| 
 | ||||
|                     **kwargs | ||||
|                 ) | ||||
|                 # graphics.reset_cache() | ||||
|                 # print( | ||||
|                 #     f"path br: {graphics.path.boundingRect()}\n", | ||||
|                 #     # f"fast path br: {graphics.fast_path.boundingRect()}", | ||||
|                 #     f"last rect br: {graphics._last_step_rect}\n", | ||||
|                 #     f"full br: {graphics._br}\n", | ||||
|                 # ) | ||||
| 
 | ||||
|             else: | ||||
|                 x = array['index'] | ||||
|                 y = array[array_key] | ||||
|                 x_iv = in_view['index'] | ||||
|                 y_iv = in_view[array_key] | ||||
| 
 | ||||
|                 # graphics.draw_last(x, y) | ||||
|                 profiler(f'draw last segment {array_key}') | ||||
| 
 | ||||
|                 graphics.update_from_array( | ||||
|                     x=x, | ||||
|                     y=y, | ||||
| 
 | ||||
|                     x_iv=x_iv, | ||||
|                     y_iv=y_iv, | ||||
| 
 | ||||
|                     view_range=(ivl, ivr) if use_vr else None, | ||||
| 
 | ||||
|                     # NOTE: already passed through by display loop? | ||||
|                     # do_append=uppx < 16, | ||||
|                     **kwargs | ||||
|                 ) | ||||
| 
 | ||||
|         return graphics | ||||
| 
 | ||||
| 
 | ||||
| def xy_downsample( | ||||
|     x, | ||||
|     y, | ||||
|     px_width, | ||||
|     uppx, | ||||
| 
 | ||||
|     x_spacer: float = 0.5, | ||||
| 
 | ||||
| ) -> tuple[np.ndarray, np.ndarray]: | ||||
| 
 | ||||
|     # downsample whenever more then 1 pixels per datum can be shown. | ||||
|     # always refresh data bounds until we get diffing | ||||
|     # working properly, see above.. | ||||
|     bins, x, y = ds_m4( | ||||
|         x, | ||||
|         y, | ||||
|         px_width=px_width, | ||||
|         uppx=uppx, | ||||
|         log_scale=bool(uppx) | ||||
|     ) | ||||
| 
 | ||||
|     # flatten output to 1d arrays suitable for path-graphics generation. | ||||
|     x = np.broadcast_to(x[:, None], y.shape) | ||||
|     x = (x + np.array( | ||||
|         [-x_spacer, 0, 0, x_spacer] | ||||
|     )).flatten() | ||||
|     y = y.flatten() | ||||
| 
 | ||||
|     return x, y | ||||
| 
 | ||||
| 
 | ||||
| class Renderer(msgspec.Struct): | ||||
| 
 | ||||
|     flow: Flow | ||||
| 
 | ||||
|     # called to render path graphics | ||||
|     draw_path: Callable[np.ndarray, QPainterPath] | ||||
| 
 | ||||
|     # called on input data but before any graphics format | ||||
|     # conversions or processing. | ||||
|     data_t: Optional[Callable[ShmArray, np.ndarray]] = None | ||||
|     data_t_shm: Optional[ShmArray] = None | ||||
| 
 | ||||
|     # called on the final data (transform) output to convert | ||||
|     # to "graphical data form" a format that can be passed to | ||||
|     # the ``.draw()`` implementation. | ||||
|     graphics_t: Optional[Callable[ShmArray, np.ndarray]] = None | ||||
|     graphics_t_shm: Optional[ShmArray] = None | ||||
| 
 | ||||
|     # path graphics update implementation methods | ||||
|     prepend_fn: Optional[Callable[QPainterPath, QPainterPath]] = None | ||||
|     append_fn: Optional[Callable[QPainterPath, QPainterPath]] = None | ||||
| 
 | ||||
|     # last array view read | ||||
|     last_read: Optional[np.ndarray] = None | ||||
| 
 | ||||
|     # output graphics rendering, the main object | ||||
|     # processed in ``QGraphicsObject.paint()`` | ||||
|     path: Optional[QPainterPath] = None | ||||
| 
 | ||||
|     # def diff( | ||||
|     #     self, | ||||
|     #     latest_read: tuple[np.ndarray], | ||||
| 
 | ||||
|     # ) -> tuple[np.ndarray]: | ||||
|     #     # blah blah blah | ||||
|     #     # do diffing for prepend, append and last entry | ||||
|     #     return ( | ||||
|     #         to_prepend | ||||
|     #         to_append | ||||
|     #         last, | ||||
|     #     ) | ||||
| 
 | ||||
|     def render( | ||||
|         self, | ||||
| 
 | ||||
|         new_read, | ||||
| 
 | ||||
|         # only render datums "in view" of the ``ChartView`` | ||||
|         only_in_view: bool = False, | ||||
| 
 | ||||
|     ) -> list[QPainterPath]: | ||||
|         ''' | ||||
|         Render the current graphics path(s) | ||||
| 
 | ||||
|         There are (at least) 3 stages from source data to graphics data: | ||||
|         - a data transform (which can be stored in additional shm) | ||||
|         - a graphics transform which converts discrete basis data to | ||||
|           a `float`-basis view-coords graphics basis. (eg. ``ohlc_flatten()``, | ||||
|           ``step_path_arrays_from_1d()``, etc.) | ||||
| 
 | ||||
|         - blah blah blah (from notes) | ||||
| 
 | ||||
|         ''' | ||||
|         # do full source data render to path | ||||
|         ( | ||||
|             xfirst, xlast, array, | ||||
|             ivl, ivr, in_view, | ||||
|         ) = self.last_read | ||||
| 
 | ||||
|         if only_in_view: | ||||
|             array = in_view | ||||
|             # # get latest data from flow shm | ||||
|             # self.last_read = ( | ||||
|             #     xfirst, xlast, array, ivl, ivr, in_view | ||||
|             # ) = new_read | ||||
| 
 | ||||
|         if self.path is None or only_in_view: | ||||
|             # redraw the entire source data if we have either of: | ||||
|             # - no prior path graphic rendered or, | ||||
|             # - we always intend to re-render the data only in view | ||||
| 
 | ||||
|             # data transform: convert source data to a format | ||||
|             # expected to be incrementally updates and later rendered | ||||
|             # to a more graphics native format. | ||||
|             if self.data_t: | ||||
|                 array = self.data_t(array) | ||||
| 
 | ||||
|                 # maybe allocate shm for data transform output | ||||
|                 # if self.data_t_shm is None: | ||||
|                 #     fshm = self.flow.shm | ||||
| 
 | ||||
|                 #     shm, opened = maybe_open_shm_array( | ||||
|                 #         f'{self.flow.name}_data_t', | ||||
|                 #         # TODO: create entry for each time frame | ||||
|                 #         dtype=array.dtype, | ||||
|                 #         readonly=False, | ||||
|                 #     ) | ||||
|                 #     assert opened | ||||
|                 #     shm.push(array) | ||||
|                 #     self.data_t_shm = shm | ||||
| 
 | ||||
|         elif self.path: | ||||
|             print(f'inremental update not supported yet {self.flow.name}') | ||||
|             # TODO: do incremental update | ||||
|             # prepend, append, last = self.diff(self.flow.read()) | ||||
| 
 | ||||
|             # do path generation for each segment | ||||
|             # and then push into graphics object. | ||||
| 
 | ||||
|         hist, last = array[:-1], array[-1] | ||||
| 
 | ||||
|         # call path render func on history | ||||
|         self.path = self.draw_path(hist) | ||||
| 
 | ||||
|         self.last_read = new_read | ||||
|         return self.path, last | ||||
|  | @ -75,6 +75,7 @@ def update_fsp_chart( | |||
|     flow, | ||||
|     graphics_name: str, | ||||
|     array_key: Optional[str], | ||||
|     **kwargs, | ||||
| 
 | ||||
| ) -> None: | ||||
| 
 | ||||
|  | @ -93,10 +94,10 @@ def update_fsp_chart( | |||
|     # update graphics | ||||
|     # NOTE: this does a length check internally which allows it | ||||
|     # staying above the last row check below.. | ||||
|     chart.update_graphics_from_array( | ||||
|     chart.update_graphics_from_flow( | ||||
|         graphics_name, | ||||
|         array, | ||||
|         array_key=array_key or graphics_name, | ||||
|         **kwargs, | ||||
|     ) | ||||
| 
 | ||||
|     # XXX: re: ``array_key``: fsp func names must be unique meaning we | ||||
|  | @ -106,9 +107,6 @@ def update_fsp_chart( | |||
|     # read from last calculated value and update any label | ||||
|     last_val_sticky = chart._ysticks.get(graphics_name) | ||||
|     if last_val_sticky: | ||||
|         # array = shm.array[array_key] | ||||
|         # if len(array): | ||||
|         #     value = array[-1] | ||||
|         last = last_row[array_key] | ||||
|         last_val_sticky.update_from_data(-1, last) | ||||
| 
 | ||||
|  | @ -246,20 +244,18 @@ async def run_fsp_ui( | |||
| 
 | ||||
|             chart.draw_curve( | ||||
|                 name=name, | ||||
|                 data=shm.array, | ||||
|                 shm=shm, | ||||
|                 overlay=True, | ||||
|                 color='default_light', | ||||
|                 array_key=name, | ||||
|                 **conf.get('chart_kwargs', {}) | ||||
|             ) | ||||
|             # specially store ref to shm for lookup in display loop | ||||
|             chart._flows[name].shm = shm | ||||
| 
 | ||||
|         else: | ||||
|             # create a new sub-chart widget for this fsp | ||||
|             chart = linkedsplits.add_plot( | ||||
|                 name=name, | ||||
|                 array=shm.array, | ||||
|                 shm=shm, | ||||
| 
 | ||||
|                 array_key=name, | ||||
|                 sidepane=sidepane, | ||||
|  | @ -271,12 +267,6 @@ async def run_fsp_ui( | |||
|                 **conf.get('chart_kwargs', {}) | ||||
|             ) | ||||
| 
 | ||||
|             # XXX: ONLY for sub-chart fsps, overlays have their | ||||
|             # data looked up from the chart's internal array set. | ||||
|             # TODO: we must get a data view api going STAT!! | ||||
|             chart._shm = shm | ||||
|             chart._flows[chart.data_key].shm = shm | ||||
| 
 | ||||
|             # should **not** be the same sub-chart widget | ||||
|             assert chart.name != linkedsplits.chart.name | ||||
| 
 | ||||
|  | @ -626,7 +616,7 @@ async def open_vlm_displays( | |||
|         shm = ohlcv | ||||
|         chart = linked.add_plot( | ||||
|             name='volume', | ||||
|             array=shm.array, | ||||
|             shm=shm, | ||||
| 
 | ||||
|             array_key='volume', | ||||
|             sidepane=sidepane, | ||||
|  | @ -639,7 +629,6 @@ async def open_vlm_displays( | |||
|             # the curve item internals are pretty convoluted. | ||||
|             style='step', | ||||
|         ) | ||||
|         chart._flows['volume'].shm = ohlcv | ||||
| 
 | ||||
|         # force 0 to always be in view | ||||
|         def maxmin( | ||||
|  | @ -666,11 +655,6 @@ async def open_vlm_displays( | |||
|         # chart.hideAxis('right') | ||||
|         # chart.showAxis('left') | ||||
| 
 | ||||
|         # XXX: ONLY for sub-chart fsps, overlays have their | ||||
|         # data looked up from the chart's internal array set. | ||||
|         # TODO: we must get a data view api going STAT!! | ||||
|         chart._shm = shm | ||||
| 
 | ||||
|         # send back new chart to caller | ||||
|         task_status.started(chart) | ||||
| 
 | ||||
|  | @ -685,9 +669,9 @@ async def open_vlm_displays( | |||
| 
 | ||||
|         last_val_sticky.update_from_data(-1, value) | ||||
| 
 | ||||
|         vlm_curve = chart.update_graphics_from_array( | ||||
|         vlm_curve = chart.update_graphics_from_flow( | ||||
|             'volume', | ||||
|             shm.array, | ||||
|             # shm.array, | ||||
|         ) | ||||
| 
 | ||||
|         # size view to data once at outset | ||||
|  | @ -795,9 +779,8 @@ async def open_vlm_displays( | |||
|                         color = 'bracket' | ||||
| 
 | ||||
|                     curve, _ = chart.draw_curve( | ||||
|                         # name='dolla_vlm', | ||||
|                         name=name, | ||||
|                         data=shm.array, | ||||
|                         shm=shm, | ||||
|                         array_key=name, | ||||
|                         overlay=pi, | ||||
|                         color=color, | ||||
|  | @ -812,7 +795,6 @@ async def open_vlm_displays( | |||
|                     # ``.draw_curve()``. | ||||
|                     flow = chart._flows[name] | ||||
|                     assert flow.plot is pi | ||||
|                     flow.shm = shm | ||||
| 
 | ||||
|             chart_curves( | ||||
|                 fields, | ||||
|  | @ -847,7 +829,9 @@ async def open_vlm_displays( | |||
|             # liquidity events (well at least on low OHLC periods - 1s). | ||||
|             vlm_curve.hide() | ||||
|             chart.removeItem(vlm_curve) | ||||
|             chart._flows.pop('volume') | ||||
|             vflow = chart._flows['volume'] | ||||
|             vflow.render = False | ||||
| 
 | ||||
|             # avoid range sorting on volume once disabled | ||||
|             chart.view.disable_auto_yrange() | ||||
| 
 | ||||
|  | @ -902,10 +886,10 @@ async def open_vlm_displays( | |||
| 
 | ||||
|         # built-in vlm fsps | ||||
|         for target, conf in { | ||||
|             tina_vwap: { | ||||
|                 'overlay': 'ohlc',  # overlays with OHLCV (main) chart | ||||
|                 'anchor': 'session', | ||||
|             }, | ||||
|             # tina_vwap: { | ||||
|             #     'overlay': 'ohlc',  # overlays with OHLCV (main) chart | ||||
|             #     'anchor': 'session', | ||||
|             # }, | ||||
|         }.items(): | ||||
|             started = await admin.open_fsp_chart( | ||||
|                 target, | ||||
|  |  | |||
|  | @ -20,7 +20,6 @@ Chart view box primitives | |||
| """ | ||||
| from __future__ import annotations | ||||
| from contextlib import asynccontextmanager | ||||
| # import itertools | ||||
| import time | ||||
| from typing import Optional, Callable | ||||
| 
 | ||||
|  | @ -35,10 +34,9 @@ import trio | |||
| 
 | ||||
| from ..log import get_logger | ||||
| from .._profile import pg_profile_enabled, ms_slower_then | ||||
| from ._style import _min_points_to_show | ||||
| # from ._style import _min_points_to_show | ||||
| from ._editors import SelectRect | ||||
| from . import _event | ||||
| from ._ohlc import BarItems | ||||
| 
 | ||||
| 
 | ||||
| log = get_logger(__name__) | ||||
|  | @ -486,15 +484,18 @@ class ChartView(ViewBox): | |||
| 
 | ||||
|         # don't zoom more then the min points setting | ||||
|         l, lbar, rbar, r = chart.bars_range() | ||||
|         vl = r - l | ||||
|         # vl = r - l | ||||
| 
 | ||||
|         if ev.delta() > 0 and vl <= _min_points_to_show: | ||||
|             log.debug("Max zoom bruh...") | ||||
|             return | ||||
|         # if ev.delta() > 0 and vl <= _min_points_to_show: | ||||
|         #     log.debug("Max zoom bruh...") | ||||
|         #     return | ||||
| 
 | ||||
|         if ev.delta() < 0 and vl >= len(chart._arrays[chart.name]) + 666: | ||||
|             log.debug("Min zoom bruh...") | ||||
|             return | ||||
|         # if ( | ||||
|         #     ev.delta() < 0 | ||||
|         #     and vl >= len(chart._flows[chart.name].shm.array) + 666 | ||||
|         # ): | ||||
|         #     log.debug("Min zoom bruh...") | ||||
|         #     return | ||||
| 
 | ||||
|         # actual scaling factor | ||||
|         s = 1.015 ** (ev.delta() * -1 / 20)  # self.state['wheelScaleFactor']) | ||||
|  | @ -568,6 +569,17 @@ class ChartView(ViewBox): | |||
| 
 | ||||
|             self._resetTarget() | ||||
|             self.scaleBy(s, focal) | ||||
| 
 | ||||
|             # XXX: without this is seems as though sometimes | ||||
|             # when zooming in from far out (and maybe vice versa?) | ||||
|             # the signal isn't being fired enough since if you pan | ||||
|             # just after you'll see further downsampling code run | ||||
|             # (pretty noticeable on the OHLC ds curve) but with this | ||||
|             # that never seems to happen? Only question is how much this | ||||
|             # "double work" is causing latency when these missing event | ||||
|             # fires don't happen? | ||||
|             self.maybe_downsample_graphics() | ||||
| 
 | ||||
|             self.sigRangeChangedManually.emit(mask) | ||||
| 
 | ||||
|             # self._ic.set() | ||||
|  | @ -734,7 +746,7 @@ class ChartView(ViewBox): | |||
| 
 | ||||
|         # flag to prevent triggering sibling charts from the same linked | ||||
|         # set from recursion errors. | ||||
|         autoscale_linked_plots: bool = True, | ||||
|         autoscale_linked_plots: bool = False, | ||||
|         name: Optional[str] = None, | ||||
|         # autoscale_overlays: bool = False, | ||||
| 
 | ||||
|  | @ -748,6 +760,7 @@ class ChartView(ViewBox): | |||
| 
 | ||||
|         ''' | ||||
|         profiler = pg.debug.Profiler( | ||||
|             msg=f'`ChartView._set_yrange()`: `{self.name}`', | ||||
|             disabled=not pg_profile_enabled(), | ||||
|             gt=ms_slower_then, | ||||
|             delayed=True, | ||||
|  | @ -777,9 +790,15 @@ class ChartView(ViewBox): | |||
| 
 | ||||
|         # calculate max, min y values in viewable x-range from data. | ||||
|         # Make sure min bars/datums on screen is adhered. | ||||
|         else: | ||||
|             br = bars_range or chart.bars_range() | ||||
|             profiler(f'got bars range: {br}') | ||||
|         # else: | ||||
|             # TODO: eventually we should point to the | ||||
|             # ``FlowsTable`` (or wtv) which should perform | ||||
|             # the group operations? | ||||
| 
 | ||||
|             # flow = chart._flows[name or chart.name] | ||||
|             # br = bars_range or chart.bars_range() | ||||
|             # br = bars_range or chart.bars_range() | ||||
|             # profiler(f'got bars range: {br}') | ||||
| 
 | ||||
|             # TODO: maybe should be a method on the | ||||
|             # chart widget/item? | ||||
|  | @ -795,7 +814,7 @@ class ChartView(ViewBox): | |||
|             #     for chart in plots: | ||||
|             #         if chart and not chart._static_yrange: | ||||
|             #             chart.cv._set_yrange( | ||||
|             #                 bars_range=br, | ||||
|             #                 # bars_range=br, | ||||
|             #                 autoscale_linked_plots=False, | ||||
|             #             ) | ||||
|             #     profiler('autoscaled linked plots') | ||||
|  | @ -809,6 +828,7 @@ class ChartView(ViewBox): | |||
| 
 | ||||
|                 if yrange is None: | ||||
|                     log.warning(f'No yrange provided for {self.name}!?') | ||||
|                     print(f"WTF NO YRANGE {self.name}") | ||||
|                     return | ||||
| 
 | ||||
|             ylow, yhigh = yrange | ||||
|  | @ -830,6 +850,8 @@ class ChartView(ViewBox): | |||
|             self.setYRange(ylow, yhigh) | ||||
|             profiler(f'set limits: {(ylow, yhigh)}') | ||||
| 
 | ||||
|         profiler.finish() | ||||
| 
 | ||||
|     def enable_auto_yrange( | ||||
|         self, | ||||
|         src_vb: Optional[ChartView] = None, | ||||
|  | @ -843,17 +865,9 @@ class ChartView(ViewBox): | |||
|         if src_vb is None: | ||||
|             src_vb = self | ||||
| 
 | ||||
|         # such that when a linked chart changes its range | ||||
|         # this local view is also automatically changed and | ||||
|         # resized to data. | ||||
|         src_vb.sigXRangeChanged.connect(self._set_yrange) | ||||
| 
 | ||||
|         # splitter(s) resizing | ||||
|         src_vb.sigResized.connect(self._set_yrange) | ||||
| 
 | ||||
|         # mouse wheel doesn't emit XRangeChanged | ||||
|         src_vb.sigRangeChangedManually.connect(self._set_yrange) | ||||
| 
 | ||||
|         # TODO: a smarter way to avoid calling this needlessly? | ||||
|         # 2 things i can think of: | ||||
|         # - register downsample-able graphics specially and only | ||||
|  | @ -864,15 +878,16 @@ class ChartView(ViewBox): | |||
|             self.maybe_downsample_graphics | ||||
|         ) | ||||
| 
 | ||||
|     def disable_auto_yrange( | ||||
|         self, | ||||
|     ) -> None: | ||||
|         # mouse wheel doesn't emit XRangeChanged | ||||
|         src_vb.sigRangeChangedManually.connect(self._set_yrange) | ||||
| 
 | ||||
|         # self._chart._static_yrange = 'axis' | ||||
|         # src_vb.sigXRangeChanged.connect(self._set_yrange) | ||||
|         # src_vb.sigXRangeChanged.connect( | ||||
|         #     self.maybe_downsample_graphics | ||||
|         # ) | ||||
| 
 | ||||
|     def disable_auto_yrange(self) -> None: | ||||
| 
 | ||||
|         self.sigXRangeChanged.disconnect( | ||||
|             self._set_yrange, | ||||
|         ) | ||||
|         self.sigResized.disconnect( | ||||
|             self._set_yrange, | ||||
|         ) | ||||
|  | @ -883,6 +898,11 @@ class ChartView(ViewBox): | |||
|             self._set_yrange, | ||||
|         ) | ||||
| 
 | ||||
|         # self.sigXRangeChanged.disconnect(self._set_yrange) | ||||
|         # self.sigXRangeChanged.disconnect( | ||||
|         #     self.maybe_downsample_graphics | ||||
|         # ) | ||||
| 
 | ||||
|     def x_uppx(self) -> float: | ||||
|         ''' | ||||
|         Return the "number of x units" within a single | ||||
|  | @ -890,7 +910,7 @@ class ChartView(ViewBox): | |||
|         graphics items which are our children. | ||||
| 
 | ||||
|         ''' | ||||
|         graphics = list(self._chart._graphics.values()) | ||||
|         graphics = [f.graphics for f in self._chart._flows.values()] | ||||
|         if not graphics: | ||||
|             return 0 | ||||
| 
 | ||||
|  | @ -903,23 +923,17 @@ class ChartView(ViewBox): | |||
| 
 | ||||
|     def maybe_downsample_graphics(self): | ||||
| 
 | ||||
|         uppx = self.x_uppx() | ||||
|         if ( | ||||
|             # we probably want to drop this once we are "drawing in | ||||
|             # view" for downsampled flows.. | ||||
|             uppx and uppx > 16 | ||||
|             and self._ic is not None | ||||
|         ): | ||||
|             # don't bother updating since we're zoomed out bigly and | ||||
|             # in a pan-interaction, in which case we shouldn't be | ||||
|             # doing view-range based rendering (at least not yet). | ||||
|             # print(f'{uppx} exiting early!') | ||||
|             return | ||||
| 
 | ||||
|         profiler = pg.debug.Profiler( | ||||
|             msg=f'ChartView.maybe_downsample_graphics() for {self.name}', | ||||
|             disabled=not pg_profile_enabled(), | ||||
|             gt=3, | ||||
|             delayed=True, | ||||
| 
 | ||||
|             # XXX: important to avoid not seeing underlying | ||||
|             # ``.update_graphics_from_flow()`` nested profiling likely | ||||
|             # due to the way delaying works and garbage collection of | ||||
|             # the profiler in the delegated method calls. | ||||
|             delayed=False, | ||||
|             # gt=3, | ||||
|             gt=ms_slower_then, | ||||
|         ) | ||||
| 
 | ||||
|         # TODO: a faster single-loop-iterator way of doing this XD | ||||
|  | @ -928,19 +942,25 @@ class ChartView(ViewBox): | |||
|         plots = linked.subplots | {chart.name: chart} | ||||
|         for chart_name, chart in plots.items(): | ||||
|             for name, flow in chart._flows.items(): | ||||
|                 graphics = flow.graphics | ||||
| 
 | ||||
|                 use_vr = False | ||||
|                 if isinstance(graphics, BarItems): | ||||
|                     use_vr = True | ||||
|                 if ( | ||||
|                     not flow.render | ||||
| 
 | ||||
|                     # XXX: super important to be aware of this. | ||||
|                     # or not flow.graphics.isVisible() | ||||
|                 ): | ||||
|                     continue | ||||
| 
 | ||||
|                 # pass in no array which will read and render from the last | ||||
|                 # passed array (normally provided by the display loop.) | ||||
|                 chart.update_graphics_from_array( | ||||
|                 chart.update_graphics_from_flow( | ||||
|                     name, | ||||
|                     use_vr=use_vr, | ||||
|                     use_vr=True, | ||||
| 
 | ||||
|                     # gets passed down into graphics obj | ||||
|                     profiler=profiler, | ||||
|                 ) | ||||
| 
 | ||||
|                 profiler(f'range change updated {chart_name}:{name}') | ||||
| 
 | ||||
|         profiler.finish() | ||||
|  |  | |||
|  | @ -46,7 +46,7 @@ log = get_logger(__name__) | |||
| 
 | ||||
| def bar_from_ohlc_row( | ||||
|     row: np.ndarray, | ||||
|     w: float | ||||
|     w: float = 0.43 | ||||
| 
 | ||||
| ) -> tuple[QLineF]: | ||||
|     ''' | ||||
|  | @ -158,8 +158,9 @@ def path_arrays_from_ohlc( | |||
| 
 | ||||
| def gen_qpath( | ||||
|     data: np.ndarray, | ||||
|     start: int,  # XXX: do we need this? | ||||
|     w: float, | ||||
|     start: int = 0,  # XXX: do we need this? | ||||
|     # 0.5 is no overlap between arms, 1.0 is full overlap | ||||
|     w: float = 0.43, | ||||
|     path: Optional[QtGui.QPainterPath] = None, | ||||
| 
 | ||||
| ) -> QtGui.QPainterPath: | ||||
|  | @ -243,7 +244,7 @@ class BarItems(pg.GraphicsObject): | |||
|         self.fast_path = QtGui.QPainterPath() | ||||
| 
 | ||||
|         self._xrange: tuple[int, int] | ||||
|         self._yrange: tuple[float, float] | ||||
|         # self._yrange: tuple[float, float] | ||||
|         self._vrange = None | ||||
| 
 | ||||
|         # TODO: don't render the full backing array each time | ||||
|  | @ -280,10 +281,10 @@ class BarItems(pg.GraphicsObject): | |||
|         # self.start_index = len(ohlc) | ||||
|         index = ohlc['index'] | ||||
|         self._xrange = (index[0], index[-1]) | ||||
|         self._yrange = ( | ||||
|             np.nanmax(ohlc['high']), | ||||
|             np.nanmin(ohlc['low']), | ||||
|         ) | ||||
|         # self._yrange = ( | ||||
|         #     np.nanmax(ohlc['high']), | ||||
|         #     np.nanmin(ohlc['low']), | ||||
|         # ) | ||||
| 
 | ||||
|         # up to last to avoid double draw of last bar | ||||
|         self._last_bar_lines = bar_from_ohlc_row(last, self.w) | ||||
|  | @ -310,7 +311,7 @@ class BarItems(pg.GraphicsObject): | |||
|         self._pi.addItem(curve) | ||||
|         self._ds_line = curve | ||||
| 
 | ||||
|         self._ds_xrange = (index[0], index[-1]) | ||||
|         # self._ds_xrange = (index[0], index[-1]) | ||||
| 
 | ||||
|         # trigger render | ||||
|         # https://doc.qt.io/qt-5/qgraphicsitem.html#update | ||||
|  | @ -324,289 +325,326 @@ class BarItems(pg.GraphicsObject): | |||
|         else: | ||||
|             return 0 | ||||
| 
 | ||||
|     def update_from_array( | ||||
|     # def update_from_array( | ||||
|     #     self, | ||||
| 
 | ||||
|     #     # full array input history | ||||
|     #     ohlc: np.ndarray, | ||||
| 
 | ||||
|     #     # pre-sliced array data that's "in view" | ||||
|     #     ohlc_iv: np.ndarray, | ||||
| 
 | ||||
|     #     view_range: Optional[tuple[int, int]] = None, | ||||
|     #     profiler: Optional[pg.debug.Profiler] = None, | ||||
| 
 | ||||
|     # ) -> None: | ||||
|     #     ''' | ||||
|     #     Update the last datum's bar graphic from input data array. | ||||
| 
 | ||||
|     #     This routine should be interface compatible with | ||||
|     #     ``pg.PlotCurveItem.setData()``. Normally this method in | ||||
|     #     ``pyqtgraph`` seems to update all the data passed to the | ||||
|     #     graphics object, and then update/rerender, but here we're | ||||
|     #     assuming the prior graphics havent changed (OHLC history rarely | ||||
|     #     does) so this "should" be simpler and faster. | ||||
| 
 | ||||
|     #     This routine should be made (transitively) as fast as possible. | ||||
| 
 | ||||
|     #     ''' | ||||
|     #     profiler = profiler or pg.debug.Profiler( | ||||
|     #         disabled=not pg_profile_enabled(), | ||||
|     #         gt=ms_slower_then, | ||||
|     #         delayed=True, | ||||
|     #     ) | ||||
| 
 | ||||
|     #     # index = self.start_index | ||||
|     #     istart, istop = self._xrange | ||||
|     #     # ds_istart, ds_istop = self._ds_xrange | ||||
| 
 | ||||
|     #     index = ohlc['index'] | ||||
|     #     first_index, last_index = index[0], index[-1] | ||||
| 
 | ||||
|     #     # length = len(ohlc) | ||||
|     #     # prepend_length = istart - first_index | ||||
|     #     # append_length = last_index - istop | ||||
| 
 | ||||
|     #     # ds_prepend_length = ds_istart - first_index | ||||
|     #     # ds_append_length = last_index - ds_istop | ||||
| 
 | ||||
|     #     flip_cache = False | ||||
| 
 | ||||
|     #     x_gt = 16 | ||||
|     #     if self._ds_line: | ||||
|     #         uppx = self._ds_line.x_uppx() | ||||
|     #     else: | ||||
|     #         uppx = 0 | ||||
| 
 | ||||
|     #     should_line = self._in_ds | ||||
|     #     if ( | ||||
|     #         self._in_ds | ||||
|     #         and uppx < x_gt | ||||
|     #     ): | ||||
|     #         should_line = False | ||||
| 
 | ||||
|     #     elif ( | ||||
|     #         not self._in_ds | ||||
|     #         and uppx >= x_gt | ||||
|     #     ): | ||||
|     #         should_line = True | ||||
| 
 | ||||
|     #     profiler('ds logic complete') | ||||
| 
 | ||||
|     #     if should_line: | ||||
|     #         # update the line graphic | ||||
|     #         # x, y = self._ds_line_xy = ohlc_flatten(ohlc_iv) | ||||
|     #         x, y = self._ds_line_xy = ohlc_flatten(ohlc) | ||||
|     #         x_iv, y_iv = self._ds_line_xy = ohlc_flatten(ohlc_iv) | ||||
|     #         profiler('flattening bars to line') | ||||
| 
 | ||||
|     #         # TODO: we should be diffing the amount of new data which | ||||
|     #         # needs to be downsampled. Ideally we actually are just | ||||
|     #         # doing all the ds-ing in sibling actors so that the data | ||||
|     #         # can just be read and rendered to graphics on events of our | ||||
|     #         # choice. | ||||
|     #         # diff = do_diff(ohlc, new_bit) | ||||
|     #         curve = self._ds_line | ||||
|     #         curve.update_from_array( | ||||
|     #             x=x, | ||||
|     #             y=y, | ||||
|     #             x_iv=x_iv, | ||||
|     #             y_iv=y_iv, | ||||
|     #             view_range=None,  # hack | ||||
|     #             profiler=profiler, | ||||
|     #         ) | ||||
|     #         profiler('updated ds line') | ||||
| 
 | ||||
|     #         if not self._in_ds: | ||||
|     #             # hide bars and show line | ||||
|     #             self.hide() | ||||
|     #             # XXX: is this actually any faster? | ||||
|     #             # self._pi.removeItem(self) | ||||
| 
 | ||||
|     #             # TODO: a `.ui()` log level? | ||||
|     #             log.info( | ||||
|     #                 f'downsampling to line graphic {self._name}' | ||||
|     #             ) | ||||
| 
 | ||||
|     #             # self._pi.addItem(curve) | ||||
|     #             curve.show() | ||||
|     #             curve.update() | ||||
|     #             self._in_ds = True | ||||
| 
 | ||||
|     #         # stop here since we don't need to update bars path any more | ||||
|     #         # as we delegate to the downsample line with updates. | ||||
| 
 | ||||
|     #     else: | ||||
|     #         # we should be in bars mode | ||||
| 
 | ||||
|     #         if self._in_ds: | ||||
|     #             # flip back to bars graphics and hide the downsample line. | ||||
|     #             log.info(f'showing bars graphic {self._name}') | ||||
| 
 | ||||
|     #             curve = self._ds_line | ||||
|     #             curve.hide() | ||||
|     #             # self._pi.removeItem(curve) | ||||
| 
 | ||||
|     #             # XXX: is this actually any faster? | ||||
|     #             # self._pi.addItem(self) | ||||
|     #             self.show() | ||||
|     #             self._in_ds = False | ||||
| 
 | ||||
|     #         # generate in_view path | ||||
|     #         self.path = gen_qpath( | ||||
|     #             ohlc_iv, | ||||
|     #             0, | ||||
|     #             self.w, | ||||
|     #             # path=self.path, | ||||
|     #         ) | ||||
| 
 | ||||
|     #         # TODO: to make the downsampling faster | ||||
|     #         # - allow mapping only a range of lines thus only drawing as | ||||
|     #         #   many bars as exactly specified. | ||||
|     #         # - move ohlc "flattening" to a shmarr | ||||
|     #         # - maybe move all this embedded logic to a higher | ||||
|     #         #   level type? | ||||
| 
 | ||||
|     #         # if prepend_length: | ||||
|     #         #     # new history was added and we need to render a new path | ||||
|     #         #     prepend_bars = ohlc[:prepend_length] | ||||
| 
 | ||||
|     #         # if ds_prepend_length: | ||||
|     #         #     ds_prepend_bars = ohlc[:ds_prepend_length] | ||||
|     #         #     pre_x, pre_y = ohlc_flatten(ds_prepend_bars) | ||||
|     #         #     fx = np.concatenate((pre_x, fx)) | ||||
|     #         #     fy = np.concatenate((pre_y, fy)) | ||||
|     #         #     profiler('ds line prepend diff complete') | ||||
| 
 | ||||
|     #         # if append_length: | ||||
|     #         #     # generate new graphics to match provided array | ||||
|     #         #     # path appending logic: | ||||
|     #         #     # we need to get the previous "current bar(s)" for the time step | ||||
|     #         #     # and convert it to a sub-path to append to the historical set | ||||
|     #         #     # new_bars = ohlc[istop - 1:istop + append_length - 1] | ||||
|     #         #     append_bars = ohlc[-append_length - 1:-1] | ||||
|     #         #     # print(f'ohlc bars to append size: {append_bars.size}\n') | ||||
| 
 | ||||
|     #         # if ds_append_length: | ||||
|     #         #     ds_append_bars = ohlc[-ds_append_length - 1:-1] | ||||
|     #         #     post_x, post_y = ohlc_flatten(ds_append_bars) | ||||
|     #         #     print( | ||||
|     #         #         f'ds curve to append sizes: {(post_x.size, post_y.size)}' | ||||
|     #         #     ) | ||||
|     #         #     fx = np.concatenate((fx, post_x)) | ||||
|     #         #     fy = np.concatenate((fy, post_y)) | ||||
| 
 | ||||
|     #         #     profiler('ds line append diff complete') | ||||
| 
 | ||||
|     #         profiler('array diffs complete') | ||||
| 
 | ||||
|     #         # does this work? | ||||
|     #         last = ohlc[-1] | ||||
|     #         # fy[-1] = last['close'] | ||||
| 
 | ||||
|     #         # # incremental update and cache line datums | ||||
|     #         # self._ds_line_xy = fx, fy | ||||
| 
 | ||||
|     #         # maybe downsample to line | ||||
|     #         # ds = self.maybe_downsample() | ||||
|     #         # if ds: | ||||
|     #         #     # if we downsample to a line don't bother with | ||||
|     #         #     # any more path generation / updates | ||||
|     #         #     self._ds_xrange = first_index, last_index | ||||
|     #         #     profiler('downsampled to line') | ||||
|     #         #     return | ||||
| 
 | ||||
|     #         # print(in_view.size) | ||||
| 
 | ||||
|     #         # if self.path: | ||||
|     #         #     self.path = path | ||||
|     #         #     self.path.reserve(path.capacity()) | ||||
|     #         #     self.path.swap(path) | ||||
| 
 | ||||
|     #         # path updates | ||||
|     #         # if prepend_length: | ||||
|     #         #     # XXX: SOMETHING IS MAYBE FISHY HERE what with the old_path | ||||
|     #         #     # y value not matching the first value from | ||||
|     #         #     # ohlc[prepend_length + 1] ??? | ||||
|     #         #     prepend_path = gen_qpath(prepend_bars, 0, self.w) | ||||
|     #         #     old_path = self.path | ||||
|     #         #     self.path = prepend_path | ||||
|     #         #     self.path.addPath(old_path) | ||||
|     #         #     profiler('path PREPEND') | ||||
| 
 | ||||
|     #         # if append_length: | ||||
|     #         #     append_path = gen_qpath(append_bars, 0, self.w) | ||||
| 
 | ||||
|     #         #     self.path.moveTo( | ||||
|     #         #         float(istop - self.w), | ||||
|     #         #         float(append_bars[0]['open']) | ||||
|     #         #     ) | ||||
|     #         #     self.path.addPath(append_path) | ||||
| 
 | ||||
|     #         #     profiler('path APPEND') | ||||
|     #         #     fp = self.fast_path | ||||
|     #         #     if fp is None: | ||||
|     #         #         self.fast_path = append_path | ||||
| 
 | ||||
|     #         #     else: | ||||
|     #         #         fp.moveTo( | ||||
|     #         #             float(istop - self.w), float(new_bars[0]['open']) | ||||
|     #         #         ) | ||||
|     #         #         fp.addPath(append_path) | ||||
| 
 | ||||
|     #         #     self.setCacheMode(QtWidgets.QGraphicsItem.NoCache) | ||||
|     #         #     flip_cache = True | ||||
| 
 | ||||
|     #         self._xrange = first_index, last_index | ||||
| 
 | ||||
|     #         # trigger redraw despite caching | ||||
|     #         self.prepareGeometryChange() | ||||
| 
 | ||||
|     #         self.draw_last(last) | ||||
| 
 | ||||
|     #         # # generate new lines objects for updatable "current bar" | ||||
|     #         # self._last_bar_lines = bar_from_ohlc_row(last, self.w) | ||||
| 
 | ||||
|     #         # # last bar update | ||||
|     #         # i, o, h, l, last, v = last[ | ||||
|     #         #     ['index', 'open', 'high', 'low', 'close', 'volume'] | ||||
|     #         # ] | ||||
|     #         # # assert i == self.start_index - 1 | ||||
|     #         # # assert i == last_index | ||||
|     #         # body, larm, rarm = self._last_bar_lines | ||||
| 
 | ||||
|     #         # # XXX: is there a faster way to modify this? | ||||
|     #         # rarm.setLine(rarm.x1(), last, rarm.x2(), last) | ||||
| 
 | ||||
|     #         # # writer is responsible for changing open on "first" volume of bar | ||||
|     #         # larm.setLine(larm.x1(), o, larm.x2(), o) | ||||
| 
 | ||||
|     #         # if l != h:  # noqa | ||||
| 
 | ||||
|     #         #     if body is None: | ||||
|     #         #         body = self._last_bar_lines[0] = QLineF(i, l, i, h) | ||||
|     #         #     else: | ||||
|     #         #         # update body | ||||
|     #         #         body.setLine(i, l, i, h) | ||||
| 
 | ||||
|     #         #     # XXX: pretty sure this is causing an issue where the bar has | ||||
|     #         #     # a large upward move right before the next sample and the body | ||||
|     #         #     # is getting set to None since the next bar is flat but the shm | ||||
|     #         #     # array index update wasn't read by the time this code runs. Iow | ||||
|     #         #     # we're doing this removal of the body for a bar index that is | ||||
|     #         #     # now out of date / from some previous sample. It's weird | ||||
|     #         #     # though because i've seen it do this to bars i - 3 back? | ||||
| 
 | ||||
|     #         profiler('last bar set') | ||||
| 
 | ||||
|     #         self.update() | ||||
|     #         profiler('.update()') | ||||
| 
 | ||||
|     #         if flip_cache: | ||||
|     #             self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache) | ||||
| 
 | ||||
|     #         # profiler.finish() | ||||
| 
 | ||||
|     def draw_last( | ||||
|         self, | ||||
| 
 | ||||
|         # full array input history | ||||
|         ohlc: np.ndarray, | ||||
| 
 | ||||
|         # pre-sliced array data that's "in view" | ||||
|         ohlc_iv: np.ndarray, | ||||
| 
 | ||||
|         view_range: Optional[tuple[int, int]] = None, | ||||
|         profiler: Optional[pg.debug.Profiler] = None, | ||||
|         last: np.ndarray, | ||||
| 
 | ||||
|     ) -> None: | ||||
|         ''' | ||||
|         Update the last datum's bar graphic from input data array. | ||||
|         # generate new lines objects for updatable "current bar" | ||||
|         self._last_bar_lines = bar_from_ohlc_row(last, self.w) | ||||
| 
 | ||||
|         This routine should be interface compatible with | ||||
|         ``pg.PlotCurveItem.setData()``. Normally this method in | ||||
|         ``pyqtgraph`` seems to update all the data passed to the | ||||
|         graphics object, and then update/rerender, but here we're | ||||
|         assuming the prior graphics havent changed (OHLC history rarely | ||||
|         does) so this "should" be simpler and faster. | ||||
|         # last bar update | ||||
|         i, o, h, l, last, v = last[ | ||||
|             ['index', 'open', 'high', 'low', 'close', 'volume'] | ||||
|         ] | ||||
|         # assert i == self.start_index - 1 | ||||
|         # assert i == last_index | ||||
|         body, larm, rarm = self._last_bar_lines | ||||
| 
 | ||||
|         This routine should be made (transitively) as fast as possible. | ||||
|         # XXX: is there a faster way to modify this? | ||||
|         rarm.setLine(rarm.x1(), last, rarm.x2(), last) | ||||
| 
 | ||||
|         ''' | ||||
|         profiler = profiler or pg.debug.Profiler( | ||||
|             disabled=not pg_profile_enabled(), | ||||
|             gt=ms_slower_then, | ||||
|             delayed=True, | ||||
|         ) | ||||
|         # writer is responsible for changing open on "first" volume of bar | ||||
|         larm.setLine(larm.x1(), o, larm.x2(), o) | ||||
| 
 | ||||
|         # index = self.start_index | ||||
|         istart, istop = self._xrange | ||||
|         ds_istart, ds_istop = self._ds_xrange | ||||
|         if l != h:  # noqa | ||||
| 
 | ||||
|         index = ohlc['index'] | ||||
|         first_index, last_index = index[0], index[-1] | ||||
|             if body is None: | ||||
|                 body = self._last_bar_lines[0] = QLineF(i, l, i, h) | ||||
|             else: | ||||
|                 # update body | ||||
|                 body.setLine(i, l, i, h) | ||||
| 
 | ||||
|         # length = len(ohlc) | ||||
|         # prepend_length = istart - first_index | ||||
|         # append_length = last_index - istop | ||||
| 
 | ||||
|         # ds_prepend_length = ds_istart - first_index | ||||
|         # ds_append_length = last_index - ds_istop | ||||
| 
 | ||||
|         flip_cache = False | ||||
| 
 | ||||
|         x_gt = 16 | ||||
|         if self._ds_line: | ||||
|             uppx = self._ds_line.x_uppx() | ||||
|         else: | ||||
|             uppx = 0 | ||||
| 
 | ||||
|         should_line = self._in_ds | ||||
|         if ( | ||||
|             self._in_ds | ||||
|             and uppx < x_gt | ||||
|         ): | ||||
|             should_line = False | ||||
| 
 | ||||
|         elif ( | ||||
|             not self._in_ds | ||||
|             and uppx >= x_gt | ||||
|         ): | ||||
|             should_line = True | ||||
| 
 | ||||
|         profiler('ds logic complete') | ||||
| 
 | ||||
|         if should_line: | ||||
|             # update the line graphic | ||||
|             # x, y = self._ds_line_xy = ohlc_flatten(ohlc_iv) | ||||
|             x, y = self._ds_line_xy = ohlc_flatten(ohlc) | ||||
|             x_iv, y_iv = self._ds_line_xy = ohlc_flatten(ohlc_iv) | ||||
|             profiler('flattening bars to line') | ||||
| 
 | ||||
|             # TODO: we should be diffing the amount of new data which | ||||
|             # needs to be downsampled. Ideally we actually are just | ||||
|             # doing all the ds-ing in sibling actors so that the data | ||||
|             # can just be read and rendered to graphics on events of our | ||||
|             # choice. | ||||
|             # diff = do_diff(ohlc, new_bit) | ||||
|             curve = self._ds_line | ||||
|             curve.update_from_array( | ||||
|                 x=x, | ||||
|                 y=y, | ||||
|                 x_iv=x_iv, | ||||
|                 y_iv=y_iv, | ||||
|                 view_range=None,  # hack | ||||
|                 profiler=profiler, | ||||
|             ) | ||||
|             profiler('updated ds line') | ||||
| 
 | ||||
|             if not self._in_ds: | ||||
|                 # hide bars and show line | ||||
|                 self.hide() | ||||
|                 # XXX: is this actually any faster? | ||||
|                 # self._pi.removeItem(self) | ||||
| 
 | ||||
|                 # TODO: a `.ui()` log level? | ||||
|                 log.info( | ||||
|                     f'downsampling to line graphic {self._name}' | ||||
|                 ) | ||||
| 
 | ||||
|                 # self._pi.addItem(curve) | ||||
|                 curve.show() | ||||
|                 curve.update() | ||||
|                 self._in_ds = True | ||||
| 
 | ||||
|             # stop here since we don't need to update bars path any more | ||||
|             # as we delegate to the downsample line with updates. | ||||
|             profiler.finish() | ||||
|             # print('terminating early') | ||||
|             return | ||||
| 
 | ||||
|         else: | ||||
|             # we should be in bars mode | ||||
| 
 | ||||
|             if self._in_ds: | ||||
|                 # flip back to bars graphics and hide the downsample line. | ||||
|                 log.info(f'showing bars graphic {self._name}') | ||||
| 
 | ||||
|                 curve = self._ds_line | ||||
|                 curve.hide() | ||||
|                 # self._pi.removeItem(curve) | ||||
| 
 | ||||
|                 # XXX: is this actually any faster? | ||||
|                 # self._pi.addItem(self) | ||||
|                 self.show() | ||||
|                 self._in_ds = False | ||||
| 
 | ||||
|             # generate in_view path | ||||
|             self.path = gen_qpath( | ||||
|                 ohlc_iv, | ||||
|                 0, | ||||
|                 self.w, | ||||
|                 # path=self.path, | ||||
|             ) | ||||
| 
 | ||||
|             # TODO: to make the downsampling faster | ||||
|             # - allow mapping only a range of lines thus only drawing as | ||||
|             #   many bars as exactly specified. | ||||
|             # - move ohlc "flattening" to a shmarr | ||||
|             # - maybe move all this embedded logic to a higher | ||||
|             #   level type? | ||||
| 
 | ||||
|             # if prepend_length: | ||||
|             #     # new history was added and we need to render a new path | ||||
|             #     prepend_bars = ohlc[:prepend_length] | ||||
| 
 | ||||
|             # if ds_prepend_length: | ||||
|             #     ds_prepend_bars = ohlc[:ds_prepend_length] | ||||
|             #     pre_x, pre_y = ohlc_flatten(ds_prepend_bars) | ||||
|             #     fx = np.concatenate((pre_x, fx)) | ||||
|             #     fy = np.concatenate((pre_y, fy)) | ||||
|             #     profiler('ds line prepend diff complete') | ||||
| 
 | ||||
|             # if append_length: | ||||
|             #     # generate new graphics to match provided array | ||||
|             #     # path appending logic: | ||||
|             #     # we need to get the previous "current bar(s)" for the time step | ||||
|             #     # and convert it to a sub-path to append to the historical set | ||||
|             #     # new_bars = ohlc[istop - 1:istop + append_length - 1] | ||||
|             #     append_bars = ohlc[-append_length - 1:-1] | ||||
|             #     # print(f'ohlc bars to append size: {append_bars.size}\n') | ||||
| 
 | ||||
|             # if ds_append_length: | ||||
|             #     ds_append_bars = ohlc[-ds_append_length - 1:-1] | ||||
|             #     post_x, post_y = ohlc_flatten(ds_append_bars) | ||||
|             #     print( | ||||
|             #         f'ds curve to append sizes: {(post_x.size, post_y.size)}' | ||||
|             #     ) | ||||
|             #     fx = np.concatenate((fx, post_x)) | ||||
|             #     fy = np.concatenate((fy, post_y)) | ||||
| 
 | ||||
|             #     profiler('ds line append diff complete') | ||||
| 
 | ||||
|             profiler('array diffs complete') | ||||
| 
 | ||||
|             # does this work? | ||||
|             last = ohlc[-1] | ||||
|             # fy[-1] = last['close'] | ||||
| 
 | ||||
|             # # incremental update and cache line datums | ||||
|             # self._ds_line_xy = fx, fy | ||||
| 
 | ||||
|             # maybe downsample to line | ||||
|             # ds = self.maybe_downsample() | ||||
|             # if ds: | ||||
|             #     # if we downsample to a line don't bother with | ||||
|             #     # any more path generation / updates | ||||
|             #     self._ds_xrange = first_index, last_index | ||||
|             #     profiler('downsampled to line') | ||||
|             #     return | ||||
| 
 | ||||
|             # print(in_view.size) | ||||
| 
 | ||||
|             # if self.path: | ||||
|             #     self.path = path | ||||
|             #     self.path.reserve(path.capacity()) | ||||
|             #     self.path.swap(path) | ||||
| 
 | ||||
|             # path updates | ||||
|             # if prepend_length: | ||||
|             #     # XXX: SOMETHING IS MAYBE FISHY HERE what with the old_path | ||||
|             #     # y value not matching the first value from | ||||
|             #     # ohlc[prepend_length + 1] ??? | ||||
|             #     prepend_path = gen_qpath(prepend_bars, 0, self.w) | ||||
|             #     old_path = self.path | ||||
|             #     self.path = prepend_path | ||||
|             #     self.path.addPath(old_path) | ||||
|             #     profiler('path PREPEND') | ||||
| 
 | ||||
|             # if append_length: | ||||
|             #     append_path = gen_qpath(append_bars, 0, self.w) | ||||
| 
 | ||||
|             #     self.path.moveTo( | ||||
|             #         float(istop - self.w), | ||||
|             #         float(append_bars[0]['open']) | ||||
|             #     ) | ||||
|             #     self.path.addPath(append_path) | ||||
| 
 | ||||
|             #     profiler('path APPEND') | ||||
|             #     fp = self.fast_path | ||||
|             #     if fp is None: | ||||
|             #         self.fast_path = append_path | ||||
| 
 | ||||
|             #     else: | ||||
|             #         fp.moveTo( | ||||
|             #             float(istop - self.w), float(new_bars[0]['open']) | ||||
|             #         ) | ||||
|             #         fp.addPath(append_path) | ||||
| 
 | ||||
|             #     self.setCacheMode(QtWidgets.QGraphicsItem.NoCache) | ||||
|             #     flip_cache = True | ||||
| 
 | ||||
|             self._xrange = first_index, last_index | ||||
| 
 | ||||
|             # trigger redraw despite caching | ||||
|             self.prepareGeometryChange() | ||||
| 
 | ||||
|             # generate new lines objects for updatable "current bar" | ||||
|             self._last_bar_lines = bar_from_ohlc_row(last, self.w) | ||||
| 
 | ||||
|             # last bar update | ||||
|             i, o, h, l, last, v = last[ | ||||
|                 ['index', 'open', 'high', 'low', 'close', 'volume'] | ||||
|             ] | ||||
|             # assert i == self.start_index - 1 | ||||
|             # assert i == last_index | ||||
|             body, larm, rarm = self._last_bar_lines | ||||
| 
 | ||||
|             # XXX: is there a faster way to modify this? | ||||
|             rarm.setLine(rarm.x1(), last, rarm.x2(), last) | ||||
| 
 | ||||
|             # writer is responsible for changing open on "first" volume of bar | ||||
|             larm.setLine(larm.x1(), o, larm.x2(), o) | ||||
| 
 | ||||
|             if l != h:  # noqa | ||||
| 
 | ||||
|                 if body is None: | ||||
|                     body = self._last_bar_lines[0] = QLineF(i, l, i, h) | ||||
|                 else: | ||||
|                     # update body | ||||
|                     body.setLine(i, l, i, h) | ||||
| 
 | ||||
|                 # XXX: pretty sure this is causing an issue where the bar has | ||||
|                 # a large upward move right before the next sample and the body | ||||
|                 # is getting set to None since the next bar is flat but the shm | ||||
|                 # array index update wasn't read by the time this code runs. Iow | ||||
|                 # we're doing this removal of the body for a bar index that is | ||||
|                 # now out of date / from some previous sample. It's weird | ||||
|                 # though because i've seen it do this to bars i - 3 back? | ||||
| 
 | ||||
|             profiler('last bar set') | ||||
| 
 | ||||
|             self.update() | ||||
|             profiler('.update()') | ||||
| 
 | ||||
|             if flip_cache: | ||||
|                 self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache) | ||||
| 
 | ||||
|             profiler.finish() | ||||
|             # XXX: pretty sure this is causing an issue where the bar has | ||||
|             # a large upward move right before the next sample and the body | ||||
|             # is getting set to None since the next bar is flat but the shm | ||||
|             # array index update wasn't read by the time this code runs. Iow | ||||
|             # we're doing this removal of the body for a bar index that is | ||||
|             # now out of date / from some previous sample. It's weird | ||||
|             # though because i've seen it do this to bars i - 3 back? | ||||
| 
 | ||||
|     def boundingRect(self): | ||||
|         # Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect | ||||
|  |  | |||
|  | @ -7,3 +7,7 @@ | |||
| # pin this to a dev branch that we have more control over especially | ||||
| # as more graphics stuff gets hashed out. | ||||
| -e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph | ||||
| 
 | ||||
| 
 | ||||
| # our async client for ``marketstore`` (the tsdb) | ||||
| -e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore | ||||
|  |  | |||
|  | @ -1,5 +1,5 @@ | |||
| # piker: trading gear for hackers | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | ||||
| # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||
| 
 | ||||
| # This program is free software: you can redistribute it and/or modify | ||||
| # it under the terms of the GNU Affero General Public License as published by | ||||
|  | @ -30,11 +30,13 @@ orig_win_id = t.find_focused().window | |||
| # for tws | ||||
| win_names: list[str] = [ | ||||
|     'Interactive Brokers',  # tws running in i3 | ||||
|     'IB Gateway.',  # gw running in i3 | ||||
|     'IB Gateway',  # gw running in i3 | ||||
|     # 'IB',  # gw running in i3 (newer version?) | ||||
| ] | ||||
| 
 | ||||
| for name in win_names: | ||||
|     results = t.find_named(name) | ||||
|     results = t.find_titled(name) | ||||
|     print(f'results for {name}: {results}') | ||||
|     if results: | ||||
|         con = results[0] | ||||
|         print(f'Resetting data feed for {name}') | ||||
|  | @ -47,22 +49,32 @@ for name in win_names: | |||
|         # https://github.com/rr-/pyxdotool | ||||
|         # https://github.com/ShaneHutter/pyxdotool | ||||
|         # https://github.com/cphyc/pyxdotool | ||||
|         subprocess.call([ | ||||
|             'xdotool', | ||||
|             'windowactivate', '--sync', win_id, | ||||
| 
 | ||||
|             # move mouse to bottom left of window (where there should | ||||
|             # be nothing to click). | ||||
|             'mousemove_relative', '--sync', str(w-4), str(h-4), | ||||
|         # TODO: only run the reconnect (2nd) kc on a detected | ||||
|         # disconnect? | ||||
|         for key_combo, timeout in [ | ||||
|             # only required if we need a connection reset. | ||||
|             # ('ctrl+alt+r', 12), | ||||
|             # data feed reset. | ||||
|             ('ctrl+alt+f', 6) | ||||
|         ]: | ||||
|             subprocess.call([ | ||||
|                 'xdotool', | ||||
|                 'windowactivate', '--sync', win_id, | ||||
| 
 | ||||
|             # NOTE: we may need to stick a `--retry 3` in here.. | ||||
|             'click', '--window', win_id, '--repeat', '3', '1', | ||||
|                 # move mouse to bottom left of window (where there should | ||||
|                 # be nothing to click). | ||||
|                 'mousemove_relative', '--sync', str(w-4), str(h-4), | ||||
| 
 | ||||
|             # hackzorzes | ||||
|             'key', 'ctrl+alt+f', | ||||
|             ], | ||||
|             timeout=1, | ||||
|         ) | ||||
|                 # NOTE: we may need to stick a `--retry 3` in here.. | ||||
|                 'click', '--window', win_id, | ||||
|                 '--repeat', '3', '1', | ||||
| 
 | ||||
|                 # hackzorzes | ||||
|                 'key', key_combo, | ||||
|                 ], | ||||
|                 timeout=timeout, | ||||
|             ) | ||||
| 
 | ||||
| # re-activate and focus original window | ||||
| subprocess.call([ | ||||
							
								
								
									
										8
									
								
								setup.py
								
								
								
								
							
							
						
						
									
										8
									
								
								setup.py
								
								
								
								
							|  | @ -77,6 +77,14 @@ setup( | |||
|         # tsdbs | ||||
|         'pymarketstore', | ||||
|     ], | ||||
|     extras_require={ | ||||
| 
 | ||||
|         # serialization | ||||
|         'tsdb': [ | ||||
|             'docker', | ||||
|         ], | ||||
| 
 | ||||
|     }, | ||||
|     tests_require=['pytest'], | ||||
|     python_requires=">=3.9",  # literally for ``datetime.datetime.fromisoformat``... | ||||
|     keywords=["async", "trading", "finance", "quant", "charting"], | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue