Compare commits
207 Commits
310_plus
...
m4_correct
Author | SHA1 | Date |
---|---|---|
Tyler Goodlet | b8cfee7d2f | |
Tyler Goodlet | 2d7aba0193 | |
Tyler Goodlet | 260b632f07 | |
Tyler Goodlet | b8704e1b7f | |
Tyler Goodlet | 800fe7446a | |
Tyler Goodlet | 536d1ff0d1 | |
Tyler Goodlet | be7c047e2f | |
Tyler Goodlet | d3e6ed3ba4 | |
Tyler Goodlet | 329e833e96 | |
Tyler Goodlet | 4e85d1d395 | |
Tyler Goodlet | 19205d57a1 | |
Tyler Goodlet | e4158dce01 | |
Tyler Goodlet | 66c20b80a5 | |
Tyler Goodlet | c07aa76201 | |
Tyler Goodlet | d39e1e9a46 | |
Tyler Goodlet | 5294017891 | |
Tyler Goodlet | 255f081e11 | |
Tyler Goodlet | 549830784b | |
Tyler Goodlet | 4178791023 | |
Tyler Goodlet | 347ac0a22b | |
Tyler Goodlet | b694ac2747 | |
Tyler Goodlet | 41d9642a00 | |
Tyler Goodlet | 33a29a43ff | |
Tyler Goodlet | 516dd26f08 | |
Tyler Goodlet | 8959f04791 | |
Tyler Goodlet | 1a24038638 | |
Tyler Goodlet | 895d45f730 | |
Tyler Goodlet | 8fd5b95fb9 | |
Tyler Goodlet | c3b43d7ae9 | |
Tyler Goodlet | 0b7961bb09 | |
Tyler Goodlet | 2250566e72 | |
Tyler Goodlet | a6bd29c1d1 | |
Tyler Goodlet | f68b8c5987 | |
Tyler Goodlet | d4e26c1a26 | |
Tyler Goodlet | 42dc1f804e | |
Tyler Goodlet | 0e7d274f2e | |
Tyler Goodlet | 9042244939 | |
Tyler Goodlet | 06f311c182 | |
Tyler Goodlet | be5ec76e73 | |
Tyler Goodlet | 31f76e44e3 | |
Tyler Goodlet | 8073c6b47e | |
Tyler Goodlet | fe3009c5a8 | |
Tyler Goodlet | 8d0c6cba58 | |
Tyler Goodlet | 44c7ff253f | |
Tyler Goodlet | 789c77f9b2 | |
Tyler Goodlet | 9a0716143c | |
Tyler Goodlet | c95d160210 | |
Tyler Goodlet | 073dab20d7 | |
Tyler Goodlet | fe14ac5c79 | |
Tyler Goodlet | 198de1efac | |
Tyler Goodlet | 6a44c83e84 | |
Tyler Goodlet | eb2b844c2d | |
Tyler Goodlet | 961f01448d | |
Tyler Goodlet | 72b93c0f24 | |
Tyler Goodlet | 13c88a075d | |
Tyler Goodlet | ba8f443bf9 | |
Tyler Goodlet | 72083eae17 | |
Tyler Goodlet | 25be7f8d08 | |
Tyler Goodlet | 5c84a5f8b4 | |
Tyler Goodlet | 74cac9fc62 | |
Tyler Goodlet | 2f052a7383 | |
Tyler Goodlet | 550d81ee2c | |
Tyler Goodlet | e430756944 | |
Tyler Goodlet | 44b613024e | |
Tyler Goodlet | 36b8253b66 | |
Tyler Goodlet | a16cd2fe6d | |
Tyler Goodlet | 245085d493 | |
Tyler Goodlet | 0b68bf257e | |
Tyler Goodlet | 68ac759b3f | |
Tyler Goodlet | 2679d6261e | |
Tyler Goodlet | 48c989d400 | |
Tyler Goodlet | 831c23872c | |
Tyler Goodlet | 008e153f36 | |
Tyler Goodlet | 38be93a009 | |
Tyler Goodlet | 3bf3f450f4 | |
Tyler Goodlet | 4e1ab378f4 | |
Tyler Goodlet | 8d77d177bf | |
Tyler Goodlet | 36463dddbe | |
Tyler Goodlet | e399b74b67 | |
Tyler Goodlet | 15b609bf13 | |
Tyler Goodlet | dfc3fb76fd | |
Tyler Goodlet | 881b1afc12 | |
Tyler Goodlet | 9878735ff3 | |
Tyler Goodlet | 1389ee51f3 | |
Tyler Goodlet | 6225e8a58e | |
Tyler Goodlet | 2b48943066 | |
Tyler Goodlet | 92d074f308 | |
Tyler Goodlet | 49a280ce14 | |
Tyler Goodlet | 42e5f48345 | |
Tyler Goodlet | 18344603e6 | |
Tyler Goodlet | ae1d9efa97 | |
Tyler Goodlet | f1747749e7 | |
Tyler Goodlet | cdd03759bf | |
Tyler Goodlet | 00a36826c2 | |
Tyler Goodlet | 3aa8044aa1 | |
Tyler Goodlet | f0ae7edb00 | |
Tyler Goodlet | 797ba15923 | |
Tyler Goodlet | b8f86d4599 | |
Tyler Goodlet | 908093264a | |
Tyler Goodlet | 3ef68f78f7 | |
Tyler Goodlet | d095c21885 | |
Tyler Goodlet | 52dc0989ac | |
Tyler Goodlet | 6f749c340d | |
Tyler Goodlet | dcff80131f | |
Tyler Goodlet | 6cd829d1fd | |
Tyler Goodlet | 76fff18b1a | |
Tyler Goodlet | 88e1befffe | |
Tyler Goodlet | c1579b630e | |
Tyler Goodlet | 7dfdd0f00b | |
Guillermo Rodriguez | 14a893a5e5 | |
Guillermo Rodriguez | c91599472d | |
Guillermo Rodriguez | 9e4dec98da | |
Tyler Goodlet | 8800ee0661 | |
Tyler Goodlet | 885116ae46 | |
Tyler Goodlet | eab9127170 | |
Tyler Goodlet | c675773de4 | |
Tyler Goodlet | b20245398d | |
Tyler Goodlet | 2abb3ec84d | |
Tyler Goodlet | 6843f9a515 | |
Tyler Goodlet | 5002e78b81 | |
Tyler Goodlet | 5446cbd335 | |
Tyler Goodlet | 7870e759ff | |
Tyler Goodlet | dfed6cb8e9 | |
Tyler Goodlet | b30b4bb555 | |
Tyler Goodlet | 514bbb1a98 | |
Tyler Goodlet | 918789d1a3 | |
Tyler Goodlet | 127294d39c | |
Tyler Goodlet | d3a30a272d | |
Tyler Goodlet | 9477c7e66c | |
Tyler Goodlet | abab8d3451 | |
Tyler Goodlet | c4ad4e089e | |
Tyler Goodlet | 36224eac5a | |
Tyler Goodlet | 1cdec55725 | |
Tyler Goodlet | 576263dc4d | |
Tyler Goodlet | 3998cb70a6 | |
Tyler Goodlet | 096d8553e6 | |
Tyler Goodlet | ec877f38a0 | |
Tyler Goodlet | b055fc9daa | |
Tyler Goodlet | c0bb23adb1 | |
Tyler Goodlet | 302fadeab7 | |
Tyler Goodlet | 5a86b62a9c | |
Tyler Goodlet | 750e7230da | |
Tyler Goodlet | 264328e119 | |
Tyler Goodlet | 242007d7f6 | |
Tyler Goodlet | dcfe89cfa9 | |
Tyler Goodlet | 4722232938 | |
Tyler Goodlet | 156a839ee9 | |
Tyler Goodlet | b748bc2d05 | |
Tyler Goodlet | 927d37541f | |
Tyler Goodlet | e6eea88174 | |
Tyler Goodlet | e1cfbc78ee | |
Tyler Goodlet | f1f7241a1e | |
Tyler Goodlet | 6af6449e8e | |
Tyler Goodlet | c55c0f5d8f | |
Tyler Goodlet | 8c2d375e0e | |
Tyler Goodlet | 889aa10d32 | |
Tyler Goodlet | eba92a8f20 | |
Tyler Goodlet | d3d19a57c9 | |
Tyler Goodlet | ba4a526b8b | |
Tyler Goodlet | 15922f4090 | |
Tyler Goodlet | f6136245f9 | |
Tyler Goodlet | 44482cbc1b | |
Tyler Goodlet | c745c9801f | |
Tyler Goodlet | 60b1c53d20 | |
Tyler Goodlet | 68779218ff | |
Tyler Goodlet | 9726ed1a42 | |
Tyler Goodlet | 69cb8156a2 | |
Tyler Goodlet | 97efb865d4 | |
Tyler Goodlet | 2f99fd35e5 | |
Tyler Goodlet | 2e25357ed0 | |
Tyler Goodlet | c1bdf0e26d | |
Tyler Goodlet | d3587263db | |
Tyler Goodlet | 6bf4cdaa24 | |
Tyler Goodlet | e95896722f | |
Tyler Goodlet | 8a4f124a48 | |
Tyler Goodlet | a3817d7644 | |
Tyler Goodlet | 6d1a3dfdc5 | |
Tyler Goodlet | acba4e8f02 | |
Tyler Goodlet | d86320848f | |
Tyler Goodlet | bed52639d0 | |
Tyler Goodlet | 4cd3f8c531 | |
Tyler Goodlet | e3739f0c84 | |
Tyler Goodlet | 6d54137ff1 | |
Tyler Goodlet | 39b1edf847 | |
Tyler Goodlet | 482c46acd0 | |
Tyler Goodlet | 850f664de9 | |
Tyler Goodlet | 5ba13d5677 | |
Tyler Goodlet | 603d61be39 | |
Tyler Goodlet | cd14a2b598 | |
Tyler Goodlet | d2b49bd1fe | |
Tyler Goodlet | abf399a73a | |
Tyler Goodlet | ca7808a887 | |
Tyler Goodlet | 78c8a843e4 | |
Tyler Goodlet | 93d2c715e7 | |
Tyler Goodlet | da5d2ef331 | |
Tyler Goodlet | a6c103a850 | |
Tyler Goodlet | 0d062bfe66 | |
Tyler Goodlet | fd296fa762 | |
Tyler Goodlet | 0e1656978b | |
Tyler Goodlet | 4378974b59 | |
Tyler Goodlet | 6d9ffc532e | |
Tyler Goodlet | 37b492eba6 | |
Tyler Goodlet | 6d54cf1d7d | |
Tyler Goodlet | b81e8dc39c | |
Tyler Goodlet | f365e4a465 | |
Tyler Goodlet | 2c9ecbae9a | |
Tyler Goodlet | 4ac65a93ae |
107
piker/_daemon.py
107
piker/_daemon.py
|
@ -19,7 +19,7 @@ Structured, daemon tree service management.
|
|||
|
||||
"""
|
||||
from typing import Optional, Union, Callable, Any
|
||||
from contextlib import asynccontextmanager
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from collections import defaultdict
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
@ -35,10 +35,10 @@ log = get_logger(__name__)
|
|||
|
||||
_root_dname = 'pikerd'
|
||||
|
||||
_registry_addr = ('127.0.0.1', 6116)
|
||||
_registry_addr = ('127.0.0.1', 1616)
|
||||
_tractor_kwargs: dict[str, Any] = {
|
||||
# use a different registry addr then tractor's default
|
||||
'arbiter_addr': _registry_addr
|
||||
'arbiter_addr': _registry_addr
|
||||
}
|
||||
_root_modules = [
|
||||
__name__,
|
||||
|
@ -91,14 +91,18 @@ class Services(BaseModel):
|
|||
log.info(
|
||||
f'`pikerd` service {name} started with value {first}'
|
||||
)
|
||||
# wait on any context's return value
|
||||
ctx_res = await ctx.result()
|
||||
|
||||
# wait on any error from the sub-actor
|
||||
# NOTE: this will block indefinitely until cancelled
|
||||
# either by error from the target context function or by
|
||||
# being cancelled here by the surrounding cancel scope
|
||||
return (await portal.result(), ctx_res)
|
||||
try:
|
||||
# wait on any context's return value
|
||||
ctx_res = await ctx.result()
|
||||
except tractor.ContextCancelled:
|
||||
return await self.cancel_service(name)
|
||||
else:
|
||||
# wait on any error from the sub-actor
|
||||
# NOTE: this will block indefinitely until
|
||||
# cancelled either by error from the target
|
||||
# context function or by being cancelled here by
|
||||
# the surrounding cancel scope
|
||||
return (await portal.result(), ctx_res)
|
||||
|
||||
cs, first = await self.service_n.start(open_context_in_task)
|
||||
|
||||
|
@ -110,20 +114,23 @@ class Services(BaseModel):
|
|||
|
||||
# TODO: per service cancellation by scope, we aren't using this
|
||||
# anywhere right?
|
||||
# async def cancel_service(
|
||||
# self,
|
||||
# name: str,
|
||||
# ) -> Any:
|
||||
# log.info(f'Cancelling `pikerd` service {name}')
|
||||
# cs, portal = self.service_tasks[name]
|
||||
# cs.cancel()
|
||||
# return await portal.cancel_actor()
|
||||
async def cancel_service(
|
||||
self,
|
||||
name: str,
|
||||
) -> Any:
|
||||
log.info(f'Cancelling `pikerd` service {name}')
|
||||
cs, portal = self.service_tasks[name]
|
||||
# XXX: not entirely sure why this is required,
|
||||
# and should probably be better fine tuned in
|
||||
# ``tractor``?
|
||||
cs.cancel()
|
||||
return await portal.cancel_actor()
|
||||
|
||||
|
||||
_services: Optional[Services] = None
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def open_pikerd(
|
||||
start_method: str = 'trio',
|
||||
loglevel: Optional[str] = None,
|
||||
|
@ -178,7 +185,7 @@ async def open_pikerd(
|
|||
yield _services
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def open_piker_runtime(
|
||||
name: str,
|
||||
enable_modules: list[str] = [],
|
||||
|
@ -219,7 +226,7 @@ async def open_piker_runtime(
|
|||
yield tractor.current_actor()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def maybe_open_runtime(
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
@ -242,7 +249,7 @@ async def maybe_open_runtime(
|
|||
yield
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def maybe_open_pikerd(
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
@ -293,7 +300,36 @@ class Brokerd:
|
|||
locks = defaultdict(trio.Lock)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def find_service(
|
||||
service_name: str,
|
||||
) -> Optional[tractor.Portal]:
|
||||
|
||||
log.info(f'Scanning for service `{service_name}`')
|
||||
# attach to existing daemon by name if possible
|
||||
async with tractor.find_actor(
|
||||
service_name,
|
||||
arbiter_sockaddr=_registry_addr,
|
||||
) as maybe_portal:
|
||||
yield maybe_portal
|
||||
|
||||
|
||||
async def check_for_service(
|
||||
service_name: str,
|
||||
|
||||
) -> bool:
|
||||
'''
|
||||
Service daemon "liveness" predicate.
|
||||
|
||||
'''
|
||||
async with tractor.query_actor(
|
||||
service_name,
|
||||
arbiter_sockaddr=_registry_addr,
|
||||
) as sockaddr:
|
||||
return sockaddr
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_spawn_daemon(
|
||||
|
||||
service_name: str,
|
||||
|
@ -303,7 +339,7 @@ async def maybe_spawn_daemon(
|
|||
**kwargs,
|
||||
|
||||
) -> tractor.Portal:
|
||||
"""
|
||||
'''
|
||||
If no ``service_name`` daemon-actor can be found,
|
||||
spawn one in a local subactor and return a portal to it.
|
||||
|
||||
|
@ -314,7 +350,7 @@ async def maybe_spawn_daemon(
|
|||
This can be seen as a service starting api for remote-actor
|
||||
clients.
|
||||
|
||||
"""
|
||||
'''
|
||||
if loglevel:
|
||||
get_console_log(loglevel)
|
||||
|
||||
|
@ -323,19 +359,13 @@ async def maybe_spawn_daemon(
|
|||
lock = Brokerd.locks[service_name]
|
||||
await lock.acquire()
|
||||
|
||||
log.info(f'Scanning for existing {service_name}')
|
||||
# attach to existing daemon by name if possible
|
||||
async with tractor.find_actor(
|
||||
service_name,
|
||||
arbiter_sockaddr=_registry_addr,
|
||||
|
||||
) as portal:
|
||||
async with find_service(service_name) as portal:
|
||||
if portal is not None:
|
||||
lock.release()
|
||||
yield portal
|
||||
return
|
||||
|
||||
log.warning(f"Couldn't find any existing {service_name}")
|
||||
log.warning(f"Couldn't find any existing {service_name}")
|
||||
|
||||
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
||||
# pikerd is not live we now become the root of the
|
||||
|
@ -372,6 +402,7 @@ async def maybe_spawn_daemon(
|
|||
async with tractor.wait_for_actor(service_name) as portal:
|
||||
lock.release()
|
||||
yield portal
|
||||
await portal.cancel_actor()
|
||||
|
||||
|
||||
async def spawn_brokerd(
|
||||
|
@ -415,7 +446,7 @@ async def spawn_brokerd(
|
|||
return True
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def maybe_spawn_brokerd(
|
||||
|
||||
brokername: str,
|
||||
|
@ -423,7 +454,9 @@ async def maybe_spawn_brokerd(
|
|||
**kwargs,
|
||||
|
||||
) -> tractor.Portal:
|
||||
'''Helper to spawn a brokerd service.
|
||||
'''
|
||||
Helper to spawn a brokerd service *from* a client
|
||||
who wishes to use the sub-actor-daemon.
|
||||
|
||||
'''
|
||||
async with maybe_spawn_daemon(
|
||||
|
@ -475,7 +508,7 @@ async def spawn_emsd(
|
|||
return True
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def maybe_open_emsd(
|
||||
|
||||
brokername: str,
|
||||
|
|
|
@ -21,7 +21,10 @@ Profiling wrappers for internal libs.
|
|||
import time
|
||||
from functools import wraps
|
||||
|
||||
_pg_profile: bool = True
|
||||
# NOTE: you can pass a flag to enable this:
|
||||
# ``piker chart <args> --profile``.
|
||||
_pg_profile: bool = False
|
||||
ms_slower_then: float = 0
|
||||
|
||||
|
||||
def pg_profile_enabled() -> bool:
|
||||
|
|
|
@ -33,7 +33,41 @@ class SymbolNotFound(BrokerError):
|
|||
|
||||
|
||||
class NoData(BrokerError):
|
||||
"Symbol data not permitted"
|
||||
'''
|
||||
Symbol data not permitted or no data
|
||||
for time range found.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
frame_size: int = 1000,
|
||||
|
||||
) -> None:
|
||||
super().__init__(*args)
|
||||
|
||||
# when raised, machinery can check if the backend
|
||||
# set a "frame size" for doing datetime calcs.
|
||||
self.frame_size: int = 1000
|
||||
|
||||
|
||||
class DataUnavailable(BrokerError):
|
||||
'''
|
||||
Signal storage requests to terminate.
|
||||
|
||||
'''
|
||||
# TODO: add in a reason that can be displayed in the
|
||||
# UI (for eg. `kraken` is bs and you should complain
|
||||
# to them that you can't pull more OHLC data..)
|
||||
|
||||
|
||||
class DataThrottle(BrokerError):
|
||||
'''
|
||||
Broker throttled request rate for data.
|
||||
|
||||
'''
|
||||
# TODO: add in throttle metrics/feedback
|
||||
|
||||
|
||||
|
||||
def resproc(
|
||||
|
@ -50,12 +84,12 @@ def resproc(
|
|||
if not resp.status_code == 200:
|
||||
raise BrokerError(resp.body)
|
||||
try:
|
||||
json = resp.json()
|
||||
msg = resp.json()
|
||||
except json.decoder.JSONDecodeError:
|
||||
log.exception(f"Failed to process {resp}:\n{resp.text}")
|
||||
raise BrokerError(resp.text)
|
||||
|
||||
if log_resp:
|
||||
log.debug(f"Received json contents:\n{colorize_json(json)}")
|
||||
log.debug(f"Received json contents:\n{colorize_json(msg)}")
|
||||
|
||||
return json if return_json else resp
|
||||
return msg if return_json else resp
|
||||
|
|
|
@ -19,6 +19,7 @@ Binance backend
|
|||
|
||||
"""
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
from typing import (
|
||||
Any, Union, Optional,
|
||||
AsyncGenerator, Callable,
|
||||
|
@ -221,20 +222,22 @@ class Client:
|
|||
async def bars(
|
||||
self,
|
||||
symbol: str,
|
||||
start_time: int = None,
|
||||
end_time: int = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
end_dt: Optional[datetime] = None,
|
||||
limit: int = 1000, # <- max allowed per query
|
||||
as_np: bool = True,
|
||||
|
||||
) -> dict:
|
||||
|
||||
if start_time is None:
|
||||
start_time = binance_timestamp(
|
||||
pendulum.now('UTC').start_of('minute').subtract(minutes=limit)
|
||||
)
|
||||
if end_dt is None:
|
||||
end_dt = pendulum.now('UTC')
|
||||
|
||||
if end_time is None:
|
||||
end_time = binance_timestamp(pendulum.now('UTC'))
|
||||
if start_dt is None:
|
||||
start_dt = end_dt.start_of(
|
||||
'minute').subtract(minutes=limit)
|
||||
|
||||
start_time = binance_timestamp(start_dt)
|
||||
end_time = binance_timestamp(end_dt)
|
||||
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
|
||||
bars = await self._api(
|
||||
|
@ -379,7 +382,27 @@ async def open_history_client(
|
|||
|
||||
# TODO implement history getter for the new storage layer.
|
||||
async with open_cached_client('binance') as client:
|
||||
yield client
|
||||
|
||||
async def get_ohlc(
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
|
||||
array = await client.bars(
|
||||
symbol,
|
||||
start_dt=start_dt,
|
||||
end_dt=end_dt,
|
||||
)
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc
|
||||
|
||||
|
||||
async def backfill_bars(
|
||||
|
|
|
@ -37,7 +37,6 @@ import asyncio
|
|||
from pprint import pformat
|
||||
import inspect
|
||||
import logging
|
||||
import platform
|
||||
from random import randint
|
||||
import time
|
||||
|
||||
|
@ -296,6 +295,10 @@ class Client:
|
|||
global _enters
|
||||
# log.info(f'REQUESTING BARS {_enters} @ end={end_dt}')
|
||||
print(f'REQUESTING BARS {_enters} @ end={end_dt}')
|
||||
|
||||
if not end_dt:
|
||||
end_dt = ''
|
||||
|
||||
_enters += 1
|
||||
|
||||
contract = await self.find_contract(fqsn)
|
||||
|
@ -1479,7 +1482,9 @@ async def get_bars(
|
|||
|
||||
if 'No market data permissions for' in msg:
|
||||
# TODO: signalling for no permissions searches
|
||||
raise NoData(f'Symbol: {fqsn}')
|
||||
raise NoData(
|
||||
f'Symbol: {fqsn}',
|
||||
)
|
||||
break
|
||||
|
||||
elif (
|
||||
|
@ -1547,8 +1552,8 @@ async def open_history_client(
|
|||
async with open_client_proxy() as proxy:
|
||||
|
||||
async def get_hist(
|
||||
end_dt: str,
|
||||
start_dt: str = '',
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[np.ndarray, str]:
|
||||
|
||||
|
@ -1556,10 +1561,13 @@ async def open_history_client(
|
|||
|
||||
# TODO: add logic here to handle tradable hours and only grab
|
||||
# valid bars in the range
|
||||
if out == (None, None):
|
||||
if out is None:
|
||||
# could be trying to retreive bars over weekend
|
||||
log.error(f"Can't grab bars starting at {end_dt}!?!?")
|
||||
raise NoData(f'{end_dt}')
|
||||
raise NoData(
|
||||
f'{end_dt}',
|
||||
frame_size=2000,
|
||||
)
|
||||
|
||||
bars, bars_array, first_dt, last_dt = out
|
||||
|
||||
|
@ -1583,7 +1591,7 @@ async def backfill_bars(
|
|||
# on that until we have the `marketstore` daemon in place in which
|
||||
# case the shm size will be driven by user config and available sys
|
||||
# memory.
|
||||
count: int = 100,
|
||||
count: int = 16,
|
||||
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
|
@ -1603,11 +1611,6 @@ async def backfill_bars(
|
|||
# async with open_history_client(fqsn) as proxy:
|
||||
async with open_client_proxy() as proxy:
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
log.warning(
|
||||
'Decreasing history query count to 4 since, windows...')
|
||||
count = 4
|
||||
|
||||
out, fails = await get_bars(proxy, fqsn)
|
||||
|
||||
if out is None:
|
||||
|
@ -2441,8 +2444,8 @@ async def data_reset_hack(
|
|||
try:
|
||||
import i3ipc
|
||||
except ImportError:
|
||||
return False
|
||||
log.warning('IB data hack no-supported on ur platformz')
|
||||
return False
|
||||
|
||||
i3 = i3ipc.Connection()
|
||||
t = i3.get_tree()
|
||||
|
|
|
@ -20,7 +20,8 @@ Kraken backend.
|
|||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from dataclasses import asdict, field
|
||||
from typing import Any, Optional, AsyncIterator, Callable
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional, AsyncIterator, Callable, Union
|
||||
import time
|
||||
|
||||
from trio_typing import TaskStatus
|
||||
|
@ -40,7 +41,13 @@ import base64
|
|||
|
||||
from .. import config
|
||||
from .._cacheables import open_cached_client
|
||||
from ._util import resproc, SymbolNotFound, BrokerError
|
||||
from ._util import (
|
||||
resproc,
|
||||
SymbolNotFound,
|
||||
BrokerError,
|
||||
DataThrottle,
|
||||
DataUnavailable,
|
||||
)
|
||||
from ..log import get_logger, get_console_log
|
||||
from ..data import ShmArray
|
||||
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
||||
|
@ -305,7 +312,7 @@ class Client:
|
|||
action: str,
|
||||
size: float,
|
||||
reqid: str = None,
|
||||
validate: bool = False # set True test call without a real submission
|
||||
validate: bool = False # set True test call without a real submission
|
||||
) -> dict:
|
||||
'''
|
||||
Place an order and return integer request id provided by client.
|
||||
|
@ -391,17 +398,26 @@ class Client:
|
|||
async def bars(
|
||||
self,
|
||||
symbol: str = 'XBTUSD',
|
||||
|
||||
# UTC 2017-07-02 12:53:20
|
||||
since: int = None,
|
||||
since: Optional[Union[int, datetime]] = None,
|
||||
count: int = 720, # <- max allowed per query
|
||||
as_np: bool = True,
|
||||
|
||||
) -> dict:
|
||||
|
||||
if since is None:
|
||||
since = pendulum.now('UTC').start_of('minute').subtract(
|
||||
minutes=count).timestamp()
|
||||
|
||||
elif isinstance(since, int):
|
||||
since = pendulum.from_timestamp(since).timestamp()
|
||||
|
||||
else: # presumably a pendulum datetime
|
||||
since = since.timestamp()
|
||||
|
||||
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||
since = str(max(1499000000, since))
|
||||
since = str(max(1499000000, int(since)))
|
||||
json = await self._public(
|
||||
'OHLC',
|
||||
data={
|
||||
|
@ -445,7 +461,16 @@ class Client:
|
|||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||
return array
|
||||
except KeyError:
|
||||
raise SymbolNotFound(json['error'][0] + f': {symbol}')
|
||||
errmsg = json['error'][0]
|
||||
|
||||
if 'not found' in errmsg:
|
||||
raise SymbolNotFound(errmsg + f': {symbol}')
|
||||
|
||||
elif 'Too many requests' in errmsg:
|
||||
raise DataThrottle(f'{symbol}')
|
||||
|
||||
else:
|
||||
raise BrokerError(errmsg)
|
||||
|
||||
|
||||
@acm
|
||||
|
@ -668,8 +693,8 @@ async def handle_order_requests(
|
|||
oid=msg.oid,
|
||||
reqid=msg.reqid,
|
||||
symbol=msg.symbol,
|
||||
# TODO: maybe figure out if pending cancels will
|
||||
# eventually get cancelled
|
||||
# TODO: maybe figure out if pending
|
||||
# cancels will eventually get cancelled
|
||||
reason="Order cancel is still pending?",
|
||||
broker_details=resp
|
||||
).dict()
|
||||
|
@ -1003,7 +1028,45 @@ async def open_history_client(
|
|||
|
||||
# TODO implement history getter for the new storage layer.
|
||||
async with open_cached_client('kraken') as client:
|
||||
yield client
|
||||
|
||||
# lol, kraken won't send any more then the "last"
|
||||
# 720 1m bars.. so we have to just ignore further
|
||||
# requests of this type..
|
||||
queries: int = 0
|
||||
|
||||
async def get_ohlc(
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
|
||||
nonlocal queries
|
||||
if queries > 0:
|
||||
raise DataUnavailable
|
||||
|
||||
count = 0
|
||||
while count <= 3:
|
||||
try:
|
||||
array = await client.bars(
|
||||
symbol,
|
||||
since=end_dt,
|
||||
)
|
||||
count += 1
|
||||
queries += 1
|
||||
break
|
||||
except DataThrottle:
|
||||
log.warning(f'kraken OHLC throttle for {symbol}')
|
||||
await trio.sleep(1)
|
||||
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc
|
||||
|
||||
|
||||
async def backfill_bars(
|
||||
|
|
|
@ -178,7 +178,9 @@ class Allocator(BaseModel):
|
|||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
||||
|
||||
else:
|
||||
raise ValueError(f"Not valid size unit '{size}'")
|
||||
raise ValueError(
|
||||
f"Not valid size unit '{size_unit}'"
|
||||
)
|
||||
|
||||
# an entry (adding-to or starting a pp)
|
||||
if (
|
||||
|
@ -282,6 +284,14 @@ class Allocator(BaseModel):
|
|||
return round(prop * self.slots)
|
||||
|
||||
|
||||
_derivs = (
|
||||
'future',
|
||||
'continuous_future',
|
||||
'option',
|
||||
'futures_option',
|
||||
)
|
||||
|
||||
|
||||
def mk_allocator(
|
||||
|
||||
symbol: Symbol,
|
||||
|
@ -290,7 +300,7 @@ def mk_allocator(
|
|||
# default allocation settings
|
||||
defaults: dict[str, float] = {
|
||||
'account': None, # select paper by default
|
||||
'size_unit': 'currency', #_size_units['currency'],
|
||||
'size_unit': 'currency',
|
||||
'units_limit': 400,
|
||||
'currency_limit': 5e3,
|
||||
'slots': 4,
|
||||
|
@ -318,11 +328,9 @@ def mk_allocator(
|
|||
|
||||
asset_type = symbol.type_key
|
||||
|
||||
|
||||
# specific configs by asset class / type
|
||||
|
||||
if asset_type in ('future', 'option', 'futures_option'):
|
||||
|
||||
if asset_type in _derivs:
|
||||
# since it's harder to know how currency "applies" in this case
|
||||
# given leverage properties
|
||||
alloc.size_unit = '# units'
|
||||
|
@ -345,7 +353,7 @@ def mk_allocator(
|
|||
if startup_size > alloc.units_limit:
|
||||
alloc.units_limit = startup_size
|
||||
|
||||
if asset_type in ('future', 'option', 'futures_option'):
|
||||
if asset_type in _derivs:
|
||||
alloc.slots = alloc.units_limit
|
||||
|
||||
return alloc
|
||||
|
|
|
@ -261,7 +261,15 @@ async def clear_dark_triggers(
|
|||
f'pred for {oid} was already removed!?'
|
||||
)
|
||||
|
||||
await ems_client_order_stream.send(msg)
|
||||
try:
|
||||
await ems_client_order_stream.send(msg)
|
||||
except (
|
||||
trio.ClosedResourceError,
|
||||
):
|
||||
log.warning(
|
||||
f'client {ems_client_order_stream} stream is broke'
|
||||
)
|
||||
break
|
||||
|
||||
else: # condition scan loop complete
|
||||
log.debug(f'execs are {execs}')
|
||||
|
@ -573,8 +581,16 @@ async def translate_and_relay_brokerd_events(
|
|||
|
||||
# fan-out-relay position msgs immediately by
|
||||
# broadcasting updates on all client streams
|
||||
for client_stream in router.clients:
|
||||
await client_stream.send(pos_msg)
|
||||
for client_stream in router.clients.copy():
|
||||
try:
|
||||
await client_stream.send(pos_msg)
|
||||
except(
|
||||
trio.ClosedResourceError,
|
||||
trio.BrokenResourceError,
|
||||
):
|
||||
router.clients.remove(client_stream)
|
||||
log.warning(
|
||||
f'client for {client_stream} was already closed?')
|
||||
|
||||
continue
|
||||
|
||||
|
|
|
@ -16,29 +16,22 @@ from .. import config
|
|||
log = get_logger('cli')
|
||||
DEFAULT_BROKER = 'questrade'
|
||||
|
||||
_config_dir = click.get_app_dir('piker')
|
||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||
_context_defaults = dict(
|
||||
default_map={
|
||||
# Questrade specific quote poll rates
|
||||
'monitor': {
|
||||
'rate': 3,
|
||||
},
|
||||
'optschain': {
|
||||
'rate': 1,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
||||
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
||||
def pikerd(loglevel, host, tl, pdb):
|
||||
"""Spawn the piker broker-daemon.
|
||||
"""
|
||||
@click.option(
|
||||
'--tsdb',
|
||||
is_flag=True,
|
||||
help='Enable local ``marketstore`` instance'
|
||||
)
|
||||
def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||
'''
|
||||
Spawn the piker broker-daemon.
|
||||
|
||||
'''
|
||||
from .._daemon import open_pikerd
|
||||
log = get_console_log(loglevel)
|
||||
|
||||
|
@ -52,13 +45,33 @@ def pikerd(loglevel, host, tl, pdb):
|
|||
))
|
||||
|
||||
async def main():
|
||||
async with open_pikerd(loglevel=loglevel, debug_mode=pdb):
|
||||
|
||||
async with (
|
||||
open_pikerd(
|
||||
loglevel=loglevel,
|
||||
debug_mode=pdb,
|
||||
), # normally delivers a ``Services`` handle
|
||||
trio.open_nursery() as n,
|
||||
):
|
||||
if tsdb:
|
||||
# TODO:
|
||||
# async with maybe_open_marketstored():
|
||||
|
||||
from piker.data._ahab import start_ahab
|
||||
log.info('Spawning `marketstore` supervisor')
|
||||
ctn_ready = await n.start(
|
||||
start_ahab,
|
||||
'marketstored',
|
||||
)
|
||||
await ctn_ready.wait()
|
||||
log.info('`marketstore` container:{uid} up')
|
||||
|
||||
await trio.sleep_forever()
|
||||
|
||||
trio.run(main)
|
||||
|
||||
|
||||
@click.group(context_settings=_context_defaults)
|
||||
@click.group(context_settings=config._context_defaults)
|
||||
@click.option(
|
||||
'--brokers', '-b',
|
||||
default=[DEFAULT_BROKER],
|
||||
|
@ -87,8 +100,8 @@ def cli(ctx, brokers, loglevel, tl, configdir):
|
|||
'loglevel': loglevel,
|
||||
'tractorloglevel': None,
|
||||
'log': get_console_log(loglevel),
|
||||
'confdir': _config_dir,
|
||||
'wl_path': _watchlists_data_path,
|
||||
'confdir': config._config_dir,
|
||||
'wl_path': config._watchlists_data_path,
|
||||
})
|
||||
|
||||
# allow enabling same loglevel in ``tractor`` machinery
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
"""
|
||||
Broker configuration mgmt.
|
||||
"""
|
||||
import platform
|
||||
import sys
|
||||
import os
|
||||
from os.path import dirname
|
||||
import shutil
|
||||
|
@ -24,14 +26,100 @@ from typing import Optional
|
|||
|
||||
from bidict import bidict
|
||||
import toml
|
||||
import click
|
||||
|
||||
from .log import get_logger
|
||||
|
||||
log = get_logger('broker-config')
|
||||
|
||||
_config_dir = click.get_app_dir('piker')
|
||||
|
||||
# taken from ``click`` since apparently they have some
|
||||
# super weirdness with sigint and sudo..no clue
|
||||
def get_app_dir(app_name, roaming=True, force_posix=False):
|
||||
r"""Returns the config folder for the application. The default behavior
|
||||
is to return whatever is most appropriate for the operating system.
|
||||
|
||||
To give you an idea, for an app called ``"Foo Bar"``, something like
|
||||
the following folders could be returned:
|
||||
|
||||
Mac OS X:
|
||||
``~/Library/Application Support/Foo Bar``
|
||||
Mac OS X (POSIX):
|
||||
``~/.foo-bar``
|
||||
Unix:
|
||||
``~/.config/foo-bar``
|
||||
Unix (POSIX):
|
||||
``~/.foo-bar``
|
||||
Win XP (roaming):
|
||||
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar``
|
||||
Win XP (not roaming):
|
||||
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
|
||||
Win 7 (roaming):
|
||||
``C:\Users\<user>\AppData\Roaming\Foo Bar``
|
||||
Win 7 (not roaming):
|
||||
``C:\Users\<user>\AppData\Local\Foo Bar``
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param app_name: the application name. This should be properly capitalized
|
||||
and can contain whitespace.
|
||||
:param roaming: controls if the folder should be roaming or not on Windows.
|
||||
Has no affect otherwise.
|
||||
:param force_posix: if this is set to `True` then on any POSIX system the
|
||||
folder will be stored in the home folder with a leading
|
||||
dot instead of the XDG config home or darwin's
|
||||
application support folder.
|
||||
"""
|
||||
|
||||
def _posixify(name):
|
||||
return "-".join(name.split()).lower()
|
||||
|
||||
# if WIN:
|
||||
if platform.system() == 'Windows':
|
||||
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
||||
folder = os.environ.get(key)
|
||||
if folder is None:
|
||||
folder = os.path.expanduser("~")
|
||||
return os.path.join(folder, app_name)
|
||||
if force_posix:
|
||||
return os.path.join(os.path.expanduser("~/.{}".format(_posixify(app_name))))
|
||||
if sys.platform == "darwin":
|
||||
return os.path.join(
|
||||
os.path.expanduser("~/Library/Application Support"), app_name
|
||||
)
|
||||
return os.path.join(
|
||||
os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
|
||||
_posixify(app_name),
|
||||
)
|
||||
|
||||
|
||||
_config_dir = _click_config_dir = get_app_dir('piker')
|
||||
_parent_user = os.environ.get('SUDO_USER')
|
||||
|
||||
if _parent_user:
|
||||
non_root_user_dir = os.path.expanduser(
|
||||
f'~{_parent_user}'
|
||||
)
|
||||
root = 'root'
|
||||
_config_dir = (
|
||||
non_root_user_dir +
|
||||
_click_config_dir[
|
||||
_click_config_dir.rfind(root) + len(root):
|
||||
]
|
||||
)
|
||||
|
||||
_file_name = 'brokers.toml'
|
||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||
_context_defaults = dict(
|
||||
default_map={
|
||||
# Questrade specific quote poll rates
|
||||
'monitor': {
|
||||
'rate': 3,
|
||||
},
|
||||
'optschain': {
|
||||
'rate': 1,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _override_config_dir(
|
||||
|
|
|
@ -0,0 +1,348 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Supervisor for docker with included specific-image service helpers.
|
||||
|
||||
'''
|
||||
import os
|
||||
from typing import (
|
||||
Optional,
|
||||
# Any,
|
||||
)
|
||||
from contextlib import asynccontextmanager as acm
|
||||
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
import docker
|
||||
import json
|
||||
from docker.models.containers import Container
|
||||
from docker.errors import DockerException, APIError
|
||||
from requests.exceptions import ConnectionError, ReadTimeout
|
||||
|
||||
from ..log import get_logger, get_console_log
|
||||
from .. import config
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
_config = '''
|
||||
# piker's ``marketstore`` config.
|
||||
|
||||
# mount this config using:
|
||||
# sudo docker run --mount \
|
||||
# type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \
|
||||
# 5993:5993 alpacamarkets/marketstore:latest
|
||||
|
||||
root_directory: data
|
||||
listen_port: 5993
|
||||
grpc_listen_port: 5995
|
||||
log_level: debug
|
||||
queryable: true
|
||||
stop_grace_period: 0
|
||||
wal_rotate_interval: 5
|
||||
stale_threshold: 5
|
||||
enable_add: true
|
||||
enable_remove: false
|
||||
|
||||
triggers:
|
||||
- module: ondiskagg.so
|
||||
on: "*/1Sec/OHLCV"
|
||||
config:
|
||||
# filter: "nasdaq"
|
||||
destinations:
|
||||
- 1Min
|
||||
- 5Min
|
||||
- 15Min
|
||||
- 1H
|
||||
- 1D
|
||||
|
||||
- module: stream.so
|
||||
on: '*/*/*'
|
||||
# config:
|
||||
# filter: "nasdaq"
|
||||
|
||||
'''
|
||||
|
||||
|
||||
class DockerNotStarted(Exception):
|
||||
'Prolly you dint start da daemon bruh'
|
||||
|
||||
|
||||
@acm
|
||||
async def open_docker(
|
||||
url: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> docker.DockerClient:
|
||||
|
||||
client: Optional[docker.DockerClient] = None
|
||||
try:
|
||||
client = docker.DockerClient(
|
||||
base_url=url,
|
||||
**kwargs
|
||||
) if url else docker.from_env(**kwargs)
|
||||
|
||||
yield client
|
||||
|
||||
except (
|
||||
DockerException,
|
||||
APIError,
|
||||
) as err:
|
||||
|
||||
def unpack_msg(err: Exception) -> str:
|
||||
args = getattr(err, 'args', None)
|
||||
if args:
|
||||
return args
|
||||
else:
|
||||
return str(err)
|
||||
|
||||
# could be more specific so let's check if it's just perms.
|
||||
if err.args:
|
||||
errs = err.args
|
||||
for err in errs:
|
||||
msg = unpack_msg(err)
|
||||
if 'PermissionError' in msg:
|
||||
raise DockerException('You dint run as root yo!')
|
||||
|
||||
elif 'FileNotFoundError' in msg:
|
||||
raise DockerNotStarted('Did you start da service sister?')
|
||||
|
||||
# not perms?
|
||||
raise
|
||||
|
||||
finally:
|
||||
if client:
|
||||
client.close()
|
||||
# client.api._custom_adapter.close()
|
||||
for c in client.containers.list():
|
||||
c.kill()
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_marketstored(
|
||||
ctx: tractor.Context,
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Start and supervise a marketstore instance with its config bind-mounted
|
||||
in from the piker config directory on the system.
|
||||
|
||||
The equivalent cli cmd to this code is:
|
||||
|
||||
sudo docker run --mount \
|
||||
type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \
|
||||
5993:5993 alpacamarkets/marketstore:latest
|
||||
|
||||
'''
|
||||
log = get_console_log('info', name=__name__)
|
||||
|
||||
async with open_docker() as client:
|
||||
|
||||
# create a mount from user's local piker config dir into container
|
||||
config_dir_mnt = docker.types.Mount(
|
||||
target='/etc',
|
||||
source=config._config_dir,
|
||||
type='bind',
|
||||
)
|
||||
|
||||
# create a user config subdir where the marketstore
|
||||
# backing filesystem database can be persisted.
|
||||
persistent_data_dir = os.path.join(
|
||||
config._config_dir, 'data',
|
||||
)
|
||||
if not os.path.isdir(persistent_data_dir):
|
||||
os.mkdir(persistent_data_dir)
|
||||
|
||||
data_dir_mnt = docker.types.Mount(
|
||||
target='/data',
|
||||
source=persistent_data_dir,
|
||||
type='bind',
|
||||
)
|
||||
|
||||
cntr: Container = client.containers.run(
|
||||
'alpacamarkets/marketstore:latest',
|
||||
# do we need this for cmds?
|
||||
# '-i',
|
||||
|
||||
# '-p 5993:5993',
|
||||
ports={
|
||||
'5993/tcp': 5993, # jsonrpc
|
||||
'5995/tcp': 5995, # grpc
|
||||
},
|
||||
mounts=[config_dir_mnt, data_dir_mnt],
|
||||
detach=True,
|
||||
# stop_signal='SIGINT',
|
||||
init=True,
|
||||
# remove=True,
|
||||
)
|
||||
try:
|
||||
seen_so_far = set()
|
||||
|
||||
async def process_logs_until(
|
||||
match: str,
|
||||
bp_on_msg: bool = False,
|
||||
):
|
||||
logs = cntr.logs(stream=True)
|
||||
for entry in logs:
|
||||
entry = entry.decode()
|
||||
|
||||
try:
|
||||
record = json.loads(entry.strip())
|
||||
except json.JSONDecodeError:
|
||||
if 'Error' in entry:
|
||||
raise RuntimeError(entry)
|
||||
|
||||
msg = record['msg']
|
||||
level = record['level']
|
||||
if msg and entry not in seen_so_far:
|
||||
seen_so_far.add(entry)
|
||||
if bp_on_msg:
|
||||
await tractor.breakpoint()
|
||||
getattr(log, level, log.error)(f'{msg}')
|
||||
|
||||
# if "launching tcp listener for all services..." in msg:
|
||||
if match in msg:
|
||||
return True
|
||||
|
||||
# do a checkpoint so we don't block if cancelled B)
|
||||
await trio.sleep(0)
|
||||
|
||||
return False
|
||||
|
||||
with trio.move_on_after(0.5):
|
||||
found = await process_logs_until(
|
||||
"launching tcp listener for all services...",
|
||||
)
|
||||
|
||||
if not found and cntr not in client.containers.list():
|
||||
raise RuntimeError(
|
||||
'Failed to start `marketstore` check logs deats'
|
||||
)
|
||||
|
||||
await ctx.started(cntr.id)
|
||||
|
||||
# block for the expected "teardown log msg"..
|
||||
await process_logs_until('exiting...',)
|
||||
|
||||
except (
|
||||
BaseException,
|
||||
# trio.Cancelled,
|
||||
# KeyboardInterrupt,
|
||||
):
|
||||
cntr.kill('SIGINT')
|
||||
with trio.move_on_after(0.5) as cs:
|
||||
cs.shield = True
|
||||
await process_logs_until('exiting...',)
|
||||
raise
|
||||
|
||||
finally:
|
||||
try:
|
||||
cntr.wait(
|
||||
timeout=0.5,
|
||||
condition='not-running',
|
||||
)
|
||||
except (
|
||||
ReadTimeout,
|
||||
ConnectionError,
|
||||
):
|
||||
cntr.kill()
|
||||
|
||||
|
||||
async def start_ahab(
|
||||
service_name: str,
|
||||
task_status: TaskStatus[trio.Event] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Start a ``docker`` container supervisor with given service name.
|
||||
|
||||
Currently the actor calling this task should normally be started
|
||||
with root permissions (until we decide to use something that doesn't
|
||||
require this, like docker's rootless mode or some wrapper project) but
|
||||
te root perms are de-escalated after the docker supervisor sub-actor
|
||||
is started.
|
||||
|
||||
'''
|
||||
cn_ready = trio.Event()
|
||||
try:
|
||||
async with tractor.open_nursery(
|
||||
loglevel='runtime',
|
||||
) as tn:
|
||||
|
||||
portal = await tn.start_actor(
|
||||
service_name,
|
||||
enable_modules=[__name__]
|
||||
)
|
||||
|
||||
# TODO: we have issues with this on teardown
|
||||
# where ``tractor`` tries to issue ``os.kill()``
|
||||
# and hits perms errors since the root process
|
||||
# doesn't any longer have root perms..
|
||||
|
||||
# de-escalate root perms to the original user
|
||||
# after the docker supervisor actor is spawned.
|
||||
if config._parent_user:
|
||||
import pwd
|
||||
os.setuid(
|
||||
pwd.getpwnam(
|
||||
config._parent_user
|
||||
)[2] # named user's uid
|
||||
)
|
||||
|
||||
task_status.started(cn_ready)
|
||||
|
||||
async with portal.open_context(
|
||||
open_marketstored,
|
||||
) as (ctx, first):
|
||||
|
||||
assert str(first)
|
||||
# run till cancelled
|
||||
await trio.sleep_forever()
|
||||
|
||||
# since we demoted root perms in this parent
|
||||
# we'll get a perms error on proc cleanup in
|
||||
# ``tractor`` nursery exit. just make sure
|
||||
# the child is terminated and don't raise the
|
||||
# error if so.
|
||||
|
||||
# TODO: we could also consider adding
|
||||
# a ``tractor.ZombieDetected`` or something that we could raise
|
||||
# if we find the child didn't terminate.
|
||||
# await tractor.breakpoint()
|
||||
except PermissionError:
|
||||
log.warning('Failed to cancel root permsed container')
|
||||
|
||||
except (
|
||||
trio.MultiError,
|
||||
) as err:
|
||||
for subexc in err.exceptions:
|
||||
if isinstance(subexc, PermissionError):
|
||||
log.warning('Failed to cancel root perms-ed container')
|
||||
return
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
async def main():
|
||||
await start_ahab()
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
trio.run(main)
|
|
@ -22,14 +22,16 @@ financial data flows.
|
|||
from __future__ import annotations
|
||||
from collections import Counter
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
import tractor
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
|
||||
from ._sharedmem import ShmArray
|
||||
from ..log import get_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._sharedmem import ShmArray
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -88,6 +90,7 @@ async def increment_ohlc_buffer(
|
|||
|
||||
total_s = 0 # total seconds counted
|
||||
lowest = min(sampler.ohlcv_shms.keys())
|
||||
lowest_shm = sampler.ohlcv_shms[lowest][0]
|
||||
ad = lowest - 0.001
|
||||
|
||||
with trio.CancelScope() as cs:
|
||||
|
@ -131,21 +134,38 @@ async def increment_ohlc_buffer(
|
|||
# write to the buffer
|
||||
shm.push(last)
|
||||
|
||||
# broadcast the buffer index step to any subscribers for
|
||||
# a given sample period.
|
||||
subs = sampler.subscribers.get(delay_s, ())
|
||||
await broadcast(delay_s, shm=lowest_shm)
|
||||
|
||||
for stream in subs:
|
||||
try:
|
||||
await stream.send({'index': shm._last.value})
|
||||
except (
|
||||
trio.BrokenResourceError,
|
||||
trio.ClosedResourceError
|
||||
):
|
||||
log.error(
|
||||
f'{stream._ctx.chan.uid} dropped connection'
|
||||
)
|
||||
subs.remove(stream)
|
||||
|
||||
async def broadcast(
|
||||
delay_s: int,
|
||||
shm: Optional[ShmArray] = None,
|
||||
|
||||
) -> None:
|
||||
# broadcast the buffer index step to any subscribers for
|
||||
# a given sample period.
|
||||
subs = sampler.subscribers.get(delay_s, ())
|
||||
|
||||
if shm is None:
|
||||
lowest = min(sampler.ohlcv_shms.keys())
|
||||
shm = sampler.ohlcv_shms[lowest][0]
|
||||
|
||||
for stream in subs:
|
||||
try:
|
||||
await stream.send({'index': shm._last.value})
|
||||
except (
|
||||
trio.BrokenResourceError,
|
||||
trio.ClosedResourceError
|
||||
):
|
||||
log.error(
|
||||
f'{stream._ctx.chan.uid} dropped connection'
|
||||
)
|
||||
try:
|
||||
subs.remove(stream)
|
||||
except ValueError:
|
||||
log.warning(
|
||||
f'{stream._ctx.chan.uid} sub already removed!?'
|
||||
)
|
||||
|
||||
|
||||
@tractor.context
|
||||
|
@ -365,7 +385,12 @@ async def uniform_rate_send(
|
|||
|
||||
if left_to_sleep > 0:
|
||||
with trio.move_on_after(left_to_sleep) as cs:
|
||||
sym, last_quote = await quote_stream.receive()
|
||||
try:
|
||||
sym, last_quote = await quote_stream.receive()
|
||||
except trio.EndOfChannel:
|
||||
log.exception(f"feed for {stream} ended?")
|
||||
break
|
||||
|
||||
diff = time.time() - last_send
|
||||
|
||||
if not first_quote:
|
||||
|
|
|
@ -22,7 +22,6 @@ from __future__ import annotations
|
|||
from sys import byteorder
|
||||
from typing import Optional
|
||||
from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
||||
from multiprocessing import resource_tracker as mantracker
|
||||
|
||||
if _USE_POSIX:
|
||||
from _posixshmem import shm_unlink
|
||||
|
@ -30,6 +29,7 @@ if _USE_POSIX:
|
|||
import tractor
|
||||
import numpy as np
|
||||
from pydantic import BaseModel
|
||||
from numpy.lib import recfunctions as rfn
|
||||
|
||||
from ..log import get_logger
|
||||
from ._source import base_iohlc_dtype
|
||||
|
@ -40,32 +40,39 @@ log = get_logger(__name__)
|
|||
|
||||
# how much is probably dependent on lifestyle
|
||||
_secs_in_day = int(60 * 60 * 24)
|
||||
# we try for 3 times but only on a run-every-other-day kinda week.
|
||||
_default_size = 10 * _secs_in_day
|
||||
# we try for a buncha times, but only on a run-every-other-day kinda week.
|
||||
_days_worth = 16
|
||||
_default_size = _days_worth * _secs_in_day
|
||||
# where to start the new data append index
|
||||
_rt_buffer_start = int(9*_secs_in_day)
|
||||
_rt_buffer_start = int((_days_worth - 1) * _secs_in_day)
|
||||
|
||||
|
||||
# Tell the "resource tracker" thing to fuck off.
|
||||
class ManTracker(mantracker.ResourceTracker):
|
||||
def register(self, name, rtype):
|
||||
pass
|
||||
def cuckoff_mantracker():
|
||||
|
||||
def unregister(self, name, rtype):
|
||||
pass
|
||||
from multiprocessing import resource_tracker as mantracker
|
||||
|
||||
def ensure_running(self):
|
||||
pass
|
||||
# Tell the "resource tracker" thing to fuck off.
|
||||
class ManTracker(mantracker.ResourceTracker):
|
||||
def register(self, name, rtype):
|
||||
pass
|
||||
|
||||
def unregister(self, name, rtype):
|
||||
pass
|
||||
|
||||
def ensure_running(self):
|
||||
pass
|
||||
|
||||
# "know your land and know your prey"
|
||||
# https://www.dailymotion.com/video/x6ozzco
|
||||
mantracker._resource_tracker = ManTracker()
|
||||
mantracker.register = mantracker._resource_tracker.register
|
||||
mantracker.ensure_running = mantracker._resource_tracker.ensure_running
|
||||
# ensure_running = mantracker._resource_tracker.ensure_running
|
||||
mantracker.unregister = mantracker._resource_tracker.unregister
|
||||
mantracker.getfd = mantracker._resource_tracker.getfd
|
||||
|
||||
|
||||
# "know your land and know your prey"
|
||||
# https://www.dailymotion.com/video/x6ozzco
|
||||
mantracker._resource_tracker = ManTracker()
|
||||
mantracker.register = mantracker._resource_tracker.register
|
||||
mantracker.ensure_running = mantracker._resource_tracker.ensure_running
|
||||
ensure_running = mantracker._resource_tracker.ensure_running
|
||||
mantracker.unregister = mantracker._resource_tracker.unregister
|
||||
mantracker.getfd = mantracker._resource_tracker.getfd
|
||||
cuckoff_mantracker()
|
||||
|
||||
|
||||
class SharedInt:
|
||||
|
@ -191,7 +198,11 @@ class ShmArray:
|
|||
self._post_init: bool = False
|
||||
|
||||
# pushing data does not write the index (aka primary key)
|
||||
self._write_fields = list(shmarr.dtype.fields.keys())[1:]
|
||||
dtype = shmarr.dtype
|
||||
if dtype.fields:
|
||||
self._write_fields = list(shmarr.dtype.fields.keys())[1:]
|
||||
else:
|
||||
self._write_fields = None
|
||||
|
||||
# TODO: ringbuf api?
|
||||
|
||||
|
@ -237,6 +248,48 @@ class ShmArray:
|
|||
|
||||
return a
|
||||
|
||||
def ustruct(
|
||||
self,
|
||||
fields: Optional[list[str]] = None,
|
||||
|
||||
# type that all field values will be cast to
|
||||
# in the returned view.
|
||||
common_dtype: np.dtype = np.float,
|
||||
|
||||
) -> np.ndarray:
|
||||
|
||||
array = self._array
|
||||
|
||||
if fields:
|
||||
selection = array[fields]
|
||||
# fcount = len(fields)
|
||||
else:
|
||||
selection = array
|
||||
# fcount = len(array.dtype.fields)
|
||||
|
||||
# XXX: manual ``.view()`` attempt that also doesn't work.
|
||||
# uview = selection.view(
|
||||
# dtype='<f16',
|
||||
# ).reshape(-1, 4, order='A')
|
||||
|
||||
# assert len(selection) == len(uview)
|
||||
|
||||
u = rfn.structured_to_unstructured(
|
||||
selection,
|
||||
# dtype=float,
|
||||
copy=True,
|
||||
)
|
||||
|
||||
# unstruct = np.ndarray(u.shape, dtype=a.dtype, buffer=shm.buf)
|
||||
# array[:] = a[:]
|
||||
return u
|
||||
# return ShmArray(
|
||||
# shmarr=u,
|
||||
# first=self._first,
|
||||
# last=self._last,
|
||||
# shm=self._shm
|
||||
# )
|
||||
|
||||
def last(
|
||||
self,
|
||||
length: int = 1,
|
||||
|
@ -255,6 +308,7 @@ class ShmArray:
|
|||
|
||||
field_map: Optional[dict[str, str]] = None,
|
||||
prepend: bool = False,
|
||||
update_first: bool = True,
|
||||
start: Optional[int] = None,
|
||||
|
||||
) -> int:
|
||||
|
@ -267,10 +321,9 @@ class ShmArray:
|
|||
|
||||
'''
|
||||
length = len(data)
|
||||
index = start if start is not None else self._last.value
|
||||
|
||||
if prepend:
|
||||
index = self._first.value - length
|
||||
index = (start or self._first.value) - length
|
||||
|
||||
if index < 0:
|
||||
raise ValueError(
|
||||
|
@ -278,6 +331,9 @@ class ShmArray:
|
|||
f'You have passed {abs(index)} too many datums.'
|
||||
)
|
||||
|
||||
else:
|
||||
index = start if start is not None else self._last.value
|
||||
|
||||
end = index + length
|
||||
|
||||
if field_map:
|
||||
|
@ -295,12 +351,17 @@ class ShmArray:
|
|||
# tries to access ``.array`` (which due to the index
|
||||
# overlap will be empty). Pretty sure we've fixed it now
|
||||
# but leaving this here as a reminder.
|
||||
if prepend:
|
||||
if prepend and update_first:
|
||||
assert index < self._first.value
|
||||
|
||||
if index < self._first.value:
|
||||
if (
|
||||
index < self._first.value
|
||||
and update_first
|
||||
):
|
||||
assert prepend, 'prepend=True not passed but index decreased?'
|
||||
self._first.value = index
|
||||
else:
|
||||
|
||||
elif not prepend:
|
||||
self._last.value = end
|
||||
|
||||
self._post_init = True
|
||||
|
@ -336,6 +397,7 @@ class ShmArray:
|
|||
f"Input array has unknown field(s): {only_in_theirs}"
|
||||
)
|
||||
|
||||
# TODO: support "silent" prepends that don't update ._first.value?
|
||||
def prepend(
|
||||
self,
|
||||
data: np.ndarray,
|
||||
|
@ -386,7 +448,11 @@ def open_shm_array(
|
|||
create=True,
|
||||
size=a.nbytes
|
||||
)
|
||||
array = np.ndarray(a.shape, dtype=a.dtype, buffer=shm.buf)
|
||||
array = np.ndarray(
|
||||
a.shape,
|
||||
dtype=a.dtype,
|
||||
buffer=shm.buf
|
||||
)
|
||||
array[:] = a[:]
|
||||
array.setflags(write=int(not readonly))
|
||||
|
||||
|
|
|
@ -22,8 +22,7 @@ from typing import Any
|
|||
import decimal
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from pydantic import BaseModel, validate_arguments
|
||||
from pydantic import BaseModel
|
||||
# from numba import from_dtype
|
||||
|
||||
|
||||
|
@ -127,11 +126,11 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
|||
|
||||
|
||||
class Symbol(BaseModel):
|
||||
"""I guess this is some kinda container thing for dealing with
|
||||
'''
|
||||
I guess this is some kinda container thing for dealing with
|
||||
all the different meta-data formats from brokers?
|
||||
|
||||
Yah, i guess dats what it izz.
|
||||
"""
|
||||
'''
|
||||
key: str
|
||||
tick_size: float = 0.01
|
||||
lot_tick_size: float = 0.0 # "volume" precision as min step value
|
||||
|
@ -254,61 +253,6 @@ class Symbol(BaseModel):
|
|||
return keys
|
||||
|
||||
|
||||
def from_df(
|
||||
|
||||
df: pd.DataFrame,
|
||||
source=None,
|
||||
default_tf=None
|
||||
|
||||
) -> np.recarray:
|
||||
"""Convert OHLC formatted ``pandas.DataFrame`` to ``numpy.recarray``.
|
||||
|
||||
"""
|
||||
df.reset_index(inplace=True)
|
||||
|
||||
# hackery to convert field names
|
||||
date = 'Date'
|
||||
if 'date' in df.columns:
|
||||
date = 'date'
|
||||
|
||||
# convert to POSIX time
|
||||
df[date] = [d.timestamp() for d in df[date]]
|
||||
|
||||
# try to rename from some camel case
|
||||
columns = {
|
||||
'Date': 'time',
|
||||
'date': 'time',
|
||||
'Open': 'open',
|
||||
'High': 'high',
|
||||
'Low': 'low',
|
||||
'Close': 'close',
|
||||
'Volume': 'volume',
|
||||
|
||||
# most feeds are providing this over sesssion anchored
|
||||
'vwap': 'bar_wap',
|
||||
|
||||
# XXX: ib_insync calls this the "wap of the bar"
|
||||
# but no clue what is actually is...
|
||||
# https://github.com/pikers/piker/issues/119#issuecomment-729120988
|
||||
'average': 'bar_wap',
|
||||
}
|
||||
|
||||
df = df.rename(columns=columns)
|
||||
|
||||
for name in df.columns:
|
||||
# if name not in base_ohlc_dtype.names[1:]:
|
||||
if name not in base_ohlc_dtype.names:
|
||||
del df[name]
|
||||
|
||||
# TODO: it turns out column access on recarrays is actually slower:
|
||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||
# it might make sense to make these structured arrays?
|
||||
array = df.to_records(index=False)
|
||||
_nan_to_closest_num(array)
|
||||
|
||||
return array
|
||||
|
||||
|
||||
def _nan_to_closest_num(array: np.ndarray):
|
||||
"""Return interpolated values instead of NaN.
|
||||
|
||||
|
|
|
@ -16,26 +16,34 @@
|
|||
|
||||
"""
|
||||
marketstore cli.
|
||||
|
||||
"""
|
||||
from typing import List
|
||||
from functools import partial
|
||||
from pprint import pformat
|
||||
|
||||
from anyio_marketstore import open_marketstore_client
|
||||
import trio
|
||||
import tractor
|
||||
import click
|
||||
import numpy as np
|
||||
|
||||
from .marketstore import (
|
||||
get_client,
|
||||
stream_quotes,
|
||||
# stream_quotes,
|
||||
ingest_quote_stream,
|
||||
_url,
|
||||
# _url,
|
||||
_tick_tbk_ids,
|
||||
mk_tbk,
|
||||
)
|
||||
from ..cli import cli
|
||||
from .. import watchlists as wl
|
||||
from ..log import get_logger
|
||||
from ._sharedmem import (
|
||||
maybe_open_shm_array,
|
||||
)
|
||||
from ._source import (
|
||||
base_iohlc_dtype,
|
||||
)
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -49,51 +57,58 @@ log = get_logger(__name__)
|
|||
)
|
||||
@click.argument('names', nargs=-1)
|
||||
@click.pass_obj
|
||||
def ms_stream(config: dict, names: List[str], url: str):
|
||||
"""Connect to a marketstore time bucket stream for (a set of) symbols(s)
|
||||
def ms_stream(
|
||||
config: dict,
|
||||
names: list[str],
|
||||
url: str,
|
||||
) -> None:
|
||||
'''
|
||||
Connect to a marketstore time bucket stream for (a set of) symbols(s)
|
||||
and print to console.
|
||||
"""
|
||||
|
||||
'''
|
||||
async def main():
|
||||
async for quote in stream_quotes(symbols=names):
|
||||
log.info(f"Received quote:\n{quote}")
|
||||
# async for quote in stream_quotes(symbols=names):
|
||||
# log.info(f"Received quote:\n{quote}")
|
||||
...
|
||||
|
||||
trio.run(main)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option(
|
||||
'--url',
|
||||
default=_url,
|
||||
help='HTTP URL of marketstore instance'
|
||||
)
|
||||
@click.argument('names', nargs=-1)
|
||||
@click.pass_obj
|
||||
def ms_destroy(config: dict, names: List[str], url: str) -> None:
|
||||
"""Destroy symbol entries in the local marketstore instance.
|
||||
"""
|
||||
async def main():
|
||||
nonlocal names
|
||||
async with get_client(url) as client:
|
||||
|
||||
if not names:
|
||||
names = await client.list_symbols()
|
||||
|
||||
# default is to wipe db entirely.
|
||||
answer = input(
|
||||
"This will entirely wipe you local marketstore db @ "
|
||||
f"{url} of the following symbols:\n {pformat(names)}"
|
||||
"\n\nDelete [N/y]?\n")
|
||||
|
||||
if answer == 'y':
|
||||
for sym in names:
|
||||
# tbk = _tick_tbk.format(sym)
|
||||
tbk = tuple(sym, *_tick_tbk_ids)
|
||||
print(f"Destroying {tbk}..")
|
||||
await client.destroy(mk_tbk(tbk))
|
||||
else:
|
||||
print("Nothing deleted.")
|
||||
|
||||
tractor.run(main)
|
||||
# @cli.command()
|
||||
# @click.option(
|
||||
# '--url',
|
||||
# default=_url,
|
||||
# help='HTTP URL of marketstore instance'
|
||||
# )
|
||||
# @click.argument('names', nargs=-1)
|
||||
# @click.pass_obj
|
||||
# def ms_destroy(config: dict, names: list[str], url: str) -> None:
|
||||
# """Destroy symbol entries in the local marketstore instance.
|
||||
# """
|
||||
# async def main():
|
||||
# nonlocal names
|
||||
# async with get_client(url) as client:
|
||||
#
|
||||
# if not names:
|
||||
# names = await client.list_symbols()
|
||||
#
|
||||
# # default is to wipe db entirely.
|
||||
# answer = input(
|
||||
# "This will entirely wipe you local marketstore db @ "
|
||||
# f"{url} of the following symbols:\n {pformat(names)}"
|
||||
# "\n\nDelete [N/y]?\n")
|
||||
#
|
||||
# if answer == 'y':
|
||||
# for sym in names:
|
||||
# # tbk = _tick_tbk.format(sym)
|
||||
# tbk = tuple(sym, *_tick_tbk_ids)
|
||||
# print(f"Destroying {tbk}..")
|
||||
# await client.destroy(mk_tbk(tbk))
|
||||
# else:
|
||||
# print("Nothing deleted.")
|
||||
#
|
||||
# tractor.run(main)
|
||||
|
||||
|
||||
@cli.command()
|
||||
|
@ -102,41 +117,53 @@ def ms_destroy(config: dict, names: List[str], url: str) -> None:
|
|||
is_flag=True,
|
||||
help='Enable tractor logging')
|
||||
@click.option(
|
||||
'--url',
|
||||
default=_url,
|
||||
help='HTTP URL of marketstore instance'
|
||||
'--host',
|
||||
default='localhost'
|
||||
)
|
||||
@click.argument('name', nargs=1, required=True)
|
||||
@click.option(
|
||||
'--port',
|
||||
default=5993
|
||||
)
|
||||
@click.argument('symbols', nargs=-1)
|
||||
@click.pass_obj
|
||||
def ms_shell(config, name, tl, url):
|
||||
"""Start an IPython shell ready to query the local marketstore db.
|
||||
"""
|
||||
async def main():
|
||||
async with get_client(url) as client:
|
||||
query = client.query # noqa
|
||||
# TODO: write magics to query marketstore
|
||||
from IPython import embed
|
||||
embed()
|
||||
def storesh(
|
||||
config,
|
||||
tl,
|
||||
host,
|
||||
port,
|
||||
symbols: list[str],
|
||||
):
|
||||
'''
|
||||
Start an IPython shell ready to query the local marketstore db.
|
||||
|
||||
tractor.run(main)
|
||||
'''
|
||||
from piker.data.marketstore import tsdb_history_update
|
||||
from piker._daemon import open_piker_runtime
|
||||
|
||||
async def main():
|
||||
nonlocal symbols
|
||||
|
||||
async with open_piker_runtime(
|
||||
'storesh',
|
||||
enable_modules=['piker.data._ahab'],
|
||||
):
|
||||
symbol = symbols[0]
|
||||
await tsdb_history_update(symbol)
|
||||
|
||||
trio.run(main)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option('--test-file', '-t', help='Test quote stream file')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option(
|
||||
'--url',
|
||||
default=_url,
|
||||
help='HTTP URL of marketstore instance'
|
||||
)
|
||||
@click.argument('name', nargs=1, required=True)
|
||||
@click.pass_obj
|
||||
def ingest(config, name, test_file, tl, url):
|
||||
"""Ingest real-time broker quotes and ticks to a marketstore instance.
|
||||
"""
|
||||
def ingest(config, name, test_file, tl):
|
||||
'''
|
||||
Ingest real-time broker quotes and ticks to a marketstore instance.
|
||||
|
||||
'''
|
||||
# global opts
|
||||
brokermod = config['brokermod']
|
||||
loglevel = config['loglevel']
|
||||
tractorloglevel = config['tractorloglevel']
|
||||
# log = config['log']
|
||||
|
@ -145,15 +172,25 @@ def ingest(config, name, test_file, tl, url):
|
|||
watchlists = wl.merge_watchlist(watchlist_from_file, wl._builtins)
|
||||
symbols = watchlists[name]
|
||||
|
||||
tractor.run(
|
||||
partial(
|
||||
ingest_quote_stream,
|
||||
symbols,
|
||||
brokermod.name,
|
||||
tries=1,
|
||||
loglevel=loglevel,
|
||||
),
|
||||
name='ingest_marketstore',
|
||||
loglevel=tractorloglevel,
|
||||
debug_mode=True,
|
||||
)
|
||||
grouped_syms = {}
|
||||
for sym in symbols:
|
||||
symbol, _, provider = sym.rpartition('.')
|
||||
if provider not in grouped_syms:
|
||||
grouped_syms[provider] = []
|
||||
|
||||
grouped_syms[provider].append(symbol)
|
||||
|
||||
async def entry_point():
|
||||
async with tractor.open_nursery() as n:
|
||||
for provider, symbols in grouped_syms.items():
|
||||
await n.run_in_actor(
|
||||
ingest_quote_stream,
|
||||
name='ingest_marketstore',
|
||||
symbols=symbols,
|
||||
brokername=provider,
|
||||
tries=1,
|
||||
actorloglevel=loglevel,
|
||||
loglevel=tractorloglevel
|
||||
)
|
||||
|
||||
tractor.run(entry_point)
|
||||
|
|
|
@ -20,7 +20,9 @@ Data feed apis and infra.
|
|||
This module is enabled for ``brokerd`` daemons.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from contextlib import asynccontextmanager
|
||||
from functools import partial
|
||||
from types import ModuleType
|
||||
|
@ -35,12 +37,15 @@ from trio.abc import ReceiveChannel
|
|||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
from pydantic import BaseModel
|
||||
import pendulum
|
||||
import numpy as np
|
||||
|
||||
from ..brokers import get_brokermod
|
||||
from .._cacheables import maybe_open_context
|
||||
from ..log import get_logger, get_console_log
|
||||
from .._daemon import (
|
||||
maybe_spawn_brokerd,
|
||||
check_for_service,
|
||||
)
|
||||
from ._sharedmem import (
|
||||
maybe_open_shm_array,
|
||||
|
@ -56,11 +61,16 @@ from ._source import (
|
|||
from ..ui import _search
|
||||
from ._sampling import (
|
||||
sampler,
|
||||
broadcast,
|
||||
increment_ohlc_buffer,
|
||||
iter_ohlc_periods,
|
||||
sample_and_broadcast,
|
||||
uniform_rate_send,
|
||||
)
|
||||
from ..brokers._util import (
|
||||
NoData,
|
||||
DataUnavailable,
|
||||
)
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -124,7 +134,7 @@ class _FeedsBus(BaseModel):
|
|||
|
||||
# def cancel_task(
|
||||
# self,
|
||||
# task: trio.lowlevel.Task
|
||||
# task: trio.lowlevel.Task,
|
||||
# ) -> bool:
|
||||
# ...
|
||||
|
||||
|
@ -188,6 +198,129 @@ async def _setup_persistent_brokerd(
|
|||
await trio.sleep_forever()
|
||||
|
||||
|
||||
def diff_history(
|
||||
array,
|
||||
start_dt,
|
||||
end_dt,
|
||||
last_tsdb_dt: Optional[datetime] = None
|
||||
|
||||
) -> np.ndarray:
|
||||
|
||||
if last_tsdb_dt:
|
||||
s_diff = (start_dt - last_tsdb_dt).seconds
|
||||
|
||||
to_push = array[:s_diff]
|
||||
|
||||
# if we detect a partial frame's worth of data
|
||||
# that is new, slice out only that history and
|
||||
# write to shm.
|
||||
if abs(s_diff) < len(array):
|
||||
log.info(
|
||||
f'Pushing partial frame {to_push.size} to shm'
|
||||
)
|
||||
# assert last_tsdb_dt > start_dt
|
||||
# selected = array['time'] > last_tsdb_dt.timestamp()
|
||||
# to_push = array[selected]
|
||||
# return to_push
|
||||
|
||||
return to_push
|
||||
|
||||
else:
|
||||
return array
|
||||
|
||||
|
||||
async def start_backfill(
|
||||
mod: ModuleType,
|
||||
bfqsn: str,
|
||||
shm: ShmArray,
|
||||
|
||||
last_tsdb_dt: Optional[datetime] = None,
|
||||
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> int:
|
||||
|
||||
async with mod.open_history_client(bfqsn) as hist:
|
||||
|
||||
# get latest query's worth of history all the way
|
||||
# back to what is recorded in the tsdb
|
||||
array, start_dt, end_dt = await hist(end_dt=None)
|
||||
|
||||
to_push = diff_history(
|
||||
array,
|
||||
start_dt,
|
||||
end_dt,
|
||||
last_tsdb_dt=last_tsdb_dt,
|
||||
)
|
||||
|
||||
log.info(f'Pushing {to_push.size} to shm!')
|
||||
shm.push(to_push)
|
||||
|
||||
for delay_s in sampler.subscribers:
|
||||
await broadcast(delay_s)
|
||||
|
||||
bf_done = trio.Event()
|
||||
# let caller unblock and deliver latest history frame
|
||||
task_status.started((shm, start_dt, end_dt, bf_done))
|
||||
|
||||
if last_tsdb_dt is None:
|
||||
# maybe a better default (they don't seem to define epoch?!)
|
||||
last_tsdb_dt = pendulum.now().subtract(days=1)
|
||||
|
||||
# pull new history frames until we hit latest
|
||||
# already in the tsdb or a max count.
|
||||
# mx_fills = 16
|
||||
count = 0
|
||||
# while True:
|
||||
while (
|
||||
end_dt > last_tsdb_dt
|
||||
# and count < mx_fills
|
||||
):
|
||||
count += 1
|
||||
try:
|
||||
array, start_dt, end_dt = await hist(end_dt=start_dt)
|
||||
|
||||
except NoData:
|
||||
# decrement by the diff in time last delivered.
|
||||
end_dt = start_dt.subtract(seconds=(end_dt - start_dt).seconds)
|
||||
continue
|
||||
|
||||
except DataUnavailable:
|
||||
# broker is being a bish and we can't pull
|
||||
# any more..
|
||||
break
|
||||
|
||||
to_push = diff_history(
|
||||
array,
|
||||
start_dt,
|
||||
end_dt,
|
||||
|
||||
last_tsdb_dt=last_tsdb_dt,
|
||||
# XXX: hacky, just run indefinitely
|
||||
# last_tsdb_dt=None,
|
||||
)
|
||||
print(f"PULLING {count}")
|
||||
log.info(f'Pushing {to_push.size} to shm!')
|
||||
|
||||
if to_push.size < 1:
|
||||
break
|
||||
|
||||
# bail on shm allocation overrun
|
||||
try:
|
||||
shm.push(to_push, prepend=True)
|
||||
except ValueError:
|
||||
await tractor.breakpoint()
|
||||
break
|
||||
|
||||
for delay_s in sampler.subscribers:
|
||||
await broadcast(delay_s)
|
||||
|
||||
bf_done.set()
|
||||
# update start index to include all tsdb history
|
||||
# that was pushed in the caller parent task.
|
||||
# shm._first.value = 0
|
||||
|
||||
|
||||
async def manage_history(
|
||||
mod: ModuleType,
|
||||
bus: _FeedsBus,
|
||||
|
@ -216,50 +349,177 @@ async def manage_history(
|
|||
# we expect the sub-actor to write
|
||||
readonly=False,
|
||||
)
|
||||
# TODO: history validation
|
||||
if not opened:
|
||||
raise RuntimeError(
|
||||
"Persistent shm for sym was already open?!"
|
||||
)
|
||||
|
||||
if opened:
|
||||
log.info('Scanning for existing `marketstored`')
|
||||
|
||||
is_up = await check_for_service('marketstored')
|
||||
|
||||
# for now only do backfilling if no tsdb can be found
|
||||
do_legacy_backfill = not is_up and opened
|
||||
|
||||
bfqsn = fqsn.replace('.' + mod.name, '')
|
||||
open_history_client = getattr(mod, 'open_history_client', None)
|
||||
|
||||
if is_up and opened and open_history_client:
|
||||
|
||||
log.info('Found existing `marketstored`')
|
||||
from . import marketstore
|
||||
async with marketstore.open_storage_client(
|
||||
fqsn,
|
||||
) as storage:
|
||||
|
||||
# TODO: this should be used verbatim for the pure
|
||||
# shm backfiller approach below.
|
||||
|
||||
# start history anal and load missing new data via backend.
|
||||
series, first_dt, last_dt = await storage.load(fqsn)
|
||||
|
||||
broker, symbol, expiry = unpack_fqsn(fqsn)
|
||||
(
|
||||
shm,
|
||||
latest_start_dt,
|
||||
latest_end_dt,
|
||||
bf_done,
|
||||
) = await bus.nursery.start(
|
||||
partial(
|
||||
start_backfill,
|
||||
mod,
|
||||
bfqsn,
|
||||
shm,
|
||||
last_tsdb_dt=last_dt,
|
||||
)
|
||||
)
|
||||
task_status.started(shm)
|
||||
some_data_ready.set()
|
||||
|
||||
await bf_done.wait()
|
||||
# do diff against last start frame of history and only fill
|
||||
# in from the tsdb an allotment that allows for most recent
|
||||
# to be loaded into mem *before* tsdb data.
|
||||
if last_dt:
|
||||
dt_diff_s = (latest_start_dt - last_dt).seconds
|
||||
else:
|
||||
dt_diff_s = 0
|
||||
|
||||
# await trio.sleep_forever()
|
||||
# TODO: see if there's faster multi-field reads:
|
||||
# https://numpy.org/doc/stable/user/basics.rec.html#accessing-multiple-fields
|
||||
# re-index with a `time` and index field
|
||||
prepend_start = shm._first.value
|
||||
|
||||
# sanity check on most-recent-data loading
|
||||
assert prepend_start > dt_diff_s
|
||||
|
||||
history = list(series.values())
|
||||
if history:
|
||||
fastest = history[0]
|
||||
to_push = fastest[:prepend_start]
|
||||
|
||||
shm.push(
|
||||
to_push,
|
||||
|
||||
# insert the history pre a "days worth" of samples
|
||||
# to leave some real-time buffer space at the end.
|
||||
prepend=True,
|
||||
# update_first=False,
|
||||
# start=prepend_start,
|
||||
field_map={
|
||||
'Epoch': 'time',
|
||||
'Open': 'open',
|
||||
'High': 'high',
|
||||
'Low': 'low',
|
||||
'Close': 'close',
|
||||
'Volume': 'volume',
|
||||
},
|
||||
)
|
||||
|
||||
# load as much from storage into shm as spacec will
|
||||
# allow according to user's shm size settings.
|
||||
count = 0
|
||||
end = fastest['Epoch'][0]
|
||||
|
||||
while shm._first.value > 0:
|
||||
count += 1
|
||||
series = await storage.read_ohlcv(
|
||||
fqsn,
|
||||
end=end,
|
||||
)
|
||||
history = list(series.values())
|
||||
fastest = history[0]
|
||||
end = fastest['Epoch'][0]
|
||||
prepend_start -= len(to_push)
|
||||
to_push = fastest[:prepend_start]
|
||||
|
||||
shm.push(
|
||||
to_push,
|
||||
|
||||
# insert the history pre a "days worth" of samples
|
||||
# to leave some real-time buffer space at the end.
|
||||
prepend=True,
|
||||
# update_first=False,
|
||||
# start=prepend_start,
|
||||
field_map={
|
||||
'Epoch': 'time',
|
||||
'Open': 'open',
|
||||
'High': 'high',
|
||||
'Low': 'low',
|
||||
'Close': 'close',
|
||||
'Volume': 'volume',
|
||||
},
|
||||
)
|
||||
for delay_s in sampler.subscribers:
|
||||
await broadcast(delay_s)
|
||||
|
||||
if count > 6:
|
||||
break
|
||||
|
||||
log.info(f'Loaded {to_push.shape} datums from storage')
|
||||
|
||||
# TODO: write new data to tsdb to be ready to for next read.
|
||||
|
||||
if do_legacy_backfill:
|
||||
# do a legacy incremental backfill from the provider.
|
||||
log.info('No existing `marketstored` found..')
|
||||
|
||||
# start history backfill task ``backfill_bars()`` is
|
||||
# a required backend func this must block until shm is
|
||||
# filled with first set of ohlc bars
|
||||
_ = await bus.nursery.start(mod.backfill_bars, fqsn, shm)
|
||||
|
||||
# yield back after client connect with filled shm
|
||||
task_status.started(shm)
|
||||
|
||||
# indicate to caller that feed can be delivered to
|
||||
# remote requesting client since we've loaded history
|
||||
# data that can be used.
|
||||
some_data_ready.set()
|
||||
|
||||
# detect sample step size for sampled historical data
|
||||
times = shm.array['time']
|
||||
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||
|
||||
# begin real-time updates of shm and tsb once the feed
|
||||
# goes live.
|
||||
await feed_is_live.wait()
|
||||
|
||||
if opened:
|
||||
sampler.ohlcv_shms.setdefault(delay_s, []).append(shm)
|
||||
|
||||
# start shm incrementing for OHLC sampling at the current
|
||||
# detected sampling period if one dne.
|
||||
if sampler.incrementers.get(delay_s) is None:
|
||||
await bus.start_task(
|
||||
increment_ohlc_buffer,
|
||||
delay_s,
|
||||
await bus.nursery.start(
|
||||
partial(
|
||||
start_backfill,
|
||||
mod,
|
||||
bfqsn,
|
||||
shm,
|
||||
)
|
||||
)
|
||||
|
||||
# yield back after client connect with filled shm
|
||||
task_status.started(shm)
|
||||
|
||||
# indicate to caller that feed can be delivered to
|
||||
# remote requesting client since we've loaded history
|
||||
# data that can be used.
|
||||
some_data_ready.set()
|
||||
|
||||
# history retreival loop depending on user interaction and thus
|
||||
# a small RPC-prot for remotely controllinlg what data is loaded
|
||||
# for viewing.
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
||||
async def allocate_persistent_feed(
|
||||
bus: _FeedsBus,
|
||||
|
||||
brokername: str,
|
||||
symbol: str,
|
||||
|
||||
loglevel: str,
|
||||
start_stream: bool = True,
|
||||
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
|
@ -277,6 +537,7 @@ async def allocate_persistent_feed(
|
|||
- a real-time streaming task which connec
|
||||
|
||||
'''
|
||||
# load backend module
|
||||
try:
|
||||
mod = get_brokermod(brokername)
|
||||
except ImportError:
|
||||
|
@ -319,7 +580,7 @@ async def allocate_persistent_feed(
|
|||
manage_history,
|
||||
mod,
|
||||
bus,
|
||||
bfqsn,
|
||||
'.'.join((bfqsn, brokername)),
|
||||
some_data_ready,
|
||||
feed_is_live,
|
||||
)
|
||||
|
@ -333,7 +594,10 @@ async def allocate_persistent_feed(
|
|||
# true fqsn
|
||||
fqsn = '.'.join((bfqsn, brokername))
|
||||
# add a fqsn entry that includes the ``.<broker>`` suffix
|
||||
# and an entry that includes the broker-specific fqsn (including
|
||||
# any new suffixes or elements as injected by the backend).
|
||||
init_msg[fqsn] = msg
|
||||
init_msg[bfqsn] = msg
|
||||
|
||||
# TODO: pretty sure we don't need this? why not just leave 1s as
|
||||
# the fastest "sample period" since we'll probably always want that
|
||||
|
@ -347,13 +611,14 @@ async def allocate_persistent_feed(
|
|||
await some_data_ready.wait()
|
||||
|
||||
# append ``.<broker>`` suffix to each quote symbol
|
||||
bsym = symbol + f'.{brokername}'
|
||||
acceptable_not_fqsn_with_broker_suffix = symbol + f'.{brokername}'
|
||||
|
||||
generic_first_quotes = {
|
||||
bsym: first_quote,
|
||||
acceptable_not_fqsn_with_broker_suffix: first_quote,
|
||||
fqsn: first_quote,
|
||||
}
|
||||
|
||||
bus.feeds[symbol] = bus.feeds[fqsn] = (
|
||||
bus.feeds[symbol] = bus.feeds[bfqsn] = (
|
||||
init_msg,
|
||||
generic_first_quotes,
|
||||
)
|
||||
|
@ -363,9 +628,25 @@ async def allocate_persistent_feed(
|
|||
# task_status.started((init_msg, generic_first_quotes))
|
||||
task_status.started()
|
||||
|
||||
# backend will indicate when real-time quotes have begun.
|
||||
if not start_stream:
|
||||
await trio.sleep_forever()
|
||||
|
||||
# begin real-time updates of shm and tsb once the feed goes live and
|
||||
# the backend will indicate when real-time quotes have begun.
|
||||
await feed_is_live.wait()
|
||||
|
||||
# start shm incrementer task for OHLC style sampling
|
||||
# at the current detected step period.
|
||||
times = shm.array['time']
|
||||
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||
|
||||
sampler.ohlcv_shms.setdefault(delay_s, []).append(shm)
|
||||
if sampler.incrementers.get(delay_s) is None:
|
||||
await bus.start_task(
|
||||
increment_ohlc_buffer,
|
||||
delay_s,
|
||||
)
|
||||
|
||||
sum_tick_vlm: bool = init_msg.get(
|
||||
'shm_write_opts', {}
|
||||
).get('sum_tick_vlm', True)
|
||||
|
@ -388,7 +669,7 @@ async def open_feed_bus(
|
|||
|
||||
ctx: tractor.Context,
|
||||
brokername: str,
|
||||
symbol: str,
|
||||
symbol: str, # normally expected to the broker-specific fqsn
|
||||
loglevel: str,
|
||||
tick_throttle: Optional[float] = None,
|
||||
start_stream: bool = True,
|
||||
|
@ -410,7 +691,9 @@ async def open_feed_bus(
|
|||
# TODO: check for any stale shm entries for this symbol
|
||||
# (after we also group them in a nice `/dev/shm/piker/` subdir).
|
||||
# ensure we are who we think we are
|
||||
assert 'brokerd' in tractor.current_actor().name
|
||||
servicename = tractor.current_actor().name
|
||||
assert 'brokerd' in servicename
|
||||
assert brokername in servicename
|
||||
|
||||
bus = get_feed_bus(brokername)
|
||||
|
||||
|
@ -420,7 +703,7 @@ async def open_feed_bus(
|
|||
entry = bus.feeds.get(symbol)
|
||||
if entry is None:
|
||||
# allocate a new actor-local stream bus which
|
||||
# will persist for this `brokerd`.
|
||||
# will persist for this `brokerd`'s service lifetime.
|
||||
async with bus.task_lock:
|
||||
await bus.nursery.start(
|
||||
partial(
|
||||
|
@ -428,13 +711,12 @@ async def open_feed_bus(
|
|||
|
||||
bus=bus,
|
||||
brokername=brokername,
|
||||
|
||||
# here we pass through the selected symbol in native
|
||||
# "format" (i.e. upper vs. lowercase depending on
|
||||
# provider).
|
||||
symbol=symbol,
|
||||
|
||||
loglevel=loglevel,
|
||||
start_stream=start_stream,
|
||||
)
|
||||
)
|
||||
# TODO: we can remove this?
|
||||
|
@ -450,7 +732,7 @@ async def open_feed_bus(
|
|||
# true fqsn
|
||||
fqsn = '.'.join([bfqsn, brokername])
|
||||
assert fqsn in first_quotes
|
||||
assert bus.feeds[fqsn]
|
||||
assert bus.feeds[bfqsn]
|
||||
|
||||
# broker-ambiguous symbol (provided on cli - eg. mnq.globex.ib)
|
||||
bsym = symbol + f'.{brokername}'
|
||||
|
|
|
@ -14,36 +14,60 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
'''
|
||||
``marketstore`` integration.
|
||||
|
||||
- client management routines
|
||||
- ticK data ingest routines
|
||||
- websocket client for subscribing to write triggers
|
||||
- todo: tick sequence stream-cloning for testing
|
||||
- todo: docker container management automation
|
||||
"""
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Dict, Any, List, Callable, Tuple
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
from pprint import pformat
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
import time
|
||||
from math import isnan
|
||||
|
||||
from bidict import bidict
|
||||
import msgpack
|
||||
import pyqtgraph as pg
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pymarketstore as pymkts
|
||||
import tractor
|
||||
from trio_websocket import open_websocket_url
|
||||
from anyio_marketstore import (
|
||||
open_marketstore_client,
|
||||
MarketstoreClient,
|
||||
Params,
|
||||
)
|
||||
import pendulum
|
||||
import purerpc
|
||||
|
||||
from .feed import maybe_open_feed
|
||||
from ..log import get_logger, get_console_log
|
||||
from ..data import open_feed
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_tick_tbk_ids: Tuple[str, str] = ('1Sec', 'TICK')
|
||||
_tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK')
|
||||
_tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids)
|
||||
_url: str = 'http://localhost:5993/rpc'
|
||||
|
||||
_tick_dt = [
|
||||
# these two are required for as a "primary key"
|
||||
('Epoch', 'i8'),
|
||||
('Nanoseconds', 'i4'),
|
||||
('IsTrade', 'i1'),
|
||||
('IsBid', 'i1'),
|
||||
('Price', 'f4'),
|
||||
('Size', 'f4')
|
||||
]
|
||||
|
||||
_quote_dt = [
|
||||
# these two are required for as a "primary key"
|
||||
('Epoch', 'i8'),
|
||||
|
@ -61,6 +85,7 @@ _quote_dt = [
|
|||
# ('brokerd_ts', 'i64'),
|
||||
# ('VWAP', 'f4')
|
||||
]
|
||||
|
||||
_quote_tmp = {}.fromkeys(dict(_quote_dt).keys(), np.nan)
|
||||
_tick_map = {
|
||||
'Up': 1,
|
||||
|
@ -69,28 +94,39 @@ _tick_map = {
|
|||
None: np.nan,
|
||||
}
|
||||
|
||||
_ohlcv_dt = [
|
||||
# these two are required for as a "primary key"
|
||||
('Epoch', 'i8'),
|
||||
# ('Nanoseconds', 'i4'),
|
||||
|
||||
class MarketStoreError(Exception):
|
||||
"Generic marketstore client error"
|
||||
# ohlcv sampling
|
||||
('Open', 'f4'),
|
||||
('High', 'f4'),
|
||||
('Low', 'i8'),
|
||||
('Close', 'i8'),
|
||||
('Volume', 'f4'),
|
||||
]
|
||||
|
||||
|
||||
def err_on_resp(response: dict) -> None:
|
||||
"""Raise any errors found in responses from client request.
|
||||
"""
|
||||
responses = response['responses']
|
||||
if responses is not None:
|
||||
for r in responses:
|
||||
err = r['error']
|
||||
if err:
|
||||
raise MarketStoreError(err)
|
||||
def mk_tbk(keys: tuple[str, str, str]) -> str:
|
||||
'''
|
||||
Generate a marketstore table key from a tuple.
|
||||
Converts,
|
||||
``('SPY', '1Sec', 'TICK')`` -> ``"SPY/1Sec/TICK"```
|
||||
|
||||
'''
|
||||
return '/'.join(keys)
|
||||
|
||||
|
||||
def quote_to_marketstore_structarray(
|
||||
quote: Dict[str, Any],
|
||||
last_fill: str,
|
||||
quote: dict[str, Any],
|
||||
last_fill: Optional[float]
|
||||
|
||||
) -> np.array:
|
||||
"""Return marketstore writeable structarray from quote ``dict``.
|
||||
"""
|
||||
'''
|
||||
Return marketstore writeable structarray from quote ``dict``.
|
||||
|
||||
'''
|
||||
if last_fill:
|
||||
# new fill bby
|
||||
now = timestamp(last_fill)
|
||||
|
@ -101,7 +137,7 @@ def quote_to_marketstore_structarray(
|
|||
|
||||
secs, ns = now / 10**9, now % 10**9
|
||||
|
||||
# pack into List[Tuple[str, Any]]
|
||||
# pack into list[tuple[str, Any]]
|
||||
array_input = []
|
||||
|
||||
# insert 'Epoch' entry first and then 'Nanoseconds'.
|
||||
|
@ -123,146 +159,426 @@ def quote_to_marketstore_structarray(
|
|||
return np.array([tuple(array_input)], dtype=_quote_dt)
|
||||
|
||||
|
||||
def timestamp(datestr: str) -> int:
|
||||
"""Return marketstore compatible 'Epoch' integer in nanoseconds
|
||||
def timestamp(date, **kwargs) -> int:
|
||||
'''
|
||||
Return marketstore compatible 'Epoch' integer in nanoseconds
|
||||
from a date formatted str.
|
||||
"""
|
||||
return int(pd.Timestamp(datestr).value)
|
||||
|
||||
'''
|
||||
return int(pd.Timestamp(date, **kwargs).value)
|
||||
|
||||
|
||||
def mk_tbk(keys: Tuple[str, str, str]) -> str:
|
||||
"""Generate a marketstore table key from a tuple.
|
||||
|
||||
Converts,
|
||||
``('SPY', '1Sec', 'TICK')`` -> ``"SPY/1Sec/TICK"```
|
||||
"""
|
||||
return '{}/' + '/'.join(keys)
|
||||
|
||||
|
||||
class Client:
|
||||
"""Async wrapper around the alpaca ``pymarketstore`` sync client.
|
||||
|
||||
This will server as the shell for building out a proper async client
|
||||
that isn't horribly documented and un-tested..
|
||||
"""
|
||||
def __init__(self, url: str):
|
||||
self._client = pymkts.Client(url)
|
||||
|
||||
async def _invoke(
|
||||
self,
|
||||
meth: Callable,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
return err_on_resp(meth(*args, **kwargs))
|
||||
|
||||
async def destroy(
|
||||
self,
|
||||
tbk: Tuple[str, str, str],
|
||||
) -> None:
|
||||
return await self._invoke(self._client.destroy, mk_tbk(tbk))
|
||||
|
||||
async def list_symbols(
|
||||
self,
|
||||
tbk: str,
|
||||
) -> List[str]:
|
||||
return await self._invoke(self._client.list_symbols, mk_tbk(tbk))
|
||||
|
||||
async def write(
|
||||
self,
|
||||
symbol: str,
|
||||
array: np.ndarray,
|
||||
) -> None:
|
||||
start = time.time()
|
||||
await self._invoke(
|
||||
self._client.write,
|
||||
array,
|
||||
_tick_tbk.format(symbol),
|
||||
isvariablelength=True
|
||||
)
|
||||
log.debug(f"{symbol} write time (s): {time.time() - start}")
|
||||
|
||||
def query(
|
||||
self,
|
||||
symbol,
|
||||
tbk: Tuple[str, str] = _tick_tbk_ids,
|
||||
) -> pd.DataFrame:
|
||||
# XXX: causes crash
|
||||
# client.query(pymkts.Params(symbol, '*', 'OHCLV'
|
||||
result = self._client.query(
|
||||
pymkts.Params(symbol, *tbk),
|
||||
)
|
||||
return result.first().df()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def get_client(
|
||||
url: str = _url,
|
||||
) -> Client:
|
||||
yield Client(url)
|
||||
host: str = 'localhost',
|
||||
port: int = 5995
|
||||
|
||||
) -> MarketstoreClient:
|
||||
'''
|
||||
Load a ``anyio_marketstore`` grpc client connected
|
||||
to an existing ``marketstore`` server.
|
||||
|
||||
'''
|
||||
async with open_marketstore_client(
|
||||
host,
|
||||
port
|
||||
) as client:
|
||||
yield client
|
||||
|
||||
|
||||
class MarketStoreError(Exception):
|
||||
"Generic marketstore client error"
|
||||
|
||||
|
||||
# def err_on_resp(response: dict) -> None:
|
||||
# """Raise any errors found in responses from client request.
|
||||
# """
|
||||
# responses = response['responses']
|
||||
# if responses is not None:
|
||||
# for r in responses:
|
||||
# err = r['error']
|
||||
# if err:
|
||||
# raise MarketStoreError(err)
|
||||
|
||||
|
||||
tf_in_1s = bidict({
|
||||
1: '1Sec',
|
||||
60: '1Min',
|
||||
60*5: '5Min',
|
||||
60*15: '15Min',
|
||||
60*30: '30Min',
|
||||
60*60: '1H',
|
||||
60*60*24: '1D',
|
||||
})
|
||||
|
||||
|
||||
class Storage:
|
||||
'''
|
||||
High level storage api for both real-time and historical ingest.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
client: MarketstoreClient,
|
||||
|
||||
) -> None:
|
||||
# TODO: eventually this should be an api/interface type that
|
||||
# ensures we can support multiple tsdb backends.
|
||||
self.client = client
|
||||
|
||||
# series' cache from tsdb reads
|
||||
self._arrays: dict[str, np.ndarray] = {}
|
||||
|
||||
async def list_keys(self) -> list[str]:
|
||||
return await self.client.list_symbols()
|
||||
|
||||
async def search_keys(self, pattern: str) -> list[str]:
|
||||
'''
|
||||
Search for time series key in the storage backend.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
async def write_ticks(self, ticks: list) -> None:
|
||||
...
|
||||
|
||||
async def load(
|
||||
self,
|
||||
fqsn: str,
|
||||
|
||||
) -> tuple[
|
||||
dict[int, np.ndarray], # timeframe (in secs) to series
|
||||
Optional[datetime], # first dt
|
||||
Optional[datetime], # last dt
|
||||
]:
|
||||
|
||||
first_tsdb_dt, last_tsdb_dt = None, None
|
||||
tsdb_arrays = await self.read_ohlcv(fqsn)
|
||||
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
||||
|
||||
if tsdb_arrays:
|
||||
fastest = list(tsdb_arrays.values())[0]
|
||||
times = fastest['Epoch']
|
||||
first, last = times[0], times[-1]
|
||||
first_tsdb_dt, last_tsdb_dt = map(
|
||||
pendulum.from_timestamp, [first, last]
|
||||
)
|
||||
|
||||
return tsdb_arrays, first_tsdb_dt, last_tsdb_dt
|
||||
|
||||
async def read_ohlcv(
|
||||
self,
|
||||
fqsn: str,
|
||||
timeframe: Optional[Union[int, str]] = None,
|
||||
end: Optional[int] = None,
|
||||
|
||||
) -> tuple[
|
||||
MarketstoreClient,
|
||||
Union[dict, np.ndarray]
|
||||
]:
|
||||
client = self.client
|
||||
syms = await client.list_symbols()
|
||||
|
||||
if fqsn not in syms:
|
||||
return {}
|
||||
|
||||
tfstr = tf_in_1s[1]
|
||||
|
||||
params = Params(
|
||||
symbols=fqsn,
|
||||
timeframe=tfstr,
|
||||
attrgroup='OHLCV',
|
||||
end=end,
|
||||
# limit_from_start=True,
|
||||
|
||||
# TODO: figure the max limit here given the
|
||||
# ``purepc`` msg size limit of purerpc: 33554432
|
||||
limit=int(800e3),
|
||||
)
|
||||
|
||||
if timeframe is None:
|
||||
log.info(f'starting {fqsn} tsdb granularity scan..')
|
||||
# loop through and try to find highest granularity
|
||||
for tfstr in tf_in_1s.values():
|
||||
try:
|
||||
log.info(f'querying for {tfstr}@{fqsn}')
|
||||
params.set('timeframe', tfstr)
|
||||
result = await client.query(params)
|
||||
break
|
||||
|
||||
except purerpc.grpclib.exceptions.UnknownError:
|
||||
# XXX: this is already logged by the container and
|
||||
# thus shows up through `marketstored` logs relay.
|
||||
# log.warning(f'{tfstr}@{fqsn} not found')
|
||||
continue
|
||||
else:
|
||||
return {}
|
||||
|
||||
else:
|
||||
result = await client.query(params)
|
||||
|
||||
# TODO: it turns out column access on recarrays is actually slower:
|
||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||
# it might make sense to make these structured arrays?
|
||||
# Fill out a `numpy` array-results map
|
||||
arrays = {}
|
||||
for fqsn, data_set in result.by_symbols().items():
|
||||
arrays.setdefault(fqsn, {})[
|
||||
tf_in_1s.inverse[data_set.timeframe]
|
||||
] = data_set.array
|
||||
|
||||
return arrays[fqsn][timeframe] if timeframe else arrays[fqsn]
|
||||
|
||||
async def delete_ts(
|
||||
self,
|
||||
key: str,
|
||||
timeframe: Optional[Union[int, str]] = None,
|
||||
|
||||
) -> bool:
|
||||
|
||||
client = self.client
|
||||
syms = await client.list_symbols()
|
||||
print(syms)
|
||||
# if key not in syms:
|
||||
# raise KeyError(f'`{fqsn}` table key not found?')
|
||||
|
||||
return await client.destroy(tbk=key)
|
||||
|
||||
async def write_ohlcv(
|
||||
self,
|
||||
fqsn: str,
|
||||
ohlcv: np.ndarray,
|
||||
append_and_duplicate: bool = True,
|
||||
|
||||
) -> None:
|
||||
# build mkts schema compat array for writing
|
||||
mkts_dt = np.dtype(_ohlcv_dt)
|
||||
mkts_array = np.zeros(
|
||||
len(ohlcv),
|
||||
dtype=mkts_dt,
|
||||
)
|
||||
# copy from shm array (yes it's this easy):
|
||||
# https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays
|
||||
mkts_array[:] = ohlcv[[
|
||||
'time',
|
||||
'open',
|
||||
'high',
|
||||
'low',
|
||||
'close',
|
||||
'volume',
|
||||
]]
|
||||
|
||||
# write to db
|
||||
resp = await self.client.write(
|
||||
mkts_array,
|
||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||
|
||||
# NOTE: will will append duplicates
|
||||
# for the same timestamp-index.
|
||||
# TODO: pre deduplicate?
|
||||
isvariablelength=append_and_duplicate,
|
||||
)
|
||||
|
||||
log.info(
|
||||
f'Wrote {mkts_array.size} datums to tsdb\n'
|
||||
)
|
||||
|
||||
for resp in resp.responses:
|
||||
err = resp.error
|
||||
if err:
|
||||
raise MarketStoreError(err)
|
||||
|
||||
|
||||
@acm
|
||||
async def open_storage_client(
|
||||
fqsn: str,
|
||||
period: Optional[Union[int, str]] = None, # in seconds
|
||||
|
||||
) -> tuple[Storage, dict[str, np.ndarray]]:
|
||||
'''
|
||||
Load a series by key and deliver in ``numpy`` struct array format.
|
||||
|
||||
'''
|
||||
async with (
|
||||
# eventually a storage backend endpoint
|
||||
get_client() as client,
|
||||
):
|
||||
# slap on our wrapper api
|
||||
yield Storage(client)
|
||||
|
||||
|
||||
async def tsdb_history_update(
|
||||
fqsn: Optional[str] = None,
|
||||
|
||||
) -> list[str]:
|
||||
|
||||
# TODO: real-time dedicated task for ensuring
|
||||
# history consistency between the tsdb, shm and real-time feed..
|
||||
|
||||
# update sequence design notes:
|
||||
|
||||
# - load existing highest frequency data from mkts
|
||||
# * how do we want to offer this to the UI?
|
||||
# - lazy loading?
|
||||
# - try to load it all and expect graphics caching/diffing
|
||||
# to hide extra bits that aren't in view?
|
||||
|
||||
# - compute the diff between latest data from broker and shm
|
||||
# * use sql api in mkts to determine where the backend should
|
||||
# start querying for data?
|
||||
# * append any diff with new shm length
|
||||
# * determine missing (gapped) history by scanning
|
||||
# * how far back do we look?
|
||||
|
||||
# - begin rt update ingest and aggregation
|
||||
# * could start by always writing ticks to mkts instead of
|
||||
# worrying about a shm queue for now.
|
||||
# * we have a short list of shm queues worth groking:
|
||||
# - https://github.com/pikers/piker/issues/107
|
||||
# * the original data feed arch blurb:
|
||||
# - https://github.com/pikers/piker/issues/98
|
||||
#
|
||||
profiler = pg.debug.Profiler(
|
||||
disabled=False, # not pg_profile_enabled(),
|
||||
delayed=False,
|
||||
)
|
||||
|
||||
async with (
|
||||
open_storage_client(fqsn) as storage,
|
||||
|
||||
maybe_open_feed(
|
||||
[fqsn],
|
||||
start_stream=False,
|
||||
|
||||
) as (feed, stream),
|
||||
):
|
||||
profiler(f'opened feed for {fqsn}')
|
||||
|
||||
|
||||
to_append = feed.shm.array
|
||||
to_prepend = None
|
||||
|
||||
if fqsn:
|
||||
symbol = feed.symbols.get(fqsn)
|
||||
if symbol:
|
||||
fqsn = symbol.front_fqsn()
|
||||
|
||||
# diff db history with shm and only write the missing portions
|
||||
ohlcv = feed.shm.array
|
||||
|
||||
# TODO: use pg profiler
|
||||
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
||||
# hist diffing
|
||||
if tsdb_arrays:
|
||||
onesec = tsdb_arrays[1]
|
||||
to_append = ohlcv[ohlcv['time'] > onesec['Epoch'][-1]]
|
||||
to_prepend = ohlcv[ohlcv['time'] < onesec['Epoch'][0]]
|
||||
|
||||
profiler('Finished db arrays diffs')
|
||||
|
||||
syms = await storage.client.list_symbols()
|
||||
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||
profiler(f'listed symbols {syms}')
|
||||
|
||||
# TODO: ask if user wants to write history for detected
|
||||
# available shm buffers?
|
||||
from tractor.trionics import ipython_embed
|
||||
await ipython_embed()
|
||||
|
||||
# for array in [to_append, to_prepend]:
|
||||
# if array is None:
|
||||
# continue
|
||||
|
||||
# log.info(
|
||||
# f'Writing datums {array.size} -> to tsdb from shm\n'
|
||||
# )
|
||||
# await storage.write_ohlcv(fqsn, array)
|
||||
|
||||
# profiler('Finished db writes')
|
||||
|
||||
|
||||
async def ingest_quote_stream(
|
||||
symbols: List[str],
|
||||
symbols: list[str],
|
||||
brokername: str,
|
||||
tries: int = 1,
|
||||
loglevel: str = None,
|
||||
|
||||
) -> None:
|
||||
"""Ingest a broker quote stream into marketstore in (sampled) tick format.
|
||||
"""
|
||||
async with open_feed(
|
||||
brokername,
|
||||
symbols,
|
||||
loglevel=loglevel,
|
||||
) as (first_quotes, qstream):
|
||||
'''
|
||||
Ingest a broker quote stream into a ``marketstore`` tsdb.
|
||||
|
||||
quote_cache = first_quotes.copy()
|
||||
'''
|
||||
async with (
|
||||
maybe_open_feed(brokername, symbols, loglevel=loglevel) as feed,
|
||||
get_client() as ms_client,
|
||||
):
|
||||
async for quotes in feed.stream:
|
||||
log.info(quotes)
|
||||
for symbol, quote in quotes.items():
|
||||
for tick in quote.get('ticks', ()):
|
||||
ticktype = tick.get('type', 'n/a')
|
||||
|
||||
async with get_client() as ms_client:
|
||||
# techtonic tick write
|
||||
array = quote_to_marketstore_structarray({
|
||||
'IsTrade': 1 if ticktype == 'trade' else 0,
|
||||
'IsBid': 1 if ticktype in ('bid', 'bsize') else 0,
|
||||
'Price': tick.get('price'),
|
||||
'Size': tick.get('size')
|
||||
}, last_fill=quote.get('broker_ts', None))
|
||||
|
||||
# start ingest to marketstore
|
||||
async for quotes in qstream:
|
||||
log.info(quotes)
|
||||
for symbol, quote in quotes.items():
|
||||
await ms_client.write(array, _tick_tbk)
|
||||
|
||||
# remap tick strs to ints
|
||||
quote['tick'] = _tick_map[quote.get('tick', 'Equal')]
|
||||
# LEGACY WRITE LOOP (using old tick dt)
|
||||
# quote_cache = {
|
||||
# 'size': 0,
|
||||
# 'tick': 0
|
||||
# }
|
||||
|
||||
# check for volume update (i.e. did trades happen
|
||||
# since last quote)
|
||||
new_vol = quote.get('volume', None)
|
||||
if new_vol is None:
|
||||
log.debug(f"No fills for {symbol}")
|
||||
if new_vol == quote_cache.get('volume'):
|
||||
# should never happen due to field diffing
|
||||
# on sender side
|
||||
log.error(
|
||||
f"{symbol}: got same volume as last quote?")
|
||||
# async for quotes in qstream:
|
||||
# log.info(quotes)
|
||||
# for symbol, quote in quotes.items():
|
||||
|
||||
quote_cache.update(quote)
|
||||
# # remap tick strs to ints
|
||||
# quote['tick'] = _tick_map[quote.get('tick', 'Equal')]
|
||||
|
||||
a = quote_to_marketstore_structarray(
|
||||
quote,
|
||||
# TODO: check this closer to the broker query api
|
||||
last_fill=quote.get('fill_time', '')
|
||||
)
|
||||
await ms_client.write(symbol, a)
|
||||
# # check for volume update (i.e. did trades happen
|
||||
# # since last quote)
|
||||
# new_vol = quote.get('volume', None)
|
||||
# if new_vol is None:
|
||||
# log.debug(f"No fills for {symbol}")
|
||||
# if new_vol == quote_cache.get('volume'):
|
||||
# # should never happen due to field diffing
|
||||
# # on sender side
|
||||
# log.error(
|
||||
# f"{symbol}: got same volume as last quote?")
|
||||
|
||||
# quote_cache.update(quote)
|
||||
|
||||
# a = quote_to_marketstore_structarray(
|
||||
# quote,
|
||||
# # TODO: check this closer to the broker query api
|
||||
# last_fill=quote.get('fill_time', '')
|
||||
# )
|
||||
# await ms_client.write(symbol, a)
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
symbols: List[str],
|
||||
symbols: list[str],
|
||||
host: str = 'localhost',
|
||||
port: int = 5993,
|
||||
diff_cached: bool = True,
|
||||
loglevel: str = None,
|
||||
|
||||
) -> None:
|
||||
"""Open a symbol stream from a running instance of marketstore and
|
||||
'''
|
||||
Open a symbol stream from a running instance of marketstore and
|
||||
log to console.
|
||||
"""
|
||||
|
||||
'''
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
tbks: Dict[str, str] = {sym: f"{sym}/*/*" for sym in symbols}
|
||||
tbks: dict[str, str] = {sym: f"{sym}/*/*" for sym in symbols}
|
||||
|
||||
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
||||
# send subs topics to server
|
||||
|
@ -271,7 +587,7 @@ async def stream_quotes(
|
|||
)
|
||||
log.info(resp)
|
||||
|
||||
async def recv() -> Dict[str, Any]:
|
||||
async def recv() -> dict[str, Any]:
|
||||
return msgpack.loads((await ws.get_message()), encoding='utf-8')
|
||||
|
||||
streams = (await recv())['streams']
|
||||
|
|
|
@ -76,7 +76,6 @@ async def filter_quotes_by_sym(
|
|||
|
||||
async def fsp_compute(
|
||||
|
||||
ctx: tractor.Context,
|
||||
symbol: Symbol,
|
||||
feed: Feed,
|
||||
quote_stream: trio.abc.ReceiveChannel,
|
||||
|
@ -86,7 +85,7 @@ async def fsp_compute(
|
|||
|
||||
func: Callable,
|
||||
|
||||
attach_stream: bool = False,
|
||||
# attach_stream: bool = False,
|
||||
task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
|
@ -127,8 +126,8 @@ async def fsp_compute(
|
|||
# each respective field.
|
||||
fields = getattr(dst.array.dtype, 'fields', None).copy()
|
||||
fields.pop('index')
|
||||
# TODO: nptyping here!
|
||||
history: Optional[np.ndarray] = None
|
||||
history: Optional[np.ndarray] = None # TODO: nptyping here!
|
||||
|
||||
if fields and len(fields) > 1 and fields:
|
||||
if not isinstance(history_output, dict):
|
||||
raise ValueError(
|
||||
|
@ -193,40 +192,47 @@ async def fsp_compute(
|
|||
profiler(f'{func_name} pushed history')
|
||||
profiler.finish()
|
||||
|
||||
# TODO: UGH, what is the right way to do something like this?
|
||||
if not ctx._started_called:
|
||||
await ctx.started(index)
|
||||
|
||||
# setup a respawn handle
|
||||
with trio.CancelScope() as cs:
|
||||
|
||||
# TODO: might be better to just make a "restart" method where
|
||||
# the target task is spawned implicitly and then the event is
|
||||
# set via some higher level api? At that poing we might as well
|
||||
# be writing a one-cancels-one nursery though right?
|
||||
tracker = TaskTracker(trio.Event(), cs)
|
||||
task_status.started((tracker, index))
|
||||
|
||||
profiler(f'{func_name} yield last index')
|
||||
|
||||
# import time
|
||||
# last = time.time()
|
||||
|
||||
try:
|
||||
# rt stream
|
||||
async with ctx.open_stream() as stream:
|
||||
async for processed in out_stream:
|
||||
|
||||
log.debug(f"{func_name}: {processed}")
|
||||
key, output = processed
|
||||
index = src.index
|
||||
dst.array[-1][key] = output
|
||||
async for processed in out_stream:
|
||||
|
||||
# NOTE: for now we aren't streaming this to the consumer
|
||||
# stream latest array index entry which basically just acts
|
||||
# as trigger msg to tell the consumer to read from shm
|
||||
if attach_stream:
|
||||
await stream.send(index)
|
||||
log.debug(f"{func_name}: {processed}")
|
||||
key, output = processed
|
||||
index = src.index
|
||||
dst.array[-1][key] = output
|
||||
|
||||
# period = time.time() - last
|
||||
# hz = 1/period if period else float('nan')
|
||||
# if hz > 60:
|
||||
# log.info(f'FSP quote too fast: {hz}')
|
||||
# last = time.time()
|
||||
# NOTE: for now we aren't streaming this to the consumer
|
||||
# stream latest array index entry which basically just acts
|
||||
# as trigger msg to tell the consumer to read from shm
|
||||
# TODO: further this should likely be implemented much
|
||||
# like our `Feed` api where there is one background
|
||||
# "service" task which computes output and then sends to
|
||||
# N-consumers who subscribe for the real-time output,
|
||||
# which we'll likely want to implement using local-mem
|
||||
# chans for the fan out?
|
||||
# if attach_stream:
|
||||
# await client_stream.send(index)
|
||||
|
||||
# period = time.time() - last
|
||||
# hz = 1/period if period else float('nan')
|
||||
# if hz > 60:
|
||||
# log.info(f'FSP quote too fast: {hz}')
|
||||
# last = time.time()
|
||||
finally:
|
||||
tracker.complete.set()
|
||||
|
||||
|
@ -255,7 +261,10 @@ async def cascade(
|
|||
destination shm array buffer.
|
||||
|
||||
'''
|
||||
profiler = pg.debug.Profiler(delayed=False, disabled=False)
|
||||
profiler = pg.debug.Profiler(
|
||||
delayed=False,
|
||||
disabled=False
|
||||
)
|
||||
|
||||
if loglevel:
|
||||
get_console_log(loglevel)
|
||||
|
@ -314,7 +323,6 @@ async def cascade(
|
|||
fsp_target = partial(
|
||||
|
||||
fsp_compute,
|
||||
ctx=ctx,
|
||||
symbol=symbol,
|
||||
feed=feed,
|
||||
quote_stream=quote_stream,
|
||||
|
@ -323,7 +331,7 @@ async def cascade(
|
|||
src=src,
|
||||
dst=dst,
|
||||
|
||||
# func_name=func_name,
|
||||
# target
|
||||
func=func
|
||||
)
|
||||
|
||||
|
@ -335,90 +343,113 @@ async def cascade(
|
|||
|
||||
profiler(f'{func_name}: fsp up')
|
||||
|
||||
async def resync(tracker: TaskTracker) -> tuple[TaskTracker, int]:
|
||||
# TODO: adopt an incremental update engine/approach
|
||||
# where possible here eventually!
|
||||
log.warning(f're-syncing fsp {func_name} to source')
|
||||
tracker.cs.cancel()
|
||||
await tracker.complete.wait()
|
||||
return await n.start(fsp_target)
|
||||
# sync client
|
||||
await ctx.started(index)
|
||||
|
||||
def is_synced(
|
||||
src: ShmArray,
|
||||
dst: ShmArray
|
||||
) -> tuple[bool, int, int]:
|
||||
'''Predicate to dertmine if a destination FSP
|
||||
output array is aligned to its source array.
|
||||
# XXX: rt stream with client which we MUST
|
||||
# open here (and keep it open) in order to make
|
||||
# incremental "updates" as history prepends take
|
||||
# place.
|
||||
async with ctx.open_stream() as client_stream:
|
||||
|
||||
'''
|
||||
step_diff = src.index - dst.index
|
||||
len_diff = abs(len(src.array) - len(dst.array))
|
||||
return not (
|
||||
# the source is likely backfilling and we must
|
||||
# sync history calculations
|
||||
len_diff > 2 or
|
||||
# TODO: these likely should all become
|
||||
# methods of this ``TaskLifetime`` or wtv
|
||||
# abstraction..
|
||||
async def resync(
|
||||
tracker: TaskTracker,
|
||||
|
||||
# we aren't step synced to the source and may be
|
||||
# leading/lagging by a step
|
||||
step_diff > 1 or
|
||||
step_diff < 0
|
||||
), step_diff, len_diff
|
||||
) -> tuple[TaskTracker, int]:
|
||||
# TODO: adopt an incremental update engine/approach
|
||||
# where possible here eventually!
|
||||
log.warning(f're-syncing fsp {func_name} to source')
|
||||
tracker.cs.cancel()
|
||||
await tracker.complete.wait()
|
||||
tracker, index = await n.start(fsp_target)
|
||||
|
||||
async def poll_and_sync_to_step(
|
||||
# always trigger UI refresh after history update,
|
||||
# see ``piker.ui._fsp.FspAdmin.open_chain()`` and
|
||||
# ``piker.ui._display.trigger_update()``.
|
||||
await client_stream.send('update')
|
||||
return tracker, index
|
||||
|
||||
tracker: TaskTracker,
|
||||
src: ShmArray,
|
||||
dst: ShmArray,
|
||||
def is_synced(
|
||||
src: ShmArray,
|
||||
dst: ShmArray
|
||||
) -> tuple[bool, int, int]:
|
||||
'''Predicate to dertmine if a destination FSP
|
||||
output array is aligned to its source array.
|
||||
|
||||
) -> tuple[TaskTracker, int]:
|
||||
'''
|
||||
step_diff = src.index - dst.index
|
||||
len_diff = abs(len(src.array) - len(dst.array))
|
||||
return not (
|
||||
# the source is likely backfilling and we must
|
||||
# sync history calculations
|
||||
len_diff > 2 or
|
||||
|
||||
# we aren't step synced to the source and may be
|
||||
# leading/lagging by a step
|
||||
step_diff > 1 or
|
||||
step_diff < 0
|
||||
), step_diff, len_diff
|
||||
|
||||
async def poll_and_sync_to_step(
|
||||
|
||||
tracker: TaskTracker,
|
||||
src: ShmArray,
|
||||
dst: ShmArray,
|
||||
|
||||
) -> tuple[TaskTracker, int]:
|
||||
|
||||
synced, step_diff, _ = is_synced(src, dst)
|
||||
while not synced:
|
||||
tracker, index = await resync(tracker)
|
||||
synced, step_diff, _ = is_synced(src, dst)
|
||||
while not synced:
|
||||
tracker, index = await resync(tracker)
|
||||
synced, step_diff, _ = is_synced(src, dst)
|
||||
|
||||
return tracker, step_diff
|
||||
return tracker, step_diff
|
||||
|
||||
s, step, ld = is_synced(src, dst)
|
||||
s, step, ld = is_synced(src, dst)
|
||||
|
||||
# detect sample period step for subscription to increment
|
||||
# signal
|
||||
times = src.array['time']
|
||||
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||
# detect sample period step for subscription to increment
|
||||
# signal
|
||||
times = src.array['time']
|
||||
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||
|
||||
# Increment the underlying shared memory buffer on every
|
||||
# "increment" msg received from the underlying data feed.
|
||||
async with feed.index_stream(int(delay_s)) as istream:
|
||||
# Increment the underlying shared memory buffer on every
|
||||
# "increment" msg received from the underlying data feed.
|
||||
async with feed.index_stream(
|
||||
int(delay_s)
|
||||
) as istream:
|
||||
|
||||
profiler(f'{func_name}: sample stream up')
|
||||
profiler.finish()
|
||||
profiler(f'{func_name}: sample stream up')
|
||||
profiler.finish()
|
||||
|
||||
async for _ in istream:
|
||||
async for _ in istream:
|
||||
|
||||
# respawn the compute task if the source
|
||||
# array has been updated such that we compute
|
||||
# new history from the (prepended) source.
|
||||
synced, step_diff, _ = is_synced(src, dst)
|
||||
if not synced:
|
||||
tracker, step_diff = await poll_and_sync_to_step(
|
||||
tracker,
|
||||
src,
|
||||
dst,
|
||||
)
|
||||
# respawn the compute task if the source
|
||||
# array has been updated such that we compute
|
||||
# new history from the (prepended) source.
|
||||
synced, step_diff, _ = is_synced(src, dst)
|
||||
if not synced:
|
||||
tracker, step_diff = await poll_and_sync_to_step(
|
||||
tracker,
|
||||
src,
|
||||
dst,
|
||||
)
|
||||
|
||||
# skip adding a last bar since we should already
|
||||
# be step alinged
|
||||
if step_diff == 0:
|
||||
continue
|
||||
# skip adding a last bar since we should already
|
||||
# be step alinged
|
||||
if step_diff == 0:
|
||||
continue
|
||||
|
||||
# read out last shm row, copy and write new row
|
||||
array = dst.array
|
||||
# read out last shm row, copy and write new row
|
||||
array = dst.array
|
||||
|
||||
# some metrics like vlm should be reset
|
||||
# to zero every step.
|
||||
if zero_on_step:
|
||||
last = zeroed
|
||||
else:
|
||||
last = array[-1:].copy()
|
||||
# some metrics like vlm should be reset
|
||||
# to zero every step.
|
||||
if zero_on_step:
|
||||
last = zeroed
|
||||
else:
|
||||
last = array[-1:].copy()
|
||||
|
||||
dst.push(last)
|
||||
dst.push(last)
|
||||
|
|
|
@ -167,6 +167,7 @@ def _wma(
|
|||
|
||||
assert length == len(weights)
|
||||
|
||||
# lol, for long sequences this is nutso slow and expensive..
|
||||
return np.convolve(signal, weights, 'valid')
|
||||
|
||||
|
||||
|
|
|
@ -309,7 +309,7 @@ async def flow_rates(
|
|||
|
||||
if period > 1:
|
||||
trade_rate_wma = _wma(
|
||||
dvlm_shm.array['trade_count'],
|
||||
dvlm_shm.array['trade_count'][-period:],
|
||||
period,
|
||||
weights=weights,
|
||||
)
|
||||
|
@ -332,7 +332,7 @@ async def flow_rates(
|
|||
|
||||
if period > 1:
|
||||
dark_trade_rate_wma = _wma(
|
||||
dvlm_shm.array['dark_trade_count'],
|
||||
dvlm_shm.array['dark_trade_count'][-period:],
|
||||
period,
|
||||
weights=weights,
|
||||
)
|
||||
|
|
|
@ -25,39 +25,10 @@ from PyQt5.QtCore import QPointF
|
|||
from PyQt5.QtWidgets import QGraphicsPathItem
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._axes import PriceAxis
|
||||
from ._chart import ChartPlotWidget
|
||||
from ._label import Label
|
||||
|
||||
|
||||
def marker_right_points(
|
||||
chart: ChartPlotWidget, # noqa
|
||||
marker_size: int = 20,
|
||||
|
||||
) -> (float, float, float):
|
||||
'''
|
||||
Return x-dimension, y-axis-aware, level-line marker oriented scene
|
||||
values.
|
||||
|
||||
X values correspond to set the end of a level line, end of
|
||||
a paried level line marker, and the right most side of the "right"
|
||||
axis respectively.
|
||||
|
||||
'''
|
||||
# TODO: compute some sensible maximum value here
|
||||
# and use a humanized scheme to limit to that length.
|
||||
l1_len = chart._max_l1_line_len
|
||||
ryaxis = chart.getAxis('right')
|
||||
|
||||
r_axis_x = ryaxis.pos().x()
|
||||
up_to_l1_sc = r_axis_x - l1_len - 10
|
||||
|
||||
marker_right = up_to_l1_sc - (1.375 * 2 * marker_size)
|
||||
line_end = marker_right - (6/16 * marker_size)
|
||||
|
||||
return line_end, marker_right, r_axis_x
|
||||
|
||||
|
||||
def vbr_left(
|
||||
label: Label,
|
||||
|
||||
|
|
|
@ -26,8 +26,6 @@ from PyQt5.QtWidgets import QGraphicsPathItem
|
|||
from pyqtgraph import Point, functions as fn, Color
|
||||
import numpy as np
|
||||
|
||||
from ._anchors import marker_right_points
|
||||
|
||||
|
||||
def mk_marker_path(
|
||||
|
||||
|
@ -116,7 +114,7 @@ class LevelMarker(QGraphicsPathItem):
|
|||
|
||||
self.get_level = get_level
|
||||
self._on_paint = on_paint
|
||||
self.scene_x = lambda: marker_right_points(chart)[1]
|
||||
self.scene_x = lambda: chart.marker_right_points()[1]
|
||||
self.level: float = 0
|
||||
self.keep_in_view = keep_in_view
|
||||
|
||||
|
@ -169,7 +167,7 @@ class LevelMarker(QGraphicsPathItem):
|
|||
vr = view.state['viewRange']
|
||||
ymn, ymx = vr[1]
|
||||
|
||||
# _, marker_right, _ = marker_right_points(line._chart)
|
||||
# _, marker_right, _ = line._chart.marker_right_points()
|
||||
x = self.scene_x()
|
||||
|
||||
if self.style == '>|': # short style, points "down-to" line
|
||||
|
|
|
@ -223,8 +223,9 @@ class DynamicDateAxis(Axis):
|
|||
) -> List[str]:
|
||||
|
||||
chart = self.linkedsplits.chart
|
||||
bars = chart._arrays[chart.name]
|
||||
shm = self.linkedsplits.chart._shm
|
||||
flow = chart._flows[chart.name]
|
||||
shm = flow.shm
|
||||
bars = shm.array
|
||||
first = shm._first.value
|
||||
|
||||
bars_len = len(bars)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -19,10 +19,14 @@ High level chart-widget apis.
|
|||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from typing import Optional
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
|
||||
from PyQt5 import QtCore, QtWidgets
|
||||
from PyQt5.QtCore import Qt
|
||||
from PyQt5.QtCore import (
|
||||
Qt,
|
||||
QLineF,
|
||||
# QPointF,
|
||||
)
|
||||
from PyQt5.QtWidgets import (
|
||||
QFrame,
|
||||
QWidget,
|
||||
|
@ -33,7 +37,6 @@ from PyQt5.QtWidgets import (
|
|||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
import trio
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ._axes import (
|
||||
DynamicDateAxis,
|
||||
|
@ -44,6 +47,7 @@ from ._cursor import (
|
|||
Cursor,
|
||||
ContentsLabel,
|
||||
)
|
||||
from ..data._sharedmem import ShmArray
|
||||
from ._l1 import L1Labels
|
||||
from ._ohlc import BarItems
|
||||
from ._curve import FastAppendCurve
|
||||
|
@ -52,17 +56,18 @@ from ._style import (
|
|||
CHART_MARGINS,
|
||||
_xaxis_at,
|
||||
_min_points_to_show,
|
||||
_bars_from_right_in_follow_mode,
|
||||
_bars_to_left_in_follow_mode,
|
||||
)
|
||||
from ..data.feed import Feed
|
||||
from ..data._source import Symbol
|
||||
from ..data._sharedmem import ShmArray
|
||||
from ..log import get_logger
|
||||
from ._interaction import ChartView
|
||||
from ._forms import FieldsForm
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._overlay import PlotItemOverlay
|
||||
from ._flows import Flow
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._display import DisplayState
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -230,11 +235,18 @@ class GodWidget(QWidget):
|
|||
# chart is already in memory so just focus it
|
||||
linkedsplits.show()
|
||||
linkedsplits.focus()
|
||||
linkedsplits.graphics_cycle()
|
||||
await trio.sleep(0)
|
||||
|
||||
# resume feeds *after* rendering chart view asap
|
||||
chart.resume_all_feeds()
|
||||
|
||||
# TODO: we need a check to see if the chart
|
||||
# last had the xlast in view, if so then shift so it's
|
||||
# still in view, if the user was viewing history then
|
||||
# do nothing yah?
|
||||
chart.default_view()
|
||||
|
||||
self.linkedsplits = linkedsplits
|
||||
symbol = linkedsplits.symbol
|
||||
if symbol is not None:
|
||||
|
@ -346,8 +358,19 @@ class LinkedSplits(QWidget):
|
|||
self.layout.setContentsMargins(0, 0, 0, 0)
|
||||
self.layout.addWidget(self.splitter)
|
||||
|
||||
# chart-local graphics state that can be passed to
|
||||
# a ``graphic_update_cycle()`` call by any task wishing to
|
||||
# update the UI for a given "chart instance".
|
||||
self.display_state: Optional[DisplayState] = None
|
||||
|
||||
self._symbol: Symbol = None
|
||||
|
||||
def graphics_cycle(self, **kwargs) -> None:
|
||||
from . import _display
|
||||
ds = self.display_state
|
||||
if ds:
|
||||
return _display.graphics_update_cycle(ds, **kwargs)
|
||||
|
||||
@property
|
||||
def symbol(self) -> Symbol:
|
||||
return self._symbol
|
||||
|
@ -362,12 +385,15 @@ class LinkedSplits(QWidget):
|
|||
'''
|
||||
ln = len(self.subplots)
|
||||
|
||||
# proportion allocated to consumer subcharts
|
||||
if not prop:
|
||||
# proportion allocated to consumer subcharts
|
||||
if ln < 2:
|
||||
prop = 1/3
|
||||
elif ln >= 2:
|
||||
prop = 3/8
|
||||
prop = 3/8*5/8
|
||||
|
||||
# if ln < 2:
|
||||
# prop = 3/8*5/8
|
||||
|
||||
# elif ln >= 2:
|
||||
# prop = 3/8
|
||||
|
||||
major = 1 - prop
|
||||
min_h_ind = int((self.height() * prop) / ln)
|
||||
|
@ -389,7 +415,7 @@ class LinkedSplits(QWidget):
|
|||
self,
|
||||
|
||||
symbol: Symbol,
|
||||
array: np.ndarray,
|
||||
shm: ShmArray,
|
||||
sidepane: FieldsForm,
|
||||
|
||||
style: str = 'bar',
|
||||
|
@ -414,7 +440,7 @@ class LinkedSplits(QWidget):
|
|||
self.chart = self.add_plot(
|
||||
|
||||
name=symbol.key,
|
||||
array=array,
|
||||
shm=shm,
|
||||
style=style,
|
||||
_is_main=True,
|
||||
|
||||
|
@ -442,7 +468,7 @@ class LinkedSplits(QWidget):
|
|||
self,
|
||||
|
||||
name: str,
|
||||
array: np.ndarray,
|
||||
shm: ShmArray,
|
||||
|
||||
array_key: Optional[str] = None,
|
||||
style: str = 'line',
|
||||
|
@ -486,7 +512,6 @@ class LinkedSplits(QWidget):
|
|||
name=name,
|
||||
data_key=array_key or name,
|
||||
|
||||
array=array,
|
||||
parent=qframe,
|
||||
linkedsplits=self,
|
||||
axisItems=axes,
|
||||
|
@ -550,7 +575,7 @@ class LinkedSplits(QWidget):
|
|||
|
||||
graphics, data_key = cpw.draw_ohlc(
|
||||
name,
|
||||
array,
|
||||
shm,
|
||||
array_key=array_key
|
||||
)
|
||||
self.cursor.contents_labels.add_label(
|
||||
|
@ -564,7 +589,7 @@ class LinkedSplits(QWidget):
|
|||
add_label = True
|
||||
graphics, data_key = cpw.draw_curve(
|
||||
name,
|
||||
array,
|
||||
shm,
|
||||
array_key=array_key,
|
||||
color='default_light',
|
||||
)
|
||||
|
@ -573,7 +598,7 @@ class LinkedSplits(QWidget):
|
|||
add_label = True
|
||||
graphics, data_key = cpw.draw_curve(
|
||||
name,
|
||||
array,
|
||||
shm,
|
||||
array_key=array_key,
|
||||
step_mode=True,
|
||||
color='davies',
|
||||
|
@ -628,31 +653,6 @@ class LinkedSplits(QWidget):
|
|||
cpw.sidepane.setMaximumWidth(sp_w)
|
||||
|
||||
|
||||
# class FlowsTable(pydantic.BaseModel):
|
||||
# '''
|
||||
# Data-AGGRegate: high level API onto multiple (categorized)
|
||||
# ``Flow``s with high level processing routines for
|
||||
# multi-graphics computations and display.
|
||||
|
||||
# '''
|
||||
# flows: dict[str, np.ndarray] = {}
|
||||
|
||||
|
||||
class Flow(BaseModel):
|
||||
'''
|
||||
(FinancialSignal-)Flow compound type which wraps a real-time
|
||||
graphics (curve) and its backing data stream together for high level
|
||||
access and control.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
name: str
|
||||
plot: pg.PlotItem
|
||||
shm: Optional[ShmArray] = None # may be filled in "later"
|
||||
|
||||
|
||||
class ChartPlotWidget(pg.PlotWidget):
|
||||
'''
|
||||
``GraphicsView`` subtype containing a single ``PlotItem``.
|
||||
|
@ -686,7 +686,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
# the "data view" we generate graphics from
|
||||
name: str,
|
||||
array: np.ndarray,
|
||||
data_key: str,
|
||||
linkedsplits: LinkedSplits,
|
||||
|
||||
|
@ -739,14 +738,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
self._max_l1_line_len: float = 0
|
||||
|
||||
# self.setViewportMargins(0, 0, 0, 0)
|
||||
# self._ohlc = array # readonly view of ohlc data
|
||||
|
||||
# TODO: move to Aggr above XD
|
||||
# readonly view of data arrays
|
||||
self._arrays = {
|
||||
self.data_key: array,
|
||||
}
|
||||
self._graphics = {} # registry of underlying graphics
|
||||
|
||||
# registry of overlay curve names
|
||||
self._flows: dict[str, Flow] = {}
|
||||
|
@ -762,7 +753,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# show background grid
|
||||
self.showGrid(x=False, y=True, alpha=0.3)
|
||||
|
||||
self.default_view()
|
||||
self.cv.enable_auto_yrange()
|
||||
|
||||
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
||||
|
@ -807,47 +797,137 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
return int(vr.left()), int(vr.right())
|
||||
|
||||
def bars_range(self) -> tuple[int, int, int, int]:
|
||||
"""Return a range tuple for the bars present in view.
|
||||
"""
|
||||
l, r = self.view_range()
|
||||
array = self._arrays[self.name]
|
||||
lbar = max(l, array[0]['index'])
|
||||
rbar = min(r, array[-1]['index'])
|
||||
'''
|
||||
Return a range tuple for the bars present in view.
|
||||
|
||||
'''
|
||||
main_flow = self._flows[self.name]
|
||||
ifirst, l, lbar, rbar, r, ilast = main_flow.datums_range()
|
||||
return l, lbar, rbar, r
|
||||
|
||||
def curve_width_pxs(
|
||||
self,
|
||||
) -> float:
|
||||
_, lbar, rbar, _ = self.bars_range()
|
||||
return self.view.mapViewToDevice(
|
||||
QLineF(lbar, 0, rbar, 0)
|
||||
).length()
|
||||
|
||||
def pre_l1_xs(self) -> tuple[float, float]:
|
||||
'''
|
||||
Return the view x-coord for the value just before
|
||||
the L1 labels on the y-axis as well as the length
|
||||
of that L1 label from the y-axis.
|
||||
|
||||
'''
|
||||
line_end, marker_right, yaxis_x = self.marker_right_points()
|
||||
view = self.view
|
||||
line = view.mapToView(
|
||||
QLineF(line_end, 0, yaxis_x, 0)
|
||||
)
|
||||
return line.x1(), line.length()
|
||||
|
||||
def marker_right_points(
|
||||
self,
|
||||
marker_size: int = 20,
|
||||
|
||||
) -> (float, float, float):
|
||||
'''
|
||||
Return x-dimension, y-axis-aware, level-line marker oriented scene
|
||||
values.
|
||||
|
||||
X values correspond to set the end of a level line, end of
|
||||
a paried level line marker, and the right most side of the "right"
|
||||
axis respectively.
|
||||
|
||||
'''
|
||||
# TODO: compute some sensible maximum value here
|
||||
# and use a humanized scheme to limit to that length.
|
||||
l1_len = self._max_l1_line_len
|
||||
ryaxis = self.getAxis('right')
|
||||
|
||||
r_axis_x = ryaxis.pos().x()
|
||||
up_to_l1_sc = r_axis_x - l1_len - 10
|
||||
|
||||
marker_right = up_to_l1_sc - (1.375 * 2 * marker_size)
|
||||
line_end = marker_right - (6/16 * marker_size)
|
||||
|
||||
return line_end, marker_right, r_axis_x
|
||||
|
||||
def default_view(
|
||||
self,
|
||||
index: int = -1,
|
||||
bars_from_y: int = 3000,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Set the view box to the "default" startup view of the scene.
|
||||
|
||||
'''
|
||||
try:
|
||||
xlast = self._arrays[self.name][index]['index']
|
||||
except IndexError:
|
||||
log.warning(f'array for {self.name} not loaded yet?')
|
||||
flow = self._flows.get(self.name)
|
||||
if not flow:
|
||||
log.warning(f'`Flow` for {self.name} not loaded yet?')
|
||||
return
|
||||
|
||||
begin = xlast - _bars_to_left_in_follow_mode
|
||||
end = xlast + _bars_from_right_in_follow_mode
|
||||
index = flow.shm.array['index']
|
||||
xfirst, xlast = index[0], index[-1]
|
||||
l, lbar, rbar, r = self.bars_range()
|
||||
view = self.view
|
||||
|
||||
if (
|
||||
rbar < 0
|
||||
or l < xfirst
|
||||
or l < 0
|
||||
or (rbar - lbar) < 6
|
||||
):
|
||||
# TODO: set fixed bars count on screen that approx includes as
|
||||
# many bars as possible before a downsample line is shown.
|
||||
begin = xlast - bars_from_y
|
||||
view.setXRange(
|
||||
min=begin,
|
||||
max=xlast,
|
||||
padding=0,
|
||||
)
|
||||
# re-get range
|
||||
l, lbar, rbar, r = self.bars_range()
|
||||
|
||||
# we get the L1 spread label "length" in view coords
|
||||
# terms now that we've scaled either by user control
|
||||
# or to the default set of bars as per the immediate block
|
||||
# above.
|
||||
marker_pos, l1_len = self.pre_l1_xs()
|
||||
end = xlast + l1_len + 1
|
||||
begin = end - (r - l)
|
||||
|
||||
# for debugging
|
||||
# print(
|
||||
# # f'bars range: {brange}\n'
|
||||
# f'xlast: {xlast}\n'
|
||||
# f'marker pos: {marker_pos}\n'
|
||||
# f'l1 len: {l1_len}\n'
|
||||
# f'begin: {begin}\n'
|
||||
# f'end: {end}\n'
|
||||
# )
|
||||
|
||||
# remove any custom user yrange setttings
|
||||
if self._static_yrange == 'axis':
|
||||
self._static_yrange = None
|
||||
|
||||
view = self.view
|
||||
view.setXRange(
|
||||
min=begin,
|
||||
max=end,
|
||||
padding=0,
|
||||
)
|
||||
self.view.maybe_downsample_graphics()
|
||||
view._set_yrange()
|
||||
try:
|
||||
self.linked.graphics_cycle()
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
def increment_view(
|
||||
self,
|
||||
steps: int = 1,
|
||||
vb: Optional[ChartView] = None,
|
||||
|
||||
) -> None:
|
||||
"""
|
||||
|
@ -856,7 +936,8 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
"""
|
||||
l, r = self.view_range()
|
||||
self.view.setXRange(
|
||||
view = vb or self.view
|
||||
view.setXRange(
|
||||
min=l + steps,
|
||||
max=r + steps,
|
||||
|
||||
|
@ -868,7 +949,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
def draw_ohlc(
|
||||
self,
|
||||
name: str,
|
||||
data: np.ndarray,
|
||||
shm: ShmArray,
|
||||
|
||||
array_key: Optional[str] = None,
|
||||
|
||||
|
@ -878,19 +959,31 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
'''
|
||||
graphics = BarItems(
|
||||
self.linked,
|
||||
self.plotItem,
|
||||
pen_color=self.pen_color
|
||||
pen_color=self.pen_color,
|
||||
name=name,
|
||||
)
|
||||
|
||||
# adds all bar/candle graphics objects for each data point in
|
||||
# the np array buffer to be drawn on next render cycle
|
||||
self.plotItem.addItem(graphics)
|
||||
|
||||
# draw after to allow self.scene() to work...
|
||||
graphics.draw_from_data(data)
|
||||
|
||||
data_key = array_key or name
|
||||
self._graphics[data_key] = graphics
|
||||
|
||||
self._flows[data_key] = Flow(
|
||||
name=name,
|
||||
plot=self.plotItem,
|
||||
_shm=shm,
|
||||
is_ohlc=True,
|
||||
graphics=graphics,
|
||||
)
|
||||
|
||||
# TODO: i think we can eventually remove this if
|
||||
# we write the ``Flow.update_graphics()`` method right?
|
||||
# draw after to allow self.scene() to work...
|
||||
graphics.draw_from_data(shm.array)
|
||||
|
||||
self._add_sticky(name, bg_color='davies')
|
||||
|
||||
return graphics, data_key
|
||||
|
@ -931,6 +1024,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
)
|
||||
pi.hideButtons()
|
||||
|
||||
# cv.enable_auto_yrange(self.view)
|
||||
cv.enable_auto_yrange()
|
||||
|
||||
# compose this new plot's graphics with the current chart's
|
||||
|
@ -955,19 +1049,20 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
self,
|
||||
|
||||
name: str,
|
||||
data: np.ndarray,
|
||||
shm: ShmArray,
|
||||
|
||||
array_key: Optional[str] = None,
|
||||
overlay: bool = False,
|
||||
color: Optional[str] = None,
|
||||
add_label: bool = True,
|
||||
pi: Optional[pg.PlotItem] = None,
|
||||
|
||||
**pdi_kwargs,
|
||||
|
||||
) -> (pg.PlotDataItem, str):
|
||||
'''
|
||||
Draw a "curve" (line plot graphics) for the provided data in
|
||||
the input array ``data``.
|
||||
the input shm array ``shm``.
|
||||
|
||||
'''
|
||||
color = color or self.pen_color or 'default_light'
|
||||
|
@ -978,6 +1073,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
data_key = array_key or name
|
||||
|
||||
# yah, we wrote our own B)
|
||||
data = shm.array
|
||||
curve = FastAppendCurve(
|
||||
y=data[data_key],
|
||||
x=data['index'],
|
||||
|
@ -988,12 +1084,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# on data reads and makes graphics rendering no faster
|
||||
# clipToView=True,
|
||||
|
||||
# TODO: see how this handles with custom ohlcv bars graphics
|
||||
# and/or if we can implement something similar for OHLC graphics
|
||||
# autoDownsample=True,
|
||||
# downsample=60,
|
||||
# downsampleMethod='subsample',
|
||||
|
||||
**pdi_kwargs,
|
||||
)
|
||||
|
||||
|
@ -1007,11 +1097,16 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# and is disastrous for performance.
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache)
|
||||
|
||||
# register curve graphics and backing array for name
|
||||
self._graphics[name] = curve
|
||||
self._arrays[data_key] = data
|
||||
pi = pi or self.plotItem
|
||||
|
||||
pi = self.plotItem
|
||||
self._flows[data_key] = Flow(
|
||||
name=name,
|
||||
plot=pi,
|
||||
_shm=shm,
|
||||
is_ohlc=False,
|
||||
# register curve graphics with this flow
|
||||
graphics=curve,
|
||||
)
|
||||
|
||||
# TODO: this probably needs its own method?
|
||||
if overlay:
|
||||
|
@ -1021,10 +1116,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
f'{overlay} must be from `.plotitem_overlay()`'
|
||||
)
|
||||
pi = overlay
|
||||
|
||||
# anchor_at = ('bottom', 'left')
|
||||
self._flows[name] = Flow(name=name, plot=pi)
|
||||
|
||||
else:
|
||||
# anchor_at = ('top', 'left')
|
||||
|
||||
|
@ -1032,7 +1123,17 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# (we need something that avoids clutter on x-axis).
|
||||
self._add_sticky(name, bg_color=color)
|
||||
|
||||
# NOTE: this is more or less the RENDER call that tells Qt to
|
||||
# start showing the generated graphics-curves. This is kind of
|
||||
# of edge-triggered call where once added any
|
||||
# ``QGraphicsItem.update()`` calls are automatically displayed.
|
||||
# Our internal graphics objects have their own "update from
|
||||
# data" style method API that allows for real-time updates on
|
||||
# the next render cycle; just note a lot of the real-time
|
||||
# updates are implicit and require a bit of digging to
|
||||
# understand.
|
||||
pi.addItem(curve)
|
||||
|
||||
return curve, data_key
|
||||
|
||||
# TODO: make this a ctx mngr
|
||||
|
@ -1064,29 +1165,11 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
)
|
||||
return last
|
||||
|
||||
def update_ohlc_from_array(
|
||||
def update_graphics_from_flow(
|
||||
self,
|
||||
|
||||
graphics_name: str,
|
||||
array: np.ndarray,
|
||||
**kwargs,
|
||||
|
||||
) -> pg.GraphicsObject:
|
||||
'''
|
||||
Update the named internal graphics from ``array``.
|
||||
|
||||
'''
|
||||
self._arrays[self.name] = array
|
||||
graphics = self._graphics[graphics_name]
|
||||
graphics.update_from_array(array, **kwargs)
|
||||
return graphics
|
||||
|
||||
def update_curve_from_array(
|
||||
self,
|
||||
|
||||
graphics_name: str,
|
||||
array: np.ndarray,
|
||||
array_key: Optional[str] = None,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> pg.GraphicsObject:
|
||||
|
@ -1094,32 +1177,12 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
Update the named internal graphics from ``array``.
|
||||
|
||||
'''
|
||||
assert len(array)
|
||||
data_key = array_key or graphics_name
|
||||
|
||||
if graphics_name not in self._flows:
|
||||
self._arrays[self.name] = array
|
||||
else:
|
||||
self._arrays[data_key] = array
|
||||
|
||||
curve = self._graphics[graphics_name]
|
||||
|
||||
# NOTE: back when we weren't implementing the curve graphics
|
||||
# ourselves you'd have updates using this method:
|
||||
# curve.setData(y=array[graphics_name], x=array['index'], **kwargs)
|
||||
|
||||
# NOTE: graphics **must** implement a diff based update
|
||||
# operation where an internal ``FastUpdateCurve._xrange`` is
|
||||
# used to determine if the underlying path needs to be
|
||||
# pre/ap-pended.
|
||||
curve.update_from_array(
|
||||
x=array['index'],
|
||||
y=array[data_key],
|
||||
**kwargs
|
||||
flow = self._flows[array_key or graphics_name]
|
||||
return flow.update_graphics(
|
||||
array_key=array_key,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
return curve
|
||||
|
||||
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
||||
# # compute contents label "height" in view terms
|
||||
# # to avoid having data "contents" overlap with them
|
||||
|
@ -1149,6 +1212,9 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
# print(f"bounds (ylow, yhigh): {(ylow, yhigh)}")
|
||||
|
||||
# TODO: pretty sure we can just call the cursor
|
||||
# directly not? i don't wee why we need special "signal proxies"
|
||||
# for this lul..
|
||||
def enterEvent(self, ev): # noqa
|
||||
# pg.PlotWidget.enterEvent(self, ev)
|
||||
self.sig_mouse_enter.emit(self)
|
||||
|
@ -1162,7 +1228,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
# TODO: this should go onto some sort of
|
||||
# data-view thinger..right?
|
||||
ohlc = self._shm.array
|
||||
ohlc = self._flows[self.name].shm.array
|
||||
|
||||
# XXX: not sure why the time is so off here
|
||||
# looks like we're gonna have to do some fixing..
|
||||
|
@ -1173,6 +1239,22 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
else:
|
||||
return ohlc['index'][-1]
|
||||
|
||||
def in_view(
|
||||
self,
|
||||
array: np.ndarray,
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Slice an input struct array providing only datums
|
||||
"in view" of this chart.
|
||||
|
||||
'''
|
||||
l, lbar, rbar, r = self.bars_range()
|
||||
ifirst = array[0]['index']
|
||||
# slice data by offset from the first index
|
||||
# available in the passed datum set.
|
||||
return array[lbar - ifirst:(rbar - ifirst) + 1]
|
||||
|
||||
def maxmin(
|
||||
self,
|
||||
name: Optional[str] = None,
|
||||
|
@ -1185,46 +1267,35 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
If ``bars_range`` is provided use that range.
|
||||
|
||||
'''
|
||||
l, lbar, rbar, r = bars_range or self.bars_range()
|
||||
# TODO: logic to check if end of bars in view
|
||||
# extra = view_len - _min_points_to_show
|
||||
# begin = self._arrays['ohlc'][0]['index'] - extra
|
||||
# # end = len(self._arrays['ohlc']) - 1 + extra
|
||||
# end = self._arrays['ohlc'][-1]['index'] - 1 + extra
|
||||
|
||||
# bars_len = rbar - lbar
|
||||
# log.debug(
|
||||
# f"\nl: {l}, lbar: {lbar}, rbar: {rbar}, r: {r}\n"
|
||||
# f"view_len: {view_len}, bars_len: {bars_len}\n"
|
||||
# f"begin: {begin}, end: {end}, extra: {extra}"
|
||||
# )
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'`{str(self)}.maxmin()` loop cycle for: `{self.name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
gt=ms_slower_then,
|
||||
delayed=True,
|
||||
)
|
||||
|
||||
# TODO: here we should instead look up the ``Flow.shm.array``
|
||||
# and read directly from shm to avoid copying to memory first
|
||||
# and then reading it again here.
|
||||
a = self._arrays.get(name or self.name)
|
||||
if a is None:
|
||||
return None
|
||||
|
||||
ifirst = a[0]['index']
|
||||
bars = a[lbar - ifirst:(rbar - ifirst) + 1]
|
||||
|
||||
if not len(bars):
|
||||
# likely no data loaded yet or extreme scrolling?
|
||||
log.error(f"WTF bars_range = {lbar}:{rbar}")
|
||||
return
|
||||
|
||||
flow_key = name or self.name
|
||||
flow = self._flows.get(flow_key)
|
||||
if (
|
||||
self.data_key == self.linked.symbol.key
|
||||
flow is None
|
||||
):
|
||||
# ohlc sampled bars hi/lo lookup
|
||||
ylow = np.nanmin(bars['low'])
|
||||
yhigh = np.nanmax(bars['high'])
|
||||
log.error(f"flow {flow_key} doesn't exist in chart {self.name} !?")
|
||||
key = res = 0, 0
|
||||
|
||||
else:
|
||||
view = bars[name or self.data_key]
|
||||
ylow = np.nanmin(view)
|
||||
yhigh = np.nanmax(view)
|
||||
first, l, lbar, rbar, r, last = bars_range or flow.datums_range()
|
||||
profiler(f'{self.name} got bars range')
|
||||
|
||||
# print(f'{(ylow, yhigh)}')
|
||||
return ylow, yhigh
|
||||
key = round(lbar), round(rbar)
|
||||
res = flow.maxmin(*key)
|
||||
if res == (None, None):
|
||||
log.error(
|
||||
f"{flow_key} no mxmn for bars_range => {key} !?"
|
||||
)
|
||||
res = 0, 0
|
||||
|
||||
profiler(f'yrange mxmn: {key} -> {res}')
|
||||
return res
|
||||
|
|
|
@ -0,0 +1,379 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Graphics related downsampling routines for compressing to pixel
|
||||
limits on the display device.
|
||||
|
||||
'''
|
||||
import math
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import (
|
||||
jit,
|
||||
# float64, optional, int64,
|
||||
)
|
||||
|
||||
from ..log import get_logger
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
def hl2mxmn(ohlc: np.ndarray) -> np.ndarray:
|
||||
'''
|
||||
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||
to a "joined" max/min 1-d array.
|
||||
|
||||
'''
|
||||
index = ohlc['index']
|
||||
hls = ohlc[[
|
||||
'low',
|
||||
'high',
|
||||
]]
|
||||
|
||||
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||
x = np.empty(2*hls.size, dtype=np.float64)
|
||||
trace_hl(hls, mxmn, x, index[0])
|
||||
x = x + index[0]
|
||||
|
||||
return mxmn, x
|
||||
|
||||
|
||||
@jit(
|
||||
# TODO: the type annots..
|
||||
# float64[:](float64[:],),
|
||||
nopython=True,
|
||||
)
|
||||
def trace_hl(
|
||||
hl: 'np.ndarray',
|
||||
out: np.ndarray,
|
||||
x: np.ndarray,
|
||||
start: int,
|
||||
|
||||
# the "offset" values in the x-domain which
|
||||
# place the 2 output points around each ``int``
|
||||
# master index.
|
||||
margin: float = 0.43,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
"Trace" the outline of the high-low values of an ohlc sequence
|
||||
as a line such that the maximum deviation (aka disperaion) between
|
||||
bars if preserved.
|
||||
|
||||
This routine is expected to modify input arrays in-place.
|
||||
|
||||
'''
|
||||
last_l = hl['low'][0]
|
||||
last_h = hl['high'][0]
|
||||
|
||||
for i in range(hl.size):
|
||||
row = hl[i]
|
||||
l, h = row['low'], row['high']
|
||||
|
||||
up_diff = h - last_l
|
||||
down_diff = last_h - l
|
||||
|
||||
if up_diff > down_diff:
|
||||
out[2*i + 1] = h
|
||||
out[2*i] = last_l
|
||||
else:
|
||||
out[2*i + 1] = l
|
||||
out[2*i] = last_h
|
||||
|
||||
last_l = l
|
||||
last_h = h
|
||||
|
||||
x[2*i] = int(i) - margin
|
||||
x[2*i + 1] = int(i) + margin
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def ohlc_flatten(
|
||||
ohlc: np.ndarray,
|
||||
use_mxmn: bool = True,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||
1-d array that is 4 times the size with x-domain values distributed
|
||||
evenly (by 0.5 steps) over each index.
|
||||
|
||||
'''
|
||||
index = ohlc['index']
|
||||
|
||||
if use_mxmn:
|
||||
# traces a line optimally over highs to lows
|
||||
# using numba. NOTE: pretty sure this is faster
|
||||
# and looks about the same as the below output.
|
||||
flat, x = hl2mxmn(ohlc)
|
||||
|
||||
else:
|
||||
flat = rfn.structured_to_unstructured(
|
||||
ohlc[['open', 'high', 'low', 'close']]
|
||||
).flatten()
|
||||
|
||||
x = np.linspace(
|
||||
start=index[0] - 0.5,
|
||||
stop=index[-1] + 0.5,
|
||||
num=len(flat),
|
||||
)
|
||||
return x, flat
|
||||
|
||||
|
||||
def ohlc_to_m4_line(
|
||||
ohlc: np.ndarray,
|
||||
px_width: int,
|
||||
|
||||
downsample: bool = False,
|
||||
uppx: Optional[float] = None,
|
||||
pretrace: bool = False,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an OHLC struct-array to a m4 downsampled 1-d array.
|
||||
|
||||
'''
|
||||
xpts, flat = ohlc_flatten(
|
||||
ohlc,
|
||||
use_mxmn=pretrace,
|
||||
)
|
||||
|
||||
if downsample:
|
||||
bins, x, y = ds_m4(
|
||||
xpts,
|
||||
flat,
|
||||
px_width=px_width,
|
||||
uppx=uppx,
|
||||
# log_scale=bool(uppx)
|
||||
)
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
x = (x + np.array([-0.43, 0, 0, 0.43])).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
return x, y
|
||||
else:
|
||||
return xpts, flat
|
||||
|
||||
|
||||
def ds_m4(
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
|
||||
# this is the width of the data in view
|
||||
# in display-device-local pixel units.
|
||||
px_width: int,
|
||||
uppx: Optional[float] = None,
|
||||
xrange: Optional[float] = None,
|
||||
# log_scale: bool = True,
|
||||
|
||||
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Downsample using the M4 algorithm.
|
||||
|
||||
This is more or less an OHLC style sampling of a line-style series.
|
||||
|
||||
'''
|
||||
# NOTE: this method is a so called "visualization driven data
|
||||
# aggregation" approach. It gives error-free line chart
|
||||
# downsampling, see
|
||||
# further scientific paper resources:
|
||||
# - http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
# - http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||
|
||||
# Details on implementation of this algo are based in,
|
||||
# https://github.com/pikers/piker/issues/109
|
||||
|
||||
# XXX: from infinite on downsampling viewable graphics:
|
||||
# "one thing i remembered about the binning - if you are
|
||||
# picking a range within your timeseries the start and end bin
|
||||
# should be one more bin size outside the visual range, then
|
||||
# you get better visual fidelity at the edges of the graph"
|
||||
# "i didn't show it in the sample code, but it's accounted for
|
||||
# in the start and end indices and number of bins"
|
||||
|
||||
# optionally log-scale down the "supposed pxs on screen"
|
||||
# as the units-per-px (uppx) get's large.
|
||||
# if log_scale:
|
||||
# assert uppx, 'You must provide a `uppx` value to use log scaling!'
|
||||
# # uppx = uppx * math.log(uppx, 2)
|
||||
|
||||
# # scaler = 2**7 / (1 + math.log(uppx, 2))
|
||||
# scaler = round(
|
||||
# max(
|
||||
# # NOTE: found that a 16x px width brought greater
|
||||
# # detail, likely due to dpi scaling?
|
||||
# # px_width=px_width * 16,
|
||||
# 2**7 / (1 + math.log(uppx, 2)),
|
||||
# 1
|
||||
# )
|
||||
# )
|
||||
# px_width *= scaler
|
||||
|
||||
# else:
|
||||
# px_width *= 16
|
||||
|
||||
# should never get called unless actually needed
|
||||
assert px_width > 1 and uppx > 0
|
||||
|
||||
# NOTE: if we didn't pre-slice the data to downsample
|
||||
# you could in theory pass these as the slicing params,
|
||||
# do we care though since we can always just pre-slice the
|
||||
# input?
|
||||
x_start = x[0] # x value start/lowest in domain
|
||||
|
||||
if xrange is None:
|
||||
x_end = x[-1] # x end value/highest in domain
|
||||
xrange = (x_end - x_start)
|
||||
|
||||
# XXX: always round up on the input pixels
|
||||
# lnx = len(x)
|
||||
# uppx *= max(4 / (1 + math.log(uppx, 2)), 1)
|
||||
|
||||
pxw = math.ceil(xrange / uppx)
|
||||
# px_width = math.ceil(px_width)
|
||||
|
||||
# ratio of indexed x-value to width of raster in pixels.
|
||||
# this is more or less, uppx: units-per-pixel.
|
||||
# w = xrange / float(px_width)
|
||||
# uppx = uppx * math.log(uppx, 2)
|
||||
# w2 = px_width / uppx
|
||||
|
||||
# scale up the width as the uppx get's large
|
||||
w = uppx # * math.log(uppx, 666)
|
||||
|
||||
# ensure we make more then enough
|
||||
# frames (windows) for the output pixel
|
||||
frames = pxw
|
||||
|
||||
# if we have more and then exact integer's
|
||||
# (uniform quotient output) worth of datum-domain-points
|
||||
# per windows-frame, add one more window to ensure
|
||||
# we have room for all output down-samples.
|
||||
pts_per_pixel, r = divmod(xrange, frames)
|
||||
if r:
|
||||
# while r:
|
||||
frames += 1
|
||||
pts_per_pixel, r = divmod(xrange, frames)
|
||||
|
||||
# print(
|
||||
# f'uppx: {uppx}\n'
|
||||
# f'xrange: {xrange}\n'
|
||||
# f'px_width: {px_width}\n'
|
||||
# f'pxw: {pxw}\n'
|
||||
# f'WTF w:{w}, w2:{w2}\n'
|
||||
# f'frames: {frames}\n'
|
||||
# )
|
||||
assert frames >= (xrange / uppx)
|
||||
|
||||
# call into ``numba``
|
||||
nb, i_win, y_out = _m4(
|
||||
x,
|
||||
y,
|
||||
|
||||
frames,
|
||||
|
||||
# TODO: see func below..
|
||||
# i_win,
|
||||
# y_out,
|
||||
|
||||
# first index in x data to start at
|
||||
x_start,
|
||||
# window size for each "frame" of data to downsample (normally
|
||||
# scaled by the ratio of pixels on screen to data in x-range).
|
||||
w,
|
||||
)
|
||||
|
||||
# filter out any overshoot in the input allocation arrays by
|
||||
# removing zero-ed tail entries which should start at a certain
|
||||
# index.
|
||||
i_win = i_win[i_win != 0]
|
||||
y_out = y_out[:i_win.size]
|
||||
|
||||
return nb, i_win, y_out
|
||||
|
||||
|
||||
@jit(
|
||||
nopython=True,
|
||||
nogil=True,
|
||||
)
|
||||
def _m4(
|
||||
|
||||
xs: np.ndarray,
|
||||
ys: np.ndarray,
|
||||
|
||||
frames: int,
|
||||
|
||||
# TODO: using this approach by having the ``.zeros()`` alloc lines
|
||||
# below, in put python was causing segs faults and alloc crashes..
|
||||
# we might need to see how it behaves with shm arrays and consider
|
||||
# allocating them once at startup?
|
||||
|
||||
# pre-alloc array of x indices mapping to the start
|
||||
# of each window used for downsampling in y.
|
||||
# i_win: np.ndarray,
|
||||
# pre-alloc array of output downsampled y values
|
||||
# y_out: np.ndarray,
|
||||
|
||||
x_start: int,
|
||||
step: float,
|
||||
|
||||
) -> int:
|
||||
# nbins = len(i_win)
|
||||
# count = len(xs)
|
||||
|
||||
# these are pre-allocated and mutated by ``numba``
|
||||
# code in-place.
|
||||
y_out = np.zeros((frames, 4), ys.dtype)
|
||||
i_win = np.zeros(frames, xs.dtype)
|
||||
|
||||
bincount = 0
|
||||
x_left = x_start
|
||||
|
||||
# Find the first window's starting value which *includes* the
|
||||
# first value in the x-domain array, i.e. the first
|
||||
# "left-side-of-window" **plus** the downsampling step,
|
||||
# creates a window which includes the first x **value**.
|
||||
while xs[0] >= x_left + step:
|
||||
x_left += step
|
||||
|
||||
# set all bins in the left-most entry to the starting left-most x value
|
||||
# (aka a row broadcast).
|
||||
i_win[bincount] = x_left
|
||||
# set all y-values to the first value passed in.
|
||||
y_out[bincount] = ys[0]
|
||||
|
||||
for i in range(len(xs)):
|
||||
x = xs[i]
|
||||
y = ys[i]
|
||||
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||
y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||
y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||
y_out[bincount, 3] = y
|
||||
else:
|
||||
# Find the next bin
|
||||
while x >= x_left + step:
|
||||
x_left += step
|
||||
|
||||
bincount += 1
|
||||
i_win[bincount] = x_left
|
||||
y_out[bincount] = y
|
||||
|
||||
return bincount, i_win, y_out
|
|
@ -43,8 +43,8 @@ log = get_logger(__name__)
|
|||
# latency (in terms of perceived lag in cross hair) so really be sure
|
||||
# there's an improvement if you want to change it!
|
||||
|
||||
_mouse_rate_limit = 120 # TODO; should we calc current screen refresh rate?
|
||||
_debounce_delay = 1 / 40
|
||||
_mouse_rate_limit = 60 # TODO; should we calc current screen refresh rate?
|
||||
_debounce_delay = 0
|
||||
_ch_label_opac = 1
|
||||
|
||||
|
||||
|
@ -95,22 +95,24 @@ class LineDot(pg.CurvePoint):
|
|||
|
||||
def event(
|
||||
self,
|
||||
|
||||
ev: QtCore.QEvent,
|
||||
|
||||
) -> None:
|
||||
) -> bool:
|
||||
if not isinstance(
|
||||
ev, QtCore.QDynamicPropertyChangeEvent
|
||||
) or self.curve() is None:
|
||||
return False
|
||||
|
||||
# TODO: get rid of this ``.getData()`` and
|
||||
# make a more pythonic api to retreive backing
|
||||
# numpy arrays...
|
||||
(x, y) = self.curve().getData()
|
||||
index = self.property('index')
|
||||
# first = self._plot._arrays['ohlc'][0]['index']
|
||||
# first = x[0]
|
||||
# i = index - first
|
||||
if index:
|
||||
i = index - x[0]
|
||||
i = round(index - x[0])
|
||||
if i > 0 and i < len(y):
|
||||
newPos = (index, y[i])
|
||||
QtWidgets.QGraphicsItem.setPos(self, *newPos)
|
||||
|
@ -241,13 +243,13 @@ class ContentsLabels:
|
|||
def update_labels(
|
||||
self,
|
||||
index: int,
|
||||
# array_name: str,
|
||||
|
||||
) -> None:
|
||||
# for name, (label, update) in self._labels.items():
|
||||
for chart, name, label, update in self._labels:
|
||||
|
||||
array = chart._arrays[name]
|
||||
flow = chart._flows[name]
|
||||
array = flow.shm.array
|
||||
|
||||
if not (
|
||||
index >= 0
|
||||
and index < array[-1]['index']
|
||||
|
@ -256,8 +258,6 @@ class ContentsLabels:
|
|||
print('WTF out of range?')
|
||||
continue
|
||||
|
||||
# array = chart._arrays[name]
|
||||
|
||||
# call provided update func with data point
|
||||
try:
|
||||
label.show()
|
||||
|
@ -293,7 +293,8 @@ class ContentsLabels:
|
|||
|
||||
|
||||
class Cursor(pg.GraphicsObject):
|
||||
'''Multi-plot cursor for use on a ``LinkedSplits`` chart (set).
|
||||
'''
|
||||
Multi-plot cursor for use on a ``LinkedSplits`` chart (set).
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
|
@ -308,7 +309,7 @@ class Cursor(pg.GraphicsObject):
|
|||
|
||||
self.linked = linkedsplits
|
||||
self.graphics: dict[str, pg.GraphicsObject] = {}
|
||||
self.plots: List['PlotChartWidget'] = [] # type: ignore # noqa
|
||||
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
||||
self.active_plot = None
|
||||
self.digits: int = digits
|
||||
self._datum_xy: tuple[int, float] = (0, 0)
|
||||
|
@ -405,6 +406,7 @@ class Cursor(pg.GraphicsObject):
|
|||
slot=self.mouseMoved,
|
||||
delay=_debounce_delay,
|
||||
)
|
||||
|
||||
px_enter = pg.SignalProxy(
|
||||
plot.sig_mouse_enter,
|
||||
rateLimit=_mouse_rate_limit,
|
||||
|
@ -436,7 +438,10 @@ class Cursor(pg.GraphicsObject):
|
|||
if plot.linked.xaxis_chart is plot:
|
||||
xlabel = self.xaxis_label = XAxisLabel(
|
||||
parent=self.plots[plot_index].getAxis('bottom'),
|
||||
# parent=self.plots[plot_index].pi_overlay.get_axis(plot.plotItem, 'bottom'),
|
||||
# parent=self.plots[plot_index].pi_overlay.get_axis(
|
||||
# plot.plotItem, 'bottom'
|
||||
# ),
|
||||
|
||||
opacity=_ch_label_opac,
|
||||
bg_color=self.label_color,
|
||||
)
|
||||
|
@ -454,9 +459,12 @@ class Cursor(pg.GraphicsObject):
|
|||
) -> LineDot:
|
||||
# if this plot contains curves add line dot "cursors" to denote
|
||||
# the current sample under the mouse
|
||||
main_flow = plot._flows[plot.name]
|
||||
# read out last index
|
||||
i = main_flow.shm.array[-1]['index']
|
||||
cursor = LineDot(
|
||||
curve,
|
||||
index=plot._arrays[plot.name][-1]['index'],
|
||||
index=i,
|
||||
plot=plot
|
||||
)
|
||||
plot.addItem(cursor)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -23,6 +23,7 @@ from typing import Optional
|
|||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
from PyQt5 import QtGui, QtWidgets
|
||||
from PyQt5.QtWidgets import QGraphicsItem
|
||||
from PyQt5.QtCore import (
|
||||
Qt,
|
||||
QLineF,
|
||||
|
@ -31,59 +32,90 @@ from PyQt5.QtCore import (
|
|||
QPointF,
|
||||
)
|
||||
|
||||
from .._profile import pg_profile_enabled
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._style import hcolor
|
||||
from ._compression import (
|
||||
# ohlc_to_m4_line,
|
||||
ds_m4,
|
||||
)
|
||||
from ..log import get_logger
|
||||
|
||||
|
||||
def step_path_arrays_from_1d(
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
include_endpoints: bool = False,
|
||||
log = get_logger(__name__)
|
||||
|
||||
) -> (np.ndarray, np.ndarray):
|
||||
'''
|
||||
Generate a "step mode" curve aligned with OHLC style bars
|
||||
such that each segment spans each bar (aka "centered" style).
|
||||
|
||||
'''
|
||||
y_out = y.copy()
|
||||
x_out = x.copy()
|
||||
x2 = np.empty(
|
||||
# the data + 2 endpoints on either end for
|
||||
# "termination of the path".
|
||||
(len(x) + 1, 2),
|
||||
# we want to align with OHLC or other sampling style
|
||||
# bars likely so we need fractinal values
|
||||
dtype=float,
|
||||
)
|
||||
x2[0] = x[0] - 0.5
|
||||
x2[1] = x[0] + 0.5
|
||||
x2[1:] = x[:, np.newaxis] + 0.5
|
||||
# TODO: numba this instead..
|
||||
# def step_path_arrays_from_1d(
|
||||
# x: np.ndarray,
|
||||
# y: np.ndarray,
|
||||
# include_endpoints: bool = True,
|
||||
|
||||
# flatten to 1-d
|
||||
x_out = x2.reshape(x2.size)
|
||||
# ) -> (np.ndarray, np.ndarray):
|
||||
# '''
|
||||
# Generate a "step mode" curve aligned with OHLC style bars
|
||||
# such that each segment spans each bar (aka "centered" style).
|
||||
|
||||
# we create a 1d with 2 extra indexes to
|
||||
# hold the start and (current) end value for the steps
|
||||
# on either end
|
||||
y2 = np.empty((len(y), 2), dtype=y.dtype)
|
||||
y2[:] = y[:, np.newaxis]
|
||||
# '''
|
||||
# # y_out = y.copy()
|
||||
# # x_out = x.copy()
|
||||
|
||||
y_out = np.empty(
|
||||
2*len(y) + 2,
|
||||
dtype=y.dtype
|
||||
)
|
||||
# # x2 = np.empty(
|
||||
# # # the data + 2 endpoints on either end for
|
||||
# # # "termination of the path".
|
||||
# # (len(x) + 1, 2),
|
||||
# # # we want to align with OHLC or other sampling style
|
||||
# # # bars likely so we need fractinal values
|
||||
# # dtype=float,
|
||||
# # )
|
||||
|
||||
# flatten and set 0 endpoints
|
||||
y_out[1:-1] = y2.reshape(y2.size)
|
||||
y_out[0] = 0
|
||||
y_out[-1] = 0
|
||||
# x2 = np.broadcast_to(
|
||||
# x[:, None],
|
||||
# (
|
||||
# x.size + 1,
|
||||
# # 4, # only ohlc
|
||||
# 2,
|
||||
# ),
|
||||
# ) + np.array([-0.5, 0.5])
|
||||
|
||||
if not include_endpoints:
|
||||
return x_out[:-1], y_out[:-1]
|
||||
# # x2[0] = x[0] - 0.5
|
||||
# # x2[1] = x[0] + 0.5
|
||||
# # x2[0, 0] = x[0] - 0.5
|
||||
# # x2[0, 1] = x[0] + 0.5
|
||||
# # x2[1:] = x[:, np.newaxis] + 0.5
|
||||
# # import pdbpp
|
||||
# # pdbpp.set_trace()
|
||||
|
||||
else:
|
||||
return x_out, y_out
|
||||
# # flatten to 1-d
|
||||
# # x_out = x2.reshape(x2.size)
|
||||
# # x_out = x2
|
||||
|
||||
# # we create a 1d with 2 extra indexes to
|
||||
# # hold the start and (current) end value for the steps
|
||||
# # on either end
|
||||
# y2 = np.empty(
|
||||
# (len(y) + 1, 2),
|
||||
# dtype=y.dtype,
|
||||
# )
|
||||
# y2[:] = y[:, np.newaxis]
|
||||
# # y2[-1] = 0
|
||||
|
||||
# # y_out = y2
|
||||
|
||||
# # y_out = np.empty(
|
||||
# # 2*len(y) + 2,
|
||||
# # dtype=y.dtype
|
||||
# # )
|
||||
|
||||
# # flatten and set 0 endpoints
|
||||
# # y_out[1:-1] = y2.reshape(y2.size)
|
||||
# # y_out[0] = 0
|
||||
# # y_out[-1] = 0
|
||||
|
||||
# if not include_endpoints:
|
||||
# return x2[:-1], y2[:-1]
|
||||
|
||||
# else:
|
||||
# return x2, y2
|
||||
|
||||
|
||||
_line_styles: dict[str, int] = {
|
||||
|
@ -94,8 +126,7 @@ _line_styles: dict[str, int] = {
|
|||
}
|
||||
|
||||
|
||||
# TODO: got a feeling that dropping this inheritance gets us even more speedups
|
||||
class FastAppendCurve(pg.PlotCurveItem):
|
||||
class FastAppendCurve(pg.GraphicsObject):
|
||||
'''
|
||||
A faster, append friendly version of ``pyqtgraph.PlotCurveItem``
|
||||
built for real-time data updates.
|
||||
|
@ -110,22 +141,46 @@ class FastAppendCurve(pg.PlotCurveItem):
|
|||
'''
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
x: np.ndarray = None,
|
||||
y: np.ndarray = None,
|
||||
*args,
|
||||
|
||||
step_mode: bool = False,
|
||||
color: str = 'default_lightest',
|
||||
fill_color: Optional[str] = None,
|
||||
style: str = 'solid',
|
||||
name: Optional[str] = None,
|
||||
use_fpath: bool = True,
|
||||
|
||||
**kwargs
|
||||
|
||||
) -> None:
|
||||
|
||||
# brutaaalll, see comments within..
|
||||
self._y = self.yData = y
|
||||
self._x = self.xData = x
|
||||
self._vr: Optional[tuple] = None
|
||||
self._avr: Optional[tuple] = None
|
||||
self._br = None
|
||||
|
||||
self._name = name
|
||||
self.path: Optional[QtGui.QPainterPath] = None
|
||||
|
||||
self.use_fpath = use_fpath
|
||||
self.fast_path: Optional[QtGui.QPainterPath] = None
|
||||
|
||||
# TODO: we can probably just dispense with the parent since
|
||||
# we're basically only using the pen setting now...
|
||||
super().__init__(*args, **kwargs)
|
||||
self._name = name
|
||||
self._xrange: tuple[int, int] = self.dataBounds(ax=0)
|
||||
|
||||
# self._xrange: tuple[int, int] = self.dataBounds(ax=0)
|
||||
self._xrange: Optional[tuple[int, int]] = None
|
||||
# self._x_iv_range = None
|
||||
|
||||
# self._last_draw = time.time()
|
||||
self._in_ds: bool = False
|
||||
self._last_uppx: float = 0
|
||||
|
||||
# all history of curve is drawn in single px thickness
|
||||
pen = pg.mkPen(hcolor(color))
|
||||
|
@ -134,37 +189,111 @@ class FastAppendCurve(pg.PlotCurveItem):
|
|||
if 'dash' in style:
|
||||
pen.setDashPattern([8, 3])
|
||||
|
||||
self.setPen(pen)
|
||||
self._pen = pen
|
||||
|
||||
# last segment is drawn in 2px thickness for emphasis
|
||||
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
||||
self.last_step_pen = pg.mkPen(pen, width=2)
|
||||
|
||||
self._last_line: QLineF = None
|
||||
self._last_step_rect: QRectF = None
|
||||
self._last_line: Optional[QLineF] = None
|
||||
self._last_step_rect: Optional[QRectF] = None
|
||||
|
||||
# flat-top style histogram-like discrete curve
|
||||
self._step_mode: bool = step_mode
|
||||
|
||||
# self._fill = True
|
||||
self.setBrush(hcolor(fill_color or color))
|
||||
self._brush = pg.functions.mkBrush(hcolor(fill_color or color))
|
||||
|
||||
# TODO: one question still remaining is if this makes trasform
|
||||
# interactions slower (such as zooming) and if so maybe if/when
|
||||
# we implement a "history" mode for the view we disable this in
|
||||
# that mode?
|
||||
if step_mode:
|
||||
# don't enable caching by default for the case where the
|
||||
# only thing drawn is the "last" line segment which can
|
||||
# have a weird artifact where it won't be fully drawn to its
|
||||
# endpoint (something we saw on trade rate curves)
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
# if step_mode:
|
||||
# don't enable caching by default for the case where the
|
||||
# only thing drawn is the "last" line segment which can
|
||||
# have a weird artifact where it won't be fully drawn to its
|
||||
# endpoint (something we saw on trade rate curves)
|
||||
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# TODO: probably stick this in a new parent
|
||||
# type which will contain our own version of
|
||||
# what ``PlotCurveItem`` had in terms of base
|
||||
# functionality? A `FlowGraphic` maybe?
|
||||
def x_uppx(self) -> int:
|
||||
|
||||
px_vecs = self.pixelVectors()[0]
|
||||
if px_vecs:
|
||||
xs_in_px = px_vecs.x()
|
||||
return round(xs_in_px)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def px_width(self) -> float:
|
||||
|
||||
vb = self.getViewBox()
|
||||
if not vb:
|
||||
return 0
|
||||
|
||||
vr = self.viewRect()
|
||||
l, r = int(vr.left()), int(vr.right())
|
||||
|
||||
if not self._xrange:
|
||||
return 0
|
||||
|
||||
start, stop = self._xrange
|
||||
lbar = max(l, start)
|
||||
rbar = min(r, stop)
|
||||
|
||||
return vb.mapViewToDevice(
|
||||
QLineF(lbar, 0, rbar, 0)
|
||||
).length()
|
||||
|
||||
def downsample(
|
||||
self,
|
||||
x,
|
||||
y,
|
||||
px_width,
|
||||
uppx,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
|
||||
# downsample whenever more then 1 pixels per datum can be shown.
|
||||
# always refresh data bounds until we get diffing
|
||||
# working properly, see above..
|
||||
bins, x, y = ds_m4(
|
||||
x,
|
||||
y,
|
||||
px_width=px_width,
|
||||
uppx=uppx,
|
||||
# log_scale=bool(uppx)
|
||||
)
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
# x = (x + np.array([-0.43, 0, 0, 0.43])).flatten()
|
||||
x = (x + np.array([-0.5, 0, 0, 0.5])).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
# presumably?
|
||||
self._in_ds = True
|
||||
return x, y
|
||||
|
||||
def update_from_array(
|
||||
self,
|
||||
|
||||
# full array input history
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
|
||||
# pre-sliced array data that's "in view"
|
||||
x_iv: np.ndarray,
|
||||
y_iv: np.ndarray,
|
||||
|
||||
view_range: Optional[tuple[int, int]] = None,
|
||||
profiler: Optional[pg.debug.Profiler] = None,
|
||||
draw_last: bool = True,
|
||||
slice_to_head: int = -1,
|
||||
do_append: bool = True,
|
||||
should_redraw: bool = False,
|
||||
|
||||
) -> QtGui.QPainterPath:
|
||||
'''
|
||||
Update curve from input 2-d data.
|
||||
|
@ -173,42 +302,235 @@ class FastAppendCurve(pg.PlotCurveItem):
|
|||
a length diff.
|
||||
|
||||
'''
|
||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
||||
profiler = profiler or pg.debug.Profiler(
|
||||
msg=f'FastAppendCurve.update_from_array(): `{self._name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
gt=ms_slower_then,
|
||||
)
|
||||
flip_cache = False
|
||||
|
||||
istart, istop = self._xrange
|
||||
# print(f"xrange: {self._xrange}")
|
||||
if self._xrange:
|
||||
istart, istop = self._xrange
|
||||
else:
|
||||
self._xrange = istart, istop = x[0], x[-1]
|
||||
|
||||
# compute the length diffs between the first/last index entry in
|
||||
# the input data and the last indexes we have on record from the
|
||||
# last time we updated the curve index.
|
||||
prepend_length = istart - x[0]
|
||||
append_length = x[-1] - istop
|
||||
prepend_length = int(istart - x[0])
|
||||
append_length = int(x[-1] - istop)
|
||||
|
||||
# step mode: draw flat top discrete "step"
|
||||
# over the index space for each datum.
|
||||
if self._step_mode:
|
||||
x_out, y_out = step_path_arrays_from_1d(x[:-1], y[:-1])
|
||||
# this is the diff-mode, "data"-rendered index
|
||||
# tracking var..
|
||||
self._xrange = x[0], x[-1]
|
||||
|
||||
# print(f"xrange: {self._xrange}")
|
||||
|
||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||
# our `LineDot`) required ``.getData()`` to work..
|
||||
# self.xData = x
|
||||
# self.yData = y
|
||||
# self._x, self._y = x, y
|
||||
|
||||
# downsampling incremental state checking
|
||||
uppx = self.x_uppx()
|
||||
px_width = self.px_width()
|
||||
uppx_diff = (uppx - self._last_uppx)
|
||||
|
||||
new_sample_rate = False
|
||||
should_ds = self._in_ds
|
||||
showing_src_data = self._in_ds
|
||||
# should_redraw = False
|
||||
|
||||
# if a view range is passed, plan to draw the
|
||||
# source ouput that's "in view" of the chart.
|
||||
if (
|
||||
view_range
|
||||
# and not self._in_ds
|
||||
# and not prepend_length > 0
|
||||
):
|
||||
# print(f'{self._name} vr: {view_range}')
|
||||
|
||||
# by default we only pull data up to the last (current) index
|
||||
x_out, y_out = x_iv[:slice_to_head], y_iv[:slice_to_head]
|
||||
profiler(f'view range slice {view_range}')
|
||||
|
||||
vl, vr = view_range
|
||||
|
||||
# last_ivr = self._x_iv_range
|
||||
# ix_iv, iy_iv = self._x_iv_range = (x_iv[0], x_iv[-1])
|
||||
|
||||
zoom_or_append = False
|
||||
last_vr = self._vr
|
||||
last_ivr = self._avr
|
||||
|
||||
if last_vr:
|
||||
# relative slice indices
|
||||
lvl, lvr = last_vr
|
||||
# abs slice indices
|
||||
al, ar = last_ivr
|
||||
|
||||
# append_length = int(x[-1] - istop)
|
||||
# append_length = int(x_iv[-1] - ar)
|
||||
|
||||
# left_change = abs(x_iv[0] - al) >= 1
|
||||
# right_change = abs(x_iv[-1] - ar) >= 1
|
||||
|
||||
if (
|
||||
# likely a zoom view change
|
||||
(vr - lvr) > 2 or vl < lvl
|
||||
# append / prepend update
|
||||
# we had an append update where the view range
|
||||
# didn't change but the data-viewed (shifted)
|
||||
# underneath, so we need to redraw.
|
||||
# or left_change and right_change and last_vr == view_range
|
||||
|
||||
# not (left_change and right_change) and ivr
|
||||
# (
|
||||
# or abs(x_iv[ivr] - livr) > 1
|
||||
):
|
||||
zoom_or_append = True
|
||||
|
||||
# if last_ivr:
|
||||
# liivl, liivr = last_ivr
|
||||
|
||||
if (
|
||||
view_range != last_vr
|
||||
and (
|
||||
append_length > 1
|
||||
or zoom_or_append
|
||||
)
|
||||
):
|
||||
should_redraw = True
|
||||
# print("REDRAWING BRUH")
|
||||
|
||||
self._vr = view_range
|
||||
self._avr = x_iv[0], x_iv[slice_to_head]
|
||||
|
||||
# x_last = x_iv[-1]
|
||||
# y_last = y_iv[-1]
|
||||
# self._last_vr = view_range
|
||||
|
||||
# self.disable_cache()
|
||||
# flip_cache = True
|
||||
|
||||
else:
|
||||
# if (
|
||||
# not view_range
|
||||
# or self._in_ds
|
||||
# ):
|
||||
# by default we only pull data up to the last (current) index
|
||||
x_out, y_out = x[:-1], y[:-1]
|
||||
x_out, y_out = x[:slice_to_head], y[:slice_to_head]
|
||||
|
||||
if prepend_length > 0:
|
||||
should_redraw = True
|
||||
|
||||
# check for downsampling conditions
|
||||
if (
|
||||
# std m4 downsample conditions
|
||||
px_width
|
||||
and abs(uppx_diff) >= 1
|
||||
):
|
||||
log.info(
|
||||
f'{self._name} sampler change: {self._last_uppx} -> {uppx}'
|
||||
)
|
||||
self._last_uppx = uppx
|
||||
new_sample_rate = True
|
||||
showing_src_data = False
|
||||
should_redraw = True
|
||||
should_ds = True
|
||||
|
||||
elif (
|
||||
uppx <= 2
|
||||
and self._in_ds
|
||||
):
|
||||
# we should de-downsample back to our original
|
||||
# source data so we clear our path data in prep
|
||||
# to generate a new one from original source data.
|
||||
should_redraw = True
|
||||
new_sample_rate = True
|
||||
should_ds = False
|
||||
showing_src_data = True
|
||||
|
||||
# no_path_yet = self.path is None
|
||||
if (
|
||||
self.path is None
|
||||
or should_redraw
|
||||
or new_sample_rate
|
||||
or prepend_length > 0
|
||||
):
|
||||
# if (
|
||||
# not view_range
|
||||
# or self._in_ds
|
||||
# ):
|
||||
# # by default we only pull data up to the last (current) index
|
||||
# x_out, y_out = x[:-1], y[:-1]
|
||||
|
||||
# step mode: draw flat top discrete "step"
|
||||
# over the index space for each datum.
|
||||
# if self._step_mode:
|
||||
# self.disable_cache()
|
||||
# flip_cache = True
|
||||
# x_out, y_out = step_path_arrays_from_1d(
|
||||
# x_out,
|
||||
# y_out,
|
||||
# )
|
||||
|
||||
# # TODO: numba this bish
|
||||
# profiler('generated step arrays')
|
||||
|
||||
if should_redraw:
|
||||
if self.path:
|
||||
# print(f'CLEARING PATH {self._name}')
|
||||
self.path.clear()
|
||||
|
||||
if self.fast_path:
|
||||
self.fast_path.clear()
|
||||
|
||||
profiler('cleared paths due to `should_redraw` set')
|
||||
|
||||
if new_sample_rate and showing_src_data:
|
||||
# if self._in_ds:
|
||||
log.info(f'DEDOWN -> {self._name}')
|
||||
|
||||
self._in_ds = False
|
||||
|
||||
elif should_ds and uppx and px_width > 1:
|
||||
x_out, y_out = self.downsample(
|
||||
x_out,
|
||||
y_out,
|
||||
px_width,
|
||||
uppx,
|
||||
)
|
||||
profiler(f'FULL PATH downsample redraw={should_ds}')
|
||||
self._in_ds = True
|
||||
|
||||
if self.path is None or prepend_length > 0:
|
||||
self.path = pg.functions.arrayToQPath(
|
||||
x_out,
|
||||
y_out,
|
||||
connect='all',
|
||||
finiteCheck=False,
|
||||
path=self.path,
|
||||
)
|
||||
profiler('generate fresh path')
|
||||
self.prepareGeometryChange()
|
||||
profiler(
|
||||
f'generated fresh path. (should_redraw: {should_redraw} should_ds: {should_ds} new_sample_rate: {new_sample_rate})'
|
||||
)
|
||||
# profiler(f'DRAW PATH IN VIEW -> {self._name}')
|
||||
|
||||
# if self._step_mode:
|
||||
# self.path.closeSubpath()
|
||||
# reserve mem allocs see:
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#reserve
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||
# XXX: right now this is based on had hoc checks on a
|
||||
# hidpi 3840x2160 4k monitor but we should optimize for
|
||||
# the target display(s) on the sys.
|
||||
# if no_path_yet:
|
||||
# self.path.reserve(int(500e3))
|
||||
|
||||
# TODO: get this piecewise prepend working - right now it's
|
||||
# giving heck on vwap...
|
||||
# if prepend_length:
|
||||
# elif prepend_length:
|
||||
# breakpoint()
|
||||
|
||||
# prepend_path = pg.functions.arrayToQPath(
|
||||
|
@ -223,74 +545,114 @@ class FastAppendCurve(pg.PlotCurveItem):
|
|||
# # self.path.moveTo(new_x[0], new_y[0])
|
||||
# self.path.connectPath(old_path)
|
||||
|
||||
elif append_length > 0:
|
||||
if self._step_mode:
|
||||
new_x, new_y = step_path_arrays_from_1d(
|
||||
x[-append_length - 2:-1],
|
||||
y[-append_length - 2:-1],
|
||||
)
|
||||
# [1:] since we don't need the vertical line normally at
|
||||
# the beginning of the step curve taking the first (x,
|
||||
# y) poing down to the x-axis **because** this is an
|
||||
# appended path graphic.
|
||||
new_x = new_x[1:]
|
||||
new_y = new_y[1:]
|
||||
elif (
|
||||
append_length > 0
|
||||
and do_append
|
||||
and not should_redraw
|
||||
# and not view_range
|
||||
):
|
||||
print(f'{self._name} append len: {append_length}')
|
||||
new_x = x[-append_length - 2:slice_to_head]
|
||||
new_y = y[-append_length - 2:slice_to_head]
|
||||
profiler('sliced append path')
|
||||
|
||||
else:
|
||||
# print(f"append_length: {append_length}")
|
||||
new_x = x[-append_length - 2:-1]
|
||||
new_y = y[-append_length - 2:-1]
|
||||
# print((new_x, new_y))
|
||||
# if self._step_mode:
|
||||
# # new_x, new_y = step_path_arrays_from_1d(
|
||||
# # new_x,
|
||||
# # new_y,
|
||||
# # )
|
||||
# # # [1:] since we don't need the vertical line normally at
|
||||
# # # the beginning of the step curve taking the first (x,
|
||||
# # # y) poing down to the x-axis **because** this is an
|
||||
# # # appended path graphic.
|
||||
# # new_x = new_x[1:]
|
||||
# # new_y = new_y[1:]
|
||||
|
||||
# self.disable_cache()
|
||||
# flip_cache = True
|
||||
|
||||
# profiler('generated step data')
|
||||
|
||||
profiler(
|
||||
f'diffed array input, append_length={append_length}'
|
||||
)
|
||||
|
||||
# if should_ds:
|
||||
# new_x, new_y = self.downsample(
|
||||
# new_x,
|
||||
# new_y,
|
||||
# px_width,
|
||||
# uppx,
|
||||
# )
|
||||
# profiler(f'fast path downsample redraw={should_ds}')
|
||||
|
||||
append_path = pg.functions.arrayToQPath(
|
||||
new_x,
|
||||
new_y,
|
||||
connect='all',
|
||||
# finiteCheck=False,
|
||||
finiteCheck=False,
|
||||
path=self.fast_path,
|
||||
)
|
||||
profiler('generated append qpath')
|
||||
|
||||
path = self.path
|
||||
if self.use_fpath:
|
||||
# an attempt at trying to make append-updates faster..
|
||||
if self.fast_path is None:
|
||||
self.fast_path = append_path
|
||||
# self.fast_path.reserve(int(6e3))
|
||||
else:
|
||||
self.fast_path.connectPath(append_path)
|
||||
size = self.fast_path.capacity()
|
||||
profiler(f'connected fast path w size: {size}')
|
||||
|
||||
# print(f"append_path br: {append_path.boundingRect()}")
|
||||
# self.path.moveTo(new_x[0], new_y[0])
|
||||
# path.connectPath(append_path)
|
||||
|
||||
# XXX: lol this causes a hang..
|
||||
# self.path = self.path.simplified()
|
||||
else:
|
||||
size = self.path.capacity()
|
||||
profiler(f'connected history path w size: {size}')
|
||||
self.path.connectPath(append_path)
|
||||
|
||||
# other merging ideas:
|
||||
# https://stackoverflow.com/questions/8936225/how-to-merge-qpainterpaths
|
||||
if self._step_mode:
|
||||
# path.addPath(append_path)
|
||||
self.path.connectPath(append_path)
|
||||
# path.addPath(append_path)
|
||||
# path.closeSubpath()
|
||||
|
||||
# TODO: try out new work from `pyqtgraph` main which
|
||||
# should repair horrid perf:
|
||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2032
|
||||
# ok, nope still horrible XD
|
||||
# if self._fill:
|
||||
# # XXX: super slow set "union" op
|
||||
# self.path = self.path.united(append_path).simplified()
|
||||
# TODO: try out new work from `pyqtgraph` main which
|
||||
# should repair horrid perf:
|
||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2032
|
||||
# ok, nope still horrible XD
|
||||
# if self._fill:
|
||||
# # XXX: super slow set "union" op
|
||||
# self.path = self.path.united(append_path).simplified()
|
||||
|
||||
# # path.addPath(append_path)
|
||||
# # path.closeSubpath()
|
||||
# self.disable_cache()
|
||||
# flip_cache = True
|
||||
|
||||
else:
|
||||
# print(f"append_path br: {append_path.boundingRect()}")
|
||||
# self.path.moveTo(new_x[0], new_y[0])
|
||||
# self.path.connectPath(append_path)
|
||||
path.connectPath(append_path)
|
||||
if draw_last:
|
||||
self.draw_last(x, y)
|
||||
profiler('draw last segment')
|
||||
|
||||
self.disable_cache()
|
||||
flip_cache = True
|
||||
|
||||
if (
|
||||
self._step_mode
|
||||
):
|
||||
self.disable_cache()
|
||||
flip_cache = True
|
||||
# if flip_cache:
|
||||
# # # XXX: seems to be needed to avoid artifacts (see above).
|
||||
# self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# print(f"update br: {self.path.boundingRect()}")
|
||||
# trigger redraw of path
|
||||
# do update before reverting to cache mode
|
||||
self.update()
|
||||
profiler('.update()')
|
||||
|
||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||
# our `LineDot`) required ``.getData()`` to work..
|
||||
self.xData = x
|
||||
self.yData = y
|
||||
def draw_last(
|
||||
self,
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
|
||||
x0, x_last = self._xrange = x[0], x[-1]
|
||||
) -> None:
|
||||
x_last = x[-1]
|
||||
y_last = y[-1]
|
||||
|
||||
# draw the "current" step graphic segment so it lines up with
|
||||
|
@ -299,26 +661,64 @@ class FastAppendCurve(pg.PlotCurveItem):
|
|||
self._last_line = QLineF(
|
||||
x_last - 0.5, 0,
|
||||
x_last + 0.5, 0,
|
||||
# x_last, 0,
|
||||
# x_last, 0,
|
||||
)
|
||||
self._last_step_rect = QRectF(
|
||||
x_last - 0.5, 0,
|
||||
x_last + 0.5, y_last
|
||||
# x_last, 0,
|
||||
# x_last, y_last
|
||||
)
|
||||
# print(
|
||||
# f"path br: {self.path.boundingRect()}",
|
||||
# f"fast path br: {self.fast_path.boundingRect()}",
|
||||
# f"last rect br: {self._last_step_rect}",
|
||||
# )
|
||||
else:
|
||||
# print((x[-1], y_last))
|
||||
self._last_line = QLineF(
|
||||
x[-2], y[-2],
|
||||
x[-1], y_last
|
||||
x_last, y_last
|
||||
)
|
||||
|
||||
# trigger redraw of path
|
||||
# do update before reverting to cache mode
|
||||
self.prepareGeometryChange()
|
||||
self.update()
|
||||
|
||||
if flip_cache:
|
||||
# XXX: seems to be needed to avoid artifacts (see above).
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||
# our `LineDot`) required ``.getData()`` to work..
|
||||
def getData(self):
|
||||
return self._x, self._y
|
||||
|
||||
# TODO: drop the above after ``Cursor`` re-work
|
||||
def get_arrays(self) -> tuple[np.ndarray, np.ndarray]:
|
||||
return self._x, self._y
|
||||
|
||||
def clear(self):
|
||||
'''
|
||||
Clear internal graphics making object ready for full re-draw.
|
||||
|
||||
'''
|
||||
# NOTE: original code from ``pg.PlotCurveItem``
|
||||
self.xData = None
|
||||
self.yData = None
|
||||
|
||||
# XXX: previously, if not trying to leverage `.reserve()` allocs
|
||||
# then you might as well create a new one..
|
||||
# self.path = None
|
||||
|
||||
# path reservation aware non-mem de-alloc cleaning
|
||||
if self.path:
|
||||
self.path.clear()
|
||||
|
||||
if self.fast_path:
|
||||
# self.fast_path.clear()
|
||||
self.fast_path = None
|
||||
|
||||
# self.disable_cache()
|
||||
# self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
def reset_cache(self) -> None:
|
||||
self.disable_cache()
|
||||
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
def disable_cache(self) -> None:
|
||||
'''
|
||||
|
@ -328,7 +728,7 @@ class FastAppendCurve(pg.PlotCurveItem):
|
|||
# XXX: pretty annoying but, without this there's little
|
||||
# artefacts on the append updates to the curve...
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
||||
self.prepareGeometryChange()
|
||||
# self.prepareGeometryChange()
|
||||
|
||||
def boundingRect(self):
|
||||
'''
|
||||
|
@ -339,29 +739,65 @@ class FastAppendCurve(pg.PlotCurveItem):
|
|||
else:
|
||||
# dynamically override this method after initial
|
||||
# path is created to avoid requiring the above None check
|
||||
self.boundingRect = self._br
|
||||
return self._br()
|
||||
self.boundingRect = self._path_br
|
||||
return self._path_br()
|
||||
|
||||
def _br(self):
|
||||
def _path_br(self):
|
||||
'''
|
||||
Post init ``.boundingRect()```.
|
||||
|
||||
'''
|
||||
hb = self.path.controlPointRect()
|
||||
# hb = self.path.boundingRect()
|
||||
hb_size = hb.size()
|
||||
|
||||
fp = self.fast_path
|
||||
if fp:
|
||||
fhb = fp.controlPointRect()
|
||||
hb_size = fhb.size() + hb_size
|
||||
# print(f'hb_size: {hb_size}')
|
||||
|
||||
# if self._last_step_rect:
|
||||
# hb_size += self._last_step_rect.size()
|
||||
|
||||
# if self._line:
|
||||
# br = self._last_step_rect.bottomRight()
|
||||
|
||||
# tl = QPointF(
|
||||
# # self._vr[0],
|
||||
# # hb.topLeft().y(),
|
||||
# # 0,
|
||||
# # hb_size.height() + 1
|
||||
# )
|
||||
|
||||
# if self._last_step_rect:
|
||||
# br = self._last_step_rect.bottomRight()
|
||||
|
||||
# else:
|
||||
# hb_size += QSizeF(1, 1)
|
||||
w = hb_size.width() + 1
|
||||
h = hb_size.height() + 1
|
||||
|
||||
# br = QPointF(
|
||||
# self._vr[-1],
|
||||
# # tl.x() + w,
|
||||
# tl.y() + h,
|
||||
# )
|
||||
|
||||
br = QRectF(
|
||||
|
||||
# top left
|
||||
# hb.topLeft()
|
||||
# tl,
|
||||
QPointF(hb.topLeft()),
|
||||
|
||||
# br,
|
||||
# total size
|
||||
# QSizeF(hb_size)
|
||||
# hb_size,
|
||||
QSizeF(w, h)
|
||||
)
|
||||
self._br = br
|
||||
# print(f'bounding rect: {br}')
|
||||
return br
|
||||
|
||||
|
@ -373,32 +809,44 @@ class FastAppendCurve(pg.PlotCurveItem):
|
|||
|
||||
) -> None:
|
||||
|
||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
||||
# p.setRenderHint(p.Antialiasing, True)
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'FastAppendCurve.paint(): `{self._name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
gt=ms_slower_then,
|
||||
)
|
||||
self.prepareGeometryChange()
|
||||
|
||||
if (
|
||||
self._step_mode
|
||||
and self._last_step_rect
|
||||
):
|
||||
brush = self.opts['brush']
|
||||
brush = self._brush
|
||||
|
||||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||
# p.drawRect(self._last_step_rect)
|
||||
p.fillRect(self._last_step_rect, brush)
|
||||
profiler('.fillRect()')
|
||||
|
||||
# p.drawPath(self.path)
|
||||
# profiler('.drawPath()')
|
||||
if self._last_line:
|
||||
p.setPen(self.last_step_pen)
|
||||
p.drawLine(self._last_line)
|
||||
profiler('.drawLine()')
|
||||
p.setPen(self._pen)
|
||||
|
||||
p.setPen(self.last_step_pen)
|
||||
p.drawLine(self._last_line)
|
||||
profiler('.drawLine()')
|
||||
path = self.path
|
||||
|
||||
# else:
|
||||
p.setPen(self.opts['pen'])
|
||||
p.drawPath(self.path)
|
||||
profiler('.drawPath()')
|
||||
if path:
|
||||
p.drawPath(path)
|
||||
profiler(f'.drawPath(path): {path.capacity()}')
|
||||
|
||||
# TODO: try out new work from `pyqtgraph` main which
|
||||
# should repair horrid perf:
|
||||
fp = self.fast_path
|
||||
if fp:
|
||||
p.drawPath(fp)
|
||||
profiler('.drawPath(fast_path)')
|
||||
|
||||
# TODO: try out new work from `pyqtgraph` main which should
|
||||
# repair horrid perf (pretty sure i did and it was still
|
||||
# horrible?):
|
||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2032
|
||||
# if self._fill:
|
||||
# brush = self.opts['brush']
|
||||
|
|
|
@ -21,16 +21,19 @@ this module ties together quote and computational (fsp) streams with
|
|||
graphics update methods via our custom ``pyqtgraph`` charting api.
|
||||
|
||||
'''
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
import time
|
||||
from typing import Optional
|
||||
from typing import Optional, Any, Callable
|
||||
|
||||
import numpy as np
|
||||
import tractor
|
||||
import trio
|
||||
import pyqtgraph as pg
|
||||
|
||||
from .. import brokers
|
||||
from ..data.feed import open_feed
|
||||
from ._axes import YAxisLabel
|
||||
from ._chart import (
|
||||
ChartPlotWidget,
|
||||
LinkedSplits,
|
||||
|
@ -49,12 +52,16 @@ from ._forms import (
|
|||
mk_order_pane_layout,
|
||||
)
|
||||
from .order_mode import open_order_mode
|
||||
# from .._profile import (
|
||||
# pg_profile_enabled,
|
||||
# ms_slower_then,
|
||||
# )
|
||||
from ..log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
# TODO: load this from a config.toml!
|
||||
_quote_throttle_rate: int = 6 + 16 # Hz
|
||||
_quote_throttle_rate: int = 22 # Hz
|
||||
|
||||
|
||||
# a working tick-type-classes template
|
||||
|
@ -65,12 +72,20 @@ _tick_groups = {
|
|||
}
|
||||
|
||||
|
||||
# TODO: delegate this to each `Flow.maxmin()` which includes
|
||||
# caching and further we should implement the following stream based
|
||||
# approach, likely with ``numba``:
|
||||
# https://arxiv.org/abs/cs/0610046
|
||||
# https://github.com/lemire/pythonmaxmin
|
||||
def chart_maxmin(
|
||||
chart: ChartPlotWidget,
|
||||
ohlcv_shm: ShmArray,
|
||||
vlm_chart: Optional[ChartPlotWidget] = None,
|
||||
|
||||
) -> tuple[
|
||||
|
||||
tuple[int, int, int, int],
|
||||
|
||||
float,
|
||||
float,
|
||||
float,
|
||||
|
@ -79,34 +94,53 @@ def chart_maxmin(
|
|||
Compute max and min datums "in view" for range limits.
|
||||
|
||||
'''
|
||||
# TODO: implement this
|
||||
# https://arxiv.org/abs/cs/0610046
|
||||
# https://github.com/lemire/pythonmaxmin
|
||||
|
||||
array = chart._arrays[chart.name]
|
||||
ifirst = array[0]['index']
|
||||
|
||||
last_bars_range = chart.bars_range()
|
||||
l, lbar, rbar, r = last_bars_range
|
||||
in_view = array[lbar - ifirst:rbar - ifirst + 1]
|
||||
out = chart.maxmin()
|
||||
|
||||
if not in_view.size:
|
||||
log.warning('Resetting chart to data')
|
||||
chart.default_view()
|
||||
if out is None:
|
||||
return (last_bars_range, 0, 0, 0)
|
||||
|
||||
mx, mn = np.nanmax(in_view['high']), np.nanmin(in_view['low'])
|
||||
|
||||
# TODO: when we start using line charts, probably want to make
|
||||
# this an overloaded call on our `DataView
|
||||
# sym = chart.name
|
||||
# mx, mn = np.nanmax(in_view[sym]), np.nanmin(in_view[sym])
|
||||
mn, mx = out
|
||||
|
||||
mx_vlm_in_view = 0
|
||||
if vlm_chart:
|
||||
mx_vlm_in_view = np.max(in_view['volume'])
|
||||
out = vlm_chart.maxmin()
|
||||
if out:
|
||||
_, mx_vlm_in_view = out
|
||||
|
||||
return last_bars_range, mx, max(mn, 0), mx_vlm_in_view
|
||||
return (
|
||||
last_bars_range,
|
||||
mx,
|
||||
max(mn, 0), # presuming price can't be negative?
|
||||
mx_vlm_in_view,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DisplayState:
|
||||
'''
|
||||
Chart-local real-time graphics state container.
|
||||
|
||||
'''
|
||||
quotes: dict[str, Any]
|
||||
|
||||
maxmin: Callable
|
||||
ohlcv: ShmArray
|
||||
|
||||
# high level chart handles
|
||||
linked: LinkedSplits
|
||||
chart: ChartPlotWidget
|
||||
vlm_chart: ChartPlotWidget
|
||||
|
||||
# axis labels
|
||||
l1: L1Labels
|
||||
last_price_sticky: YAxisLabel
|
||||
vlm_sticky: YAxisLabel
|
||||
|
||||
# misc state tracking
|
||||
vars: dict[str, Any]
|
||||
|
||||
wap_in_history: bool = False
|
||||
|
||||
|
||||
async def graphics_update_loop(
|
||||
|
@ -147,10 +181,13 @@ async def graphics_update_loop(
|
|||
|
||||
if vlm_chart:
|
||||
vlm_sticky = vlm_chart._ysticks['volume']
|
||||
vlm_view = vlm_chart.view
|
||||
|
||||
maxmin = partial(chart_maxmin, chart, vlm_chart)
|
||||
chart.default_view()
|
||||
maxmin = partial(
|
||||
chart_maxmin,
|
||||
chart,
|
||||
ohlcv,
|
||||
vlm_chart,
|
||||
)
|
||||
last_bars_range: tuple[float, float]
|
||||
(
|
||||
last_bars_range,
|
||||
|
@ -183,7 +220,7 @@ async def graphics_update_loop(
|
|||
tick_margin = 3 * tick_size
|
||||
|
||||
chart.show()
|
||||
view = chart.view
|
||||
# view = chart.view
|
||||
last_quote = time.time()
|
||||
i_last = ohlcv.index
|
||||
|
||||
|
@ -210,7 +247,32 @@ async def graphics_update_loop(
|
|||
|
||||
# async for quotes in iter_drain_quotes():
|
||||
|
||||
ds = linked.display_state = DisplayState(**{
|
||||
'quotes': {},
|
||||
'linked': linked,
|
||||
'maxmin': maxmin,
|
||||
'ohlcv': ohlcv,
|
||||
'chart': chart,
|
||||
'last_price_sticky': last_price_sticky,
|
||||
'vlm_chart': vlm_chart,
|
||||
'vlm_sticky': vlm_sticky,
|
||||
'l1': l1,
|
||||
|
||||
'vars': {
|
||||
'tick_margin': tick_margin,
|
||||
'i_last': i_last,
|
||||
'last_mx_vlm': last_mx_vlm,
|
||||
'last_mx': last_mx,
|
||||
'last_mn': last_mn,
|
||||
}
|
||||
})
|
||||
|
||||
chart.default_view()
|
||||
|
||||
# main real-time quotes update loop
|
||||
async for quotes in stream:
|
||||
|
||||
ds.quotes = quotes
|
||||
quote_period = time.time() - last_quote
|
||||
quote_rate = round(
|
||||
1/quote_period, 1) if quote_period > 0 else float('inf')
|
||||
|
@ -231,222 +293,324 @@ async def graphics_update_loop(
|
|||
chart.pause_all_feeds()
|
||||
continue
|
||||
|
||||
for sym, quote in quotes.items():
|
||||
ic = chart.view._ic
|
||||
if ic:
|
||||
chart.pause_all_feeds()
|
||||
await ic.wait()
|
||||
chart.resume_all_feeds()
|
||||
|
||||
# sync call to update all graphics/UX components.
|
||||
graphics_update_cycle(ds)
|
||||
|
||||
|
||||
def graphics_update_cycle(
|
||||
ds: DisplayState,
|
||||
wap_in_history: bool = False,
|
||||
trigger_all: bool = False, # flag used by prepend history updates
|
||||
|
||||
) -> None:
|
||||
# TODO: eventually optimize this whole graphics stack with ``numba``
|
||||
# hopefully XD
|
||||
|
||||
chart = ds.chart
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'Graphics loop cycle for: `{chart.name}`',
|
||||
disabled=True, # not pg_profile_enabled(),
|
||||
gt=1/12 * 1e3,
|
||||
# gt=ms_slower_then,
|
||||
)
|
||||
|
||||
# unpack multi-referenced components
|
||||
vlm_chart = ds.vlm_chart
|
||||
l1 = ds.l1
|
||||
ohlcv = ds.ohlcv
|
||||
array = ohlcv.array
|
||||
vars = ds.vars
|
||||
tick_margin = vars['tick_margin']
|
||||
|
||||
update_uppx = 16
|
||||
|
||||
for sym, quote in ds.quotes.items():
|
||||
|
||||
# compute the first available graphic's x-units-per-pixel
|
||||
xpx = vlm_chart.view.x_uppx()
|
||||
|
||||
# NOTE: vlm may be written by the ``brokerd`` backend
|
||||
# event though a tick sample is not emitted.
|
||||
# TODO: show dark trades differently
|
||||
# https://github.com/pikers/piker/issues/116
|
||||
|
||||
# NOTE: this used to be implemented in a dedicated
|
||||
# "increment task": ``check_for_new_bars()`` but it doesn't
|
||||
# make sense to do a whole task switch when we can just do
|
||||
# this simple index-diff and all the fsp sub-curve graphics
|
||||
# are diffed on each draw cycle anyway; so updates to the
|
||||
# "curve" length is already automatic.
|
||||
|
||||
# increment the view position by the sample offset.
|
||||
i_step = ohlcv.index
|
||||
i_diff = i_step - vars['i_last']
|
||||
vars['i_last'] = i_step
|
||||
|
||||
(
|
||||
brange,
|
||||
mx_in_view,
|
||||
mn_in_view,
|
||||
mx_vlm_in_view,
|
||||
) = ds.maxmin()
|
||||
|
||||
l, lbar, rbar, r = brange
|
||||
mx = mx_in_view + tick_margin
|
||||
mn = mn_in_view - tick_margin
|
||||
profiler('maxmin call')
|
||||
liv = r > i_step # the last datum is in view
|
||||
|
||||
# don't real-time "shift" the curve to the
|
||||
# left unless we get one of the following:
|
||||
if (
|
||||
(
|
||||
brange,
|
||||
mx_in_view,
|
||||
mn_in_view,
|
||||
mx_vlm_in_view,
|
||||
) = maxmin()
|
||||
l, lbar, rbar, r = brange
|
||||
mx = mx_in_view + tick_margin
|
||||
mn = mn_in_view - tick_margin
|
||||
i_diff > 0 # no new sample step
|
||||
and xpx < 4 # chart is zoomed out very far
|
||||
and r >= i_step # the last datum isn't in view
|
||||
and liv
|
||||
)
|
||||
or trigger_all
|
||||
):
|
||||
# TODO: we should track and compute whether the last
|
||||
# pixel in a curve should show new data based on uppx
|
||||
# and then iff update curves and shift?
|
||||
chart.increment_view(steps=i_diff)
|
||||
|
||||
# NOTE: vlm may be written by the ``brokerd`` backend
|
||||
# event though a tick sample is not emitted.
|
||||
# TODO: show dark trades differently
|
||||
# https://github.com/pikers/piker/issues/116
|
||||
array = ohlcv.array
|
||||
if vlm_chart:
|
||||
# always update y-label
|
||||
ds.vlm_sticky.update_from_data(
|
||||
*array[-1][['index', 'volume']]
|
||||
)
|
||||
|
||||
# NOTE: this used to be implemented in a dedicated
|
||||
# "increment tas": ``check_for_new_bars()`` but it doesn't
|
||||
# make sense to do a whole task switch when we can just do
|
||||
# this simple index-diff and all the fsp sub-curve graphics
|
||||
# are diffed on each draw cycle anyway; so updates to the
|
||||
# "curve" length is already automatic.
|
||||
|
||||
# increment the view position by the sample offset.
|
||||
i_step = ohlcv.index
|
||||
i_diff = i_step - i_last
|
||||
if i_diff > 0:
|
||||
chart.increment_view(
|
||||
steps=i_diff,
|
||||
if (
|
||||
(
|
||||
xpx < update_uppx
|
||||
or i_diff > 0
|
||||
and liv
|
||||
)
|
||||
or trigger_all
|
||||
):
|
||||
# TODO: make it so this doesn't have to be called
|
||||
# once the $vlm is up?
|
||||
vlm_chart.update_graphics_from_flow(
|
||||
'volume',
|
||||
# UGGGh, see ``maxmin()`` impl in `._fsp` for
|
||||
# the overlayed plotitems... we need a better
|
||||
# bay to invoke a maxmin per overlay..
|
||||
render=False,
|
||||
# XXX: ^^^^ THIS IS SUPER IMPORTANT! ^^^^
|
||||
# without this, since we disable the
|
||||
# 'volume' (units) chart after the $vlm starts
|
||||
# up we need to be sure to enable this
|
||||
# auto-ranging otherwise there will be no handler
|
||||
# connected to update accompanying overlay
|
||||
# graphics..
|
||||
)
|
||||
i_last = i_step
|
||||
|
||||
if vlm_chart:
|
||||
vlm_chart.update_curve_from_array('volume', array)
|
||||
vlm_sticky.update_from_data(*array[-1][['index', 'volume']])
|
||||
|
||||
if (
|
||||
mx_vlm_in_view != last_mx_vlm or
|
||||
mx_vlm_in_view > last_mx_vlm
|
||||
mx_vlm_in_view != vars['last_mx_vlm']
|
||||
):
|
||||
# print(f'mx vlm: {last_mx_vlm} -> {mx_vlm_in_view}')
|
||||
vlm_view._set_yrange(
|
||||
yrange=(0, mx_vlm_in_view * 1.375)
|
||||
yrange = (0, mx_vlm_in_view * 1.375)
|
||||
vlm_chart.view._set_yrange(
|
||||
yrange=yrange,
|
||||
)
|
||||
last_mx_vlm = mx_vlm_in_view
|
||||
# print(f'mx vlm: {last_mx_vlm} -> {mx_vlm_in_view}')
|
||||
vars['last_mx_vlm'] = mx_vlm_in_view
|
||||
|
||||
for curve_name, flow in vlm_chart._flows.items():
|
||||
|
||||
if not flow.render:
|
||||
continue
|
||||
|
||||
update_fsp_chart(
|
||||
vlm_chart,
|
||||
flow.shm,
|
||||
flow,
|
||||
curve_name,
|
||||
array_key=curve_name,
|
||||
do_append=xpx < update_uppx,
|
||||
)
|
||||
# is this even doing anything?
|
||||
flow.plot.vb._set_yrange(
|
||||
# (pretty sure it's the real-time
|
||||
# resizing from last quote?)
|
||||
fvb = flow.plot.vb
|
||||
fvb._set_yrange(
|
||||
autoscale_linked_plots=False,
|
||||
name=curve_name,
|
||||
)
|
||||
|
||||
ticks_frame = quote.get('ticks', ())
|
||||
ticks_frame = quote.get('ticks', ())
|
||||
|
||||
frames_by_type: dict[str, dict] = {}
|
||||
lasts = {}
|
||||
frames_by_type: dict[str, dict] = {}
|
||||
lasts = {}
|
||||
|
||||
# build tick-type "frames" of tick sequences since
|
||||
# likely the tick arrival rate is higher then our
|
||||
# (throttled) quote stream rate.
|
||||
for tick in ticks_frame:
|
||||
price = tick.get('price')
|
||||
ticktype = tick.get('type')
|
||||
# build tick-type "frames" of tick sequences since
|
||||
# likely the tick arrival rate is higher then our
|
||||
# (throttled) quote stream rate.
|
||||
for tick in ticks_frame:
|
||||
price = tick.get('price')
|
||||
ticktype = tick.get('type')
|
||||
|
||||
if ticktype == 'n/a' or price == -1:
|
||||
# okkk..
|
||||
continue
|
||||
if ticktype == 'n/a' or price == -1:
|
||||
# okkk..
|
||||
continue
|
||||
|
||||
# keys are entered in olded-event-inserted-first order
|
||||
# since we iterate ``ticks_frame`` in standard order
|
||||
# above. in other words the order of the keys is the order
|
||||
# of tick events by type from the provider feed.
|
||||
frames_by_type.setdefault(ticktype, []).append(tick)
|
||||
# keys are entered in olded-event-inserted-first order
|
||||
# since we iterate ``ticks_frame`` in standard order
|
||||
# above. in other words the order of the keys is the order
|
||||
# of tick events by type from the provider feed.
|
||||
frames_by_type.setdefault(ticktype, []).append(tick)
|
||||
|
||||
# overwrites so the last tick per type is the entry
|
||||
lasts[ticktype] = tick
|
||||
# overwrites so the last tick per type is the entry
|
||||
lasts[ticktype] = tick
|
||||
|
||||
# from pprint import pformat
|
||||
# frame_counts = {
|
||||
# typ: len(frame) for typ, frame in frames_by_type.items()
|
||||
# }
|
||||
# print(f'{pformat(frame_counts)}')
|
||||
# print(f'framed: {pformat(frames_by_type)}')
|
||||
# print(f'lasts: {pformat(lasts)}')
|
||||
# from pprint import pformat
|
||||
# frame_counts = {
|
||||
# typ: len(frame) for typ, frame in frames_by_type.items()
|
||||
# }
|
||||
# print(f'{pformat(frame_counts)}')
|
||||
# print(f'framed: {pformat(frames_by_type)}')
|
||||
# print(f'lasts: {pformat(lasts)}')
|
||||
|
||||
# TODO: eventually we want to separate out the utrade (aka
|
||||
# dark vlm prices) here and show them as an additional
|
||||
# graphic.
|
||||
clear_types = _tick_groups['clears']
|
||||
# TODO: eventually we want to separate out the utrade (aka
|
||||
# dark vlm prices) here and show them as an additional
|
||||
# graphic.
|
||||
clear_types = _tick_groups['clears']
|
||||
|
||||
# XXX: if we wanted to iterate in "latest" (i.e. most
|
||||
# current) tick first order as an optimization where we only
|
||||
# update from the last tick from each type class.
|
||||
# last_clear_updated: bool = False
|
||||
# for typ, tick in reversed(lasts.items()):
|
||||
# XXX: if we wanted to iterate in "latest" (i.e. most
|
||||
# current) tick first order as an optimization where we only
|
||||
# update from the last tick from each type class.
|
||||
# last_clear_updated: bool = False
|
||||
|
||||
# iterate in FIFO order per frame
|
||||
for typ, tick in lasts.items():
|
||||
# update ohlc sampled price bars
|
||||
if (
|
||||
xpx < update_uppx
|
||||
or i_diff > 0
|
||||
or trigger_all
|
||||
):
|
||||
chart.update_graphics_from_flow(
|
||||
chart.name,
|
||||
do_append=xpx < update_uppx,
|
||||
)
|
||||
|
||||
price = tick.get('price')
|
||||
size = tick.get('size')
|
||||
# iterate in FIFO order per tick-frame
|
||||
for typ, tick in lasts.items():
|
||||
|
||||
# compute max and min prices (including bid/ask) from
|
||||
# tick frames to determine the y-range for chart
|
||||
# auto-scaling.
|
||||
# TODO: we need a streaming minmax algo here, see def above.
|
||||
price = tick.get('price')
|
||||
size = tick.get('size')
|
||||
|
||||
# compute max and min prices (including bid/ask) from
|
||||
# tick frames to determine the y-range for chart
|
||||
# auto-scaling.
|
||||
# TODO: we need a streaming minmax algo here, see def above.
|
||||
if liv:
|
||||
mx = max(price + tick_margin, mx)
|
||||
mn = min(price - tick_margin, mn)
|
||||
|
||||
if typ in clear_types:
|
||||
if typ in clear_types:
|
||||
|
||||
# XXX: if we only wanted to update graphics from the
|
||||
# "current"/"latest received" clearing price tick
|
||||
# once (see alt iteration order above).
|
||||
# if last_clear_updated:
|
||||
# continue
|
||||
# XXX: if we only wanted to update graphics from the
|
||||
# "current"/"latest received" clearing price tick
|
||||
# once (see alt iteration order above).
|
||||
# if last_clear_updated:
|
||||
# continue
|
||||
|
||||
# last_clear_updated = True
|
||||
# we only want to update grahpics from the *last*
|
||||
# tick event that falls under the "clearing price"
|
||||
# set.
|
||||
# last_clear_updated = True
|
||||
# we only want to update grahpics from the *last*
|
||||
# tick event that falls under the "clearing price"
|
||||
# set.
|
||||
|
||||
# update price sticky(s)
|
||||
end = array[-1]
|
||||
last_price_sticky.update_from_data(
|
||||
*end[['index', 'close']]
|
||||
# update price sticky(s)
|
||||
end = array[-1]
|
||||
ds.last_price_sticky.update_from_data(
|
||||
*end[['index', 'close']]
|
||||
)
|
||||
|
||||
if wap_in_history:
|
||||
# update vwap overlay line
|
||||
chart.update_graphics_from_flow(
|
||||
'bar_wap',
|
||||
)
|
||||
|
||||
# update ohlc sampled price bars
|
||||
chart.update_ohlc_from_array(
|
||||
chart.name,
|
||||
array,
|
||||
# L1 book label-line updates
|
||||
# XXX: is this correct for ib?
|
||||
# if ticktype in ('trade', 'last'):
|
||||
# if ticktype in ('last',): # 'size'):
|
||||
if typ in ('last',): # 'size'):
|
||||
|
||||
label = {
|
||||
l1.ask_label.fields['level']: l1.ask_label,
|
||||
l1.bid_label.fields['level']: l1.bid_label,
|
||||
}.get(price)
|
||||
|
||||
if (
|
||||
label is not None
|
||||
and liv
|
||||
):
|
||||
label.update_fields(
|
||||
{'level': price, 'size': size}
|
||||
)
|
||||
|
||||
if wap_in_history:
|
||||
# update vwap overlay line
|
||||
chart.update_curve_from_array('bar_wap', ohlcv.array)
|
||||
# TODO: on trades should we be knocking down
|
||||
# the relevant L1 queue?
|
||||
# label.size -= size
|
||||
|
||||
# L1 book label-line updates
|
||||
# XXX: is this correct for ib?
|
||||
# if ticktype in ('trade', 'last'):
|
||||
# if ticktype in ('last',): # 'size'):
|
||||
if typ in ('last',): # 'size'):
|
||||
|
||||
label = {
|
||||
l1.ask_label.fields['level']: l1.ask_label,
|
||||
l1.bid_label.fields['level']: l1.bid_label,
|
||||
}.get(price)
|
||||
|
||||
if label is not None:
|
||||
label.update_fields({'level': price, 'size': size})
|
||||
|
||||
# TODO: on trades should we be knocking down
|
||||
# the relevant L1 queue?
|
||||
# label.size -= size
|
||||
|
||||
# elif ticktype in ('ask', 'asize'):
|
||||
elif typ in _tick_groups['asks']:
|
||||
l1.ask_label.update_fields({'level': price, 'size': size})
|
||||
|
||||
# elif ticktype in ('bid', 'bsize'):
|
||||
elif typ in _tick_groups['bids']:
|
||||
l1.bid_label.update_fields({'level': price, 'size': size})
|
||||
|
||||
# check for y-range re-size
|
||||
if (
|
||||
(mx > last_mx) or (mn < last_mn)
|
||||
and not chart._static_yrange == 'axis'
|
||||
elif (
|
||||
typ in _tick_groups['asks']
|
||||
# TODO: instead we could check if the price is in the
|
||||
# y-view-range?
|
||||
and liv
|
||||
):
|
||||
# print(f'new y range: {(mn, mx)}')
|
||||
view._set_yrange(
|
||||
yrange=(mn, mx),
|
||||
l1.ask_label.update_fields({'level': price, 'size': size})
|
||||
|
||||
elif (
|
||||
typ in _tick_groups['bids']
|
||||
# TODO: instead we could check if the price is in the
|
||||
# y-view-range?
|
||||
and liv
|
||||
):
|
||||
l1.bid_label.update_fields({'level': price, 'size': size})
|
||||
|
||||
# check for y-range re-size
|
||||
if (
|
||||
(mx > vars['last_mx']) or (mn < vars['last_mn'])
|
||||
and not chart._static_yrange == 'axis'
|
||||
and liv
|
||||
):
|
||||
main_vb = chart.view
|
||||
if (
|
||||
main_vb._ic is None
|
||||
or not main_vb._ic.is_set()
|
||||
):
|
||||
main_vb._set_yrange(
|
||||
# TODO: we should probably scale
|
||||
# the view margin based on the size
|
||||
# of the true range? This way you can
|
||||
# slap in orders outside the current
|
||||
# L1 (only) book range.
|
||||
# range_margin=0.1,
|
||||
yrange=(mn, mx),
|
||||
)
|
||||
|
||||
last_mx, last_mn = mx, mn
|
||||
vars['last_mx'], vars['last_mn'] = mx, mn
|
||||
|
||||
# run synchronous update on all derived fsp subplots
|
||||
for name, subchart in linked.subplots.items():
|
||||
update_fsp_chart(
|
||||
subchart,
|
||||
subchart._shm,
|
||||
# run synchronous update on all linked flows
|
||||
for curve_name, flow in chart._flows.items():
|
||||
# TODO: should the "main" (aka source) flow be special?
|
||||
if curve_name == chart.data_key:
|
||||
continue
|
||||
|
||||
# XXX: do we really needs seperate names here?
|
||||
name,
|
||||
array_key=name,
|
||||
)
|
||||
subchart.cv._set_yrange()
|
||||
|
||||
# TODO: all overlays on all subplots..
|
||||
|
||||
# run synchronous update on all derived overlays
|
||||
for curve_name, flow in chart._flows.items():
|
||||
update_fsp_chart(
|
||||
chart,
|
||||
flow.shm,
|
||||
curve_name,
|
||||
array_key=curve_name,
|
||||
)
|
||||
# chart.view._set_yrange()
|
||||
|
||||
# loop end
|
||||
update_fsp_chart(
|
||||
chart,
|
||||
flow,
|
||||
curve_name,
|
||||
array_key=curve_name,
|
||||
)
|
||||
|
||||
|
||||
async def display_symbol_data(
|
||||
|
@ -479,8 +643,10 @@ async def display_symbol_data(
|
|||
# clear_on_next=True,
|
||||
# group_key=loading_sym_key,
|
||||
# )
|
||||
fqsn = '.'.join((sym, provider))
|
||||
|
||||
async with open_feed(
|
||||
['.'.join((sym, provider))],
|
||||
[fqsn],
|
||||
loglevel=loglevel,
|
||||
|
||||
# limit to at least display's FPS
|
||||
|
@ -500,8 +666,8 @@ async def display_symbol_data(
|
|||
f'step:1s '
|
||||
)
|
||||
|
||||
linkedsplits = godwidget.linkedsplits
|
||||
linkedsplits._symbol = symbol
|
||||
linked = godwidget.linkedsplits
|
||||
linked._symbol = symbol
|
||||
|
||||
# generate order mode side-pane UI
|
||||
# A ``FieldsForm`` form to configure order entry
|
||||
|
@ -511,38 +677,38 @@ async def display_symbol_data(
|
|||
godwidget.pp_pane = pp_pane
|
||||
|
||||
# create main OHLC chart
|
||||
chart = linkedsplits.plot_ohlc_main(
|
||||
chart = linked.plot_ohlc_main(
|
||||
symbol,
|
||||
bars,
|
||||
ohlcv,
|
||||
sidepane=pp_pane,
|
||||
)
|
||||
chart.default_view()
|
||||
chart._feeds[symbol.key] = feed
|
||||
chart.setFocus()
|
||||
|
||||
# plot historical vwap if available
|
||||
wap_in_history = False
|
||||
|
||||
if brokermod._show_wap_in_history:
|
||||
# XXX: FOR SOME REASON THIS IS CAUSING HANGZ!?!
|
||||
# if brokermod._show_wap_in_history:
|
||||
|
||||
if 'bar_wap' in bars.dtype.fields:
|
||||
wap_in_history = True
|
||||
chart.draw_curve(
|
||||
name='bar_wap',
|
||||
data=bars,
|
||||
add_label=False,
|
||||
)
|
||||
# if 'bar_wap' in bars.dtype.fields:
|
||||
# wap_in_history = True
|
||||
# chart.draw_curve(
|
||||
# name='bar_wap',
|
||||
# shm=ohlcv,
|
||||
# color='default_light',
|
||||
# add_label=False,
|
||||
# )
|
||||
|
||||
# size view to data once at outset
|
||||
chart.cv._set_yrange()
|
||||
|
||||
# TODO: a data view api that makes this less shit
|
||||
chart._shm = ohlcv
|
||||
|
||||
# NOTE: we must immediately tell Qt to show the OHLC chart
|
||||
# to avoid a race where the subplots get added/shown to
|
||||
# the linked set *before* the main price chart!
|
||||
linkedsplits.show()
|
||||
linkedsplits.focus()
|
||||
linked.show()
|
||||
linked.focus()
|
||||
await trio.sleep(0)
|
||||
|
||||
vlm_chart: Optional[ChartPlotWidget] = None
|
||||
|
@ -552,7 +718,7 @@ async def display_symbol_data(
|
|||
if has_vlm(ohlcv):
|
||||
vlm_chart = await ln.start(
|
||||
open_vlm_displays,
|
||||
linkedsplits,
|
||||
linked,
|
||||
ohlcv,
|
||||
)
|
||||
|
||||
|
@ -560,7 +726,7 @@ async def display_symbol_data(
|
|||
# from an input config.
|
||||
ln.start_soon(
|
||||
start_fsp_displays,
|
||||
linkedsplits,
|
||||
linked,
|
||||
ohlcv,
|
||||
loading_sym_key,
|
||||
loglevel,
|
||||
|
@ -569,7 +735,7 @@ async def display_symbol_data(
|
|||
# start graphics update loop after receiving first live quote
|
||||
ln.start_soon(
|
||||
graphics_update_loop,
|
||||
linkedsplits,
|
||||
linked,
|
||||
feed.stream,
|
||||
ohlcv,
|
||||
wap_in_history,
|
||||
|
@ -587,17 +753,18 @@ async def display_symbol_data(
|
|||
# let Qt run to render all widgets and make sure the
|
||||
# sidepanes line up vertically.
|
||||
await trio.sleep(0)
|
||||
linkedsplits.resize_sidepanes()
|
||||
linked.resize_sidepanes()
|
||||
|
||||
# NOTE: we pop the volume chart from the subplots set so
|
||||
# that it isn't double rendered in the display loop
|
||||
# above since we do a maxmin calc on the volume data to
|
||||
# determine if auto-range adjustements should be made.
|
||||
linkedsplits.subplots.pop('volume', None)
|
||||
# linked.subplots.pop('volume', None)
|
||||
|
||||
# TODO: make this not so shit XD
|
||||
# close group status
|
||||
sbar._status_groups[loading_sym_key][1]()
|
||||
|
||||
# let the app run.. bby
|
||||
# linked.graphics_cycle()
|
||||
await trio.sleep_forever()
|
||||
|
|
|
@ -343,7 +343,7 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
|||
nbars = ixmx - ixmn + 1
|
||||
|
||||
chart = self._chart
|
||||
data = chart._arrays[chart.name][ixmn:ixmx]
|
||||
data = chart._flows[chart.name].shm.array[ixmn:ixmx]
|
||||
|
||||
if len(data):
|
||||
std = data['close'].std()
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -72,12 +72,17 @@ def has_vlm(ohlcv: ShmArray) -> bool:
|
|||
|
||||
def update_fsp_chart(
|
||||
chart: ChartPlotWidget,
|
||||
shm: ShmArray,
|
||||
flow,
|
||||
graphics_name: str,
|
||||
array_key: Optional[str],
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
|
||||
shm = flow.shm
|
||||
if not shm:
|
||||
return
|
||||
|
||||
array = shm.array
|
||||
last_row = try_read(array)
|
||||
|
||||
|
@ -89,10 +94,10 @@ def update_fsp_chart(
|
|||
# update graphics
|
||||
# NOTE: this does a length check internally which allows it
|
||||
# staying above the last row check below..
|
||||
chart.update_curve_from_array(
|
||||
chart.update_graphics_from_flow(
|
||||
graphics_name,
|
||||
array,
|
||||
array_key=array_key or graphics_name,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# XXX: re: ``array_key``: fsp func names must be unique meaning we
|
||||
|
@ -102,9 +107,6 @@ def update_fsp_chart(
|
|||
# read from last calculated value and update any label
|
||||
last_val_sticky = chart._ysticks.get(graphics_name)
|
||||
if last_val_sticky:
|
||||
# array = shm.array[array_key]
|
||||
# if len(array):
|
||||
# value = array[-1]
|
||||
last = last_row[array_key]
|
||||
last_val_sticky.update_from_data(-1, last)
|
||||
|
||||
|
@ -242,21 +244,18 @@ async def run_fsp_ui(
|
|||
|
||||
chart.draw_curve(
|
||||
name=name,
|
||||
data=shm.array,
|
||||
shm=shm,
|
||||
overlay=True,
|
||||
color='default_light',
|
||||
array_key=name,
|
||||
separate_axes=conf.get('separate_axes', False),
|
||||
**conf.get('chart_kwargs', {})
|
||||
)
|
||||
# specially store ref to shm for lookup in display loop
|
||||
chart._flows[name].shm = shm
|
||||
|
||||
else:
|
||||
# create a new sub-chart widget for this fsp
|
||||
chart = linkedsplits.add_plot(
|
||||
name=name,
|
||||
array=shm.array,
|
||||
shm=shm,
|
||||
|
||||
array_key=name,
|
||||
sidepane=sidepane,
|
||||
|
@ -268,11 +267,6 @@ async def run_fsp_ui(
|
|||
**conf.get('chart_kwargs', {})
|
||||
)
|
||||
|
||||
# XXX: ONLY for sub-chart fsps, overlays have their
|
||||
# data looked up from the chart's internal array set.
|
||||
# TODO: we must get a data view api going STAT!!
|
||||
chart._shm = shm
|
||||
|
||||
# should **not** be the same sub-chart widget
|
||||
assert chart.name != linkedsplits.chart.name
|
||||
|
||||
|
@ -283,7 +277,7 @@ async def run_fsp_ui(
|
|||
# first UI update, usually from shm pushed history
|
||||
update_fsp_chart(
|
||||
chart,
|
||||
shm,
|
||||
chart._flows[array_key],
|
||||
name,
|
||||
array_key=array_key,
|
||||
)
|
||||
|
@ -426,6 +420,7 @@ class FspAdmin:
|
|||
) as (ctx, last_index),
|
||||
ctx.open_stream() as stream,
|
||||
):
|
||||
|
||||
# register output data
|
||||
self._registry[
|
||||
(fqsn, ns_path)
|
||||
|
@ -438,6 +433,17 @@ class FspAdmin:
|
|||
started.set()
|
||||
|
||||
# wait for graceful shutdown signal
|
||||
async with stream.subscribe() as stream:
|
||||
async for msg in stream:
|
||||
if msg == 'update':
|
||||
# if the chart isn't hidden try to update
|
||||
# the data on screen.
|
||||
if not self.linked.isHidden():
|
||||
log.info(f'Re-syncing graphics for fsp: {ns_path}')
|
||||
self.linked.graphics_cycle(trigger_all=True)
|
||||
else:
|
||||
log.info(f'recved unexpected fsp engine msg: {msg}')
|
||||
|
||||
await complete.wait()
|
||||
|
||||
async def start_engine_task(
|
||||
|
@ -610,7 +616,7 @@ async def open_vlm_displays(
|
|||
shm = ohlcv
|
||||
chart = linked.add_plot(
|
||||
name='volume',
|
||||
array=shm.array,
|
||||
shm=shm,
|
||||
|
||||
array_key='volume',
|
||||
sidepane=sidepane,
|
||||
|
@ -649,11 +655,6 @@ async def open_vlm_displays(
|
|||
# chart.hideAxis('right')
|
||||
# chart.showAxis('left')
|
||||
|
||||
# XXX: ONLY for sub-chart fsps, overlays have their
|
||||
# data looked up from the chart's internal array set.
|
||||
# TODO: we must get a data view api going STAT!!
|
||||
chart._shm = shm
|
||||
|
||||
# send back new chart to caller
|
||||
task_status.started(chart)
|
||||
|
||||
|
@ -668,9 +669,9 @@ async def open_vlm_displays(
|
|||
|
||||
last_val_sticky.update_from_data(-1, value)
|
||||
|
||||
vlm_curve = chart.update_curve_from_array(
|
||||
vlm_curve = chart.update_graphics_from_flow(
|
||||
'volume',
|
||||
shm.array,
|
||||
# shm.array,
|
||||
)
|
||||
|
||||
# size view to data once at outset
|
||||
|
@ -745,19 +746,14 @@ async def open_vlm_displays(
|
|||
'dark_trade_rate',
|
||||
]
|
||||
|
||||
# add custom auto range handler
|
||||
dvlm_pi.vb._maxmin = partial(
|
||||
group_mxmn = partial(
|
||||
maxmin,
|
||||
# keep both regular and dark vlm in view
|
||||
names=fields + dvlm_rate_fields,
|
||||
)
|
||||
|
||||
# TODO: is there a way to "sync" the dual axes such that only
|
||||
# one curve is needed?
|
||||
# hide the original vlm curve since the $vlm one is now
|
||||
# displayed and the curves are effectively the same minus
|
||||
# liquidity events (well at least on low OHLC periods - 1s).
|
||||
vlm_curve.hide()
|
||||
# add custom auto range handler
|
||||
dvlm_pi.vb._maxmin = group_mxmn
|
||||
|
||||
# use slightly less light (then bracket) gray
|
||||
# for volume from "main exchange" and a more "bluey"
|
||||
|
@ -783,21 +779,22 @@ async def open_vlm_displays(
|
|||
color = 'bracket'
|
||||
|
||||
curve, _ = chart.draw_curve(
|
||||
# name='dolla_vlm',
|
||||
name=name,
|
||||
data=shm.array,
|
||||
shm=shm,
|
||||
array_key=name,
|
||||
overlay=pi,
|
||||
color=color,
|
||||
step_mode=step_mode,
|
||||
style=style,
|
||||
pi=pi,
|
||||
)
|
||||
|
||||
# TODO: we need a better API to do this..
|
||||
# specially store ref to shm for lookup in display loop
|
||||
# since only a placeholder of `None` is entered in
|
||||
# ``.draw_curve()``.
|
||||
chart._flows[name].shm = shm
|
||||
flow = chart._flows[name]
|
||||
assert flow.plot is pi
|
||||
|
||||
chart_curves(
|
||||
fields,
|
||||
|
@ -825,6 +822,19 @@ async def open_vlm_displays(
|
|||
fr_shm,
|
||||
)
|
||||
|
||||
# TODO: is there a way to "sync" the dual axes such that only
|
||||
# one curve is needed?
|
||||
# hide the original vlm curve since the $vlm one is now
|
||||
# displayed and the curves are effectively the same minus
|
||||
# liquidity events (well at least on low OHLC periods - 1s).
|
||||
vlm_curve.hide()
|
||||
chart.removeItem(vlm_curve)
|
||||
vflow = chart._flows['volume']
|
||||
vflow.render = False
|
||||
|
||||
# avoid range sorting on volume once disabled
|
||||
chart.view.disable_auto_yrange()
|
||||
|
||||
# Trade rate overlay
|
||||
# XXX: requires an additional overlay for
|
||||
# a trades-per-period (time) y-range.
|
||||
|
@ -864,7 +874,10 @@ async def open_vlm_displays(
|
|||
style='dash',
|
||||
)
|
||||
|
||||
for pi in (dvlm_pi, tr_pi):
|
||||
for pi in (
|
||||
dvlm_pi,
|
||||
tr_pi,
|
||||
):
|
||||
for name, axis_info in pi.axes.items():
|
||||
# lol this sux XD
|
||||
axis = axis_info['item']
|
||||
|
@ -873,10 +886,10 @@ async def open_vlm_displays(
|
|||
|
||||
# built-in vlm fsps
|
||||
for target, conf in {
|
||||
tina_vwap: {
|
||||
'overlay': 'ohlc', # overlays with OHLCV (main) chart
|
||||
'anchor': 'session',
|
||||
},
|
||||
# tina_vwap: {
|
||||
# 'overlay': 'ohlc', # overlays with OHLCV (main) chart
|
||||
# 'anchor': 'session',
|
||||
# },
|
||||
}.items():
|
||||
started = await admin.open_fsp_chart(
|
||||
target,
|
||||
|
|
|
@ -33,7 +33,8 @@ import numpy as np
|
|||
import trio
|
||||
|
||||
from ..log import get_logger
|
||||
from ._style import _min_points_to_show
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
# from ._style import _min_points_to_show
|
||||
from ._editors import SelectRect
|
||||
from . import _event
|
||||
|
||||
|
@ -318,6 +319,7 @@ async def handle_viewmode_mouse(
|
|||
):
|
||||
# when in order mode, submit execution
|
||||
# msg.event.accept()
|
||||
# breakpoint()
|
||||
view.order_mode.submit_order()
|
||||
|
||||
|
||||
|
@ -356,13 +358,13 @@ class ChartView(ViewBox):
|
|||
):
|
||||
super().__init__(
|
||||
parent=parent,
|
||||
name=name,
|
||||
# TODO: look into the default view padding
|
||||
# support that might replace somem of our
|
||||
# ``ChartPlotWidget._set_yrange()`
|
||||
# defaultPadding=0.,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
# for "known y-range style"
|
||||
self._static_yrange = static_yrange
|
||||
self._maxmin = None
|
||||
|
@ -384,6 +386,34 @@ class ChartView(ViewBox):
|
|||
self.order_mode: bool = False
|
||||
|
||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
self._ic = None
|
||||
|
||||
def start_ic(
|
||||
self,
|
||||
) -> None:
|
||||
'''
|
||||
Signal the beginning of a click-drag interaction
|
||||
to any interested task waiters.
|
||||
|
||||
'''
|
||||
if self._ic is None:
|
||||
self.chart.pause_all_feeds()
|
||||
self._ic = trio.Event()
|
||||
|
||||
def signal_ic(
|
||||
self,
|
||||
*args,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Signal the end of a click-drag interaction
|
||||
to any waiters.
|
||||
|
||||
'''
|
||||
if self._ic:
|
||||
self._ic.set()
|
||||
self._ic = None
|
||||
self.chart.resume_all_feeds()
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_async_input_handler(
|
||||
|
@ -435,7 +465,8 @@ class ChartView(ViewBox):
|
|||
axis=None,
|
||||
relayed_from: ChartView = None,
|
||||
):
|
||||
'''Override "center-point" location for scrolling.
|
||||
'''
|
||||
Override "center-point" location for scrolling.
|
||||
|
||||
This is an override of the ``ViewBox`` method simply changing
|
||||
the center of the zoom to be the y-axis.
|
||||
|
@ -453,15 +484,18 @@ class ChartView(ViewBox):
|
|||
|
||||
# don't zoom more then the min points setting
|
||||
l, lbar, rbar, r = chart.bars_range()
|
||||
vl = r - l
|
||||
# vl = r - l
|
||||
|
||||
if ev.delta() > 0 and vl <= _min_points_to_show:
|
||||
log.debug("Max zoom bruh...")
|
||||
return
|
||||
# if ev.delta() > 0 and vl <= _min_points_to_show:
|
||||
# log.debug("Max zoom bruh...")
|
||||
# return
|
||||
|
||||
if ev.delta() < 0 and vl >= len(chart._arrays[chart.name]) + 666:
|
||||
log.debug("Min zoom bruh...")
|
||||
return
|
||||
# if (
|
||||
# ev.delta() < 0
|
||||
# and vl >= len(chart._flows[chart.name].shm.array) + 666
|
||||
# ):
|
||||
# log.debug("Min zoom bruh...")
|
||||
# return
|
||||
|
||||
# actual scaling factor
|
||||
s = 1.015 ** (ev.delta() * -1 / 20) # self.state['wheelScaleFactor'])
|
||||
|
@ -535,7 +569,23 @@ class ChartView(ViewBox):
|
|||
|
||||
self._resetTarget()
|
||||
self.scaleBy(s, focal)
|
||||
|
||||
# XXX: without this is seems as though sometimes
|
||||
# when zooming in from far out (and maybe vice versa?)
|
||||
# the signal isn't being fired enough since if you pan
|
||||
# just after you'll see further downsampling code run
|
||||
# (pretty noticeable on the OHLC ds curve) but with this
|
||||
# that never seems to happen? Only question is how much this
|
||||
# "double work" is causing latency when these missing event
|
||||
# fires don't happen?
|
||||
self.maybe_downsample_graphics()
|
||||
|
||||
self.sigRangeChangedManually.emit(mask)
|
||||
|
||||
# self._ic.set()
|
||||
# self._ic = None
|
||||
# self.chart.resume_all_feeds()
|
||||
|
||||
ev.accept()
|
||||
|
||||
def mouseDragEvent(
|
||||
|
@ -618,6 +668,11 @@ class ChartView(ViewBox):
|
|||
# XXX: WHY
|
||||
ev.accept()
|
||||
|
||||
self.start_ic()
|
||||
# if self._ic is None:
|
||||
# self.chart.pause_all_feeds()
|
||||
# self._ic = trio.Event()
|
||||
|
||||
if axis == 1:
|
||||
self.chart._static_yrange = 'axis'
|
||||
|
||||
|
@ -635,6 +690,12 @@ class ChartView(ViewBox):
|
|||
|
||||
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
||||
|
||||
if ev.isFinish():
|
||||
self.signal_ic()
|
||||
# self._ic.set()
|
||||
# self._ic = None
|
||||
# self.chart.resume_all_feeds()
|
||||
|
||||
# WEIRD "RIGHT-CLICK CENTER ZOOM" MODE
|
||||
elif button & QtCore.Qt.RightButton:
|
||||
|
||||
|
@ -685,7 +746,7 @@ class ChartView(ViewBox):
|
|||
|
||||
# flag to prevent triggering sibling charts from the same linked
|
||||
# set from recursion errors.
|
||||
autoscale_linked_plots: bool = True,
|
||||
autoscale_linked_plots: bool = False,
|
||||
name: Optional[str] = None,
|
||||
# autoscale_overlays: bool = False,
|
||||
|
||||
|
@ -698,6 +759,12 @@ class ChartView(ViewBox):
|
|||
data set.
|
||||
|
||||
'''
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'`ChartView._set_yrange()`: `{self.name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
gt=ms_slower_then,
|
||||
delayed=True,
|
||||
)
|
||||
set_range = True
|
||||
chart = self._chart
|
||||
|
||||
|
@ -723,34 +790,51 @@ class ChartView(ViewBox):
|
|||
|
||||
# calculate max, min y values in viewable x-range from data.
|
||||
# Make sure min bars/datums on screen is adhered.
|
||||
else:
|
||||
br = bars_range or chart.bars_range()
|
||||
# else:
|
||||
# TODO: eventually we should point to the
|
||||
# ``FlowsTable`` (or wtv) which should perform
|
||||
# the group operations?
|
||||
|
||||
# flow = chart._flows[name or chart.name]
|
||||
# br = bars_range or chart.bars_range()
|
||||
# br = bars_range or chart.bars_range()
|
||||
# profiler(f'got bars range: {br}')
|
||||
|
||||
# TODO: maybe should be a method on the
|
||||
# chart widget/item?
|
||||
if autoscale_linked_plots:
|
||||
# avoid recursion by sibling plots
|
||||
linked = self.linkedsplits
|
||||
plots = list(linked.subplots.copy().values())
|
||||
main = linked.chart
|
||||
if main:
|
||||
plots.append(main)
|
||||
# if False:
|
||||
# if autoscale_linked_plots:
|
||||
# # avoid recursion by sibling plots
|
||||
# linked = self.linkedsplits
|
||||
# plots = list(linked.subplots.copy().values())
|
||||
# main = linked.chart
|
||||
# if main:
|
||||
# plots.append(main)
|
||||
|
||||
for chart in plots:
|
||||
if chart and not chart._static_yrange:
|
||||
chart.cv._set_yrange(
|
||||
bars_range=br,
|
||||
autoscale_linked_plots=False,
|
||||
)
|
||||
# for chart in plots:
|
||||
# if chart and not chart._static_yrange:
|
||||
# chart.cv._set_yrange(
|
||||
# # bars_range=br,
|
||||
# autoscale_linked_plots=False,
|
||||
# )
|
||||
# profiler('autoscaled linked plots')
|
||||
|
||||
if set_range:
|
||||
|
||||
yrange = self._maxmin()
|
||||
if yrange is None:
|
||||
return
|
||||
if not yrange:
|
||||
# XXX: only compute the mxmn range
|
||||
# if none is provided as input!
|
||||
yrange = self._maxmin()
|
||||
|
||||
if yrange is None:
|
||||
log.warning(f'No yrange provided for {self.name}!?')
|
||||
print(f"WTF NO YRANGE {self.name}")
|
||||
return
|
||||
|
||||
ylow, yhigh = yrange
|
||||
|
||||
profiler(f'maxmin(): {yrange}')
|
||||
|
||||
# view margins: stay within a % of the "true range"
|
||||
diff = yhigh - ylow
|
||||
ylow = ylow - (diff * range_margin)
|
||||
|
@ -764,9 +848,13 @@ class ChartView(ViewBox):
|
|||
yMax=yhigh,
|
||||
)
|
||||
self.setYRange(ylow, yhigh)
|
||||
profiler(f'set limits: {(ylow, yhigh)}')
|
||||
|
||||
profiler.finish()
|
||||
|
||||
def enable_auto_yrange(
|
||||
vb: ChartView,
|
||||
self,
|
||||
src_vb: Optional[ChartView] = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
|
@ -774,13 +862,105 @@ class ChartView(ViewBox):
|
|||
based on data contents and ``ViewBox`` state.
|
||||
|
||||
'''
|
||||
vb.sigXRangeChanged.connect(vb._set_yrange)
|
||||
if src_vb is None:
|
||||
src_vb = self
|
||||
|
||||
# splitter(s) resizing
|
||||
src_vb.sigResized.connect(self._set_yrange)
|
||||
|
||||
# TODO: a smarter way to avoid calling this needlessly?
|
||||
# 2 things i can think of:
|
||||
# - register downsample-able graphics specially and only
|
||||
# iterate those.
|
||||
# - only register this when certain downsampleable graphics are
|
||||
# "added to scene".
|
||||
src_vb.sigRangeChangedManually.connect(
|
||||
self.maybe_downsample_graphics
|
||||
)
|
||||
|
||||
# mouse wheel doesn't emit XRangeChanged
|
||||
vb.sigRangeChangedManually.connect(vb._set_yrange)
|
||||
vb.sigResized.connect(vb._set_yrange) # splitter(s) resizing
|
||||
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
||||
|
||||
def disable_auto_yrange(
|
||||
self,
|
||||
) -> None:
|
||||
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
||||
# src_vb.sigXRangeChanged.connect(
|
||||
# self.maybe_downsample_graphics
|
||||
# )
|
||||
|
||||
self._chart._static_yrange = 'axis'
|
||||
def disable_auto_yrange(self) -> None:
|
||||
|
||||
self.sigResized.disconnect(
|
||||
self._set_yrange,
|
||||
)
|
||||
self.sigRangeChangedManually.disconnect(
|
||||
self.maybe_downsample_graphics
|
||||
)
|
||||
self.sigRangeChangedManually.disconnect(
|
||||
self._set_yrange,
|
||||
)
|
||||
|
||||
# self.sigXRangeChanged.disconnect(self._set_yrange)
|
||||
# self.sigXRangeChanged.disconnect(
|
||||
# self.maybe_downsample_graphics
|
||||
# )
|
||||
|
||||
def x_uppx(self) -> float:
|
||||
'''
|
||||
Return the "number of x units" within a single
|
||||
pixel currently being displayed for relevant
|
||||
graphics items which are our children.
|
||||
|
||||
'''
|
||||
graphics = [f.graphics for f in self._chart._flows.values()]
|
||||
if not graphics:
|
||||
return 0
|
||||
|
||||
for graphic in graphics:
|
||||
xvec = graphic.pixelVectors()[0]
|
||||
if xvec:
|
||||
return xvec.x()
|
||||
else:
|
||||
return 0
|
||||
|
||||
def maybe_downsample_graphics(self):
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
||||
disabled=not pg_profile_enabled(),
|
||||
|
||||
# XXX: important to avoid not seeing underlying
|
||||
# ``.update_graphics_from_flow()`` nested profiling likely
|
||||
# due to the way delaying works and garbage collection of
|
||||
# the profiler in the delegated method calls.
|
||||
delayed=False,
|
||||
# gt=3,
|
||||
gt=ms_slower_then,
|
||||
)
|
||||
|
||||
# TODO: a faster single-loop-iterator way of doing this XD
|
||||
chart = self._chart
|
||||
linked = self.linkedsplits
|
||||
plots = linked.subplots | {chart.name: chart}
|
||||
for chart_name, chart in plots.items():
|
||||
for name, flow in chart._flows.items():
|
||||
|
||||
if (
|
||||
not flow.render
|
||||
|
||||
# XXX: super important to be aware of this.
|
||||
# or not flow.graphics.isVisible()
|
||||
):
|
||||
continue
|
||||
|
||||
# pass in no array which will read and render from the last
|
||||
# passed array (normally provided by the display loop.)
|
||||
chart.update_graphics_from_flow(
|
||||
name,
|
||||
use_vr=True,
|
||||
|
||||
# gets passed down into graphics obj
|
||||
profiler=profiler,
|
||||
)
|
||||
|
||||
profiler(f'range change updated {chart_name}:{name}')
|
||||
|
||||
profiler.finish()
|
||||
|
|
|
@ -20,7 +20,7 @@ Lines for orders, alerts, L2.
|
|||
"""
|
||||
from functools import partial
|
||||
from math import floor
|
||||
from typing import Tuple, Optional, List, Callable
|
||||
from typing import Optional, Callable
|
||||
|
||||
import pyqtgraph as pg
|
||||
from pyqtgraph import Point, functions as fn
|
||||
|
@ -29,10 +29,8 @@ from PyQt5.QtCore import QPointF
|
|||
|
||||
from ._annotate import qgo_draw_markers, LevelMarker
|
||||
from ._anchors import (
|
||||
marker_right_points,
|
||||
vbr_left,
|
||||
right_axis,
|
||||
# pp_tight_and_right, # wanna keep it straight in the long run
|
||||
gpath_pin,
|
||||
)
|
||||
from ..calc import humanize
|
||||
|
@ -104,8 +102,8 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
# list of labels anchored at one of the 2 line endpoints
|
||||
# inside the viewbox
|
||||
self._labels: List[Label] = []
|
||||
self._markers: List[(int, Label)] = []
|
||||
self._labels: list[Label] = []
|
||||
self._markers: list[(int, Label)] = []
|
||||
|
||||
# whenever this line is moved trigger label updates
|
||||
self.sigPositionChanged.connect(self.on_pos_change)
|
||||
|
@ -124,7 +122,7 @@ class LevelLine(pg.InfiniteLine):
|
|||
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
||||
self._right_end_sc: float = 0
|
||||
|
||||
def txt_offsets(self) -> Tuple[int, int]:
|
||||
def txt_offsets(self) -> tuple[int, int]:
|
||||
return 0, 0
|
||||
|
||||
@property
|
||||
|
@ -315,17 +313,6 @@ class LevelLine(pg.InfiniteLine):
|
|||
# TODO: enter labels edit mode
|
||||
print(f'double click {ev}')
|
||||
|
||||
def right_point(
|
||||
self,
|
||||
) -> float:
|
||||
|
||||
chart = self._chart
|
||||
l1_len = chart._max_l1_line_len
|
||||
ryaxis = chart.getAxis('right')
|
||||
up_to_l1_sc = ryaxis.pos().x() - l1_len
|
||||
|
||||
return up_to_l1_sc
|
||||
|
||||
def paint(
|
||||
self,
|
||||
|
||||
|
@ -345,7 +332,7 @@ class LevelLine(pg.InfiniteLine):
|
|||
vb_left, vb_right = self._endPoints
|
||||
vb = self.getViewBox()
|
||||
|
||||
line_end, marker_right, r_axis_x = marker_right_points(self._chart)
|
||||
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
||||
|
||||
if self.show_markers and self.markers:
|
||||
|
||||
|
@ -411,7 +398,7 @@ class LevelLine(pg.InfiniteLine):
|
|||
def scene_endpoint(self) -> QPointF:
|
||||
|
||||
if not self._right_end_sc:
|
||||
line_end, _, _ = marker_right_points(self._chart)
|
||||
line_end, _, _ = self._chart.marker_right_points()
|
||||
self._right_end_sc = line_end - 10
|
||||
|
||||
return QPointF(self._right_end_sc, self.scene_y())
|
||||
|
@ -422,23 +409,23 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
) -> QtWidgets.QGraphicsPathItem:
|
||||
|
||||
self._marker = path
|
||||
self._marker.setPen(self.currentPen)
|
||||
self._marker.setBrush(fn.mkBrush(self.currentPen.color()))
|
||||
# add path to scene
|
||||
self.getViewBox().scene().addItem(path)
|
||||
|
||||
self._marker = path
|
||||
|
||||
rsc = self.right_point()
|
||||
|
||||
self._marker.setPen(self.currentPen)
|
||||
self._marker.setBrush(fn.mkBrush(self.currentPen.color()))
|
||||
# place to just-left of L1 labels
|
||||
rsc = self._chart.pre_l1_xs()[0]
|
||||
path.setPos(QPointF(rsc, self.scene_y()))
|
||||
|
||||
return path
|
||||
|
||||
def hoverEvent(self, ev):
|
||||
"""Mouse hover callback.
|
||||
'''
|
||||
Mouse hover callback.
|
||||
|
||||
"""
|
||||
'''
|
||||
cur = self._chart.linked.cursor
|
||||
|
||||
# hovered
|
||||
|
@ -614,7 +601,8 @@ def order_line(
|
|||
**line_kwargs,
|
||||
|
||||
) -> LevelLine:
|
||||
'''Convenience routine to add a line graphic representing an order
|
||||
'''
|
||||
Convenience routine to add a line graphic representing an order
|
||||
execution submitted to the EMS via the chart's "order mode".
|
||||
|
||||
'''
|
||||
|
@ -689,7 +677,6 @@ def order_line(
|
|||
|
||||
return f'{account}: '
|
||||
|
||||
|
||||
label.fields = {
|
||||
'size': size,
|
||||
'size_digits': 0,
|
||||
|
|
|
@ -17,7 +17,11 @@
|
|||
Super fast OHLC sampling graphics types.
|
||||
|
||||
"""
|
||||
from typing import List, Optional, Tuple
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
|
@ -27,30 +31,29 @@ from PyQt5.QtCore import QLineF, QPointF
|
|||
# from numba import types as ntypes
|
||||
# from ..data._source import numba_ohlc_dtype
|
||||
|
||||
from .._profile import pg_profile_enabled
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._style import hcolor
|
||||
from ..log import get_logger
|
||||
from ._curve import FastAppendCurve
|
||||
from ._compression import ohlc_flatten
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import LinkedSplits
|
||||
|
||||
|
||||
def _mk_lines_array(
|
||||
data: List,
|
||||
size: int,
|
||||
elements_step: int = 6,
|
||||
) -> np.ndarray:
|
||||
"""Create an ndarray to hold lines graphics info.
|
||||
|
||||
"""
|
||||
return np.zeros_like(
|
||||
data,
|
||||
shape=(int(size), elements_step),
|
||||
dtype=object,
|
||||
)
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
def lines_from_ohlc(
|
||||
def bar_from_ohlc_row(
|
||||
row: np.ndarray,
|
||||
w: float
|
||||
) -> Tuple[QLineF]:
|
||||
w: float = 0.43
|
||||
|
||||
) -> tuple[QLineF]:
|
||||
'''
|
||||
Generate the minimal ``QLineF`` lines to construct a single
|
||||
OHLC "bar" for use in the "last datum" of a series.
|
||||
|
||||
'''
|
||||
open, high, low, close, index = row[
|
||||
['open', 'high', 'low', 'close', 'index']]
|
||||
|
||||
|
@ -84,7 +87,7 @@ def lines_from_ohlc(
|
|||
@njit(
|
||||
# TODO: for now need to construct this manually for readonly arrays, see
|
||||
# https://github.com/numba/numba/issues/4511
|
||||
# ntypes.Tuple((float64[:], float64[:], float64[:]))(
|
||||
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
||||
# numba_ohlc_dtype[::1], # contiguous
|
||||
# int64,
|
||||
# optional(float64),
|
||||
|
@ -95,10 +98,12 @@ def path_arrays_from_ohlc(
|
|||
data: np.ndarray,
|
||||
start: int64,
|
||||
bar_gap: float64 = 0.43,
|
||||
) -> np.ndarray:
|
||||
"""Generate an array of lines objects from input ohlc data.
|
||||
|
||||
"""
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Generate an array of lines objects from input ohlc data.
|
||||
|
||||
'''
|
||||
size = int(data.shape[0] * 6)
|
||||
|
||||
x = np.zeros(
|
||||
|
@ -152,26 +157,51 @@ def path_arrays_from_ohlc(
|
|||
|
||||
|
||||
def gen_qpath(
|
||||
data,
|
||||
start, # XXX: do we need this?
|
||||
w,
|
||||
data: np.ndarray,
|
||||
start: int = 0, # XXX: do we need this?
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
w: float = 0.43,
|
||||
path: Optional[QtGui.QPainterPath] = None,
|
||||
|
||||
) -> QtGui.QPainterPath:
|
||||
|
||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
||||
path_was_none = path is None
|
||||
|
||||
x, y, c = path_arrays_from_ohlc(data, start, bar_gap=w)
|
||||
profiler = pg.debug.Profiler(
|
||||
msg='gen_qpath ohlc',
|
||||
disabled=not pg_profile_enabled(),
|
||||
gt=ms_slower_then,
|
||||
)
|
||||
|
||||
x, y, c = path_arrays_from_ohlc(
|
||||
data,
|
||||
start,
|
||||
bar_gap=w,
|
||||
)
|
||||
profiler("generate stream with numba")
|
||||
|
||||
# TODO: numba the internals of this!
|
||||
path = pg.functions.arrayToQPath(x, y, connect=c)
|
||||
path = pg.functions.arrayToQPath(
|
||||
x,
|
||||
y,
|
||||
connect=c,
|
||||
path=path,
|
||||
)
|
||||
|
||||
# avoid mem allocs if possible
|
||||
if path_was_none:
|
||||
path.reserve(path.capacity())
|
||||
|
||||
profiler("generate path with arrayToQPath")
|
||||
|
||||
return path
|
||||
|
||||
|
||||
class BarItems(pg.GraphicsObject):
|
||||
"""Price range bars graphics rendered from a OHLC sequence.
|
||||
"""
|
||||
'''
|
||||
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
||||
|
||||
'''
|
||||
sigPlotChanged = QtCore.pyqtSignal(object)
|
||||
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
|
@ -179,17 +209,26 @@ class BarItems(pg.GraphicsObject):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
# scene: 'QGraphicsScene', # noqa
|
||||
linked: LinkedSplits,
|
||||
plotitem: 'pg.PlotItem', # noqa
|
||||
pen_color: str = 'bracket',
|
||||
last_bar_color: str = 'bracket',
|
||||
|
||||
name: Optional[str] = None,
|
||||
|
||||
) -> None:
|
||||
super().__init__()
|
||||
|
||||
self.linked = linked
|
||||
# XXX: for the mega-lulz increasing width here increases draw
|
||||
# latency... so probably don't do it until we figure that out.
|
||||
self._color = pen_color
|
||||
self.bars_pen = pg.mkPen(hcolor(pen_color), width=1)
|
||||
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
||||
self._name = name
|
||||
|
||||
self._ds_line_xy: Optional[
|
||||
tuple[np.ndarray, np.ndarray]
|
||||
] = None
|
||||
|
||||
# NOTE: this prevents redraws on mouse interaction which is
|
||||
# a huge boon for avg interaction latency.
|
||||
|
@ -200,50 +239,79 @@ class BarItems(pg.GraphicsObject):
|
|||
# that mode?
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# not sure if this is actually impoving anything but figured it
|
||||
# was worth a shot:
|
||||
# self.path.reserve(int(100e3 * 6))
|
||||
|
||||
self.path = QtGui.QPainterPath()
|
||||
|
||||
self._pi = plotitem
|
||||
self.path = QtGui.QPainterPath()
|
||||
self.fast_path = QtGui.QPainterPath()
|
||||
|
||||
self._xrange: Tuple[int, int]
|
||||
self._yrange: Tuple[float, float]
|
||||
self._xrange: tuple[int, int]
|
||||
# self._yrange: tuple[float, float]
|
||||
self._vrange = None
|
||||
|
||||
# TODO: don't render the full backing array each time
|
||||
# self._path_data = None
|
||||
self._last_bar_lines: Optional[Tuple[QLineF, ...]] = None
|
||||
self._last_bar_lines: Optional[tuple[QLineF, ...]] = None
|
||||
|
||||
# track the current length of drawable lines within the larger array
|
||||
self.start_index: int = 0
|
||||
self.stop_index: int = 0
|
||||
|
||||
# downsampler-line state
|
||||
self._in_ds: bool = False
|
||||
self._ds_line: Optional[FastAppendCurve] = None
|
||||
self._dsi: tuple[int, int] = 0, 0
|
||||
self._xs_in_px: float = 0
|
||||
|
||||
def draw_from_data(
|
||||
self,
|
||||
data: np.ndarray,
|
||||
ohlc: np.ndarray,
|
||||
start: int = 0,
|
||||
|
||||
) -> QtGui.QPainterPath:
|
||||
"""Draw OHLC datum graphics from a ``np.ndarray``.
|
||||
'''
|
||||
Draw OHLC datum graphics from a ``np.ndarray``.
|
||||
|
||||
This routine is usually only called to draw the initial history.
|
||||
"""
|
||||
hist, last = data[:-1], data[-1]
|
||||
|
||||
'''
|
||||
hist, last = ohlc[:-1], ohlc[-1]
|
||||
self.path = gen_qpath(hist, start, self.w)
|
||||
|
||||
# save graphics for later reference and keep track
|
||||
# of current internal "last index"
|
||||
# self.start_index = len(data)
|
||||
index = data['index']
|
||||
# self.start_index = len(ohlc)
|
||||
index = ohlc['index']
|
||||
self._xrange = (index[0], index[-1])
|
||||
self._yrange = (
|
||||
np.nanmax(data['high']),
|
||||
np.nanmin(data['low']),
|
||||
)
|
||||
# self._yrange = (
|
||||
# np.nanmax(ohlc['high']),
|
||||
# np.nanmin(ohlc['low']),
|
||||
# )
|
||||
|
||||
# up to last to avoid double draw of last bar
|
||||
self._last_bar_lines = lines_from_ohlc(last, self.w)
|
||||
self._last_bar_lines = bar_from_ohlc_row(last, self.w)
|
||||
|
||||
x, y = self._ds_line_xy = ohlc_flatten(ohlc)
|
||||
|
||||
# TODO: figuring out the most optimial size for the ideal
|
||||
# curve-path by,
|
||||
# - calcing the display's max px width `.screen()`
|
||||
# - drawing a curve and figuring out it's capacity:
|
||||
# https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||
# - reserving that cap for each curve-mapped-to-shm with
|
||||
|
||||
# - leveraging clearing when needed to redraw the entire
|
||||
# curve that does not release mem allocs:
|
||||
# https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||
curve = FastAppendCurve(
|
||||
y=y,
|
||||
x=x,
|
||||
name='OHLC',
|
||||
color=self._color,
|
||||
)
|
||||
curve.hide()
|
||||
self._pi.addItem(curve)
|
||||
self._ds_line = curve
|
||||
|
||||
# self._ds_xrange = (index[0], index[-1])
|
||||
|
||||
# trigger render
|
||||
# https://doc.qt.io/qt-5/qgraphicsitem.html#update
|
||||
|
@ -251,78 +319,305 @@ class BarItems(pg.GraphicsObject):
|
|||
|
||||
return self.path
|
||||
|
||||
def update_from_array(
|
||||
def x_uppx(self) -> int:
|
||||
if self._ds_line:
|
||||
return self._ds_line.x_uppx()
|
||||
else:
|
||||
return 0
|
||||
|
||||
# def update_from_array(
|
||||
# self,
|
||||
|
||||
# # full array input history
|
||||
# ohlc: np.ndarray,
|
||||
|
||||
# # pre-sliced array data that's "in view"
|
||||
# ohlc_iv: np.ndarray,
|
||||
|
||||
# view_range: Optional[tuple[int, int]] = None,
|
||||
# profiler: Optional[pg.debug.Profiler] = None,
|
||||
|
||||
# ) -> None:
|
||||
# '''
|
||||
# Update the last datum's bar graphic from input data array.
|
||||
|
||||
# This routine should be interface compatible with
|
||||
# ``pg.PlotCurveItem.setData()``. Normally this method in
|
||||
# ``pyqtgraph`` seems to update all the data passed to the
|
||||
# graphics object, and then update/rerender, but here we're
|
||||
# assuming the prior graphics havent changed (OHLC history rarely
|
||||
# does) so this "should" be simpler and faster.
|
||||
|
||||
# This routine should be made (transitively) as fast as possible.
|
||||
|
||||
# '''
|
||||
# profiler = profiler or pg.debug.Profiler(
|
||||
# disabled=not pg_profile_enabled(),
|
||||
# gt=ms_slower_then,
|
||||
# delayed=True,
|
||||
# )
|
||||
|
||||
# # index = self.start_index
|
||||
# istart, istop = self._xrange
|
||||
# # ds_istart, ds_istop = self._ds_xrange
|
||||
|
||||
# index = ohlc['index']
|
||||
# first_index, last_index = index[0], index[-1]
|
||||
|
||||
# # length = len(ohlc)
|
||||
# # prepend_length = istart - first_index
|
||||
# # append_length = last_index - istop
|
||||
|
||||
# # ds_prepend_length = ds_istart - first_index
|
||||
# # ds_append_length = last_index - ds_istop
|
||||
|
||||
# flip_cache = False
|
||||
|
||||
# x_gt = 16
|
||||
# if self._ds_line:
|
||||
# uppx = self._ds_line.x_uppx()
|
||||
# else:
|
||||
# uppx = 0
|
||||
|
||||
# should_line = self._in_ds
|
||||
# if (
|
||||
# self._in_ds
|
||||
# and uppx < x_gt
|
||||
# ):
|
||||
# should_line = False
|
||||
|
||||
# elif (
|
||||
# not self._in_ds
|
||||
# and uppx >= x_gt
|
||||
# ):
|
||||
# should_line = True
|
||||
|
||||
# profiler('ds logic complete')
|
||||
|
||||
# if should_line:
|
||||
# # update the line graphic
|
||||
# # x, y = self._ds_line_xy = ohlc_flatten(ohlc_iv)
|
||||
# x, y = self._ds_line_xy = ohlc_flatten(ohlc)
|
||||
# x_iv, y_iv = self._ds_line_xy = ohlc_flatten(ohlc_iv)
|
||||
# profiler('flattening bars to line')
|
||||
|
||||
# # TODO: we should be diffing the amount of new data which
|
||||
# # needs to be downsampled. Ideally we actually are just
|
||||
# # doing all the ds-ing in sibling actors so that the data
|
||||
# # can just be read and rendered to graphics on events of our
|
||||
# # choice.
|
||||
# # diff = do_diff(ohlc, new_bit)
|
||||
# curve = self._ds_line
|
||||
# curve.update_from_array(
|
||||
# x=x,
|
||||
# y=y,
|
||||
# x_iv=x_iv,
|
||||
# y_iv=y_iv,
|
||||
# view_range=None, # hack
|
||||
# profiler=profiler,
|
||||
# )
|
||||
# profiler('updated ds line')
|
||||
|
||||
# if not self._in_ds:
|
||||
# # hide bars and show line
|
||||
# self.hide()
|
||||
# # XXX: is this actually any faster?
|
||||
# # self._pi.removeItem(self)
|
||||
|
||||
# # TODO: a `.ui()` log level?
|
||||
# log.info(
|
||||
# f'downsampling to line graphic {self._name}'
|
||||
# )
|
||||
|
||||
# # self._pi.addItem(curve)
|
||||
# curve.show()
|
||||
# curve.update()
|
||||
# self._in_ds = True
|
||||
|
||||
# # stop here since we don't need to update bars path any more
|
||||
# # as we delegate to the downsample line with updates.
|
||||
|
||||
# else:
|
||||
# # we should be in bars mode
|
||||
|
||||
# if self._in_ds:
|
||||
# # flip back to bars graphics and hide the downsample line.
|
||||
# log.info(f'showing bars graphic {self._name}')
|
||||
|
||||
# curve = self._ds_line
|
||||
# curve.hide()
|
||||
# # self._pi.removeItem(curve)
|
||||
|
||||
# # XXX: is this actually any faster?
|
||||
# # self._pi.addItem(self)
|
||||
# self.show()
|
||||
# self._in_ds = False
|
||||
|
||||
# # generate in_view path
|
||||
# self.path = gen_qpath(
|
||||
# ohlc_iv,
|
||||
# 0,
|
||||
# self.w,
|
||||
# # path=self.path,
|
||||
# )
|
||||
|
||||
# # TODO: to make the downsampling faster
|
||||
# # - allow mapping only a range of lines thus only drawing as
|
||||
# # many bars as exactly specified.
|
||||
# # - move ohlc "flattening" to a shmarr
|
||||
# # - maybe move all this embedded logic to a higher
|
||||
# # level type?
|
||||
|
||||
# # if prepend_length:
|
||||
# # # new history was added and we need to render a new path
|
||||
# # prepend_bars = ohlc[:prepend_length]
|
||||
|
||||
# # if ds_prepend_length:
|
||||
# # ds_prepend_bars = ohlc[:ds_prepend_length]
|
||||
# # pre_x, pre_y = ohlc_flatten(ds_prepend_bars)
|
||||
# # fx = np.concatenate((pre_x, fx))
|
||||
# # fy = np.concatenate((pre_y, fy))
|
||||
# # profiler('ds line prepend diff complete')
|
||||
|
||||
# # if append_length:
|
||||
# # # generate new graphics to match provided array
|
||||
# # # path appending logic:
|
||||
# # # we need to get the previous "current bar(s)" for the time step
|
||||
# # # and convert it to a sub-path to append to the historical set
|
||||
# # # new_bars = ohlc[istop - 1:istop + append_length - 1]
|
||||
# # append_bars = ohlc[-append_length - 1:-1]
|
||||
# # # print(f'ohlc bars to append size: {append_bars.size}\n')
|
||||
|
||||
# # if ds_append_length:
|
||||
# # ds_append_bars = ohlc[-ds_append_length - 1:-1]
|
||||
# # post_x, post_y = ohlc_flatten(ds_append_bars)
|
||||
# # print(
|
||||
# # f'ds curve to append sizes: {(post_x.size, post_y.size)}'
|
||||
# # )
|
||||
# # fx = np.concatenate((fx, post_x))
|
||||
# # fy = np.concatenate((fy, post_y))
|
||||
|
||||
# # profiler('ds line append diff complete')
|
||||
|
||||
# profiler('array diffs complete')
|
||||
|
||||
# # does this work?
|
||||
# last = ohlc[-1]
|
||||
# # fy[-1] = last['close']
|
||||
|
||||
# # # incremental update and cache line datums
|
||||
# # self._ds_line_xy = fx, fy
|
||||
|
||||
# # maybe downsample to line
|
||||
# # ds = self.maybe_downsample()
|
||||
# # if ds:
|
||||
# # # if we downsample to a line don't bother with
|
||||
# # # any more path generation / updates
|
||||
# # self._ds_xrange = first_index, last_index
|
||||
# # profiler('downsampled to line')
|
||||
# # return
|
||||
|
||||
# # print(in_view.size)
|
||||
|
||||
# # if self.path:
|
||||
# # self.path = path
|
||||
# # self.path.reserve(path.capacity())
|
||||
# # self.path.swap(path)
|
||||
|
||||
# # path updates
|
||||
# # if prepend_length:
|
||||
# # # XXX: SOMETHING IS MAYBE FISHY HERE what with the old_path
|
||||
# # # y value not matching the first value from
|
||||
# # # ohlc[prepend_length + 1] ???
|
||||
# # prepend_path = gen_qpath(prepend_bars, 0, self.w)
|
||||
# # old_path = self.path
|
||||
# # self.path = prepend_path
|
||||
# # self.path.addPath(old_path)
|
||||
# # profiler('path PREPEND')
|
||||
|
||||
# # if append_length:
|
||||
# # append_path = gen_qpath(append_bars, 0, self.w)
|
||||
|
||||
# # self.path.moveTo(
|
||||
# # float(istop - self.w),
|
||||
# # float(append_bars[0]['open'])
|
||||
# # )
|
||||
# # self.path.addPath(append_path)
|
||||
|
||||
# # profiler('path APPEND')
|
||||
# # fp = self.fast_path
|
||||
# # if fp is None:
|
||||
# # self.fast_path = append_path
|
||||
|
||||
# # else:
|
||||
# # fp.moveTo(
|
||||
# # float(istop - self.w), float(new_bars[0]['open'])
|
||||
# # )
|
||||
# # fp.addPath(append_path)
|
||||
|
||||
# # self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
||||
# # flip_cache = True
|
||||
|
||||
# self._xrange = first_index, last_index
|
||||
|
||||
# # trigger redraw despite caching
|
||||
# self.prepareGeometryChange()
|
||||
|
||||
# self.draw_last(last)
|
||||
|
||||
# # # generate new lines objects for updatable "current bar"
|
||||
# # self._last_bar_lines = bar_from_ohlc_row(last, self.w)
|
||||
|
||||
# # # last bar update
|
||||
# # i, o, h, l, last, v = last[
|
||||
# # ['index', 'open', 'high', 'low', 'close', 'volume']
|
||||
# # ]
|
||||
# # # assert i == self.start_index - 1
|
||||
# # # assert i == last_index
|
||||
# # body, larm, rarm = self._last_bar_lines
|
||||
|
||||
# # # XXX: is there a faster way to modify this?
|
||||
# # rarm.setLine(rarm.x1(), last, rarm.x2(), last)
|
||||
|
||||
# # # writer is responsible for changing open on "first" volume of bar
|
||||
# # larm.setLine(larm.x1(), o, larm.x2(), o)
|
||||
|
||||
# # if l != h: # noqa
|
||||
|
||||
# # if body is None:
|
||||
# # body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
||||
# # else:
|
||||
# # # update body
|
||||
# # body.setLine(i, l, i, h)
|
||||
|
||||
# # # XXX: pretty sure this is causing an issue where the bar has
|
||||
# # # a large upward move right before the next sample and the body
|
||||
# # # is getting set to None since the next bar is flat but the shm
|
||||
# # # array index update wasn't read by the time this code runs. Iow
|
||||
# # # we're doing this removal of the body for a bar index that is
|
||||
# # # now out of date / from some previous sample. It's weird
|
||||
# # # though because i've seen it do this to bars i - 3 back?
|
||||
|
||||
# profiler('last bar set')
|
||||
|
||||
# self.update()
|
||||
# profiler('.update()')
|
||||
|
||||
# if flip_cache:
|
||||
# self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# # profiler.finish()
|
||||
|
||||
def draw_last(
|
||||
self,
|
||||
array: np.ndarray,
|
||||
just_history=False,
|
||||
last: np.ndarray,
|
||||
|
||||
) -> None:
|
||||
"""Update the last datum's bar graphic from input data array.
|
||||
|
||||
This routine should be interface compatible with
|
||||
``pg.PlotCurveItem.setData()``. Normally this method in
|
||||
``pyqtgraph`` seems to update all the data passed to the
|
||||
graphics object, and then update/rerender, but here we're
|
||||
assuming the prior graphics havent changed (OHLC history rarely
|
||||
does) so this "should" be simpler and faster.
|
||||
|
||||
This routine should be made (transitively) as fast as possible.
|
||||
"""
|
||||
# index = self.start_index
|
||||
istart, istop = self._xrange
|
||||
|
||||
index = array['index']
|
||||
first_index, last_index = index[0], index[-1]
|
||||
|
||||
# length = len(array)
|
||||
prepend_length = istart - first_index
|
||||
append_length = last_index - istop
|
||||
|
||||
flip_cache = False
|
||||
|
||||
# TODO: allow mapping only a range of lines thus
|
||||
# only drawing as many bars as exactly specified.
|
||||
|
||||
if prepend_length:
|
||||
|
||||
# new history was added and we need to render a new path
|
||||
new_bars = array[:prepend_length]
|
||||
prepend_path = gen_qpath(new_bars, 0, self.w)
|
||||
|
||||
# XXX: SOMETHING IS MAYBE FISHY HERE what with the old_path
|
||||
# y value not matching the first value from
|
||||
# array[prepend_length + 1] ???
|
||||
|
||||
# update path
|
||||
old_path = self.path
|
||||
self.path = prepend_path
|
||||
self.path.addPath(old_path)
|
||||
|
||||
# trigger redraw despite caching
|
||||
self.prepareGeometryChange()
|
||||
|
||||
if append_length:
|
||||
# generate new lines objects for updatable "current bar"
|
||||
self._last_bar_lines = lines_from_ohlc(array[-1], self.w)
|
||||
|
||||
# generate new graphics to match provided array
|
||||
# path appending logic:
|
||||
# we need to get the previous "current bar(s)" for the time step
|
||||
# and convert it to a sub-path to append to the historical set
|
||||
# new_bars = array[istop - 1:istop + append_length - 1]
|
||||
new_bars = array[-append_length - 1:-1]
|
||||
append_path = gen_qpath(new_bars, 0, self.w)
|
||||
self.path.moveTo(float(istop - self.w), float(new_bars[0]['open']))
|
||||
self.path.addPath(append_path)
|
||||
|
||||
# trigger redraw despite caching
|
||||
self.prepareGeometryChange()
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
||||
flip_cache = True
|
||||
|
||||
self._xrange = first_index, last_index
|
||||
# generate new lines objects for updatable "current bar"
|
||||
self._last_bar_lines = bar_from_ohlc_row(last, self.w)
|
||||
|
||||
# last bar update
|
||||
i, o, h, l, last, v = array[-1][
|
||||
i, o, h, l, last, v = last[
|
||||
['index', 'open', 'high', 'low', 'close', 'volume']
|
||||
]
|
||||
# assert i == self.start_index - 1
|
||||
|
@ -351,11 +646,6 @@ class BarItems(pg.GraphicsObject):
|
|||
# now out of date / from some previous sample. It's weird
|
||||
# though because i've seen it do this to bars i - 3 back?
|
||||
|
||||
self.update()
|
||||
|
||||
if flip_cache:
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
def boundingRect(self):
|
||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||
|
||||
|
@ -373,16 +663,31 @@ class BarItems(pg.GraphicsObject):
|
|||
# apparently this a lot faster says the docs?
|
||||
# https://doc.qt.io/qt-5/qpainterpath.html#controlPointRect
|
||||
hb = self.path.controlPointRect()
|
||||
hb_tl, hb_br = hb.topLeft(), hb.bottomRight()
|
||||
hb_tl, hb_br = (
|
||||
hb.topLeft(),
|
||||
hb.bottomRight(),
|
||||
)
|
||||
|
||||
# fp = self.fast_path
|
||||
# if fp:
|
||||
# fhb = fp.controlPointRect()
|
||||
# print((hb_tl, hb_br))
|
||||
# print(fhb)
|
||||
# hb_tl, hb_br = (
|
||||
# fhb.topLeft() + hb.topLeft(),
|
||||
# fhb.bottomRight() + hb.bottomRight(),
|
||||
# )
|
||||
|
||||
# need to include last bar height or BR will be off
|
||||
mx_y = hb_br.y()
|
||||
mn_y = hb_tl.y()
|
||||
|
||||
body_line = self._last_bar_lines[0]
|
||||
if body_line:
|
||||
mx_y = max(mx_y, max(body_line.y1(), body_line.y2()))
|
||||
mn_y = min(mn_y, min(body_line.y1(), body_line.y2()))
|
||||
last_lines = self._last_bar_lines
|
||||
if last_lines:
|
||||
body_line = self._last_bar_lines[0]
|
||||
if body_line:
|
||||
mx_y = max(mx_y, max(body_line.y1(), body_line.y2()))
|
||||
mn_y = min(mn_y, min(body_line.y1(), body_line.y2()))
|
||||
|
||||
return QtCore.QRectF(
|
||||
|
||||
|
@ -405,9 +710,16 @@ class BarItems(pg.GraphicsObject):
|
|||
p: QtGui.QPainter,
|
||||
opt: QtWidgets.QStyleOptionGraphicsItem,
|
||||
w: QtWidgets.QWidget
|
||||
|
||||
) -> None:
|
||||
|
||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
||||
if self._in_ds:
|
||||
return
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
disabled=not pg_profile_enabled(),
|
||||
gt=ms_slower_then,
|
||||
)
|
||||
|
||||
# p.setCompositionMode(0)
|
||||
|
||||
|
@ -423,4 +735,8 @@ class BarItems(pg.GraphicsObject):
|
|||
|
||||
p.setPen(self.bars_pen)
|
||||
p.drawPath(self.path)
|
||||
profiler('draw history path')
|
||||
profiler(f'draw history path: {self.path.capacity()}')
|
||||
|
||||
# if self.fast_path:
|
||||
# p.drawPath(self.fast_path)
|
||||
# profiler('draw fast path')
|
||||
|
|
|
@ -14,9 +14,10 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
'''
|
||||
Qt UI styling.
|
||||
"""
|
||||
|
||||
'''
|
||||
from typing import Optional, Dict
|
||||
import math
|
||||
|
||||
|
@ -202,8 +203,6 @@ _xaxis_at = 'bottom'
|
|||
# charting config
|
||||
CHART_MARGINS = (0, 0, 2, 2)
|
||||
_min_points_to_show = 6
|
||||
_bars_to_left_in_follow_mode = int(61*6)
|
||||
_bars_from_right_in_follow_mode = round(0.16 * _bars_to_left_in_follow_mode)
|
||||
_tina_mode = False
|
||||
|
||||
|
||||
|
|
|
@ -122,7 +122,8 @@ def optschain(config, symbol, date, rate, test):
|
|||
@cli.command()
|
||||
@click.option(
|
||||
'--profile',
|
||||
is_flag=True,
|
||||
'-p',
|
||||
default=None,
|
||||
help='Enable pyqtgraph profiling'
|
||||
)
|
||||
@click.option(
|
||||
|
@ -133,9 +134,16 @@ def optschain(config, symbol, date, rate, test):
|
|||
@click.argument('symbol', required=True)
|
||||
@click.pass_obj
|
||||
def chart(config, symbol, profile, pdb):
|
||||
"""Start a real-time chartng UI
|
||||
"""
|
||||
from .. import _profile
|
||||
'''
|
||||
Start a real-time chartng UI
|
||||
|
||||
'''
|
||||
# eg. ``--profile 3`` reports profiling for anything slower then 3 ms.
|
||||
if profile is not None:
|
||||
from .. import _profile
|
||||
_profile._pg_profile = True
|
||||
_profile.ms_slower_then = float(profile)
|
||||
|
||||
from ._app import _main
|
||||
|
||||
if '.' not in symbol:
|
||||
|
@ -145,8 +153,6 @@ def chart(config, symbol, profile, pdb):
|
|||
))
|
||||
return
|
||||
|
||||
# toggle to enable profiling
|
||||
_profile._pg_profile = profile
|
||||
|
||||
# global opts
|
||||
brokernames = config['brokers']
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -30,11 +30,13 @@ orig_win_id = t.find_focused().window
|
|||
# for tws
|
||||
win_names: list[str] = [
|
||||
'Interactive Brokers', # tws running in i3
|
||||
'IB Gateway.', # gw running in i3
|
||||
'IB Gateway', # gw running in i3
|
||||
# 'IB', # gw running in i3 (newer version?)
|
||||
]
|
||||
|
||||
for name in win_names:
|
||||
results = t.find_named(name)
|
||||
results = t.find_titled(name)
|
||||
print(f'results for {name}: {results}')
|
||||
if results:
|
||||
con = results[0]
|
||||
print(f'Resetting data feed for {name}')
|
||||
|
@ -47,22 +49,32 @@ for name in win_names:
|
|||
# https://github.com/rr-/pyxdotool
|
||||
# https://github.com/ShaneHutter/pyxdotool
|
||||
# https://github.com/cphyc/pyxdotool
|
||||
subprocess.call([
|
||||
'xdotool',
|
||||
'windowactivate', '--sync', win_id,
|
||||
|
||||
# move mouse to bottom left of window (where there should
|
||||
# be nothing to click).
|
||||
'mousemove_relative', '--sync', str(w-4), str(h-4),
|
||||
# TODO: only run the reconnect (2nd) kc on a detected
|
||||
# disconnect?
|
||||
for key_combo, timeout in [
|
||||
# only required if we need a connection reset.
|
||||
# ('ctrl+alt+r', 12),
|
||||
# data feed reset.
|
||||
('ctrl+alt+f', 6)
|
||||
]:
|
||||
subprocess.call([
|
||||
'xdotool',
|
||||
'windowactivate', '--sync', win_id,
|
||||
|
||||
# NOTE: we may need to stick a `--retry 3` in here..
|
||||
'click', '--window', win_id, '--repeat', '3', '1',
|
||||
# move mouse to bottom left of window (where there should
|
||||
# be nothing to click).
|
||||
'mousemove_relative', '--sync', str(w-4), str(h-4),
|
||||
|
||||
# hackzorzes
|
||||
'key', 'ctrl+alt+f',
|
||||
],
|
||||
timeout=1,
|
||||
)
|
||||
# NOTE: we may need to stick a `--retry 3` in here..
|
||||
'click', '--window', win_id,
|
||||
'--repeat', '3', '1',
|
||||
|
||||
# hackzorzes
|
||||
'key', key_combo,
|
||||
],
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
# re-activate and focus original window
|
||||
subprocess.call([
|
Loading…
Reference in New Issue