Compare commits
153 Commits
310_plus
...
mkts_backu
Author | SHA1 | Date |
---|---|---|
Tyler Goodlet | cc50932c4f | |
Tyler Goodlet | c62d3dd82c | |
Tyler Goodlet | 7d664c55ff | |
Tyler Goodlet | 024d3661a0 | |
Tyler Goodlet | 9befc1fb1a | |
Tyler Goodlet | 54a1397d2c | |
Tyler Goodlet | 08d7f925b9 | |
Tyler Goodlet | 25891c6e51 | |
Tyler Goodlet | 3a3baca9bc | |
Tyler Goodlet | 768d2d997f | |
Tyler Goodlet | 98da4342e7 | |
Tyler Goodlet | dd6e2604d3 | |
Tyler Goodlet | e2d91f274f | |
Tyler Goodlet | addb0a4928 | |
Tyler Goodlet | 098c4f25fc | |
Tyler Goodlet | 9c88b26d85 | |
Tyler Goodlet | 8686cf99fe | |
Tyler Goodlet | f9ec00e1ae | |
Tyler Goodlet | 25a3a123ec | |
Tyler Goodlet | 54466db554 | |
Tyler Goodlet | 6f06f646cf | |
Tyler Goodlet | 65d4c317c6 | |
Tyler Goodlet | 97439e882c | |
Tyler Goodlet | b5d566fed5 | |
Tyler Goodlet | d3adb6dff7 | |
Tyler Goodlet | 22c81eb5bf | |
Tyler Goodlet | 41a8c23e44 | |
Tyler Goodlet | 6bb1f06813 | |
Tyler Goodlet | 72de184c08 | |
Tyler Goodlet | 319a6fb66a | |
Tyler Goodlet | e7b1d77b08 | |
Tyler Goodlet | 3c5a799e97 | |
Tyler Goodlet | 6e86904032 | |
Tyler Goodlet | 544c6c3180 | |
Tyler Goodlet | 4d2b5f9196 | |
Tyler Goodlet | 4aaf5a1f8b | |
Tyler Goodlet | 0cb05ef868 | |
Tyler Goodlet | 0676f3271c | |
Tyler Goodlet | 34635c21a9 | |
Tyler Goodlet | 129ec9fc19 | |
Tyler Goodlet | d2b42a46e6 | |
Tyler Goodlet | fac1f86891 | |
Tyler Goodlet | 36b13012b4 | |
Tyler Goodlet | 9fcb1d3501 | |
Tyler Goodlet | 96182c37f1 | |
Tyler Goodlet | 7f350569df | |
Tyler Goodlet | 48ed07aa99 | |
Tyler Goodlet | 7e5c8f4417 | |
Tyler Goodlet | 01f06976ed | |
Tyler Goodlet | 8b89ba6111 | |
Tyler Goodlet | ba797fcbee | |
Tyler Goodlet | 3b96b52474 | |
Tyler Goodlet | 4af941566a | |
Tyler Goodlet | 01b594e828 | |
Tyler Goodlet | 197cad17a2 | |
Tyler Goodlet | fb2f1fa488 | |
Tyler Goodlet | 532da9c590 | |
Tyler Goodlet | e8c261279d | |
Tyler Goodlet | e9e76e0626 | |
Tyler Goodlet | df6f9b1c17 | |
Tyler Goodlet | 8e8c1c14ce | |
Tyler Goodlet | 4c6e5598f2 | |
Tyler Goodlet | 7a959e756d | |
Tyler Goodlet | c0d1facf3b | |
Tyler Goodlet | d03cd23571 | |
Tyler Goodlet | a8cb6c2056 | |
Tyler Goodlet | e9ed070cbf | |
Tyler Goodlet | cf457112dd | |
Tyler Goodlet | fa8e4f7c27 | |
Tyler Goodlet | 990417b172 | |
Tyler Goodlet | 5d09d8258f | |
Tyler Goodlet | 3e72b59658 | |
Tyler Goodlet | a3b282dffe | |
Tyler Goodlet | 23a368b5e5 | |
Tyler Goodlet | a4dd6c81dc | |
Tyler Goodlet | a2ef955690 | |
Tyler Goodlet | 6d9a94065d | |
Tyler Goodlet | c976bff40c | |
Tyler Goodlet | 11bda4f9b4 | |
Tyler Goodlet | 803c65bc88 | |
Tyler Goodlet | cf7163194c | |
Tyler Goodlet | afe41236ff | |
Tyler Goodlet | b4d35496f7 | |
Tyler Goodlet | c5be35dad4 | |
Tyler Goodlet | e33d0aac15 | |
Tyler Goodlet | 02ba7b6b96 | |
Tyler Goodlet | 5775c5fe71 | |
Tyler Goodlet | 820dfff08a | |
Tyler Goodlet | cf589c840d | |
Tyler Goodlet | bbaba71465 | |
Tyler Goodlet | 73aebdfa16 | |
Tyler Goodlet | d9862a4962 | |
Tyler Goodlet | de599233af | |
Tyler Goodlet | 855d02ef5a | |
Tyler Goodlet | 7fbd4a95e3 | |
Tyler Goodlet | 847c95d277 | |
Tyler Goodlet | 8af76322c9 | |
Tyler Goodlet | eb5a4f7eeb | |
Tyler Goodlet | e008f69505 | |
Tyler Goodlet | 6c8b79906b | |
Tyler Goodlet | 40e62c1a38 | |
Tyler Goodlet | bed47d3ae6 | |
Tyler Goodlet | f60d9dd79c | |
Tyler Goodlet | 4402b2dc73 | |
Tyler Goodlet | 6e37ab6bf9 | |
Tyler Goodlet | 88411a6a26 | |
Tyler Goodlet | a0c3d5f32f | |
Tyler Goodlet | 236df4b6d6 | |
Tyler Goodlet | a3ec0c16c6 | |
Tyler Goodlet | 51ced95962 | |
Tyler Goodlet | 3487f76147 | |
Tyler Goodlet | fa69fca311 | |
Tyler Goodlet | 57b3d2f7e4 | |
Tyler Goodlet | f9b799b53d | |
Tyler Goodlet | 35f7c3409a | |
Tyler Goodlet | 9c5f7a6bb9 | |
Tyler Goodlet | 86337430d8 | |
Tyler Goodlet | 8d09d63095 | |
Tyler Goodlet | df04ccb845 | |
Tyler Goodlet | ad0ace2528 | |
Tyler Goodlet | edd273d5d8 | |
Tyler Goodlet | cfc77a0a66 | |
Tyler Goodlet | 69b3120444 | |
Tyler Goodlet | 8662cde7ca | |
Tyler Goodlet | 73b3f7ead8 | |
Tyler Goodlet | c3509e7f93 | |
Tyler Goodlet | fea645423e | |
Guillermo Rodriguez | d215a69049 | |
Guillermo Rodriguez | a11cee82d0 | |
Guillermo Rodriguez | aba50515df | |
Tyler Goodlet | 1b1bf07f54 | |
Tyler Goodlet | 77a7b73260 | |
Tyler Goodlet | 4ad06e4cc0 | |
Tyler Goodlet | 3da081c67a | |
Tyler Goodlet | d56d1fc4c1 | |
Tyler Goodlet | 544578c67d | |
Tyler Goodlet | 01ea2b3110 | |
Tyler Goodlet | 2f02f71610 | |
Tyler Goodlet | b318ebc221 | |
Tyler Goodlet | d737adb1b8 | |
Tyler Goodlet | 75d7314493 | |
Tyler Goodlet | d7d824030d | |
Tyler Goodlet | 28436bcb2b | |
Tyler Goodlet | 692e310a98 | |
Tyler Goodlet | c60d523428 | |
Tyler Goodlet | 00d7bb089f | |
Tyler Goodlet | 3dc87e0426 | |
Tyler Goodlet | 49531a2da6 | |
Tyler Goodlet | 53641abc4b | |
Tyler Goodlet | b0e236fadf | |
Tyler Goodlet | ef0516a84b | |
Tyler Goodlet | bcd0895a12 | |
Tyler Goodlet | 81c69c54ec |
107
piker/_daemon.py
107
piker/_daemon.py
|
@ -19,7 +19,7 @@ Structured, daemon tree service management.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Optional, Union, Callable, Any
|
from typing import Optional, Union, Callable, Any
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager as acm
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
@ -35,10 +35,10 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
_root_dname = 'pikerd'
|
_root_dname = 'pikerd'
|
||||||
|
|
||||||
_registry_addr = ('127.0.0.1', 6116)
|
_registry_addr = ('127.0.0.1', 1616)
|
||||||
_tractor_kwargs: dict[str, Any] = {
|
_tractor_kwargs: dict[str, Any] = {
|
||||||
# use a different registry addr then tractor's default
|
# use a different registry addr then tractor's default
|
||||||
'arbiter_addr': _registry_addr
|
'arbiter_addr': _registry_addr
|
||||||
}
|
}
|
||||||
_root_modules = [
|
_root_modules = [
|
||||||
__name__,
|
__name__,
|
||||||
|
@ -91,14 +91,18 @@ class Services(BaseModel):
|
||||||
log.info(
|
log.info(
|
||||||
f'`pikerd` service {name} started with value {first}'
|
f'`pikerd` service {name} started with value {first}'
|
||||||
)
|
)
|
||||||
# wait on any context's return value
|
try:
|
||||||
ctx_res = await ctx.result()
|
# wait on any context's return value
|
||||||
|
ctx_res = await ctx.result()
|
||||||
# wait on any error from the sub-actor
|
except tractor.ContextCancelled:
|
||||||
# NOTE: this will block indefinitely until cancelled
|
return await self.cancel_service(name)
|
||||||
# either by error from the target context function or by
|
else:
|
||||||
# being cancelled here by the surrounding cancel scope
|
# wait on any error from the sub-actor
|
||||||
return (await portal.result(), ctx_res)
|
# NOTE: this will block indefinitely until
|
||||||
|
# cancelled either by error from the target
|
||||||
|
# context function or by being cancelled here by
|
||||||
|
# the surrounding cancel scope
|
||||||
|
return (await portal.result(), ctx_res)
|
||||||
|
|
||||||
cs, first = await self.service_n.start(open_context_in_task)
|
cs, first = await self.service_n.start(open_context_in_task)
|
||||||
|
|
||||||
|
@ -110,20 +114,23 @@ class Services(BaseModel):
|
||||||
|
|
||||||
# TODO: per service cancellation by scope, we aren't using this
|
# TODO: per service cancellation by scope, we aren't using this
|
||||||
# anywhere right?
|
# anywhere right?
|
||||||
# async def cancel_service(
|
async def cancel_service(
|
||||||
# self,
|
self,
|
||||||
# name: str,
|
name: str,
|
||||||
# ) -> Any:
|
) -> Any:
|
||||||
# log.info(f'Cancelling `pikerd` service {name}')
|
log.info(f'Cancelling `pikerd` service {name}')
|
||||||
# cs, portal = self.service_tasks[name]
|
cs, portal = self.service_tasks[name]
|
||||||
# cs.cancel()
|
# XXX: not entirely sure why this is required,
|
||||||
# return await portal.cancel_actor()
|
# and should probably be better fine tuned in
|
||||||
|
# ``tractor``?
|
||||||
|
cs.cancel()
|
||||||
|
return await portal.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
_services: Optional[Services] = None
|
_services: Optional[Services] = None
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_pikerd(
|
async def open_pikerd(
|
||||||
start_method: str = 'trio',
|
start_method: str = 'trio',
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
|
@ -178,7 +185,7 @@ async def open_pikerd(
|
||||||
yield _services
|
yield _services
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_piker_runtime(
|
async def open_piker_runtime(
|
||||||
name: str,
|
name: str,
|
||||||
enable_modules: list[str] = [],
|
enable_modules: list[str] = [],
|
||||||
|
@ -219,7 +226,7 @@ async def open_piker_runtime(
|
||||||
yield tractor.current_actor()
|
yield tractor.current_actor()
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def maybe_open_runtime(
|
async def maybe_open_runtime(
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
@ -242,7 +249,7 @@ async def maybe_open_runtime(
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def maybe_open_pikerd(
|
async def maybe_open_pikerd(
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
@ -293,7 +300,36 @@ class Brokerd:
|
||||||
locks = defaultdict(trio.Lock)
|
locks = defaultdict(trio.Lock)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
|
async def find_service(
|
||||||
|
service_name: str,
|
||||||
|
) -> Optional[tractor.Portal]:
|
||||||
|
|
||||||
|
log.info(f'Scanning for service `{service_name}`')
|
||||||
|
# attach to existing daemon by name if possible
|
||||||
|
async with tractor.find_actor(
|
||||||
|
service_name,
|
||||||
|
arbiter_sockaddr=_registry_addr,
|
||||||
|
) as maybe_portal:
|
||||||
|
yield maybe_portal
|
||||||
|
|
||||||
|
|
||||||
|
async def check_for_service(
|
||||||
|
service_name: str,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
'''
|
||||||
|
Service daemon "liveness" predicate.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with tractor.query_actor(
|
||||||
|
service_name,
|
||||||
|
arbiter_sockaddr=_registry_addr,
|
||||||
|
) as sockaddr:
|
||||||
|
return sockaddr
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
async def maybe_spawn_daemon(
|
async def maybe_spawn_daemon(
|
||||||
|
|
||||||
service_name: str,
|
service_name: str,
|
||||||
|
@ -303,7 +339,7 @@ async def maybe_spawn_daemon(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> tractor.Portal:
|
) -> tractor.Portal:
|
||||||
"""
|
'''
|
||||||
If no ``service_name`` daemon-actor can be found,
|
If no ``service_name`` daemon-actor can be found,
|
||||||
spawn one in a local subactor and return a portal to it.
|
spawn one in a local subactor and return a portal to it.
|
||||||
|
|
||||||
|
@ -314,7 +350,7 @@ async def maybe_spawn_daemon(
|
||||||
This can be seen as a service starting api for remote-actor
|
This can be seen as a service starting api for remote-actor
|
||||||
clients.
|
clients.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
if loglevel:
|
if loglevel:
|
||||||
get_console_log(loglevel)
|
get_console_log(loglevel)
|
||||||
|
|
||||||
|
@ -323,19 +359,13 @@ async def maybe_spawn_daemon(
|
||||||
lock = Brokerd.locks[service_name]
|
lock = Brokerd.locks[service_name]
|
||||||
await lock.acquire()
|
await lock.acquire()
|
||||||
|
|
||||||
log.info(f'Scanning for existing {service_name}')
|
async with find_service(service_name) as portal:
|
||||||
# attach to existing daemon by name if possible
|
|
||||||
async with tractor.find_actor(
|
|
||||||
service_name,
|
|
||||||
arbiter_sockaddr=_registry_addr,
|
|
||||||
|
|
||||||
) as portal:
|
|
||||||
if portal is not None:
|
if portal is not None:
|
||||||
lock.release()
|
lock.release()
|
||||||
yield portal
|
yield portal
|
||||||
return
|
return
|
||||||
|
|
||||||
log.warning(f"Couldn't find any existing {service_name}")
|
log.warning(f"Couldn't find any existing {service_name}")
|
||||||
|
|
||||||
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
||||||
# pikerd is not live we now become the root of the
|
# pikerd is not live we now become the root of the
|
||||||
|
@ -372,6 +402,7 @@ async def maybe_spawn_daemon(
|
||||||
async with tractor.wait_for_actor(service_name) as portal:
|
async with tractor.wait_for_actor(service_name) as portal:
|
||||||
lock.release()
|
lock.release()
|
||||||
yield portal
|
yield portal
|
||||||
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
async def spawn_brokerd(
|
async def spawn_brokerd(
|
||||||
|
@ -415,7 +446,7 @@ async def spawn_brokerd(
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def maybe_spawn_brokerd(
|
async def maybe_spawn_brokerd(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
|
@ -423,7 +454,9 @@ async def maybe_spawn_brokerd(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> tractor.Portal:
|
) -> tractor.Portal:
|
||||||
'''Helper to spawn a brokerd service.
|
'''
|
||||||
|
Helper to spawn a brokerd service *from* a client
|
||||||
|
who wishes to use the sub-actor-daemon.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async with maybe_spawn_daemon(
|
async with maybe_spawn_daemon(
|
||||||
|
@ -475,7 +508,7 @@ async def spawn_emsd(
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def maybe_open_emsd(
|
async def maybe_open_emsd(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
|
|
|
@ -21,7 +21,10 @@ Profiling wrappers for internal libs.
|
||||||
import time
|
import time
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
_pg_profile: bool = True
|
# NOTE: you can pass a flag to enable this:
|
||||||
|
# ``piker chart <args> --profile``.
|
||||||
|
_pg_profile: bool = False
|
||||||
|
ms_slower_then: float = 10
|
||||||
|
|
||||||
|
|
||||||
def pg_profile_enabled() -> bool:
|
def pg_profile_enabled() -> bool:
|
||||||
|
|
|
@ -39,7 +39,9 @@ class NoData(BrokerError):
|
||||||
def resproc(
|
def resproc(
|
||||||
resp: asks.response_objects.Response,
|
resp: asks.response_objects.Response,
|
||||||
log: logging.Logger,
|
log: logging.Logger,
|
||||||
return_json: bool = True
|
return_json: bool = True,
|
||||||
|
log_resp: bool = False,
|
||||||
|
|
||||||
) -> asks.response_objects.Response:
|
) -> asks.response_objects.Response:
|
||||||
"""Process response and return its json content.
|
"""Process response and return its json content.
|
||||||
|
|
||||||
|
@ -52,7 +54,8 @@ def resproc(
|
||||||
except json.decoder.JSONDecodeError:
|
except json.decoder.JSONDecodeError:
|
||||||
log.exception(f"Failed to process {resp}:\n{resp.text}")
|
log.exception(f"Failed to process {resp}:\n{resp.text}")
|
||||||
raise BrokerError(resp.text)
|
raise BrokerError(resp.text)
|
||||||
else:
|
|
||||||
|
if log_resp:
|
||||||
log.debug(f"Received json contents:\n{colorize_json(json)}")
|
log.debug(f"Received json contents:\n{colorize_json(json)}")
|
||||||
|
|
||||||
return json if return_json else resp
|
return json if return_json else resp
|
||||||
|
|
|
@ -18,8 +18,11 @@
|
||||||
Binance backend
|
Binance backend
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager as acm
|
||||||
from typing import List, Dict, Any, Tuple, Union, Optional, AsyncGenerator
|
from typing import (
|
||||||
|
Any, Union, Optional,
|
||||||
|
AsyncGenerator, Callable,
|
||||||
|
)
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
|
@ -88,7 +91,7 @@ class Pair(BaseModel):
|
||||||
baseCommissionPrecision: int
|
baseCommissionPrecision: int
|
||||||
quoteCommissionPrecision: int
|
quoteCommissionPrecision: int
|
||||||
|
|
||||||
orderTypes: List[str]
|
orderTypes: list[str]
|
||||||
|
|
||||||
icebergAllowed: bool
|
icebergAllowed: bool
|
||||||
ocoAllowed: bool
|
ocoAllowed: bool
|
||||||
|
@ -96,8 +99,8 @@ class Pair(BaseModel):
|
||||||
isSpotTradingAllowed: bool
|
isSpotTradingAllowed: bool
|
||||||
isMarginTradingAllowed: bool
|
isMarginTradingAllowed: bool
|
||||||
|
|
||||||
filters: List[Dict[str, Union[str, int, float]]]
|
filters: list[dict[str, Union[str, int, float]]]
|
||||||
permissions: List[str]
|
permissions: list[str]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -145,7 +148,7 @@ class Client:
|
||||||
self,
|
self,
|
||||||
method: str,
|
method: str,
|
||||||
params: dict,
|
params: dict,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
resp = await self._sesh.get(
|
resp = await self._sesh.get(
|
||||||
path=f'/api/v3/{method}',
|
path=f'/api/v3/{method}',
|
||||||
params=params,
|
params=params,
|
||||||
|
@ -200,7 +203,7 @@ class Client:
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
limit: int = None,
|
limit: int = None,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
if self._pairs is not None:
|
if self._pairs is not None:
|
||||||
data = self._pairs
|
data = self._pairs
|
||||||
else:
|
else:
|
||||||
|
@ -273,7 +276,7 @@ class Client:
|
||||||
return array
|
return array
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def get_client() -> Client:
|
async def get_client() -> Client:
|
||||||
client = Client()
|
client = Client()
|
||||||
await client.cache_symbols()
|
await client.cache_symbols()
|
||||||
|
@ -353,7 +356,7 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def make_sub(pairs: List[str], sub_name: str, uid: int) -> Dict[str, str]:
|
def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]:
|
||||||
"""Create a request subscription packet dict.
|
"""Create a request subscription packet dict.
|
||||||
|
|
||||||
https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams
|
https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams
|
||||||
|
@ -368,6 +371,17 @@ def make_sub(pairs: List[str], sub_name: str, uid: int) -> Dict[str, str]:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_history_client(
|
||||||
|
symbol: str,
|
||||||
|
|
||||||
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
# TODO implement history getter for the new storage layer.
|
||||||
|
async with open_cached_client('binance') as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
async def backfill_bars(
|
async def backfill_bars(
|
||||||
sym: str,
|
sym: str,
|
||||||
shm: ShmArray, # type: ignore # noqa
|
shm: ShmArray, # type: ignore # noqa
|
||||||
|
@ -385,12 +399,12 @@ async def backfill_bars(
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
symbols: List[str],
|
symbols: list[str],
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
# startup sync
|
# startup sync
|
||||||
task_status: TaskStatus[Tuple[Dict, Dict]] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
|
@ -427,10 +441,11 @@ async def stream_quotes(
|
||||||
symbol: {
|
symbol: {
|
||||||
'symbol_info': sym_infos[sym],
|
'symbol_info': sym_infos[sym],
|
||||||
'shm_write_opts': {'sum_tick_vml': False},
|
'shm_write_opts': {'sum_tick_vml': False},
|
||||||
|
'fqsn': sym,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def subscribe(ws: wsproto.WSConnection):
|
async def subscribe(ws: wsproto.WSConnection):
|
||||||
# setup subs
|
# setup subs
|
||||||
|
|
||||||
|
@ -480,8 +495,7 @@ async def stream_quotes(
|
||||||
# TODO: use ``anext()`` when it lands in 3.10!
|
# TODO: use ``anext()`` when it lands in 3.10!
|
||||||
typ, quote = await msg_gen.__anext__()
|
typ, quote = await msg_gen.__anext__()
|
||||||
|
|
||||||
first_quote = {quote['symbol'].lower(): quote}
|
task_status.started((init_msgs, quote))
|
||||||
task_status.started((init_msgs, first_quote))
|
|
||||||
|
|
||||||
# signal to caller feed is ready for consumption
|
# signal to caller feed is ready for consumption
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
|
|
@ -142,15 +142,23 @@ async def symbol_search(
|
||||||
brokermods: list[ModuleType],
|
brokermods: list[ModuleType],
|
||||||
pattern: str,
|
pattern: str,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
||||||
"""Return symbol info from broker.
|
'''
|
||||||
"""
|
Return symbol info from broker.
|
||||||
|
|
||||||
|
'''
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
async def search_backend(brokername: str) -> None:
|
async def search_backend(
|
||||||
|
brokermod: ModuleType
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
brokername: str = mod.name
|
||||||
|
|
||||||
async with maybe_spawn_brokerd(
|
async with maybe_spawn_brokerd(
|
||||||
brokername,
|
mod.name,
|
||||||
|
infect_asyncio=getattr(mod, '_infect_asyncio', False),
|
||||||
) as portal:
|
) as portal:
|
||||||
|
|
||||||
results.append((
|
results.append((
|
||||||
|
|
1021
piker/brokers/ib.py
1021
piker/brokers/ib.py
File diff suppressed because it is too large
Load Diff
|
@ -18,9 +18,9 @@
|
||||||
Kraken backend.
|
Kraken backend.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager as acm
|
||||||
from dataclasses import asdict, field
|
from dataclasses import asdict, field
|
||||||
from typing import List, Dict, Any, Tuple, Optional
|
from typing import List, Dict, Any, Tuple, Optional, Callable
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
|
@ -271,7 +271,7 @@ class Client:
|
||||||
raise SymbolNotFound(json['error'][0] + f': {symbol}')
|
raise SymbolNotFound(json['error'][0] + f': {symbol}')
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def get_client() -> Client:
|
async def get_client() -> Client:
|
||||||
client = Client()
|
client = Client()
|
||||||
|
|
||||||
|
@ -385,6 +385,17 @@ def make_sub(pairs: List[str], data: Dict[str, Any]) -> Dict[str, str]:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_history_client(
|
||||||
|
symbol: str,
|
||||||
|
|
||||||
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
# TODO implement history getter for the new storage layer.
|
||||||
|
async with open_cached_client('kraken') as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
async def backfill_bars(
|
async def backfill_bars(
|
||||||
|
|
||||||
sym: str,
|
sym: str,
|
||||||
|
@ -450,10 +461,11 @@ async def stream_quotes(
|
||||||
symbol: {
|
symbol: {
|
||||||
'symbol_info': sym_infos[sym],
|
'symbol_info': sym_infos[sym],
|
||||||
'shm_write_opts': {'sum_tick_vml': False},
|
'shm_write_opts': {'sum_tick_vml': False},
|
||||||
|
'fqsn': sym,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def subscribe(ws: wsproto.WSConnection):
|
async def subscribe(ws: wsproto.WSConnection):
|
||||||
# XXX: setup subs
|
# XXX: setup subs
|
||||||
# https://docs.kraken.com/websockets/#message-subscribe
|
# https://docs.kraken.com/websockets/#message-subscribe
|
||||||
|
@ -506,8 +518,7 @@ async def stream_quotes(
|
||||||
|
|
||||||
topic, quote = normalize(ohlc_last)
|
topic, quote = normalize(ohlc_last)
|
||||||
|
|
||||||
first_quote = {topic: quote}
|
task_status.started((init_msgs, quote))
|
||||||
task_status.started((init_msgs, first_quote))
|
|
||||||
|
|
||||||
# lol, only "closes" when they're margin squeezing clients ;P
|
# lol, only "closes" when they're margin squeezing clients ;P
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
|
|
@ -178,7 +178,9 @@ class Allocator(BaseModel):
|
||||||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Not valid size unit '{size}'")
|
raise ValueError(
|
||||||
|
f"Not valid size unit '{size_unit}'"
|
||||||
|
)
|
||||||
|
|
||||||
# an entry (adding-to or starting a pp)
|
# an entry (adding-to or starting a pp)
|
||||||
if (
|
if (
|
||||||
|
@ -282,6 +284,14 @@ class Allocator(BaseModel):
|
||||||
return round(prop * self.slots)
|
return round(prop * self.slots)
|
||||||
|
|
||||||
|
|
||||||
|
_derivs = (
|
||||||
|
'future',
|
||||||
|
'continuous_future',
|
||||||
|
'option',
|
||||||
|
'futures_option',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def mk_allocator(
|
def mk_allocator(
|
||||||
|
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
|
@ -290,7 +300,7 @@ def mk_allocator(
|
||||||
# default allocation settings
|
# default allocation settings
|
||||||
defaults: dict[str, float] = {
|
defaults: dict[str, float] = {
|
||||||
'account': None, # select paper by default
|
'account': None, # select paper by default
|
||||||
'size_unit': 'currency', #_size_units['currency'],
|
'size_unit': 'currency',
|
||||||
'units_limit': 400,
|
'units_limit': 400,
|
||||||
'currency_limit': 5e3,
|
'currency_limit': 5e3,
|
||||||
'slots': 4,
|
'slots': 4,
|
||||||
|
@ -318,11 +328,9 @@ def mk_allocator(
|
||||||
|
|
||||||
asset_type = symbol.type_key
|
asset_type = symbol.type_key
|
||||||
|
|
||||||
|
|
||||||
# specific configs by asset class / type
|
# specific configs by asset class / type
|
||||||
|
|
||||||
if asset_type in ('future', 'option', 'futures_option'):
|
if asset_type in _derivs:
|
||||||
|
|
||||||
# since it's harder to know how currency "applies" in this case
|
# since it's harder to know how currency "applies" in this case
|
||||||
# given leverage properties
|
# given leverage properties
|
||||||
alloc.size_unit = '# units'
|
alloc.size_unit = '# units'
|
||||||
|
@ -345,7 +353,7 @@ def mk_allocator(
|
||||||
if startup_size > alloc.units_limit:
|
if startup_size > alloc.units_limit:
|
||||||
alloc.units_limit = startup_size
|
alloc.units_limit = startup_size
|
||||||
|
|
||||||
if asset_type in ('future', 'option', 'futures_option'):
|
if asset_type in _derivs:
|
||||||
alloc.slots = alloc.units_limit
|
alloc.slots = alloc.units_limit
|
||||||
|
|
||||||
return alloc
|
return alloc
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
Orders and execution client API.
|
Orders and execution client API.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager as acm
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
|
@ -27,7 +27,6 @@ import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.trionics import broadcast_receiver
|
from tractor.trionics import broadcast_receiver
|
||||||
|
|
||||||
from ..data._source import Symbol
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._ems import _emsd_main
|
from ._ems import _emsd_main
|
||||||
from .._daemon import maybe_open_emsd
|
from .._daemon import maybe_open_emsd
|
||||||
|
@ -156,16 +155,19 @@ async def relay_order_cmds_from_sync_code(
|
||||||
await to_ems_stream.send(cmd)
|
await to_ems_stream.send(cmd)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_ems(
|
async def open_ems(
|
||||||
broker: str,
|
fqsn: str,
|
||||||
symbol: Symbol,
|
|
||||||
|
|
||||||
) -> (OrderBook, tractor.MsgStream, dict):
|
) -> (
|
||||||
"""Spawn an EMS daemon and begin sending orders and receiving
|
OrderBook,
|
||||||
|
tractor.MsgStream,
|
||||||
|
dict,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Spawn an EMS daemon and begin sending orders and receiving
|
||||||
alerts.
|
alerts.
|
||||||
|
|
||||||
|
|
||||||
This EMS tries to reduce most broker's terrible order entry apis to
|
This EMS tries to reduce most broker's terrible order entry apis to
|
||||||
a very simple protocol built on a few easy to grok and/or
|
a very simple protocol built on a few easy to grok and/or
|
||||||
"rantsy" premises:
|
"rantsy" premises:
|
||||||
|
@ -194,21 +196,22 @@ async def open_ems(
|
||||||
- 'dark_executed', 'broker_executed'
|
- 'dark_executed', 'broker_executed'
|
||||||
- 'broker_filled'
|
- 'broker_filled'
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# wait for service to connect back to us signalling
|
# wait for service to connect back to us signalling
|
||||||
# ready for order commands
|
# ready for order commands
|
||||||
book = get_orders()
|
book = get_orders()
|
||||||
|
|
||||||
|
from ..data._source import uncons_fqsn
|
||||||
|
broker, symbol, suffix = uncons_fqsn(fqsn)
|
||||||
|
|
||||||
async with maybe_open_emsd(broker) as portal:
|
async with maybe_open_emsd(broker) as portal:
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
# connect to emsd
|
# connect to emsd
|
||||||
portal.open_context(
|
portal.open_context(
|
||||||
|
|
||||||
_emsd_main,
|
_emsd_main,
|
||||||
broker=broker,
|
fqsn=fqsn,
|
||||||
symbol=symbol.key,
|
|
||||||
|
|
||||||
) as (ctx, (positions, accounts)),
|
) as (ctx, (positions, accounts)),
|
||||||
|
|
||||||
|
@ -218,7 +221,7 @@ async def open_ems(
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
n.start_soon(
|
n.start_soon(
|
||||||
relay_order_cmds_from_sync_code,
|
relay_order_cmds_from_sync_code,
|
||||||
symbol.key,
|
fqsn,
|
||||||
trades_stream
|
trades_stream
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ In da suit parlances: "Execution management systems"
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from math import isnan
|
# from math import isnan
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
import time
|
import time
|
||||||
from typing import AsyncIterator, Callable
|
from typing import AsyncIterator, Callable
|
||||||
|
@ -113,8 +113,8 @@ class _DarkBook:
|
||||||
|
|
||||||
# tracks most recent values per symbol each from data feed
|
# tracks most recent values per symbol each from data feed
|
||||||
lasts: dict[
|
lasts: dict[
|
||||||
tuple[str, str],
|
str,
|
||||||
float
|
float,
|
||||||
] = field(default_factory=dict)
|
] = field(default_factory=dict)
|
||||||
|
|
||||||
# mapping of piker ems order ids to current brokerd order flow message
|
# mapping of piker ems order ids to current brokerd order flow message
|
||||||
|
@ -135,7 +135,7 @@ async def clear_dark_triggers(
|
||||||
ems_client_order_stream: tractor.MsgStream,
|
ems_client_order_stream: tractor.MsgStream,
|
||||||
quote_stream: tractor.ReceiveMsgStream, # noqa
|
quote_stream: tractor.ReceiveMsgStream, # noqa
|
||||||
broker: str,
|
broker: str,
|
||||||
symbol: str,
|
fqsn: str,
|
||||||
|
|
||||||
book: _DarkBook,
|
book: _DarkBook,
|
||||||
|
|
||||||
|
@ -155,7 +155,6 @@ async def clear_dark_triggers(
|
||||||
# start = time.time()
|
# start = time.time()
|
||||||
for sym, quote in quotes.items():
|
for sym, quote in quotes.items():
|
||||||
execs = book.orders.get(sym, {})
|
execs = book.orders.get(sym, {})
|
||||||
|
|
||||||
for tick in iterticks(
|
for tick in iterticks(
|
||||||
quote,
|
quote,
|
||||||
# dark order price filter(s)
|
# dark order price filter(s)
|
||||||
|
@ -171,7 +170,7 @@ async def clear_dark_triggers(
|
||||||
ttype = tick['type']
|
ttype = tick['type']
|
||||||
|
|
||||||
# update to keep new cmds informed
|
# update to keep new cmds informed
|
||||||
book.lasts[(broker, symbol)] = price
|
book.lasts[sym] = price
|
||||||
|
|
||||||
for oid, (
|
for oid, (
|
||||||
pred,
|
pred,
|
||||||
|
@ -196,6 +195,7 @@ async def clear_dark_triggers(
|
||||||
|
|
||||||
action: str = cmd['action']
|
action: str = cmd['action']
|
||||||
symbol: str = cmd['symbol']
|
symbol: str = cmd['symbol']
|
||||||
|
bfqsn: str = symbol.replace(f'.{broker}', '')
|
||||||
|
|
||||||
if action == 'alert':
|
if action == 'alert':
|
||||||
# nothing to do but relay a status
|
# nothing to do but relay a status
|
||||||
|
@ -225,7 +225,7 @@ async def clear_dark_triggers(
|
||||||
# order-request and instead create a new one.
|
# order-request and instead create a new one.
|
||||||
reqid=None,
|
reqid=None,
|
||||||
|
|
||||||
symbol=sym,
|
symbol=bfqsn,
|
||||||
price=submit_price,
|
price=submit_price,
|
||||||
size=cmd['size'],
|
size=cmd['size'],
|
||||||
)
|
)
|
||||||
|
@ -247,12 +247,9 @@ async def clear_dark_triggers(
|
||||||
oid=oid, # ems order id
|
oid=oid, # ems order id
|
||||||
resp=resp,
|
resp=resp,
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
|
symbol=fqsn,
|
||||||
symbol=symbol,
|
|
||||||
trigger_price=price,
|
trigger_price=price,
|
||||||
|
|
||||||
broker_details={'name': broker},
|
broker_details={'name': broker},
|
||||||
|
|
||||||
cmd=cmd, # original request message
|
cmd=cmd, # original request message
|
||||||
|
|
||||||
).dict()
|
).dict()
|
||||||
|
@ -265,12 +262,20 @@ async def clear_dark_triggers(
|
||||||
f'pred for {oid} was already removed!?'
|
f'pred for {oid} was already removed!?'
|
||||||
)
|
)
|
||||||
|
|
||||||
await ems_client_order_stream.send(msg)
|
try:
|
||||||
|
await ems_client_order_stream.send(msg)
|
||||||
|
except (
|
||||||
|
trio.ClosedResourceError,
|
||||||
|
):
|
||||||
|
log.warning(
|
||||||
|
f'client {ems_client_order_stream} stream is broke'
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
else: # condition scan loop complete
|
else: # condition scan loop complete
|
||||||
log.debug(f'execs are {execs}')
|
log.debug(f'execs are {execs}')
|
||||||
if execs:
|
if execs:
|
||||||
book.orders[symbol] = execs
|
book.orders[fqsn] = execs
|
||||||
|
|
||||||
# print(f'execs scan took: {time.time() - start}')
|
# print(f'execs scan took: {time.time() - start}')
|
||||||
|
|
||||||
|
@ -382,7 +387,8 @@ async def open_brokerd_trades_dialogue(
|
||||||
task_status: TaskStatus[TradesRelay] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[TradesRelay] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> tuple[dict, tractor.MsgStream]:
|
) -> tuple[dict, tractor.MsgStream]:
|
||||||
'''Open and yield ``brokerd`` trades dialogue context-stream if none
|
'''
|
||||||
|
Open and yield ``brokerd`` trades dialogue context-stream if none
|
||||||
already exists.
|
already exists.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -419,8 +425,7 @@ async def open_brokerd_trades_dialogue(
|
||||||
# actor to simulate the real IPC load it'll have when also
|
# actor to simulate the real IPC load it'll have when also
|
||||||
# pulling data from feeds
|
# pulling data from feeds
|
||||||
open_trades_endpoint = paper.open_paperboi(
|
open_trades_endpoint = paper.open_paperboi(
|
||||||
broker=broker,
|
fqsn='.'.join([symbol, broker]),
|
||||||
symbol=symbol,
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -458,12 +463,13 @@ async def open_brokerd_trades_dialogue(
|
||||||
# locally cache and track positions per account.
|
# locally cache and track positions per account.
|
||||||
pps = {}
|
pps = {}
|
||||||
for msg in positions:
|
for msg in positions:
|
||||||
|
log.info(f'loading pp: {msg}')
|
||||||
|
|
||||||
account = msg['account']
|
account = msg['account']
|
||||||
assert account in accounts
|
assert account in accounts
|
||||||
|
|
||||||
pps.setdefault(
|
pps.setdefault(
|
||||||
msg['symbol'],
|
f'{msg["symbol"]}.{broker}',
|
||||||
{}
|
{}
|
||||||
)[account] = msg
|
)[account] = msg
|
||||||
|
|
||||||
|
@ -562,14 +568,28 @@ async def translate_and_relay_brokerd_events(
|
||||||
|
|
||||||
# XXX: this will be useful for automatic strats yah?
|
# XXX: this will be useful for automatic strats yah?
|
||||||
# keep pps per account up to date locally in ``emsd`` mem
|
# keep pps per account up to date locally in ``emsd`` mem
|
||||||
relay.positions.setdefault(pos_msg['symbol'], {}).setdefault(
|
sym, broker = pos_msg['symbol'], pos_msg['broker']
|
||||||
|
|
||||||
|
relay.positions.setdefault(
|
||||||
|
# NOTE: translate to a FQSN!
|
||||||
|
f'{sym}.{broker}',
|
||||||
|
{}
|
||||||
|
).setdefault(
|
||||||
pos_msg['account'], {}
|
pos_msg['account'], {}
|
||||||
).update(pos_msg)
|
).update(pos_msg)
|
||||||
|
|
||||||
# fan-out-relay position msgs immediately by
|
# fan-out-relay position msgs immediately by
|
||||||
# broadcasting updates on all client streams
|
# broadcasting updates on all client streams
|
||||||
for client_stream in router.clients:
|
for client_stream in router.clients.copy():
|
||||||
await client_stream.send(pos_msg)
|
try:
|
||||||
|
await client_stream.send(pos_msg)
|
||||||
|
except(
|
||||||
|
trio.ClosedResourceError,
|
||||||
|
trio.BrokenResourceError,
|
||||||
|
):
|
||||||
|
router.clients.remove(client_stream)
|
||||||
|
log.warning(
|
||||||
|
f'client for {client_stream} was already closed?')
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -839,11 +859,15 @@ async def process_client_order_cmds(
|
||||||
|
|
||||||
msg = Order(**cmd)
|
msg = Order(**cmd)
|
||||||
|
|
||||||
sym = msg.symbol
|
fqsn = msg.symbol
|
||||||
trigger_price = msg.price
|
trigger_price = msg.price
|
||||||
size = msg.size
|
size = msg.size
|
||||||
exec_mode = msg.exec_mode
|
exec_mode = msg.exec_mode
|
||||||
broker = msg.brokers[0]
|
broker = msg.brokers[0]
|
||||||
|
# remove the broker part before creating a message
|
||||||
|
# to send to the specific broker since they probably
|
||||||
|
# aren't expectig their own name, but should they?
|
||||||
|
sym = fqsn.replace(f'.{broker}', '')
|
||||||
|
|
||||||
if exec_mode == 'live' and action in ('buy', 'sell',):
|
if exec_mode == 'live' and action in ('buy', 'sell',):
|
||||||
|
|
||||||
|
@ -901,7 +925,7 @@ async def process_client_order_cmds(
|
||||||
# price received from the feed, instead of being
|
# price received from the feed, instead of being
|
||||||
# like every other shitty tina platform that makes
|
# like every other shitty tina platform that makes
|
||||||
# the user choose the predicate operator.
|
# the user choose the predicate operator.
|
||||||
last = dark_book.lasts[(broker, sym)]
|
last = dark_book.lasts[fqsn]
|
||||||
pred = mk_check(trigger_price, last, action)
|
pred = mk_check(trigger_price, last, action)
|
||||||
|
|
||||||
spread_slap: float = 5
|
spread_slap: float = 5
|
||||||
|
@ -932,7 +956,7 @@ async def process_client_order_cmds(
|
||||||
# dark book entry if the order id already exists
|
# dark book entry if the order id already exists
|
||||||
|
|
||||||
dark_book.orders.setdefault(
|
dark_book.orders.setdefault(
|
||||||
sym, {}
|
fqsn, {}
|
||||||
)[oid] = (
|
)[oid] = (
|
||||||
pred,
|
pred,
|
||||||
tickfilter,
|
tickfilter,
|
||||||
|
@ -959,8 +983,8 @@ async def process_client_order_cmds(
|
||||||
async def _emsd_main(
|
async def _emsd_main(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
broker: str,
|
fqsn: str,
|
||||||
symbol: str,
|
|
||||||
_exec_mode: str = 'dark', # ('paper', 'dark', 'live')
|
_exec_mode: str = 'dark', # ('paper', 'dark', 'live')
|
||||||
loglevel: str = 'info',
|
loglevel: str = 'info',
|
||||||
|
|
||||||
|
@ -1002,6 +1026,8 @@ async def _emsd_main(
|
||||||
global _router
|
global _router
|
||||||
assert _router
|
assert _router
|
||||||
|
|
||||||
|
from ..data._source import uncons_fqsn
|
||||||
|
broker, symbol, suffix = uncons_fqsn(fqsn)
|
||||||
dark_book = _router.get_dark_book(broker)
|
dark_book = _router.get_dark_book(broker)
|
||||||
|
|
||||||
# TODO: would be nice if in tractor we can require either a ctx arg,
|
# TODO: would be nice if in tractor we can require either a ctx arg,
|
||||||
|
@ -1014,17 +1040,16 @@ async def _emsd_main(
|
||||||
# spawn one task per broker feed
|
# spawn one task per broker feed
|
||||||
async with (
|
async with (
|
||||||
maybe_open_feed(
|
maybe_open_feed(
|
||||||
broker,
|
[fqsn],
|
||||||
[symbol],
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) as (feed, stream),
|
) as (feed, quote_stream),
|
||||||
):
|
):
|
||||||
|
|
||||||
# XXX: this should be initial price quote from target provider
|
# XXX: this should be initial price quote from target provider
|
||||||
first_quote = feed.first_quotes[symbol]
|
first_quote = feed.first_quotes[fqsn]
|
||||||
|
|
||||||
book = _router.get_dark_book(broker)
|
book = _router.get_dark_book(broker)
|
||||||
last = book.lasts[(broker, symbol)] = first_quote['last']
|
book.lasts[fqsn] = first_quote['last']
|
||||||
|
|
||||||
# XXX: ib is a cucker but we've fixed avoiding receiving any
|
# XXX: ib is a cucker but we've fixed avoiding receiving any
|
||||||
# `Nan`s in the backend during market hours (right?). this was
|
# `Nan`s in the backend during market hours (right?). this was
|
||||||
|
@ -1053,8 +1078,8 @@ async def _emsd_main(
|
||||||
|
|
||||||
# flatten out collected pps from brokerd for delivery
|
# flatten out collected pps from brokerd for delivery
|
||||||
pp_msgs = {
|
pp_msgs = {
|
||||||
sym: list(pps.values())
|
fqsn: list(pps.values())
|
||||||
for sym, pps in relay.positions.items()
|
for fqsn, pps in relay.positions.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
# signal to client that we're started and deliver
|
# signal to client that we're started and deliver
|
||||||
|
@ -1071,9 +1096,9 @@ async def _emsd_main(
|
||||||
|
|
||||||
brokerd_stream,
|
brokerd_stream,
|
||||||
ems_client_order_stream,
|
ems_client_order_stream,
|
||||||
stream,
|
quote_stream,
|
||||||
broker,
|
broker,
|
||||||
symbol,
|
fqsn, # form: <name>.<venue>.<suffix>.<broker>
|
||||||
book
|
book
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1089,7 +1114,7 @@ async def _emsd_main(
|
||||||
# relay.brokerd_dialogue,
|
# relay.brokerd_dialogue,
|
||||||
brokerd_stream,
|
brokerd_stream,
|
||||||
|
|
||||||
symbol,
|
fqsn,
|
||||||
feed,
|
feed,
|
||||||
dark_book,
|
dark_book,
|
||||||
_router,
|
_router,
|
||||||
|
|
|
@ -32,6 +32,7 @@ from dataclasses import dataclass
|
||||||
|
|
||||||
from .. import data
|
from .. import data
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
|
from ..data._source import uncons_fqsn
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
||||||
|
@ -446,7 +447,7 @@ async def trades_dialogue(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
broker: str,
|
broker: str,
|
||||||
symbol: str,
|
fqsn: str,
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -455,8 +456,7 @@ async def trades_dialogue(
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
data.open_feed(
|
data.open_feed(
|
||||||
broker,
|
[fqsn],
|
||||||
[symbol],
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) as feed,
|
) as feed,
|
||||||
|
|
||||||
|
@ -490,15 +490,16 @@ async def trades_dialogue(
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_paperboi(
|
async def open_paperboi(
|
||||||
broker: str,
|
fqsn: str,
|
||||||
symbol: str,
|
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
) -> Callable:
|
) -> Callable:
|
||||||
'''Spawn a paper engine actor and yield through access to
|
'''
|
||||||
|
Spawn a paper engine actor and yield through access to
|
||||||
its context.
|
its context.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
broker, symbol, expiry = uncons_fqsn(fqsn)
|
||||||
service_name = f'paperboi.{broker}'
|
service_name = f'paperboi.{broker}'
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
@ -517,7 +518,7 @@ async def open_paperboi(
|
||||||
async with portal.open_context(
|
async with portal.open_context(
|
||||||
trades_dialogue,
|
trades_dialogue,
|
||||||
broker=broker,
|
broker=broker,
|
||||||
symbol=symbol,
|
fqsn=fqsn,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
|
@ -16,29 +16,22 @@ from .. import config
|
||||||
log = get_logger('cli')
|
log = get_logger('cli')
|
||||||
DEFAULT_BROKER = 'questrade'
|
DEFAULT_BROKER = 'questrade'
|
||||||
|
|
||||||
_config_dir = click.get_app_dir('piker')
|
|
||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
|
||||||
_context_defaults = dict(
|
|
||||||
default_map={
|
|
||||||
# Questrade specific quote poll rates
|
|
||||||
'monitor': {
|
|
||||||
'rate': 3,
|
|
||||||
},
|
|
||||||
'optschain': {
|
|
||||||
'rate': 1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
||||||
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
||||||
def pikerd(loglevel, host, tl, pdb):
|
@click.option(
|
||||||
"""Spawn the piker broker-daemon.
|
'--tsdb',
|
||||||
"""
|
is_flag=True,
|
||||||
|
help='Enable local ``marketstore`` instance'
|
||||||
|
)
|
||||||
|
def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
|
'''
|
||||||
|
Spawn the piker broker-daemon.
|
||||||
|
|
||||||
|
'''
|
||||||
from .._daemon import open_pikerd
|
from .._daemon import open_pikerd
|
||||||
log = get_console_log(loglevel)
|
log = get_console_log(loglevel)
|
||||||
|
|
||||||
|
@ -52,13 +45,33 @@ def pikerd(loglevel, host, tl, pdb):
|
||||||
))
|
))
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
async with open_pikerd(loglevel=loglevel, debug_mode=pdb):
|
|
||||||
|
async with (
|
||||||
|
open_pikerd(
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=pdb,
|
||||||
|
), # normally delivers a ``Services`` handle
|
||||||
|
trio.open_nursery() as n,
|
||||||
|
):
|
||||||
|
if tsdb:
|
||||||
|
# TODO:
|
||||||
|
# async with maybe_open_marketstored():
|
||||||
|
|
||||||
|
from piker.data._ahab import start_ahab
|
||||||
|
log.info('Spawning `marketstore` supervisor')
|
||||||
|
ctn_ready = await n.start(
|
||||||
|
start_ahab,
|
||||||
|
'marketstored',
|
||||||
|
)
|
||||||
|
await ctn_ready.wait()
|
||||||
|
log.info('`marketstore` container:{uid} up')
|
||||||
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
@click.group(context_settings=_context_defaults)
|
@click.group(context_settings=config._context_defaults)
|
||||||
@click.option(
|
@click.option(
|
||||||
'--brokers', '-b',
|
'--brokers', '-b',
|
||||||
default=[DEFAULT_BROKER],
|
default=[DEFAULT_BROKER],
|
||||||
|
@ -87,8 +100,8 @@ def cli(ctx, brokers, loglevel, tl, configdir):
|
||||||
'loglevel': loglevel,
|
'loglevel': loglevel,
|
||||||
'tractorloglevel': None,
|
'tractorloglevel': None,
|
||||||
'log': get_console_log(loglevel),
|
'log': get_console_log(loglevel),
|
||||||
'confdir': _config_dir,
|
'confdir': config._config_dir,
|
||||||
'wl_path': _watchlists_data_path,
|
'wl_path': config._watchlists_data_path,
|
||||||
})
|
})
|
||||||
|
|
||||||
# allow enabling same loglevel in ``tractor`` machinery
|
# allow enabling same loglevel in ``tractor`` machinery
|
||||||
|
|
|
@ -17,6 +17,8 @@
|
||||||
"""
|
"""
|
||||||
Broker configuration mgmt.
|
Broker configuration mgmt.
|
||||||
"""
|
"""
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
import os
|
import os
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -24,14 +26,100 @@ from typing import Optional
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import toml
|
import toml
|
||||||
import click
|
|
||||||
|
|
||||||
from .log import get_logger
|
from .log import get_logger
|
||||||
|
|
||||||
log = get_logger('broker-config')
|
log = get_logger('broker-config')
|
||||||
|
|
||||||
_config_dir = click.get_app_dir('piker')
|
|
||||||
|
# taken from ``click`` since apparently they have some
|
||||||
|
# super weirdness with sigint and sudo..no clue
|
||||||
|
def get_app_dir(app_name, roaming=True, force_posix=False):
|
||||||
|
r"""Returns the config folder for the application. The default behavior
|
||||||
|
is to return whatever is most appropriate for the operating system.
|
||||||
|
|
||||||
|
To give you an idea, for an app called ``"Foo Bar"``, something like
|
||||||
|
the following folders could be returned:
|
||||||
|
|
||||||
|
Mac OS X:
|
||||||
|
``~/Library/Application Support/Foo Bar``
|
||||||
|
Mac OS X (POSIX):
|
||||||
|
``~/.foo-bar``
|
||||||
|
Unix:
|
||||||
|
``~/.config/foo-bar``
|
||||||
|
Unix (POSIX):
|
||||||
|
``~/.foo-bar``
|
||||||
|
Win XP (roaming):
|
||||||
|
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar``
|
||||||
|
Win XP (not roaming):
|
||||||
|
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
|
||||||
|
Win 7 (roaming):
|
||||||
|
``C:\Users\<user>\AppData\Roaming\Foo Bar``
|
||||||
|
Win 7 (not roaming):
|
||||||
|
``C:\Users\<user>\AppData\Local\Foo Bar``
|
||||||
|
|
||||||
|
.. versionadded:: 2.0
|
||||||
|
|
||||||
|
:param app_name: the application name. This should be properly capitalized
|
||||||
|
and can contain whitespace.
|
||||||
|
:param roaming: controls if the folder should be roaming or not on Windows.
|
||||||
|
Has no affect otherwise.
|
||||||
|
:param force_posix: if this is set to `True` then on any POSIX system the
|
||||||
|
folder will be stored in the home folder with a leading
|
||||||
|
dot instead of the XDG config home or darwin's
|
||||||
|
application support folder.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _posixify(name):
|
||||||
|
return "-".join(name.split()).lower()
|
||||||
|
|
||||||
|
# if WIN:
|
||||||
|
if platform.system() == 'Windows':
|
||||||
|
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
||||||
|
folder = os.environ.get(key)
|
||||||
|
if folder is None:
|
||||||
|
folder = os.path.expanduser("~")
|
||||||
|
return os.path.join(folder, app_name)
|
||||||
|
if force_posix:
|
||||||
|
return os.path.join(os.path.expanduser("~/.{}".format(_posixify(app_name))))
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
return os.path.join(
|
||||||
|
os.path.expanduser("~/Library/Application Support"), app_name
|
||||||
|
)
|
||||||
|
return os.path.join(
|
||||||
|
os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
|
||||||
|
_posixify(app_name),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_config_dir = _click_config_dir = get_app_dir('piker')
|
||||||
|
_parent_user = os.environ.get('SUDO_USER')
|
||||||
|
|
||||||
|
if _parent_user:
|
||||||
|
non_root_user_dir = os.path.expanduser(
|
||||||
|
f'~{_parent_user}'
|
||||||
|
)
|
||||||
|
root = 'root'
|
||||||
|
_config_dir = (
|
||||||
|
non_root_user_dir +
|
||||||
|
_click_config_dir[
|
||||||
|
_click_config_dir.rfind(root) + len(root):
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
_file_name = 'brokers.toml'
|
_file_name = 'brokers.toml'
|
||||||
|
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
|
_context_defaults = dict(
|
||||||
|
default_map={
|
||||||
|
# Questrade specific quote poll rates
|
||||||
|
'monitor': {
|
||||||
|
'rate': 3,
|
||||||
|
},
|
||||||
|
'optschain': {
|
||||||
|
'rate': 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _override_config_dir(
|
def _override_config_dir(
|
||||||
|
|
|
@ -0,0 +1,348 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Supervisor for docker with included specific-image service helpers.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import os
|
||||||
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
# Any,
|
||||||
|
)
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
|
||||||
|
import trio
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
import tractor
|
||||||
|
import docker
|
||||||
|
import json
|
||||||
|
from docker.models.containers import Container
|
||||||
|
from docker.errors import DockerException, APIError
|
||||||
|
from requests.exceptions import ConnectionError, ReadTimeout
|
||||||
|
|
||||||
|
from ..log import get_logger, get_console_log
|
||||||
|
from .. import config
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_config = '''
|
||||||
|
# piker's ``marketstore`` config.
|
||||||
|
|
||||||
|
# mount this config using:
|
||||||
|
# sudo docker run --mount \
|
||||||
|
# type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \
|
||||||
|
# 5993:5993 alpacamarkets/marketstore:latest
|
||||||
|
|
||||||
|
root_directory: data
|
||||||
|
listen_port: 5993
|
||||||
|
grpc_listen_port: 5995
|
||||||
|
log_level: debug
|
||||||
|
queryable: true
|
||||||
|
stop_grace_period: 0
|
||||||
|
wal_rotate_interval: 5
|
||||||
|
stale_threshold: 5
|
||||||
|
enable_add: true
|
||||||
|
enable_remove: false
|
||||||
|
|
||||||
|
triggers:
|
||||||
|
- module: ondiskagg.so
|
||||||
|
on: "*/1Sec/OHLCV"
|
||||||
|
config:
|
||||||
|
# filter: "nasdaq"
|
||||||
|
destinations:
|
||||||
|
- 1Min
|
||||||
|
- 5Min
|
||||||
|
- 15Min
|
||||||
|
- 1H
|
||||||
|
- 1D
|
||||||
|
|
||||||
|
- module: stream.so
|
||||||
|
on: '*/*/*'
|
||||||
|
# config:
|
||||||
|
# filter: "nasdaq"
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class DockerNotStarted(Exception):
|
||||||
|
'Prolly you dint start da daemon bruh'
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_docker(
|
||||||
|
url: Optional[str] = None,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> docker.DockerClient:
|
||||||
|
|
||||||
|
client: Optional[docker.DockerClient] = None
|
||||||
|
try:
|
||||||
|
client = docker.DockerClient(
|
||||||
|
base_url=url,
|
||||||
|
**kwargs
|
||||||
|
) if url else docker.from_env(**kwargs)
|
||||||
|
|
||||||
|
yield client
|
||||||
|
|
||||||
|
except (
|
||||||
|
DockerException,
|
||||||
|
APIError,
|
||||||
|
) as err:
|
||||||
|
|
||||||
|
def unpack_msg(err: Exception) -> str:
|
||||||
|
args = getattr(err, 'args', None)
|
||||||
|
if args:
|
||||||
|
return args
|
||||||
|
else:
|
||||||
|
return str(err)
|
||||||
|
|
||||||
|
# could be more specific so let's check if it's just perms.
|
||||||
|
if err.args:
|
||||||
|
errs = err.args
|
||||||
|
for err in errs:
|
||||||
|
msg = unpack_msg(err)
|
||||||
|
if 'PermissionError' in msg:
|
||||||
|
raise DockerException('You dint run as root yo!')
|
||||||
|
|
||||||
|
elif 'FileNotFoundError' in msg:
|
||||||
|
raise DockerNotStarted('Did you start da service sister?')
|
||||||
|
|
||||||
|
# not perms?
|
||||||
|
raise
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if client:
|
||||||
|
client.close()
|
||||||
|
# client.api._custom_adapter.close()
|
||||||
|
for c in client.containers.list():
|
||||||
|
c.kill()
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def open_marketstored(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Start and supervise a marketstore instance with its config bind-mounted
|
||||||
|
in from the piker config directory on the system.
|
||||||
|
|
||||||
|
The equivalent cli cmd to this code is:
|
||||||
|
|
||||||
|
sudo docker run --mount \
|
||||||
|
type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \
|
||||||
|
5993:5993 alpacamarkets/marketstore:latest
|
||||||
|
|
||||||
|
'''
|
||||||
|
log = get_console_log('info', name=__name__)
|
||||||
|
|
||||||
|
async with open_docker() as client:
|
||||||
|
|
||||||
|
# create a mount from user's local piker config dir into container
|
||||||
|
config_dir_mnt = docker.types.Mount(
|
||||||
|
target='/etc',
|
||||||
|
source=config._config_dir,
|
||||||
|
type='bind',
|
||||||
|
)
|
||||||
|
|
||||||
|
# create a user config subdir where the marketstore
|
||||||
|
# backing filesystem database can be persisted.
|
||||||
|
persistent_data_dir = os.path.join(
|
||||||
|
config._config_dir, 'data',
|
||||||
|
)
|
||||||
|
if not os.path.isdir(persistent_data_dir):
|
||||||
|
os.mkdir(persistent_data_dir)
|
||||||
|
|
||||||
|
data_dir_mnt = docker.types.Mount(
|
||||||
|
target='/data',
|
||||||
|
source=persistent_data_dir,
|
||||||
|
type='bind',
|
||||||
|
)
|
||||||
|
|
||||||
|
cntr: Container = client.containers.run(
|
||||||
|
'alpacamarkets/marketstore:latest',
|
||||||
|
# do we need this for cmds?
|
||||||
|
# '-i',
|
||||||
|
|
||||||
|
# '-p 5993:5993',
|
||||||
|
ports={
|
||||||
|
'5993/tcp': 5993, # jsonrpc
|
||||||
|
'5995/tcp': 5995, # grpc
|
||||||
|
},
|
||||||
|
mounts=[config_dir_mnt, data_dir_mnt],
|
||||||
|
detach=True,
|
||||||
|
# stop_signal='SIGINT',
|
||||||
|
init=True,
|
||||||
|
# remove=True,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
seen_so_far = set()
|
||||||
|
|
||||||
|
async def process_logs_until(
|
||||||
|
match: str,
|
||||||
|
bp_on_msg: bool = False,
|
||||||
|
):
|
||||||
|
logs = cntr.logs(stream=True)
|
||||||
|
for entry in logs:
|
||||||
|
entry = entry.decode()
|
||||||
|
|
||||||
|
try:
|
||||||
|
record = json.loads(entry.strip())
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
if 'Error' in entry:
|
||||||
|
raise RuntimeError(entry)
|
||||||
|
|
||||||
|
msg = record['msg']
|
||||||
|
level = record['level']
|
||||||
|
if msg and entry not in seen_so_far:
|
||||||
|
seen_so_far.add(entry)
|
||||||
|
if bp_on_msg:
|
||||||
|
await tractor.breakpoint()
|
||||||
|
getattr(log, level, log.error)(f'{msg}')
|
||||||
|
|
||||||
|
# if "launching tcp listener for all services..." in msg:
|
||||||
|
if match in msg:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# do a checkpoint so we don't block if cancelled B)
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
with trio.move_on_after(0.5):
|
||||||
|
found = await process_logs_until(
|
||||||
|
"launching tcp listener for all services...",
|
||||||
|
)
|
||||||
|
|
||||||
|
if not found and cntr not in client.containers.list():
|
||||||
|
raise RuntimeError(
|
||||||
|
'Failed to start `marketstore` check logs deats'
|
||||||
|
)
|
||||||
|
|
||||||
|
await ctx.started(cntr.id)
|
||||||
|
|
||||||
|
# block for the expected "teardown log msg"..
|
||||||
|
await process_logs_until('exiting...',)
|
||||||
|
|
||||||
|
except (
|
||||||
|
BaseException,
|
||||||
|
# trio.Cancelled,
|
||||||
|
# KeyboardInterrupt,
|
||||||
|
):
|
||||||
|
cntr.kill('SIGINT')
|
||||||
|
with trio.move_on_after(0.5) as cs:
|
||||||
|
cs.shield = True
|
||||||
|
await process_logs_until('exiting...',)
|
||||||
|
raise
|
||||||
|
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
cntr.wait(
|
||||||
|
timeout=0.5,
|
||||||
|
condition='not-running',
|
||||||
|
)
|
||||||
|
except (
|
||||||
|
ReadTimeout,
|
||||||
|
ConnectionError,
|
||||||
|
):
|
||||||
|
cntr.kill()
|
||||||
|
|
||||||
|
|
||||||
|
async def start_ahab(
|
||||||
|
service_name: str,
|
||||||
|
task_status: TaskStatus[trio.Event] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Start a ``docker`` container supervisor with given service name.
|
||||||
|
|
||||||
|
Currently the actor calling this task should normally be started
|
||||||
|
with root permissions (until we decide to use something that doesn't
|
||||||
|
require this, like docker's rootless mode or some wrapper project) but
|
||||||
|
te root perms are de-escalated after the docker supervisor sub-actor
|
||||||
|
is started.
|
||||||
|
|
||||||
|
'''
|
||||||
|
cn_ready = trio.Event()
|
||||||
|
try:
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
loglevel='runtime',
|
||||||
|
) as tn:
|
||||||
|
|
||||||
|
portal = await tn.start_actor(
|
||||||
|
service_name,
|
||||||
|
enable_modules=[__name__]
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: we have issues with this on teardown
|
||||||
|
# where ``tractor`` tries to issue ``os.kill()``
|
||||||
|
# and hits perms errors since the root process
|
||||||
|
# doesn't any longer have root perms..
|
||||||
|
|
||||||
|
# de-escalate root perms to the original user
|
||||||
|
# after the docker supervisor actor is spawned.
|
||||||
|
if config._parent_user:
|
||||||
|
import pwd
|
||||||
|
os.setuid(
|
||||||
|
pwd.getpwnam(
|
||||||
|
config._parent_user
|
||||||
|
)[2] # named user's uid
|
||||||
|
)
|
||||||
|
|
||||||
|
task_status.started(cn_ready)
|
||||||
|
|
||||||
|
async with portal.open_context(
|
||||||
|
open_marketstored,
|
||||||
|
) as (ctx, first):
|
||||||
|
|
||||||
|
assert str(first)
|
||||||
|
# run till cancelled
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
# since we demoted root perms in this parent
|
||||||
|
# we'll get a perms error on proc cleanup in
|
||||||
|
# ``tractor`` nursery exit. just make sure
|
||||||
|
# the child is terminated and don't raise the
|
||||||
|
# error if so.
|
||||||
|
|
||||||
|
# TODO: we could also consider adding
|
||||||
|
# a ``tractor.ZombieDetected`` or something that we could raise
|
||||||
|
# if we find the child didn't terminate.
|
||||||
|
# await tractor.breakpoint()
|
||||||
|
except PermissionError:
|
||||||
|
log.warning('Failed to cancel root permsed container')
|
||||||
|
|
||||||
|
except (
|
||||||
|
trio.MultiError,
|
||||||
|
) as err:
|
||||||
|
for subexc in err.exceptions:
|
||||||
|
if isinstance(subexc, PermissionError):
|
||||||
|
log.warning('Failed to cancel root perms-ed container')
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
await start_ahab()
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
trio.run(main)
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -19,15 +19,19 @@ Sampling and broadcast machinery for (soft) real-time delivery of
|
||||||
financial data flows.
|
financial data flows.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
from collections import Counter
|
||||||
import time
|
import time
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
from ._sharedmem import ShmArray
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._sharedmem import ShmArray
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -133,18 +137,20 @@ async def increment_ohlc_buffer(
|
||||||
# a given sample period.
|
# a given sample period.
|
||||||
subs = sampler.subscribers.get(delay_s, ())
|
subs = sampler.subscribers.get(delay_s, ())
|
||||||
|
|
||||||
for ctx in subs:
|
for stream in subs:
|
||||||
try:
|
try:
|
||||||
await ctx.send_yield({'index': shm._last.value})
|
await stream.send({'index': shm._last.value})
|
||||||
except (
|
except (
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
trio.ClosedResourceError
|
trio.ClosedResourceError
|
||||||
):
|
):
|
||||||
log.error(f'{ctx.chan.uid} dropped connection')
|
log.error(
|
||||||
subs.remove(ctx)
|
f'{stream._ctx.chan.uid} dropped connection'
|
||||||
|
)
|
||||||
|
subs.remove(stream)
|
||||||
|
|
||||||
|
|
||||||
@tractor.stream
|
@tractor.context
|
||||||
async def iter_ohlc_periods(
|
async def iter_ohlc_periods(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
delay_s: int,
|
delay_s: int,
|
||||||
|
@ -158,18 +164,20 @@ async def iter_ohlc_periods(
|
||||||
'''
|
'''
|
||||||
# add our subscription
|
# add our subscription
|
||||||
subs = sampler.subscribers.setdefault(delay_s, [])
|
subs = sampler.subscribers.setdefault(delay_s, [])
|
||||||
subs.append(ctx)
|
await ctx.started()
|
||||||
|
async with ctx.open_stream() as stream:
|
||||||
|
subs.append(stream)
|
||||||
|
|
||||||
try:
|
|
||||||
# stream and block until cancelled
|
|
||||||
await trio.sleep_forever()
|
|
||||||
finally:
|
|
||||||
try:
|
try:
|
||||||
subs.remove(ctx)
|
# stream and block until cancelled
|
||||||
except ValueError:
|
await trio.sleep_forever()
|
||||||
log.error(
|
finally:
|
||||||
f'iOHLC step stream was already dropped for {ctx.chan.uid}?'
|
try:
|
||||||
)
|
subs.remove(stream)
|
||||||
|
except ValueError:
|
||||||
|
log.error(
|
||||||
|
f'iOHLC step stream was already dropped {ctx.chan.uid}?'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def sample_and_broadcast(
|
async def sample_and_broadcast(
|
||||||
|
@ -177,17 +185,19 @@ async def sample_and_broadcast(
|
||||||
bus: '_FeedsBus', # noqa
|
bus: '_FeedsBus', # noqa
|
||||||
shm: ShmArray,
|
shm: ShmArray,
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
|
brokername: str,
|
||||||
sum_tick_vlm: bool = True,
|
sum_tick_vlm: bool = True,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
log.info("Started shared mem bar writer")
|
log.info("Started shared mem bar writer")
|
||||||
|
|
||||||
|
overruns = Counter()
|
||||||
|
|
||||||
# iterate stream delivered by broker
|
# iterate stream delivered by broker
|
||||||
async for quotes in quote_stream:
|
async for quotes in quote_stream:
|
||||||
# TODO: ``numba`` this!
|
# TODO: ``numba`` this!
|
||||||
for sym, quote in quotes.items():
|
for broker_symbol, quote in quotes.items():
|
||||||
|
|
||||||
# TODO: in theory you can send the IPC msg *before* writing
|
# TODO: in theory you can send the IPC msg *before* writing
|
||||||
# to the sharedmem array to decrease latency, however, that
|
# to the sharedmem array to decrease latency, however, that
|
||||||
# will require at least some way to prevent task switching
|
# will require at least some way to prevent task switching
|
||||||
|
@ -251,9 +261,15 @@ async def sample_and_broadcast(
|
||||||
# end up triggering backpressure which which will
|
# end up triggering backpressure which which will
|
||||||
# eventually block this producer end of the feed and
|
# eventually block this producer end of the feed and
|
||||||
# thus other consumers still attached.
|
# thus other consumers still attached.
|
||||||
subs = bus._subscribers[sym.lower()]
|
subs = bus._subscribers[broker_symbol.lower()]
|
||||||
|
|
||||||
|
# NOTE: by default the broker backend doesn't append
|
||||||
|
# it's own "name" into the fqsn schema (but maybe it
|
||||||
|
# should?) so we have to manually generate the correct
|
||||||
|
# key here.
|
||||||
|
bsym = f'{broker_symbol}.{brokername}'
|
||||||
|
lags: int = 0
|
||||||
|
|
||||||
lags = 0
|
|
||||||
for (stream, tick_throttle) in subs:
|
for (stream, tick_throttle) in subs:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -262,7 +278,9 @@ async def sample_and_broadcast(
|
||||||
# this is a send mem chan that likely
|
# this is a send mem chan that likely
|
||||||
# pushes to the ``uniform_rate_send()`` below.
|
# pushes to the ``uniform_rate_send()`` below.
|
||||||
try:
|
try:
|
||||||
stream.send_nowait((sym, quote))
|
stream.send_nowait(
|
||||||
|
(bsym, quote)
|
||||||
|
)
|
||||||
except trio.WouldBlock:
|
except trio.WouldBlock:
|
||||||
ctx = getattr(stream, '_ctx', None)
|
ctx = getattr(stream, '_ctx', None)
|
||||||
if ctx:
|
if ctx:
|
||||||
|
@ -271,12 +289,22 @@ async def sample_and_broadcast(
|
||||||
f'{ctx.channel.uid} !!!'
|
f'{ctx.channel.uid} !!!'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
key = id(stream)
|
||||||
|
overruns[key] += 1
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Feed overrun {bus.brokername} -> '
|
f'Feed overrun {bus.brokername} -> '
|
||||||
f'feed @ {tick_throttle} Hz'
|
f'feed @ {tick_throttle} Hz'
|
||||||
)
|
)
|
||||||
|
if overruns[key] > 6:
|
||||||
|
log.warning(
|
||||||
|
f'Dropping consumer {stream}'
|
||||||
|
)
|
||||||
|
await stream.aclose()
|
||||||
|
raise trio.BrokenResourceError
|
||||||
else:
|
else:
|
||||||
await stream.send({sym: quote})
|
await stream.send(
|
||||||
|
{bsym: quote}
|
||||||
|
)
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
if cs.cancelled_caught:
|
||||||
lags += 1
|
lags += 1
|
||||||
|
@ -295,7 +323,7 @@ async def sample_and_broadcast(
|
||||||
'`brokerd`-quotes-feed connection'
|
'`brokerd`-quotes-feed connection'
|
||||||
)
|
)
|
||||||
if tick_throttle:
|
if tick_throttle:
|
||||||
assert stream.closed()
|
assert stream._closed
|
||||||
|
|
||||||
# XXX: do we need to deregister here
|
# XXX: do we need to deregister here
|
||||||
# if it's done in the fee bus code?
|
# if it's done in the fee bus code?
|
||||||
|
@ -399,7 +427,16 @@ async def uniform_rate_send(
|
||||||
# rate timing exactly lul
|
# rate timing exactly lul
|
||||||
try:
|
try:
|
||||||
await stream.send({sym: first_quote})
|
await stream.send({sym: first_quote})
|
||||||
except trio.ClosedResourceError:
|
except (
|
||||||
|
# NOTE: any of these can be raised by ``tractor``'s IPC
|
||||||
|
# transport-layer and we want to be highly resilient
|
||||||
|
# to consumers which crash or lose network connection.
|
||||||
|
# I.e. we **DO NOT** want to crash and propagate up to
|
||||||
|
# ``pikerd`` these kinds of errors!
|
||||||
|
trio.ClosedResourceError,
|
||||||
|
trio.BrokenResourceError,
|
||||||
|
ConnectionResetError,
|
||||||
|
):
|
||||||
# if the feed consumer goes down then drop
|
# if the feed consumer goes down then drop
|
||||||
# out of this rate limiter
|
# out of this rate limiter
|
||||||
log.warning(f'{stream} closed')
|
log.warning(f'{stream} closed')
|
||||||
|
|
|
@ -19,7 +19,6 @@ NumPy compatible shared memory buffers for real-time IPC streaming.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from dataclasses import dataclass, asdict
|
|
||||||
from sys import byteorder
|
from sys import byteorder
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
||||||
|
@ -30,7 +29,7 @@ if _USE_POSIX:
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel, validator
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._source import base_iohlc_dtype
|
from ._source import base_iohlc_dtype
|
||||||
|
@ -152,7 +151,8 @@ def _make_token(
|
||||||
|
|
||||||
|
|
||||||
class ShmArray:
|
class ShmArray:
|
||||||
"""A shared memory ``numpy`` (compatible) array API.
|
'''
|
||||||
|
A shared memory ``numpy`` (compatible) array API.
|
||||||
|
|
||||||
An underlying shared memory buffer is allocated based on
|
An underlying shared memory buffer is allocated based on
|
||||||
a user specified ``numpy.ndarray``. This fixed size array
|
a user specified ``numpy.ndarray``. This fixed size array
|
||||||
|
@ -162,7 +162,7 @@ class ShmArray:
|
||||||
``SharedInt`` interfaces) values such that multiple processes can
|
``SharedInt`` interfaces) values such that multiple processes can
|
||||||
interact with the same array using a synchronized-index.
|
interact with the same array using a synchronized-index.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
shmarr: np.ndarray,
|
shmarr: np.ndarray,
|
||||||
|
@ -209,7 +209,8 @@ class ShmArray:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def array(self) -> np.ndarray:
|
def array(self) -> np.ndarray:
|
||||||
'''Return an up-to-date ``np.ndarray`` view of the
|
'''
|
||||||
|
Return an up-to-date ``np.ndarray`` view of the
|
||||||
so-far-written data to the underlying shm buffer.
|
so-far-written data to the underlying shm buffer.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -238,19 +239,21 @@ class ShmArray:
|
||||||
self,
|
self,
|
||||||
data: np.ndarray,
|
data: np.ndarray,
|
||||||
|
|
||||||
|
field_map: Optional[dict[str, str]] = None,
|
||||||
prepend: bool = False,
|
prepend: bool = False,
|
||||||
start: Optional[int] = None,
|
start: Optional[int] = None,
|
||||||
|
|
||||||
) -> int:
|
) -> int:
|
||||||
'''Ring buffer like "push" to append data
|
'''
|
||||||
|
Ring buffer like "push" to append data
|
||||||
into the buffer and return updated "last" index.
|
into the buffer and return updated "last" index.
|
||||||
|
|
||||||
NB: no actual ring logic yet to give a "loop around" on overflow
|
NB: no actual ring logic yet to give a "loop around" on overflow
|
||||||
condition, lel.
|
condition, lel.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
self._post_init = True
|
|
||||||
length = len(data)
|
length = len(data)
|
||||||
index = start or self._last.value
|
index = start if start is not None else self._last.value
|
||||||
|
|
||||||
if prepend:
|
if prepend:
|
||||||
index = self._first.value - length
|
index = self._first.value - length
|
||||||
|
@ -258,15 +261,20 @@ class ShmArray:
|
||||||
if index < 0:
|
if index < 0:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'Array size of {self._len} was overrun during prepend.\n'
|
f'Array size of {self._len} was overrun during prepend.\n'
|
||||||
'You have passed {abs(index)} too many datums.'
|
f'You have passed {abs(index)} too many datums.'
|
||||||
)
|
)
|
||||||
|
|
||||||
end = index + length
|
end = index + length
|
||||||
|
|
||||||
fields = self._write_fields
|
if field_map:
|
||||||
|
src_names, dst_names = zip(*field_map.items())
|
||||||
|
else:
|
||||||
|
dst_names = src_names = self._write_fields
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._array[fields][index:end] = data[fields][:]
|
self._array[
|
||||||
|
list(dst_names)
|
||||||
|
][index:end] = data[list(src_names)][:]
|
||||||
|
|
||||||
# NOTE: there was a race here between updating
|
# NOTE: there was a race here between updating
|
||||||
# the first and last indices and when the next reader
|
# the first and last indices and when the next reader
|
||||||
|
@ -281,9 +289,13 @@ class ShmArray:
|
||||||
else:
|
else:
|
||||||
self._last.value = end
|
self._last.value = end
|
||||||
|
|
||||||
|
self._post_init = True
|
||||||
return end
|
return end
|
||||||
|
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
|
if field_map:
|
||||||
|
raise
|
||||||
|
|
||||||
# should raise if diff detected
|
# should raise if diff detected
|
||||||
self.diff_err_fields(data)
|
self.diff_err_fields(data)
|
||||||
raise err
|
raise err
|
||||||
|
@ -339,7 +351,7 @@ class ShmArray:
|
||||||
# how much is probably dependent on lifestyle
|
# how much is probably dependent on lifestyle
|
||||||
_secs_in_day = int(60 * 60 * 24)
|
_secs_in_day = int(60 * 60 * 24)
|
||||||
# we try for 3 times but only on a run-every-other-day kinda week.
|
# we try for 3 times but only on a run-every-other-day kinda week.
|
||||||
_default_size = 3 * _secs_in_day
|
_default_size = 6 * _secs_in_day
|
||||||
|
|
||||||
|
|
||||||
def open_shm_array(
|
def open_shm_array(
|
||||||
|
@ -392,7 +404,24 @@ def open_shm_array(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
last.value = first.value = int(_secs_in_day)
|
# start the "real-time" updated section after 3-days worth of 1s
|
||||||
|
# sampled OHLC. this allows appending up to a days worth from
|
||||||
|
# tick/quote feeds before having to flush to a (tsdb) storage
|
||||||
|
# backend, and looks something like,
|
||||||
|
# -------------------------
|
||||||
|
# | | i
|
||||||
|
# _________________________
|
||||||
|
# <-------------> <------->
|
||||||
|
# history real-time
|
||||||
|
#
|
||||||
|
# Once fully "prepended", the history section will leave the
|
||||||
|
# ``ShmArray._start.value: int = 0`` and the yet-to-be written
|
||||||
|
# real-time section will start at ``ShmArray.index: int``.
|
||||||
|
|
||||||
|
# this sets the index to 3/4 of the length of the buffer
|
||||||
|
# leaving a "days worth of second samples" for the real-time
|
||||||
|
# section.
|
||||||
|
last.value = first.value = int(5*_secs_in_day)
|
||||||
|
|
||||||
shmarr = ShmArray(
|
shmarr = ShmArray(
|
||||||
array,
|
array,
|
||||||
|
|
|
@ -17,12 +17,12 @@
|
||||||
"""
|
"""
|
||||||
numpy data source coversion helpers.
|
numpy data source coversion helpers.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from typing import Any
|
from typing import Any
|
||||||
import decimal
|
import decimal
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
from pydantic import BaseModel
|
||||||
from pydantic import BaseModel, validate_arguments
|
|
||||||
# from numba import from_dtype
|
# from numba import from_dtype
|
||||||
|
|
||||||
|
|
||||||
|
@ -91,31 +91,113 @@ def ohlc_zeros(length: int) -> np.ndarray:
|
||||||
return np.zeros(length, dtype=base_ohlc_dtype)
|
return np.zeros(length, dtype=base_ohlc_dtype)
|
||||||
|
|
||||||
|
|
||||||
|
def uncons_fqsn(fqsn: str) -> tuple[str, str, str]:
|
||||||
|
'''
|
||||||
|
Unpack a fully-qualified-symbol-name to ``tuple``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
venue = ''
|
||||||
|
suffix = ''
|
||||||
|
|
||||||
|
# TODO: probably reverse the order of all this XD
|
||||||
|
tokens = fqsn.split('.')
|
||||||
|
if len(tokens) < 3:
|
||||||
|
# probably crypto
|
||||||
|
symbol, broker = tokens
|
||||||
|
return (
|
||||||
|
broker,
|
||||||
|
symbol,
|
||||||
|
'',
|
||||||
|
)
|
||||||
|
|
||||||
|
elif len(tokens) > 3:
|
||||||
|
symbol, venue, suffix, broker = tokens
|
||||||
|
else:
|
||||||
|
symbol, venue, broker = tokens
|
||||||
|
suffix = ''
|
||||||
|
|
||||||
|
# head, _, broker = fqsn.rpartition('.')
|
||||||
|
# symbol, _, suffix = head.rpartition('.')
|
||||||
|
return (
|
||||||
|
broker,
|
||||||
|
'.'.join([symbol, venue]),
|
||||||
|
suffix,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Symbol(BaseModel):
|
class Symbol(BaseModel):
|
||||||
"""I guess this is some kinda container thing for dealing with
|
'''
|
||||||
|
I guess this is some kinda container thing for dealing with
|
||||||
all the different meta-data formats from brokers?
|
all the different meta-data formats from brokers?
|
||||||
|
|
||||||
Yah, i guess dats what it izz.
|
'''
|
||||||
"""
|
|
||||||
key: str
|
key: str
|
||||||
type_key: str # {'stock', 'forex', 'future', ... etc.}
|
tick_size: float = 0.01
|
||||||
tick_size: float
|
lot_tick_size: float = 0.0 # "volume" precision as min step value
|
||||||
lot_tick_size: float # "volume" precision as min step value
|
tick_size_digits: int = 2
|
||||||
tick_size_digits: int
|
lot_size_digits: int = 0
|
||||||
lot_size_digits: int
|
suffix: str = ''
|
||||||
broker_info: dict[str, dict[str, Any]] = {}
|
broker_info: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
# specifies a "class" of financial instrument
|
# specifies a "class" of financial instrument
|
||||||
# ex. stock, futer, option, bond etc.
|
# ex. stock, futer, option, bond etc.
|
||||||
|
|
||||||
|
# @validate_arguments
|
||||||
|
@classmethod
|
||||||
|
def from_broker_info(
|
||||||
|
cls,
|
||||||
|
broker: str,
|
||||||
|
symbol: str,
|
||||||
|
info: dict[str, Any],
|
||||||
|
suffix: str = '',
|
||||||
|
|
||||||
|
# XXX: like wtf..
|
||||||
|
# ) -> 'Symbol':
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
tick_size = info.get('price_tick_size', 0.01)
|
||||||
|
lot_tick_size = info.get('lot_tick_size', 0.0)
|
||||||
|
|
||||||
|
return Symbol(
|
||||||
|
key=symbol,
|
||||||
|
tick_size=tick_size,
|
||||||
|
lot_tick_size=lot_tick_size,
|
||||||
|
tick_size_digits=float_digits(tick_size),
|
||||||
|
lot_size_digits=float_digits(lot_tick_size),
|
||||||
|
suffix=suffix,
|
||||||
|
broker_info={broker: info},
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_fqsn(
|
||||||
|
cls,
|
||||||
|
fqsn: str,
|
||||||
|
info: dict[str, Any],
|
||||||
|
|
||||||
|
# XXX: like wtf..
|
||||||
|
# ) -> 'Symbol':
|
||||||
|
) -> None:
|
||||||
|
broker, key, suffix = uncons_fqsn(fqsn)
|
||||||
|
return cls.from_broker_info(
|
||||||
|
broker,
|
||||||
|
key,
|
||||||
|
info=info,
|
||||||
|
suffix=suffix,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type_key(self) -> str:
|
||||||
|
return list(self.broker_info.values())[0]['asset_type']
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def brokers(self) -> list[str]:
|
def brokers(self) -> list[str]:
|
||||||
return list(self.broker_info.keys())
|
return list(self.broker_info.keys())
|
||||||
|
|
||||||
def nearest_tick(self, value: float) -> float:
|
def nearest_tick(self, value: float) -> float:
|
||||||
"""Return the nearest tick value based on mininum increment.
|
'''
|
||||||
|
Return the nearest tick value based on mininum increment.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
mult = 1 / self.tick_size
|
mult = 1 / self.tick_size
|
||||||
return round(value * mult) / mult
|
return round(value * mult) / mult
|
||||||
|
|
||||||
|
@ -131,92 +213,27 @@ class Symbol(BaseModel):
|
||||||
self.key,
|
self.key,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def tokens(self) -> tuple[str]:
|
||||||
|
broker, key = self.front_feed()
|
||||||
|
if self.suffix:
|
||||||
|
return (key, self.suffix, broker)
|
||||||
|
else:
|
||||||
|
return (key, broker)
|
||||||
|
|
||||||
|
def front_fqsn(self) -> str:
|
||||||
|
tokens = self.tokens()
|
||||||
|
fqsn = '.'.join(tokens)
|
||||||
|
return fqsn
|
||||||
|
|
||||||
def iterfqsns(self) -> list[str]:
|
def iterfqsns(self) -> list[str]:
|
||||||
return [
|
keys = []
|
||||||
mk_fqsn(self.key, broker)
|
for broker in self.broker_info.keys():
|
||||||
for broker in self.broker_info.keys()
|
fqsn = mk_fqsn(self.key, broker)
|
||||||
]
|
if self.suffix:
|
||||||
|
fqsn += f'.{self.suffix}'
|
||||||
|
keys.append(fqsn)
|
||||||
|
|
||||||
|
return keys
|
||||||
@validate_arguments
|
|
||||||
def mk_symbol(
|
|
||||||
|
|
||||||
key: str,
|
|
||||||
type_key: str,
|
|
||||||
tick_size: float = 0.01,
|
|
||||||
lot_tick_size: float = 0,
|
|
||||||
broker_info: dict[str, Any] = {},
|
|
||||||
|
|
||||||
) -> Symbol:
|
|
||||||
'''
|
|
||||||
Create and return an instrument description for the
|
|
||||||
"symbol" named as ``key``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return Symbol(
|
|
||||||
key=key,
|
|
||||||
type_key=type_key,
|
|
||||||
tick_size=tick_size,
|
|
||||||
lot_tick_size=lot_tick_size,
|
|
||||||
tick_size_digits=float_digits(tick_size),
|
|
||||||
lot_size_digits=float_digits(lot_tick_size),
|
|
||||||
broker_info=broker_info,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def from_df(
|
|
||||||
|
|
||||||
df: pd.DataFrame,
|
|
||||||
source=None,
|
|
||||||
default_tf=None
|
|
||||||
|
|
||||||
) -> np.recarray:
|
|
||||||
"""Convert OHLC formatted ``pandas.DataFrame`` to ``numpy.recarray``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
df.reset_index(inplace=True)
|
|
||||||
|
|
||||||
# hackery to convert field names
|
|
||||||
date = 'Date'
|
|
||||||
if 'date' in df.columns:
|
|
||||||
date = 'date'
|
|
||||||
|
|
||||||
# convert to POSIX time
|
|
||||||
df[date] = [d.timestamp() for d in df[date]]
|
|
||||||
|
|
||||||
# try to rename from some camel case
|
|
||||||
columns = {
|
|
||||||
'Date': 'time',
|
|
||||||
'date': 'time',
|
|
||||||
'Open': 'open',
|
|
||||||
'High': 'high',
|
|
||||||
'Low': 'low',
|
|
||||||
'Close': 'close',
|
|
||||||
'Volume': 'volume',
|
|
||||||
|
|
||||||
# most feeds are providing this over sesssion anchored
|
|
||||||
'vwap': 'bar_wap',
|
|
||||||
|
|
||||||
# XXX: ib_insync calls this the "wap of the bar"
|
|
||||||
# but no clue what is actually is...
|
|
||||||
# https://github.com/pikers/piker/issues/119#issuecomment-729120988
|
|
||||||
'average': 'bar_wap',
|
|
||||||
}
|
|
||||||
|
|
||||||
df = df.rename(columns=columns)
|
|
||||||
|
|
||||||
for name in df.columns:
|
|
||||||
# if name not in base_ohlc_dtype.names[1:]:
|
|
||||||
if name not in base_ohlc_dtype.names:
|
|
||||||
del df[name]
|
|
||||||
|
|
||||||
# TODO: it turns out column access on recarrays is actually slower:
|
|
||||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
|
||||||
# it might make sense to make these structured arrays?
|
|
||||||
array = df.to_records(index=False)
|
|
||||||
_nan_to_closest_num(array)
|
|
||||||
|
|
||||||
return array
|
|
||||||
|
|
||||||
|
|
||||||
def _nan_to_closest_num(array: np.ndarray):
|
def _nan_to_closest_num(array: np.ndarray):
|
||||||
|
|
|
@ -16,26 +16,34 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
marketstore cli.
|
marketstore cli.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import List
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
|
||||||
|
from anyio_marketstore import open_marketstore_client
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
import click
|
import click
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
from .marketstore import (
|
from .marketstore import (
|
||||||
get_client,
|
get_client,
|
||||||
stream_quotes,
|
# stream_quotes,
|
||||||
ingest_quote_stream,
|
ingest_quote_stream,
|
||||||
_url,
|
# _url,
|
||||||
_tick_tbk_ids,
|
_tick_tbk_ids,
|
||||||
mk_tbk,
|
mk_tbk,
|
||||||
)
|
)
|
||||||
from ..cli import cli
|
from ..cli import cli
|
||||||
from .. import watchlists as wl
|
from .. import watchlists as wl
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from ._sharedmem import (
|
||||||
|
maybe_open_shm_array,
|
||||||
|
)
|
||||||
|
from ._source import (
|
||||||
|
base_iohlc_dtype,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -49,51 +57,58 @@ log = get_logger(__name__)
|
||||||
)
|
)
|
||||||
@click.argument('names', nargs=-1)
|
@click.argument('names', nargs=-1)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def ms_stream(config: dict, names: List[str], url: str):
|
def ms_stream(
|
||||||
"""Connect to a marketstore time bucket stream for (a set of) symbols(s)
|
config: dict,
|
||||||
|
names: list[str],
|
||||||
|
url: str,
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Connect to a marketstore time bucket stream for (a set of) symbols(s)
|
||||||
and print to console.
|
and print to console.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
async for quote in stream_quotes(symbols=names):
|
# async for quote in stream_quotes(symbols=names):
|
||||||
log.info(f"Received quote:\n{quote}")
|
# log.info(f"Received quote:\n{quote}")
|
||||||
|
...
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
# @cli.command()
|
||||||
@click.option(
|
# @click.option(
|
||||||
'--url',
|
# '--url',
|
||||||
default=_url,
|
# default=_url,
|
||||||
help='HTTP URL of marketstore instance'
|
# help='HTTP URL of marketstore instance'
|
||||||
)
|
# )
|
||||||
@click.argument('names', nargs=-1)
|
# @click.argument('names', nargs=-1)
|
||||||
@click.pass_obj
|
# @click.pass_obj
|
||||||
def ms_destroy(config: dict, names: List[str], url: str) -> None:
|
# def ms_destroy(config: dict, names: list[str], url: str) -> None:
|
||||||
"""Destroy symbol entries in the local marketstore instance.
|
# """Destroy symbol entries in the local marketstore instance.
|
||||||
"""
|
# """
|
||||||
async def main():
|
# async def main():
|
||||||
nonlocal names
|
# nonlocal names
|
||||||
async with get_client(url) as client:
|
# async with get_client(url) as client:
|
||||||
|
#
|
||||||
if not names:
|
# if not names:
|
||||||
names = await client.list_symbols()
|
# names = await client.list_symbols()
|
||||||
|
#
|
||||||
# default is to wipe db entirely.
|
# # default is to wipe db entirely.
|
||||||
answer = input(
|
# answer = input(
|
||||||
"This will entirely wipe you local marketstore db @ "
|
# "This will entirely wipe you local marketstore db @ "
|
||||||
f"{url} of the following symbols:\n {pformat(names)}"
|
# f"{url} of the following symbols:\n {pformat(names)}"
|
||||||
"\n\nDelete [N/y]?\n")
|
# "\n\nDelete [N/y]?\n")
|
||||||
|
#
|
||||||
if answer == 'y':
|
# if answer == 'y':
|
||||||
for sym in names:
|
# for sym in names:
|
||||||
# tbk = _tick_tbk.format(sym)
|
# # tbk = _tick_tbk.format(sym)
|
||||||
tbk = tuple(sym, *_tick_tbk_ids)
|
# tbk = tuple(sym, *_tick_tbk_ids)
|
||||||
print(f"Destroying {tbk}..")
|
# print(f"Destroying {tbk}..")
|
||||||
await client.destroy(mk_tbk(tbk))
|
# await client.destroy(mk_tbk(tbk))
|
||||||
else:
|
# else:
|
||||||
print("Nothing deleted.")
|
# print("Nothing deleted.")
|
||||||
|
#
|
||||||
tractor.run(main)
|
# tractor.run(main)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@ -102,41 +117,53 @@ def ms_destroy(config: dict, names: List[str], url: str) -> None:
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
help='Enable tractor logging')
|
help='Enable tractor logging')
|
||||||
@click.option(
|
@click.option(
|
||||||
'--url',
|
'--host',
|
||||||
default=_url,
|
default='localhost'
|
||||||
help='HTTP URL of marketstore instance'
|
|
||||||
)
|
)
|
||||||
@click.argument('name', nargs=1, required=True)
|
@click.option(
|
||||||
|
'--port',
|
||||||
|
default=5993
|
||||||
|
)
|
||||||
|
@click.argument('symbols', nargs=-1)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def ms_shell(config, name, tl, url):
|
def storesh(
|
||||||
"""Start an IPython shell ready to query the local marketstore db.
|
config,
|
||||||
"""
|
tl,
|
||||||
async def main():
|
host,
|
||||||
async with get_client(url) as client:
|
port,
|
||||||
query = client.query # noqa
|
symbols: list[str],
|
||||||
# TODO: write magics to query marketstore
|
):
|
||||||
from IPython import embed
|
'''
|
||||||
embed()
|
Start an IPython shell ready to query the local marketstore db.
|
||||||
|
|
||||||
tractor.run(main)
|
'''
|
||||||
|
from piker.data.marketstore import tsdb_history_update
|
||||||
|
from piker._daemon import open_piker_runtime
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
nonlocal symbols
|
||||||
|
|
||||||
|
async with open_piker_runtime(
|
||||||
|
'storesh',
|
||||||
|
enable_modules=['piker.data._ahab'],
|
||||||
|
):
|
||||||
|
symbol = symbols[0]
|
||||||
|
await tsdb_history_update(symbol)
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--test-file', '-t', help='Test quote stream file')
|
@click.option('--test-file', '-t', help='Test quote stream file')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
|
||||||
@click.option(
|
|
||||||
'--url',
|
|
||||||
default=_url,
|
|
||||||
help='HTTP URL of marketstore instance'
|
|
||||||
)
|
|
||||||
@click.argument('name', nargs=1, required=True)
|
@click.argument('name', nargs=1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def ingest(config, name, test_file, tl, url):
|
def ingest(config, name, test_file, tl):
|
||||||
"""Ingest real-time broker quotes and ticks to a marketstore instance.
|
'''
|
||||||
"""
|
Ingest real-time broker quotes and ticks to a marketstore instance.
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermod']
|
|
||||||
loglevel = config['loglevel']
|
loglevel = config['loglevel']
|
||||||
tractorloglevel = config['tractorloglevel']
|
tractorloglevel = config['tractorloglevel']
|
||||||
# log = config['log']
|
# log = config['log']
|
||||||
|
@ -145,15 +172,25 @@ def ingest(config, name, test_file, tl, url):
|
||||||
watchlists = wl.merge_watchlist(watchlist_from_file, wl._builtins)
|
watchlists = wl.merge_watchlist(watchlist_from_file, wl._builtins)
|
||||||
symbols = watchlists[name]
|
symbols = watchlists[name]
|
||||||
|
|
||||||
tractor.run(
|
grouped_syms = {}
|
||||||
partial(
|
for sym in symbols:
|
||||||
ingest_quote_stream,
|
symbol, _, provider = sym.rpartition('.')
|
||||||
symbols,
|
if provider not in grouped_syms:
|
||||||
brokermod.name,
|
grouped_syms[provider] = []
|
||||||
tries=1,
|
|
||||||
loglevel=loglevel,
|
grouped_syms[provider].append(symbol)
|
||||||
),
|
|
||||||
name='ingest_marketstore',
|
async def entry_point():
|
||||||
loglevel=tractorloglevel,
|
async with tractor.open_nursery() as n:
|
||||||
debug_mode=True,
|
for provider, symbols in grouped_syms.items():
|
||||||
)
|
await n.run_in_actor(
|
||||||
|
ingest_quote_stream,
|
||||||
|
name='ingest_marketstore',
|
||||||
|
symbols=symbols,
|
||||||
|
brokername=provider,
|
||||||
|
tries=1,
|
||||||
|
actorloglevel=loglevel,
|
||||||
|
loglevel=tractorloglevel
|
||||||
|
)
|
||||||
|
|
||||||
|
tractor.run(entry_point)
|
||||||
|
|
|
@ -20,6 +20,7 @@ Data feed apis and infra.
|
||||||
This module is enabled for ``brokerd`` daemons.
|
This module is enabled for ``brokerd`` daemons.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
@ -30,17 +31,21 @@ from typing import (
|
||||||
Awaitable,
|
Awaitable,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
import pendulum
|
||||||
import trio
|
import trio
|
||||||
from trio.abc import ReceiveChannel
|
from trio.abc import ReceiveChannel
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
from ..brokers import get_brokermod
|
from ..brokers import get_brokermod
|
||||||
from .._cacheables import maybe_open_context
|
from .._cacheables import maybe_open_context
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from .._daemon import (
|
from .._daemon import (
|
||||||
maybe_spawn_brokerd,
|
maybe_spawn_brokerd,
|
||||||
|
check_for_service,
|
||||||
)
|
)
|
||||||
from ._sharedmem import (
|
from ._sharedmem import (
|
||||||
maybe_open_shm_array,
|
maybe_open_shm_array,
|
||||||
|
@ -50,9 +55,8 @@ from ._sharedmem import (
|
||||||
from .ingest import get_ingestormod
|
from .ingest import get_ingestormod
|
||||||
from ._source import (
|
from ._source import (
|
||||||
base_iohlc_dtype,
|
base_iohlc_dtype,
|
||||||
mk_symbol,
|
|
||||||
Symbol,
|
Symbol,
|
||||||
mk_fqsn,
|
uncons_fqsn,
|
||||||
)
|
)
|
||||||
from ..ui import _search
|
from ..ui import _search
|
||||||
from ._sampling import (
|
from ._sampling import (
|
||||||
|
@ -125,7 +129,7 @@ class _FeedsBus(BaseModel):
|
||||||
|
|
||||||
# def cancel_task(
|
# def cancel_task(
|
||||||
# self,
|
# self,
|
||||||
# task: trio.lowlevel.Task
|
# task: trio.lowlevel.Task,
|
||||||
# ) -> bool:
|
# ) -> bool:
|
||||||
# ...
|
# ...
|
||||||
|
|
||||||
|
@ -189,12 +193,26 @@ async def _setup_persistent_brokerd(
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
async def start_backfill(
|
||||||
|
mod: ModuleType,
|
||||||
|
fqsn: str,
|
||||||
|
shm: ShmArray,
|
||||||
|
|
||||||
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> int:
|
||||||
|
|
||||||
|
return await mod.backfill_bars(
|
||||||
|
fqsn,
|
||||||
|
shm,
|
||||||
|
task_status=task_status,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def manage_history(
|
async def manage_history(
|
||||||
mod: ModuleType,
|
mod: ModuleType,
|
||||||
shm: ShmArray,
|
|
||||||
bus: _FeedsBus,
|
bus: _FeedsBus,
|
||||||
symbol: str,
|
fqsn: str,
|
||||||
we_opened_shm: bool,
|
|
||||||
some_data_ready: trio.Event,
|
some_data_ready: trio.Event,
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
|
|
||||||
|
@ -208,48 +226,153 @@ async def manage_history(
|
||||||
buffer.
|
buffer.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
task_status.started()
|
# (maybe) allocate shm array for this broker/symbol which will
|
||||||
|
# be used for fast near-term history capture and processing.
|
||||||
|
shm, opened = maybe_open_shm_array(
|
||||||
|
key=fqsn,
|
||||||
|
|
||||||
opened = we_opened_shm
|
# use any broker defined ohlc dtype:
|
||||||
|
dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype),
|
||||||
|
|
||||||
|
# we expect the sub-actor to write
|
||||||
|
readonly=False,
|
||||||
|
)
|
||||||
# TODO: history validation
|
# TODO: history validation
|
||||||
# assert opened, f'Persistent shm for {symbol} was already open?!'
|
if not opened:
|
||||||
# if not opened:
|
raise RuntimeError(
|
||||||
# raise RuntimeError("Persistent shm for sym was already open?!")
|
"Persistent shm for sym was already open?!"
|
||||||
|
)
|
||||||
|
|
||||||
if opened:
|
log.info('Scanning for existing `marketstored`')
|
||||||
# ask broker backend for new history
|
|
||||||
|
|
||||||
|
is_up = await check_for_service('marketstored')
|
||||||
|
|
||||||
|
# for now only do backfilling if no tsdb can be found
|
||||||
|
do_legacy_backfill = not is_up and opened
|
||||||
|
|
||||||
|
open_history_client = getattr(mod, 'open_history_client', None)
|
||||||
|
|
||||||
|
if is_up and opened and open_history_client:
|
||||||
|
|
||||||
|
log.info('Found existing `marketstored`')
|
||||||
|
from . import marketstore
|
||||||
|
|
||||||
|
async with marketstore.open_storage_client(
|
||||||
|
fqsn,
|
||||||
|
) as storage:
|
||||||
|
|
||||||
|
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
||||||
|
|
||||||
|
if not tsdb_arrays:
|
||||||
|
do_legacy_backfill = True
|
||||||
|
|
||||||
|
else:
|
||||||
|
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
||||||
|
|
||||||
|
fastest = list(tsdb_arrays.values())[0]
|
||||||
|
times = fastest['Epoch']
|
||||||
|
first, last = times[0], times[-1]
|
||||||
|
first_tsdb_dt, last_tsdb_dt = map(
|
||||||
|
pendulum.from_timestamp, [first, last]
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: this should be used verbatim for the pure
|
||||||
|
# shm backfiller approach below.
|
||||||
|
|
||||||
|
def diff_history(
|
||||||
|
array,
|
||||||
|
start_dt,
|
||||||
|
end_dt,
|
||||||
|
|
||||||
|
) -> np.ndarray:
|
||||||
|
|
||||||
|
s_diff = (last_tsdb_dt - start_dt).seconds
|
||||||
|
|
||||||
|
# if we detect a partial frame's worth of data
|
||||||
|
# that is new, slice out only that history and
|
||||||
|
# write to shm.
|
||||||
|
if s_diff > 0:
|
||||||
|
assert last_tsdb_dt > start_dt
|
||||||
|
selected = array['time'] > last_tsdb_dt.timestamp()
|
||||||
|
to_push = array[selected]
|
||||||
|
log.info(
|
||||||
|
f'Pushing partial frame {to_push.size} to shm'
|
||||||
|
)
|
||||||
|
return to_push
|
||||||
|
|
||||||
|
else:
|
||||||
|
return array
|
||||||
|
|
||||||
|
# start history anal and load missing new data via backend.
|
||||||
|
async with open_history_client(fqsn) as hist:
|
||||||
|
|
||||||
|
# get latest query's worth of history all the way
|
||||||
|
# back to what is recorded in the tsdb
|
||||||
|
array, start_dt, end_dt = await hist(end_dt='')
|
||||||
|
to_push = diff_history(array, start_dt, end_dt)
|
||||||
|
shm.push(to_push)
|
||||||
|
|
||||||
|
# let caller unblock and deliver latest history frame
|
||||||
|
task_status.started(shm)
|
||||||
|
some_data_ready.set()
|
||||||
|
|
||||||
|
# pull new history frames until we hit latest
|
||||||
|
# already in the tsdb
|
||||||
|
while start_dt > last_tsdb_dt:
|
||||||
|
array, start_dt, end_dt = await hist(end_dt=start_dt)
|
||||||
|
to_push = diff_history(array, start_dt, end_dt)
|
||||||
|
shm.push(to_push, prepend=True)
|
||||||
|
|
||||||
|
# TODO: see if there's faster multi-field reads:
|
||||||
|
# https://numpy.org/doc/stable/user/basics.rec.html#accessing-multiple-fields
|
||||||
|
# re-index with a `time` and index field
|
||||||
|
shm.push(
|
||||||
|
fastest[-shm._first.value:],
|
||||||
|
|
||||||
|
# insert the history pre a "days worth" of samples
|
||||||
|
# to leave some real-time buffer space at the end.
|
||||||
|
prepend=True,
|
||||||
|
# start=shm._len - _secs_in_day,
|
||||||
|
field_map={
|
||||||
|
'Epoch': 'time',
|
||||||
|
'Open': 'open',
|
||||||
|
'High': 'high',
|
||||||
|
'Low': 'low',
|
||||||
|
'Close': 'close',
|
||||||
|
'Volume': 'volume',
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: write new data to tsdb to be ready to for next
|
||||||
|
# read.
|
||||||
|
|
||||||
|
if do_legacy_backfill:
|
||||||
|
# do a legacy incremental backfill from the provider.
|
||||||
|
log.info('No existing `marketstored` found..')
|
||||||
|
|
||||||
|
bfqsn = fqsn.replace('.' + mod.name, '')
|
||||||
# start history backfill task ``backfill_bars()`` is
|
# start history backfill task ``backfill_bars()`` is
|
||||||
# a required backend func this must block until shm is
|
# a required backend func this must block until shm is
|
||||||
# filled with first set of ohlc bars
|
# filled with first set of ohlc bars
|
||||||
cs = await bus.nursery.start(mod.backfill_bars, symbol, shm)
|
await bus.nursery.start(
|
||||||
|
start_backfill,
|
||||||
|
mod,
|
||||||
|
bfqsn,
|
||||||
|
shm,
|
||||||
|
)
|
||||||
|
|
||||||
# indicate to caller that feed can be delivered to
|
# yield back after client connect with filled shm
|
||||||
# remote requesting client since we've loaded history
|
task_status.started(shm)
|
||||||
# data that can be used.
|
|
||||||
some_data_ready.set()
|
|
||||||
|
|
||||||
# detect sample step size for sampled historical data
|
# indicate to caller that feed can be delivered to
|
||||||
times = shm.array['time']
|
# remote requesting client since we've loaded history
|
||||||
delay_s = times[-1] - times[times != times[-1]][-1]
|
# data that can be used.
|
||||||
|
some_data_ready.set()
|
||||||
# begin real-time updates of shm and tsb once the feed
|
|
||||||
# goes live.
|
|
||||||
await feed_is_live.wait()
|
|
||||||
|
|
||||||
if opened:
|
|
||||||
sampler.ohlcv_shms.setdefault(delay_s, []).append(shm)
|
|
||||||
|
|
||||||
# start shm incrementing for OHLC sampling at the current
|
|
||||||
# detected sampling period if one dne.
|
|
||||||
if sampler.incrementers.get(delay_s) is None:
|
|
||||||
cs = await bus.start_task(
|
|
||||||
increment_ohlc_buffer,
|
|
||||||
delay_s,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
# history retreival loop depending on user interaction and thus
|
||||||
|
# a small RPC-prot for remotely controllinlg what data is loaded
|
||||||
|
# for viewing.
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
cs.cancel()
|
|
||||||
|
|
||||||
|
|
||||||
async def allocate_persistent_feed(
|
async def allocate_persistent_feed(
|
||||||
|
@ -257,6 +380,7 @@ async def allocate_persistent_feed(
|
||||||
brokername: str,
|
brokername: str,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
start_stream: bool = True,
|
||||||
|
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
@ -279,20 +403,6 @@ async def allocate_persistent_feed(
|
||||||
except ImportError:
|
except ImportError:
|
||||||
mod = get_ingestormod(brokername)
|
mod = get_ingestormod(brokername)
|
||||||
|
|
||||||
fqsn = mk_fqsn(brokername, symbol)
|
|
||||||
|
|
||||||
# (maybe) allocate shm array for this broker/symbol which will
|
|
||||||
# be used for fast near-term history capture and processing.
|
|
||||||
shm, opened = maybe_open_shm_array(
|
|
||||||
key=fqsn,
|
|
||||||
|
|
||||||
# use any broker defined ohlc dtype:
|
|
||||||
dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype),
|
|
||||||
|
|
||||||
# we expect the sub-actor to write
|
|
||||||
readonly=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
# mem chan handed to broker backend so it can push real-time
|
# mem chan handed to broker backend so it can push real-time
|
||||||
# quotes to this task for sampling and history storage (see below).
|
# quotes to this task for sampling and history storage (see below).
|
||||||
send, quote_stream = trio.open_memory_channel(10)
|
send, quote_stream = trio.open_memory_channel(10)
|
||||||
|
@ -301,30 +411,9 @@ async def allocate_persistent_feed(
|
||||||
some_data_ready = trio.Event()
|
some_data_ready = trio.Event()
|
||||||
feed_is_live = trio.Event()
|
feed_is_live = trio.Event()
|
||||||
|
|
||||||
# run 2 tasks:
|
|
||||||
# - a history loader / maintainer
|
|
||||||
# - a real-time streamer which consumers and sends new data to any
|
|
||||||
# consumers as well as writes to storage backends (as configured).
|
|
||||||
|
|
||||||
# XXX: neither of these will raise but will cause an inf hang due to:
|
|
||||||
# https://github.com/python-trio/trio/issues/2258
|
|
||||||
# bus.nursery.start_soon(
|
|
||||||
# await bus.start_task(
|
|
||||||
|
|
||||||
await bus.nursery.start(
|
|
||||||
manage_history,
|
|
||||||
mod,
|
|
||||||
shm,
|
|
||||||
bus,
|
|
||||||
symbol,
|
|
||||||
opened,
|
|
||||||
some_data_ready,
|
|
||||||
feed_is_live,
|
|
||||||
)
|
|
||||||
|
|
||||||
# establish broker backend quote stream by calling
|
# establish broker backend quote stream by calling
|
||||||
# ``stream_quotes()``, which is a required broker backend endpoint.
|
# ``stream_quotes()``, which is a required broker backend endpoint.
|
||||||
init_msg, first_quotes = await bus.nursery.start(
|
init_msg, first_quote = await bus.nursery.start(
|
||||||
partial(
|
partial(
|
||||||
mod.stream_quotes,
|
mod.stream_quotes,
|
||||||
send_chan=send,
|
send_chan=send,
|
||||||
|
@ -333,11 +422,39 @@ async def allocate_persistent_feed(
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
# the broker-specific fully qualified symbol name,
|
||||||
|
# but ensure it is lower-cased for external use.
|
||||||
|
bfqsn = init_msg[symbol]['fqsn'].lower()
|
||||||
|
init_msg[symbol]['fqsn'] = bfqsn
|
||||||
|
|
||||||
|
# HISTORY, run 2 tasks:
|
||||||
|
# - a history loader / maintainer
|
||||||
|
# - a real-time streamer which consumers and sends new data to any
|
||||||
|
# consumers as well as writes to storage backends (as configured).
|
||||||
|
|
||||||
|
# XXX: neither of these will raise but will cause an inf hang due to:
|
||||||
|
# https://github.com/python-trio/trio/issues/2258
|
||||||
|
# bus.nursery.start_soon(
|
||||||
|
# await bus.start_task(
|
||||||
|
shm = await bus.nursery.start(
|
||||||
|
manage_history,
|
||||||
|
mod,
|
||||||
|
bus,
|
||||||
|
'.'.join((bfqsn, brokername)),
|
||||||
|
some_data_ready,
|
||||||
|
feed_is_live,
|
||||||
|
)
|
||||||
|
|
||||||
# we hand an IPC-msg compatible shm token to the caller so it
|
# we hand an IPC-msg compatible shm token to the caller so it
|
||||||
# can read directly from the memory which will be written by
|
# can read directly from the memory which will be written by
|
||||||
# this task.
|
# this task.
|
||||||
init_msg[symbol]['shm_token'] = shm.token
|
msg = init_msg[symbol]
|
||||||
|
msg['shm_token'] = shm.token
|
||||||
|
|
||||||
|
# true fqsn
|
||||||
|
fqsn = '.'.join((bfqsn, brokername))
|
||||||
|
# add a fqsn entry that includes the ``.<broker>`` suffix
|
||||||
|
init_msg[fqsn] = msg
|
||||||
|
|
||||||
# TODO: pretty sure we don't need this? why not just leave 1s as
|
# TODO: pretty sure we don't need this? why not just leave 1s as
|
||||||
# the fastest "sample period" since we'll probably always want that
|
# the fastest "sample period" since we'll probably always want that
|
||||||
|
@ -350,12 +467,42 @@ async def allocate_persistent_feed(
|
||||||
log.info(f'waiting on history to load: {fqsn}')
|
log.info(f'waiting on history to load: {fqsn}')
|
||||||
await some_data_ready.wait()
|
await some_data_ready.wait()
|
||||||
|
|
||||||
bus.feeds[symbol.lower()] = (init_msg, first_quotes)
|
# append ``.<broker>`` suffix to each quote symbol
|
||||||
task_status.started((init_msg, first_quotes))
|
bsym = symbol + f'.{brokername}'
|
||||||
|
generic_first_quotes = {
|
||||||
|
bsym: first_quote,
|
||||||
|
fqsn: first_quote,
|
||||||
|
}
|
||||||
|
|
||||||
# backend will indicate when real-time quotes have begun.
|
bus.feeds[symbol] = bus.feeds[fqsn] = (
|
||||||
|
init_msg,
|
||||||
|
generic_first_quotes,
|
||||||
|
)
|
||||||
|
# for ambiguous names we simply apply the retreived
|
||||||
|
# feed to that name (for now).
|
||||||
|
|
||||||
|
# task_status.started((init_msg, generic_first_quotes))
|
||||||
|
task_status.started()
|
||||||
|
|
||||||
|
if not start_stream:
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
# begin real-time updates of shm and tsb once the feed goes live and
|
||||||
|
# the backend will indicate when real-time quotes have begun.
|
||||||
await feed_is_live.wait()
|
await feed_is_live.wait()
|
||||||
|
|
||||||
|
# start shm incrementer task for OHLC style sampling
|
||||||
|
# at the current detected step period.
|
||||||
|
times = shm.array['time']
|
||||||
|
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||||
|
|
||||||
|
sampler.ohlcv_shms.setdefault(delay_s, []).append(shm)
|
||||||
|
if sampler.incrementers.get(delay_s) is None:
|
||||||
|
await bus.start_task(
|
||||||
|
increment_ohlc_buffer,
|
||||||
|
delay_s,
|
||||||
|
)
|
||||||
|
|
||||||
sum_tick_vlm: bool = init_msg.get(
|
sum_tick_vlm: bool = init_msg.get(
|
||||||
'shm_write_opts', {}
|
'shm_write_opts', {}
|
||||||
).get('sum_tick_vlm', True)
|
).get('sum_tick_vlm', True)
|
||||||
|
@ -366,10 +513,11 @@ async def allocate_persistent_feed(
|
||||||
bus,
|
bus,
|
||||||
shm,
|
shm,
|
||||||
quote_stream,
|
quote_stream,
|
||||||
|
brokername,
|
||||||
sum_tick_vlm
|
sum_tick_vlm
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
log.warning(f'{symbol}@{brokername} feed task terminated')
|
log.warning(f'{fqsn} feed task terminated')
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -402,37 +550,27 @@ async def open_feed_bus(
|
||||||
assert 'brokerd' in tractor.current_actor().name
|
assert 'brokerd' in tractor.current_actor().name
|
||||||
|
|
||||||
bus = get_feed_bus(brokername)
|
bus = get_feed_bus(brokername)
|
||||||
bus._subscribers.setdefault(symbol, [])
|
|
||||||
fqsn = mk_fqsn(brokername, symbol)
|
|
||||||
|
|
||||||
entry = bus.feeds.get(symbol)
|
|
||||||
|
|
||||||
# if no cached feed for this symbol has been created for this
|
# if no cached feed for this symbol has been created for this
|
||||||
# brokerd yet, start persistent stream and shm writer task in
|
# brokerd yet, start persistent stream and shm writer task in
|
||||||
# service nursery
|
# service nursery
|
||||||
|
entry = bus.feeds.get(symbol)
|
||||||
if entry is None:
|
if entry is None:
|
||||||
if not start_stream:
|
# allocate a new actor-local stream bus which
|
||||||
raise RuntimeError(
|
# will persist for this `brokerd`.
|
||||||
f'No stream feed exists for {fqsn}?\n'
|
|
||||||
f'You may need a `brokerd` started first.'
|
|
||||||
)
|
|
||||||
|
|
||||||
# allocate a new actor-local stream bus which will persist for
|
|
||||||
# this `brokerd`.
|
|
||||||
async with bus.task_lock:
|
async with bus.task_lock:
|
||||||
init_msg, first_quotes = await bus.nursery.start(
|
await bus.nursery.start(
|
||||||
partial(
|
partial(
|
||||||
allocate_persistent_feed,
|
allocate_persistent_feed,
|
||||||
|
|
||||||
bus=bus,
|
bus=bus,
|
||||||
brokername=brokername,
|
brokername=brokername,
|
||||||
|
|
||||||
# here we pass through the selected symbol in native
|
# here we pass through the selected symbol in native
|
||||||
# "format" (i.e. upper vs. lowercase depending on
|
# "format" (i.e. upper vs. lowercase depending on
|
||||||
# provider).
|
# provider).
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
start_stream=start_stream,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
# TODO: we can remove this?
|
# TODO: we can remove this?
|
||||||
|
@ -442,9 +580,30 @@ async def open_feed_bus(
|
||||||
# subscriber
|
# subscriber
|
||||||
init_msg, first_quotes = bus.feeds[symbol]
|
init_msg, first_quotes = bus.feeds[symbol]
|
||||||
|
|
||||||
|
msg = init_msg[symbol]
|
||||||
|
bfqsn = msg['fqsn'].lower()
|
||||||
|
|
||||||
|
# true fqsn
|
||||||
|
fqsn = '.'.join([bfqsn, brokername])
|
||||||
|
assert fqsn in first_quotes
|
||||||
|
assert bus.feeds[fqsn]
|
||||||
|
|
||||||
|
# broker-ambiguous symbol (provided on cli - eg. mnq.globex.ib)
|
||||||
|
bsym = symbol + f'.{brokername}'
|
||||||
|
assert bsym in first_quotes
|
||||||
|
|
||||||
|
# we use the broker-specific fqsn (bfqsn) for
|
||||||
|
# the sampler subscription since the backend isn't (yet)
|
||||||
|
# expected to append it's own name to the fqsn, so we filter
|
||||||
|
# on keys which *do not* include that name (e.g .ib) .
|
||||||
|
bus._subscribers.setdefault(bfqsn, [])
|
||||||
|
|
||||||
# send this even to subscribers to existing feed?
|
# send this even to subscribers to existing feed?
|
||||||
# deliver initial info message a first quote asap
|
# deliver initial info message a first quote asap
|
||||||
await ctx.started((init_msg, first_quotes))
|
await ctx.started((
|
||||||
|
init_msg,
|
||||||
|
first_quotes,
|
||||||
|
))
|
||||||
|
|
||||||
if not start_stream:
|
if not start_stream:
|
||||||
log.warning(f'Not opening real-time stream for {fqsn}')
|
log.warning(f'Not opening real-time stream for {fqsn}')
|
||||||
|
@ -454,12 +613,15 @@ async def open_feed_bus(
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as stream,
|
ctx.open_stream() as stream,
|
||||||
):
|
):
|
||||||
|
# re-send to trigger display loop cycle (necessary especially
|
||||||
|
# when the mkt is closed and no real-time messages are
|
||||||
|
# expected).
|
||||||
|
await stream.send({fqsn: first_quotes})
|
||||||
|
|
||||||
|
# open a bg task which receives quotes over a mem chan
|
||||||
|
# and only pushes them to the target actor-consumer at
|
||||||
|
# a max ``tick_throttle`` instantaneous rate.
|
||||||
if tick_throttle:
|
if tick_throttle:
|
||||||
|
|
||||||
# open a bg task which receives quotes over a mem chan
|
|
||||||
# and only pushes them to the target actor-consumer at
|
|
||||||
# a max ``tick_throttle`` instantaneous rate.
|
|
||||||
|
|
||||||
send, recv = trio.open_memory_channel(2**10)
|
send, recv = trio.open_memory_channel(2**10)
|
||||||
cs = await bus.start_task(
|
cs = await bus.start_task(
|
||||||
uniform_rate_send,
|
uniform_rate_send,
|
||||||
|
@ -472,12 +634,15 @@ async def open_feed_bus(
|
||||||
else:
|
else:
|
||||||
sub = (stream, tick_throttle)
|
sub = (stream, tick_throttle)
|
||||||
|
|
||||||
subs = bus._subscribers[symbol]
|
subs = bus._subscribers[bfqsn]
|
||||||
subs.append(sub)
|
subs.append(sub)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
uid = ctx.chan.uid
|
uid = ctx.chan.uid
|
||||||
|
|
||||||
|
# ctrl protocol for start/stop of quote streams based on UI
|
||||||
|
# state (eg. don't need a stream when a symbol isn't being
|
||||||
|
# displayed).
|
||||||
async for msg in stream:
|
async for msg in stream:
|
||||||
|
|
||||||
if msg == 'pause':
|
if msg == 'pause':
|
||||||
|
@ -502,7 +667,7 @@ async def open_feed_bus(
|
||||||
# n.cancel_scope.cancel()
|
# n.cancel_scope.cancel()
|
||||||
cs.cancel()
|
cs.cancel()
|
||||||
try:
|
try:
|
||||||
bus._subscribers[symbol].remove(sub)
|
bus._subscribers[bfqsn].remove(sub)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
log.warning(f'{sub} for {symbol} was already removed?')
|
log.warning(f'{sub} for {symbol} was already removed?')
|
||||||
|
|
||||||
|
@ -519,19 +684,20 @@ async def open_sample_step_stream(
|
||||||
# created for all practical purposes
|
# created for all practical purposes
|
||||||
async with maybe_open_context(
|
async with maybe_open_context(
|
||||||
acm_func=partial(
|
acm_func=partial(
|
||||||
portal.open_stream_from,
|
portal.open_context,
|
||||||
iter_ohlc_periods,
|
iter_ohlc_periods,
|
||||||
),
|
),
|
||||||
kwargs={'delay_s': delay_s},
|
kwargs={'delay_s': delay_s},
|
||||||
|
|
||||||
) as (cache_hit, istream):
|
) as (cache_hit, (ctx, first)):
|
||||||
if cache_hit:
|
async with ctx.open_stream() as istream:
|
||||||
# add a new broadcast subscription for the quote stream
|
if cache_hit:
|
||||||
# if this feed is likely already in use
|
# add a new broadcast subscription for the quote stream
|
||||||
async with istream.subscribe() as bistream:
|
# if this feed is likely already in use
|
||||||
yield bistream
|
async with istream.subscribe() as bistream:
|
||||||
else:
|
yield bistream
|
||||||
yield istream
|
else:
|
||||||
|
yield istream
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -627,10 +793,10 @@ async def install_brokerd_search(
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_feed(
|
async def open_feed(
|
||||||
brokername: str,
|
|
||||||
symbols: list[str],
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
|
|
||||||
|
fqsns: list[str],
|
||||||
|
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
backpressure: bool = True,
|
backpressure: bool = True,
|
||||||
start_stream: bool = True,
|
start_stream: bool = True,
|
||||||
tick_throttle: Optional[float] = None, # Hz
|
tick_throttle: Optional[float] = None, # Hz
|
||||||
|
@ -640,7 +806,10 @@ async def open_feed(
|
||||||
Open a "data feed" which provides streamed real-time quotes.
|
Open a "data feed" which provides streamed real-time quotes.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
sym = symbols[0].lower()
|
fqsn = fqsns[0].lower()
|
||||||
|
|
||||||
|
brokername, key, suffix = uncons_fqsn(fqsn)
|
||||||
|
bfqsn = fqsn.replace('.' + brokername, '')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mod = get_brokermod(brokername)
|
mod = get_brokermod(brokername)
|
||||||
|
@ -661,7 +830,7 @@ async def open_feed(
|
||||||
portal.open_context(
|
portal.open_context(
|
||||||
open_feed_bus,
|
open_feed_bus,
|
||||||
brokername=brokername,
|
brokername=brokername,
|
||||||
symbol=sym,
|
symbol=bfqsn,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
start_stream=start_stream,
|
start_stream=start_stream,
|
||||||
tick_throttle=tick_throttle,
|
tick_throttle=tick_throttle,
|
||||||
|
@ -678,9 +847,10 @@ async def open_feed(
|
||||||
):
|
):
|
||||||
# we can only read from shm
|
# we can only read from shm
|
||||||
shm = attach_shm_array(
|
shm = attach_shm_array(
|
||||||
token=init_msg[sym]['shm_token'],
|
token=init_msg[bfqsn]['shm_token'],
|
||||||
readonly=True,
|
readonly=True,
|
||||||
)
|
)
|
||||||
|
assert fqsn in first_quotes
|
||||||
|
|
||||||
feed = Feed(
|
feed = Feed(
|
||||||
name=brokername,
|
name=brokername,
|
||||||
|
@ -693,17 +863,15 @@ async def open_feed(
|
||||||
)
|
)
|
||||||
|
|
||||||
for sym, data in init_msg.items():
|
for sym, data in init_msg.items():
|
||||||
|
|
||||||
si = data['symbol_info']
|
si = data['symbol_info']
|
||||||
|
fqsn = data['fqsn'] + f'.{brokername}'
|
||||||
symbol = mk_symbol(
|
symbol = Symbol.from_fqsn(
|
||||||
key=sym,
|
fqsn,
|
||||||
type_key=si.get('asset_type', 'forex'),
|
info=si,
|
||||||
tick_size=si.get('price_tick_size', 0.01),
|
|
||||||
lot_tick_size=si.get('lot_tick_size', 0.0),
|
|
||||||
)
|
)
|
||||||
symbol.broker_info[brokername] = si
|
|
||||||
|
|
||||||
|
# symbol.broker_info[brokername] = si
|
||||||
|
feed.symbols[fqsn] = symbol
|
||||||
feed.symbols[sym] = symbol
|
feed.symbols[sym] = symbol
|
||||||
|
|
||||||
# cast shm dtype to list... can't member why we need this
|
# cast shm dtype to list... can't member why we need this
|
||||||
|
@ -727,26 +895,27 @@ async def open_feed(
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def maybe_open_feed(
|
async def maybe_open_feed(
|
||||||
|
|
||||||
brokername: str,
|
fqsns: list[str],
|
||||||
symbols: list[str],
|
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> (Feed, ReceiveChannel[dict[str, Any]]):
|
) -> (
|
||||||
|
Feed,
|
||||||
|
ReceiveChannel[dict[str, Any]],
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Maybe open a data to a ``brokerd`` daemon only if there is no
|
Maybe open a data to a ``brokerd`` daemon only if there is no
|
||||||
local one for the broker-symbol pair, if one is cached use it wrapped
|
local one for the broker-symbol pair, if one is cached use it wrapped
|
||||||
in a tractor broadcast receiver.
|
in a tractor broadcast receiver.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
sym = symbols[0].lower()
|
fqsn = fqsns[0]
|
||||||
|
|
||||||
async with maybe_open_context(
|
async with maybe_open_context(
|
||||||
acm_func=open_feed,
|
acm_func=open_feed,
|
||||||
kwargs={
|
kwargs={
|
||||||
'brokername': brokername,
|
'fqsns': fqsns,
|
||||||
'symbols': [sym],
|
|
||||||
'loglevel': loglevel,
|
'loglevel': loglevel,
|
||||||
'tick_throttle': kwargs.get('tick_throttle'),
|
'tick_throttle': kwargs.get('tick_throttle'),
|
||||||
|
|
||||||
|
@ -754,11 +923,12 @@ async def maybe_open_feed(
|
||||||
'backpressure': kwargs.get('backpressure', True),
|
'backpressure': kwargs.get('backpressure', True),
|
||||||
'start_stream': kwargs.get('start_stream', True),
|
'start_stream': kwargs.get('start_stream', True),
|
||||||
},
|
},
|
||||||
key=sym,
|
key=fqsn,
|
||||||
|
|
||||||
) as (cache_hit, feed):
|
) as (cache_hit, feed):
|
||||||
|
|
||||||
if cache_hit:
|
if cache_hit:
|
||||||
log.info(f'Using cached feed for {brokername}.{sym}')
|
log.info(f'Using cached feed for {fqsn}')
|
||||||
# add a new broadcast subscription for the quote stream
|
# add a new broadcast subscription for the quote stream
|
||||||
# if this feed is likely already in use
|
# if this feed is likely already in use
|
||||||
async with feed.stream.subscribe() as bstream:
|
async with feed.stream.subscribe() as bstream:
|
||||||
|
|
|
@ -14,36 +14,58 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
``marketstore`` integration.
|
``marketstore`` integration.
|
||||||
|
|
||||||
- client management routines
|
- client management routines
|
||||||
- ticK data ingest routines
|
- ticK data ingest routines
|
||||||
- websocket client for subscribing to write triggers
|
- websocket client for subscribing to write triggers
|
||||||
- todo: tick sequence stream-cloning for testing
|
- todo: tick sequence stream-cloning for testing
|
||||||
- todo: docker container management automation
|
|
||||||
"""
|
'''
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager as acm
|
||||||
from typing import Dict, Any, List, Callable, Tuple
|
from pprint import pformat
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
import time
|
import time
|
||||||
from math import isnan
|
from math import isnan
|
||||||
|
|
||||||
|
from bidict import bidict
|
||||||
import msgpack
|
import msgpack
|
||||||
|
import pyqtgraph as pg
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pymarketstore as pymkts
|
|
||||||
import tractor
|
import tractor
|
||||||
from trio_websocket import open_websocket_url
|
from trio_websocket import open_websocket_url
|
||||||
|
from anyio_marketstore import (
|
||||||
|
open_marketstore_client,
|
||||||
|
MarketstoreClient,
|
||||||
|
Params,
|
||||||
|
)
|
||||||
|
import purerpc
|
||||||
|
|
||||||
|
from .feed import maybe_open_feed
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from ..data import open_feed
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
_tick_tbk_ids: Tuple[str, str] = ('1Sec', 'TICK')
|
_tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK')
|
||||||
_tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids)
|
_tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids)
|
||||||
_url: str = 'http://localhost:5993/rpc'
|
|
||||||
|
_tick_dt = [
|
||||||
|
# these two are required for as a "primary key"
|
||||||
|
('Epoch', 'i8'),
|
||||||
|
('Nanoseconds', 'i4'),
|
||||||
|
('IsTrade', 'i1'),
|
||||||
|
('IsBid', 'i1'),
|
||||||
|
('Price', 'f4'),
|
||||||
|
('Size', 'f4')
|
||||||
|
]
|
||||||
|
|
||||||
_quote_dt = [
|
_quote_dt = [
|
||||||
# these two are required for as a "primary key"
|
# these two are required for as a "primary key"
|
||||||
('Epoch', 'i8'),
|
('Epoch', 'i8'),
|
||||||
|
@ -61,6 +83,7 @@ _quote_dt = [
|
||||||
# ('brokerd_ts', 'i64'),
|
# ('brokerd_ts', 'i64'),
|
||||||
# ('VWAP', 'f4')
|
# ('VWAP', 'f4')
|
||||||
]
|
]
|
||||||
|
|
||||||
_quote_tmp = {}.fromkeys(dict(_quote_dt).keys(), np.nan)
|
_quote_tmp = {}.fromkeys(dict(_quote_dt).keys(), np.nan)
|
||||||
_tick_map = {
|
_tick_map = {
|
||||||
'Up': 1,
|
'Up': 1,
|
||||||
|
@ -69,28 +92,39 @@ _tick_map = {
|
||||||
None: np.nan,
|
None: np.nan,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_ohlcv_dt = [
|
||||||
|
# these two are required for as a "primary key"
|
||||||
|
('Epoch', 'i8'),
|
||||||
|
# ('Nanoseconds', 'i4'),
|
||||||
|
|
||||||
class MarketStoreError(Exception):
|
# ohlcv sampling
|
||||||
"Generic marketstore client error"
|
('Open', 'f4'),
|
||||||
|
('High', 'f4'),
|
||||||
|
('Low', 'i8'),
|
||||||
|
('Close', 'i8'),
|
||||||
|
('Volume', 'f4'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def err_on_resp(response: dict) -> None:
|
def mk_tbk(keys: tuple[str, str, str]) -> str:
|
||||||
"""Raise any errors found in responses from client request.
|
'''
|
||||||
"""
|
Generate a marketstore table key from a tuple.
|
||||||
responses = response['responses']
|
Converts,
|
||||||
if responses is not None:
|
``('SPY', '1Sec', 'TICK')`` -> ``"SPY/1Sec/TICK"```
|
||||||
for r in responses:
|
|
||||||
err = r['error']
|
'''
|
||||||
if err:
|
return '/'.join(keys)
|
||||||
raise MarketStoreError(err)
|
|
||||||
|
|
||||||
|
|
||||||
def quote_to_marketstore_structarray(
|
def quote_to_marketstore_structarray(
|
||||||
quote: Dict[str, Any],
|
quote: dict[str, Any],
|
||||||
last_fill: str,
|
last_fill: Optional[float]
|
||||||
|
|
||||||
) -> np.array:
|
) -> np.array:
|
||||||
"""Return marketstore writeable structarray from quote ``dict``.
|
'''
|
||||||
"""
|
Return marketstore writeable structarray from quote ``dict``.
|
||||||
|
|
||||||
|
'''
|
||||||
if last_fill:
|
if last_fill:
|
||||||
# new fill bby
|
# new fill bby
|
||||||
now = timestamp(last_fill)
|
now = timestamp(last_fill)
|
||||||
|
@ -101,7 +135,7 @@ def quote_to_marketstore_structarray(
|
||||||
|
|
||||||
secs, ns = now / 10**9, now % 10**9
|
secs, ns = now / 10**9, now % 10**9
|
||||||
|
|
||||||
# pack into List[Tuple[str, Any]]
|
# pack into list[tuple[str, Any]]
|
||||||
array_input = []
|
array_input = []
|
||||||
|
|
||||||
# insert 'Epoch' entry first and then 'Nanoseconds'.
|
# insert 'Epoch' entry first and then 'Nanoseconds'.
|
||||||
|
@ -123,146 +157,379 @@ def quote_to_marketstore_structarray(
|
||||||
return np.array([tuple(array_input)], dtype=_quote_dt)
|
return np.array([tuple(array_input)], dtype=_quote_dt)
|
||||||
|
|
||||||
|
|
||||||
def timestamp(datestr: str) -> int:
|
def timestamp(date, **kwargs) -> int:
|
||||||
"""Return marketstore compatible 'Epoch' integer in nanoseconds
|
'''
|
||||||
|
Return marketstore compatible 'Epoch' integer in nanoseconds
|
||||||
from a date formatted str.
|
from a date formatted str.
|
||||||
"""
|
|
||||||
return int(pd.Timestamp(datestr).value)
|
'''
|
||||||
|
return int(pd.Timestamp(date, **kwargs).value)
|
||||||
|
|
||||||
|
|
||||||
def mk_tbk(keys: Tuple[str, str, str]) -> str:
|
@acm
|
||||||
"""Generate a marketstore table key from a tuple.
|
|
||||||
|
|
||||||
Converts,
|
|
||||||
``('SPY', '1Sec', 'TICK')`` -> ``"SPY/1Sec/TICK"```
|
|
||||||
"""
|
|
||||||
return '{}/' + '/'.join(keys)
|
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
|
||||||
"""Async wrapper around the alpaca ``pymarketstore`` sync client.
|
|
||||||
|
|
||||||
This will server as the shell for building out a proper async client
|
|
||||||
that isn't horribly documented and un-tested..
|
|
||||||
"""
|
|
||||||
def __init__(self, url: str):
|
|
||||||
self._client = pymkts.Client(url)
|
|
||||||
|
|
||||||
async def _invoke(
|
|
||||||
self,
|
|
||||||
meth: Callable,
|
|
||||||
*args,
|
|
||||||
**kwargs,
|
|
||||||
) -> Any:
|
|
||||||
return err_on_resp(meth(*args, **kwargs))
|
|
||||||
|
|
||||||
async def destroy(
|
|
||||||
self,
|
|
||||||
tbk: Tuple[str, str, str],
|
|
||||||
) -> None:
|
|
||||||
return await self._invoke(self._client.destroy, mk_tbk(tbk))
|
|
||||||
|
|
||||||
async def list_symbols(
|
|
||||||
self,
|
|
||||||
tbk: str,
|
|
||||||
) -> List[str]:
|
|
||||||
return await self._invoke(self._client.list_symbols, mk_tbk(tbk))
|
|
||||||
|
|
||||||
async def write(
|
|
||||||
self,
|
|
||||||
symbol: str,
|
|
||||||
array: np.ndarray,
|
|
||||||
) -> None:
|
|
||||||
start = time.time()
|
|
||||||
await self._invoke(
|
|
||||||
self._client.write,
|
|
||||||
array,
|
|
||||||
_tick_tbk.format(symbol),
|
|
||||||
isvariablelength=True
|
|
||||||
)
|
|
||||||
log.debug(f"{symbol} write time (s): {time.time() - start}")
|
|
||||||
|
|
||||||
def query(
|
|
||||||
self,
|
|
||||||
symbol,
|
|
||||||
tbk: Tuple[str, str] = _tick_tbk_ids,
|
|
||||||
) -> pd.DataFrame:
|
|
||||||
# XXX: causes crash
|
|
||||||
# client.query(pymkts.Params(symbol, '*', 'OHCLV'
|
|
||||||
result = self._client.query(
|
|
||||||
pymkts.Params(symbol, *tbk),
|
|
||||||
)
|
|
||||||
return result.first().df()
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def get_client(
|
async def get_client(
|
||||||
url: str = _url,
|
host: str = 'localhost',
|
||||||
) -> Client:
|
port: int = 5995
|
||||||
yield Client(url)
|
|
||||||
|
) -> MarketstoreClient:
|
||||||
|
'''
|
||||||
|
Load a ``anyio_marketstore`` grpc client connected
|
||||||
|
to an existing ``marketstore`` server.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with open_marketstore_client(
|
||||||
|
host,
|
||||||
|
port
|
||||||
|
) as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
class MarketStoreError(Exception):
|
||||||
|
"Generic marketstore client error"
|
||||||
|
|
||||||
|
|
||||||
|
# def err_on_resp(response: dict) -> None:
|
||||||
|
# """Raise any errors found in responses from client request.
|
||||||
|
# """
|
||||||
|
# responses = response['responses']
|
||||||
|
# if responses is not None:
|
||||||
|
# for r in responses:
|
||||||
|
# err = r['error']
|
||||||
|
# if err:
|
||||||
|
# raise MarketStoreError(err)
|
||||||
|
|
||||||
|
|
||||||
|
tf_in_1s = bidict({
|
||||||
|
1: '1Sec',
|
||||||
|
60: '1Min',
|
||||||
|
60*5: '5Min',
|
||||||
|
60*15: '15Min',
|
||||||
|
60*30: '30Min',
|
||||||
|
60*60: '1H',
|
||||||
|
60*60*24: '1D',
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class Storage:
|
||||||
|
'''
|
||||||
|
High level storage api for both real-time and historical ingest.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
client: MarketstoreClient,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# TODO: eventually this should be an api/interface type that
|
||||||
|
# ensures we can support multiple tsdb backends.
|
||||||
|
self.client = client
|
||||||
|
|
||||||
|
# series' cache from tsdb reads
|
||||||
|
self._arrays: dict[str, np.ndarray] = {}
|
||||||
|
|
||||||
|
async def list_keys(self) -> list[str]:
|
||||||
|
return await self.client.list_symbols()
|
||||||
|
|
||||||
|
async def search_keys(self, pattern: str) -> list[str]:
|
||||||
|
'''
|
||||||
|
Search for time series key in the storage backend.
|
||||||
|
|
||||||
|
'''
|
||||||
|
...
|
||||||
|
|
||||||
|
async def write_ticks(self, ticks: list) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def write_ohlcv(self, ohlcv: np.ndarray) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def read_ohlcv(
|
||||||
|
self,
|
||||||
|
fqsn: str,
|
||||||
|
timeframe: Optional[Union[int, str]] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
MarketstoreClient,
|
||||||
|
Union[dict, np.ndarray]
|
||||||
|
]:
|
||||||
|
client = self.client
|
||||||
|
syms = await client.list_symbols()
|
||||||
|
|
||||||
|
if fqsn not in syms:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if timeframe is None:
|
||||||
|
log.info(f'starting {fqsn} tsdb granularity scan..')
|
||||||
|
# loop through and try to find highest granularity
|
||||||
|
for tfstr in tf_in_1s.values():
|
||||||
|
try:
|
||||||
|
log.info(f'querying for {tfstr}@{fqsn}')
|
||||||
|
result = await client.query(
|
||||||
|
Params(fqsn, tfstr, 'OHLCV',)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
except purerpc.grpclib.exceptions.UnknownError:
|
||||||
|
# XXX: this is already logged by the container and
|
||||||
|
# thus shows up through `marketstored` logs relay.
|
||||||
|
# log.warning(f'{tfstr}@{fqsn} not found')
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
else:
|
||||||
|
tfstr = tf_in_1s[timeframe]
|
||||||
|
result = await client.query(Params(fqsn, tfstr, 'OHLCV',))
|
||||||
|
|
||||||
|
# TODO: it turns out column access on recarrays is actually slower:
|
||||||
|
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||||
|
# it might make sense to make these structured arrays?
|
||||||
|
# Fill out a `numpy` array-results map
|
||||||
|
arrays = {}
|
||||||
|
for fqsn, data_set in result.by_symbols().items():
|
||||||
|
arrays.setdefault(fqsn, {})[
|
||||||
|
tf_in_1s.inverse[data_set.timeframe]
|
||||||
|
] = data_set.array
|
||||||
|
|
||||||
|
return arrays[fqsn][timeframe] if timeframe else arrays[fqsn]
|
||||||
|
|
||||||
|
async def delete_ts(
|
||||||
|
self,
|
||||||
|
key: str,
|
||||||
|
timeframe: Optional[Union[int, str]] = None,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
|
||||||
|
client = self.client
|
||||||
|
syms = await client.list_symbols()
|
||||||
|
print(syms)
|
||||||
|
# if key not in syms:
|
||||||
|
# raise KeyError(f'`{fqsn}` table key not found?')
|
||||||
|
|
||||||
|
return await client.destroy(tbk=key)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_storage_client(
|
||||||
|
fqsn: str,
|
||||||
|
period: Optional[Union[int, str]] = None, # in seconds
|
||||||
|
|
||||||
|
) -> tuple[Storage, dict[str, np.ndarray]]:
|
||||||
|
'''
|
||||||
|
Load a series by key and deliver in ``numpy`` struct array format.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with (
|
||||||
|
# eventually a storage backend endpoint
|
||||||
|
get_client() as client,
|
||||||
|
):
|
||||||
|
# slap on our wrapper api
|
||||||
|
yield Storage(client)
|
||||||
|
|
||||||
|
|
||||||
|
async def tsdb_history_update(
|
||||||
|
fqsn: str,
|
||||||
|
|
||||||
|
) -> list[str]:
|
||||||
|
|
||||||
|
# TODO: real-time dedicated task for ensuring
|
||||||
|
# history consistency between the tsdb, shm and real-time feed..
|
||||||
|
|
||||||
|
# update sequence design notes:
|
||||||
|
|
||||||
|
# - load existing highest frequency data from mkts
|
||||||
|
# * how do we want to offer this to the UI?
|
||||||
|
# - lazy loading?
|
||||||
|
# - try to load it all and expect graphics caching/diffing
|
||||||
|
# to hide extra bits that aren't in view?
|
||||||
|
|
||||||
|
# - compute the diff between latest data from broker and shm
|
||||||
|
# * use sql api in mkts to determine where the backend should
|
||||||
|
# start querying for data?
|
||||||
|
# * append any diff with new shm length
|
||||||
|
# * determine missing (gapped) history by scanning
|
||||||
|
# * how far back do we look?
|
||||||
|
|
||||||
|
# - begin rt update ingest and aggregation
|
||||||
|
# * could start by always writing ticks to mkts instead of
|
||||||
|
# worrying about a shm queue for now.
|
||||||
|
# * we have a short list of shm queues worth groking:
|
||||||
|
# - https://github.com/pikers/piker/issues/107
|
||||||
|
# * the original data feed arch blurb:
|
||||||
|
# - https://github.com/pikers/piker/issues/98
|
||||||
|
#
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
disabled=False, # not pg_profile_enabled(),
|
||||||
|
delayed=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_storage_client(fqsn) as storage,
|
||||||
|
|
||||||
|
maybe_open_feed(
|
||||||
|
[fqsn],
|
||||||
|
start_stream=False,
|
||||||
|
|
||||||
|
) as (feed, stream),
|
||||||
|
):
|
||||||
|
profiler(f'opened feed for {fqsn}')
|
||||||
|
|
||||||
|
symbol = feed.symbols.get(fqsn)
|
||||||
|
if symbol:
|
||||||
|
fqsn = symbol.front_fqsn()
|
||||||
|
|
||||||
|
syms = await storage.client.list_symbols()
|
||||||
|
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||||
|
profiler(f'listed symbols {syms}')
|
||||||
|
|
||||||
|
# diff db history with shm and only write the missing portions
|
||||||
|
ohlcv = feed.shm.array
|
||||||
|
|
||||||
|
# TODO: use pg profiler
|
||||||
|
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
||||||
|
|
||||||
|
to_append = feed.shm.array
|
||||||
|
to_prepend = None
|
||||||
|
|
||||||
|
# hist diffing
|
||||||
|
if tsdb_arrays:
|
||||||
|
onesec = tsdb_arrays[1]
|
||||||
|
to_append = ohlcv[ohlcv['time'] > onesec['Epoch'][-1]]
|
||||||
|
to_prepend = ohlcv[ohlcv['time'] < onesec['Epoch'][0]]
|
||||||
|
|
||||||
|
profiler('Finished db arrays diffs')
|
||||||
|
|
||||||
|
for array in [to_append, to_prepend]:
|
||||||
|
if array is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f'Writing datums {array.size} -> to tsdb from shm\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# build mkts schema compat array for writing
|
||||||
|
mkts_dt = np.dtype(_ohlcv_dt)
|
||||||
|
mkts_array = np.zeros(
|
||||||
|
len(array),
|
||||||
|
dtype=mkts_dt,
|
||||||
|
)
|
||||||
|
# copy from shm array (yes it's this easy):
|
||||||
|
# https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays
|
||||||
|
mkts_array[:] = array[[
|
||||||
|
'time',
|
||||||
|
'open',
|
||||||
|
'high',
|
||||||
|
'low',
|
||||||
|
'close',
|
||||||
|
'volume',
|
||||||
|
]]
|
||||||
|
|
||||||
|
# write to db
|
||||||
|
resp = await storage.client.write(
|
||||||
|
mkts_array,
|
||||||
|
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||||
|
|
||||||
|
# NOTE: will will append duplicates
|
||||||
|
# for the same timestamp-index.
|
||||||
|
# TODO: pre deduplicate?
|
||||||
|
isvariablelength=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f'Wrote {to_append.size} datums to tsdb\n'
|
||||||
|
)
|
||||||
|
profiler('Finished db writes')
|
||||||
|
|
||||||
|
for resp in resp.responses:
|
||||||
|
err = resp.error
|
||||||
|
if err:
|
||||||
|
raise MarketStoreError(err)
|
||||||
|
|
||||||
|
from tractor.trionics import ipython_embed
|
||||||
|
await ipython_embed()
|
||||||
|
|
||||||
|
|
||||||
async def ingest_quote_stream(
|
async def ingest_quote_stream(
|
||||||
symbols: List[str],
|
symbols: list[str],
|
||||||
brokername: str,
|
brokername: str,
|
||||||
tries: int = 1,
|
tries: int = 1,
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Ingest a broker quote stream into marketstore in (sampled) tick format.
|
'''
|
||||||
"""
|
Ingest a broker quote stream into a ``marketstore`` tsdb.
|
||||||
async with open_feed(
|
|
||||||
brokername,
|
|
||||||
symbols,
|
|
||||||
loglevel=loglevel,
|
|
||||||
) as (first_quotes, qstream):
|
|
||||||
|
|
||||||
quote_cache = first_quotes.copy()
|
'''
|
||||||
|
async with (
|
||||||
|
maybe_open_feed(brokername, symbols, loglevel=loglevel) as feed,
|
||||||
|
get_client() as ms_client,
|
||||||
|
):
|
||||||
|
async for quotes in feed.stream:
|
||||||
|
log.info(quotes)
|
||||||
|
for symbol, quote in quotes.items():
|
||||||
|
for tick in quote.get('ticks', ()):
|
||||||
|
ticktype = tick.get('type', 'n/a')
|
||||||
|
|
||||||
async with get_client() as ms_client:
|
# techtonic tick write
|
||||||
|
array = quote_to_marketstore_structarray({
|
||||||
|
'IsTrade': 1 if ticktype == 'trade' else 0,
|
||||||
|
'IsBid': 1 if ticktype in ('bid', 'bsize') else 0,
|
||||||
|
'Price': tick.get('price'),
|
||||||
|
'Size': tick.get('size')
|
||||||
|
}, last_fill=quote.get('broker_ts', None))
|
||||||
|
|
||||||
# start ingest to marketstore
|
await ms_client.write(array, _tick_tbk)
|
||||||
async for quotes in qstream:
|
|
||||||
log.info(quotes)
|
|
||||||
for symbol, quote in quotes.items():
|
|
||||||
|
|
||||||
# remap tick strs to ints
|
# LEGACY WRITE LOOP (using old tick dt)
|
||||||
quote['tick'] = _tick_map[quote.get('tick', 'Equal')]
|
# quote_cache = {
|
||||||
|
# 'size': 0,
|
||||||
|
# 'tick': 0
|
||||||
|
# }
|
||||||
|
|
||||||
# check for volume update (i.e. did trades happen
|
# async for quotes in qstream:
|
||||||
# since last quote)
|
# log.info(quotes)
|
||||||
new_vol = quote.get('volume', None)
|
# for symbol, quote in quotes.items():
|
||||||
if new_vol is None:
|
|
||||||
log.debug(f"No fills for {symbol}")
|
|
||||||
if new_vol == quote_cache.get('volume'):
|
|
||||||
# should never happen due to field diffing
|
|
||||||
# on sender side
|
|
||||||
log.error(
|
|
||||||
f"{symbol}: got same volume as last quote?")
|
|
||||||
|
|
||||||
quote_cache.update(quote)
|
# # remap tick strs to ints
|
||||||
|
# quote['tick'] = _tick_map[quote.get('tick', 'Equal')]
|
||||||
|
|
||||||
a = quote_to_marketstore_structarray(
|
# # check for volume update (i.e. did trades happen
|
||||||
quote,
|
# # since last quote)
|
||||||
# TODO: check this closer to the broker query api
|
# new_vol = quote.get('volume', None)
|
||||||
last_fill=quote.get('fill_time', '')
|
# if new_vol is None:
|
||||||
)
|
# log.debug(f"No fills for {symbol}")
|
||||||
await ms_client.write(symbol, a)
|
# if new_vol == quote_cache.get('volume'):
|
||||||
|
# # should never happen due to field diffing
|
||||||
|
# # on sender side
|
||||||
|
# log.error(
|
||||||
|
# f"{symbol}: got same volume as last quote?")
|
||||||
|
|
||||||
|
# quote_cache.update(quote)
|
||||||
|
|
||||||
|
# a = quote_to_marketstore_structarray(
|
||||||
|
# quote,
|
||||||
|
# # TODO: check this closer to the broker query api
|
||||||
|
# last_fill=quote.get('fill_time', '')
|
||||||
|
# )
|
||||||
|
# await ms_client.write(symbol, a)
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
symbols: List[str],
|
symbols: list[str],
|
||||||
host: str = 'localhost',
|
host: str = 'localhost',
|
||||||
port: int = 5993,
|
port: int = 5993,
|
||||||
diff_cached: bool = True,
|
diff_cached: bool = True,
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Open a symbol stream from a running instance of marketstore and
|
'''
|
||||||
|
Open a symbol stream from a running instance of marketstore and
|
||||||
log to console.
|
log to console.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
tbks: Dict[str, str] = {sym: f"{sym}/*/*" for sym in symbols}
|
tbks: dict[str, str] = {sym: f"{sym}/*/*" for sym in symbols}
|
||||||
|
|
||||||
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
||||||
# send subs topics to server
|
# send subs topics to server
|
||||||
|
@ -271,7 +538,7 @@ async def stream_quotes(
|
||||||
)
|
)
|
||||||
log.info(resp)
|
log.info(resp)
|
||||||
|
|
||||||
async def recv() -> Dict[str, Any]:
|
async def recv() -> dict[str, Any]:
|
||||||
return msgpack.loads((await ws.get_message()), encoding='utf-8')
|
return msgpack.loads((await ws.get_message()), encoding='utf-8')
|
||||||
|
|
||||||
streams = (await recv())['streams']
|
streams = (await recv())['streams']
|
||||||
|
|
|
@ -37,6 +37,7 @@ from .. import data
|
||||||
from ..data import attach_shm_array
|
from ..data import attach_shm_array
|
||||||
from ..data.feed import Feed
|
from ..data.feed import Feed
|
||||||
from ..data._sharedmem import ShmArray
|
from ..data._sharedmem import ShmArray
|
||||||
|
from ..data._source import Symbol
|
||||||
from ._api import (
|
from ._api import (
|
||||||
Fsp,
|
Fsp,
|
||||||
_load_builtins,
|
_load_builtins,
|
||||||
|
@ -75,8 +76,7 @@ async def filter_quotes_by_sym(
|
||||||
|
|
||||||
async def fsp_compute(
|
async def fsp_compute(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
symbol: Symbol,
|
||||||
symbol: str,
|
|
||||||
feed: Feed,
|
feed: Feed,
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ async def fsp_compute(
|
||||||
|
|
||||||
func: Callable,
|
func: Callable,
|
||||||
|
|
||||||
attach_stream: bool = False,
|
# attach_stream: bool = False,
|
||||||
task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -95,13 +95,14 @@ async def fsp_compute(
|
||||||
disabled=True
|
disabled=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
fqsn = symbol.front_fqsn()
|
||||||
out_stream = func(
|
out_stream = func(
|
||||||
|
|
||||||
# TODO: do we even need this if we do the feed api right?
|
# TODO: do we even need this if we do the feed api right?
|
||||||
# shouldn't a local stream do this before we get a handle
|
# shouldn't a local stream do this before we get a handle
|
||||||
# to the async iterable? it's that or we do some kinda
|
# to the async iterable? it's that or we do some kinda
|
||||||
# async itertools style?
|
# async itertools style?
|
||||||
filter_quotes_by_sym(symbol, quote_stream),
|
filter_quotes_by_sym(fqsn, quote_stream),
|
||||||
|
|
||||||
# XXX: currently the ``ohlcv`` arg
|
# XXX: currently the ``ohlcv`` arg
|
||||||
feed.shm,
|
feed.shm,
|
||||||
|
@ -125,8 +126,8 @@ async def fsp_compute(
|
||||||
# each respective field.
|
# each respective field.
|
||||||
fields = getattr(dst.array.dtype, 'fields', None).copy()
|
fields = getattr(dst.array.dtype, 'fields', None).copy()
|
||||||
fields.pop('index')
|
fields.pop('index')
|
||||||
# TODO: nptyping here!
|
history: Optional[np.ndarray] = None # TODO: nptyping here!
|
||||||
history: Optional[np.ndarray] = None
|
|
||||||
if fields and len(fields) > 1 and fields:
|
if fields and len(fields) > 1 and fields:
|
||||||
if not isinstance(history_output, dict):
|
if not isinstance(history_output, dict):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
|
@ -191,40 +192,47 @@ async def fsp_compute(
|
||||||
profiler(f'{func_name} pushed history')
|
profiler(f'{func_name} pushed history')
|
||||||
profiler.finish()
|
profiler.finish()
|
||||||
|
|
||||||
# TODO: UGH, what is the right way to do something like this?
|
|
||||||
if not ctx._started_called:
|
|
||||||
await ctx.started(index)
|
|
||||||
|
|
||||||
# setup a respawn handle
|
# setup a respawn handle
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
|
|
||||||
|
# TODO: might be better to just make a "restart" method where
|
||||||
|
# the target task is spawned implicitly and then the event is
|
||||||
|
# set via some higher level api? At that poing we might as well
|
||||||
|
# be writing a one-cancels-one nursery though right?
|
||||||
tracker = TaskTracker(trio.Event(), cs)
|
tracker = TaskTracker(trio.Event(), cs)
|
||||||
task_status.started((tracker, index))
|
task_status.started((tracker, index))
|
||||||
|
|
||||||
profiler(f'{func_name} yield last index')
|
profiler(f'{func_name} yield last index')
|
||||||
|
|
||||||
# import time
|
# import time
|
||||||
# last = time.time()
|
# last = time.time()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# rt stream
|
|
||||||
async with ctx.open_stream() as stream:
|
|
||||||
async for processed in out_stream:
|
|
||||||
|
|
||||||
log.debug(f"{func_name}: {processed}")
|
async for processed in out_stream:
|
||||||
key, output = processed
|
|
||||||
index = src.index
|
|
||||||
dst.array[-1][key] = output
|
|
||||||
|
|
||||||
# NOTE: for now we aren't streaming this to the consumer
|
log.debug(f"{func_name}: {processed}")
|
||||||
# stream latest array index entry which basically just acts
|
key, output = processed
|
||||||
# as trigger msg to tell the consumer to read from shm
|
index = src.index
|
||||||
if attach_stream:
|
dst.array[-1][key] = output
|
||||||
await stream.send(index)
|
|
||||||
|
|
||||||
# period = time.time() - last
|
# NOTE: for now we aren't streaming this to the consumer
|
||||||
# hz = 1/period if period else float('nan')
|
# stream latest array index entry which basically just acts
|
||||||
# if hz > 60:
|
# as trigger msg to tell the consumer to read from shm
|
||||||
# log.info(f'FSP quote too fast: {hz}')
|
# TODO: further this should likely be implemented much
|
||||||
# last = time.time()
|
# like our `Feed` api where there is one background
|
||||||
|
# "service" task which computes output and then sends to
|
||||||
|
# N-consumers who subscribe for the real-time output,
|
||||||
|
# which we'll likely want to implement using local-mem
|
||||||
|
# chans for the fan out?
|
||||||
|
# if attach_stream:
|
||||||
|
# await client_stream.send(index)
|
||||||
|
|
||||||
|
# period = time.time() - last
|
||||||
|
# hz = 1/period if period else float('nan')
|
||||||
|
# if hz > 60:
|
||||||
|
# log.info(f'FSP quote too fast: {hz}')
|
||||||
|
# last = time.time()
|
||||||
finally:
|
finally:
|
||||||
tracker.complete.set()
|
tracker.complete.set()
|
||||||
|
|
||||||
|
@ -235,8 +243,7 @@ async def cascade(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
|
|
||||||
# data feed key
|
# data feed key
|
||||||
brokername: str,
|
fqsn: str,
|
||||||
symbol: str,
|
|
||||||
|
|
||||||
src_shm_token: dict,
|
src_shm_token: dict,
|
||||||
dst_shm_token: tuple[str, np.dtype],
|
dst_shm_token: tuple[str, np.dtype],
|
||||||
|
@ -254,7 +261,10 @@ async def cascade(
|
||||||
destination shm array buffer.
|
destination shm array buffer.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
profiler = pg.debug.Profiler(delayed=False, disabled=False)
|
profiler = pg.debug.Profiler(
|
||||||
|
delayed=False,
|
||||||
|
disabled=False
|
||||||
|
)
|
||||||
|
|
||||||
if loglevel:
|
if loglevel:
|
||||||
get_console_log(loglevel)
|
get_console_log(loglevel)
|
||||||
|
@ -289,8 +299,7 @@ async def cascade(
|
||||||
|
|
||||||
# open a data feed stream with requested broker
|
# open a data feed stream with requested broker
|
||||||
async with data.feed.maybe_open_feed(
|
async with data.feed.maybe_open_feed(
|
||||||
brokername,
|
[fqsn],
|
||||||
[symbol],
|
|
||||||
|
|
||||||
# TODO throttle tick outputs from *this* daemon since
|
# TODO throttle tick outputs from *this* daemon since
|
||||||
# it'll emit tons of ticks due to the throttle only
|
# it'll emit tons of ticks due to the throttle only
|
||||||
|
@ -299,6 +308,7 @@ async def cascade(
|
||||||
# tick_throttle=60,
|
# tick_throttle=60,
|
||||||
|
|
||||||
) as (feed, quote_stream):
|
) as (feed, quote_stream):
|
||||||
|
symbol = feed.symbols[fqsn]
|
||||||
|
|
||||||
profiler(f'{func}: feed up')
|
profiler(f'{func}: feed up')
|
||||||
|
|
||||||
|
@ -313,7 +323,6 @@ async def cascade(
|
||||||
fsp_target = partial(
|
fsp_target = partial(
|
||||||
|
|
||||||
fsp_compute,
|
fsp_compute,
|
||||||
ctx=ctx,
|
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
feed=feed,
|
feed=feed,
|
||||||
quote_stream=quote_stream,
|
quote_stream=quote_stream,
|
||||||
|
@ -322,7 +331,7 @@ async def cascade(
|
||||||
src=src,
|
src=src,
|
||||||
dst=dst,
|
dst=dst,
|
||||||
|
|
||||||
# func_name=func_name,
|
# target
|
||||||
func=func
|
func=func
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -334,90 +343,113 @@ async def cascade(
|
||||||
|
|
||||||
profiler(f'{func_name}: fsp up')
|
profiler(f'{func_name}: fsp up')
|
||||||
|
|
||||||
async def resync(tracker: TaskTracker) -> tuple[TaskTracker, int]:
|
# sync client
|
||||||
# TODO: adopt an incremental update engine/approach
|
await ctx.started(index)
|
||||||
# where possible here eventually!
|
|
||||||
log.warning(f're-syncing fsp {func_name} to source')
|
|
||||||
tracker.cs.cancel()
|
|
||||||
await tracker.complete.wait()
|
|
||||||
return await n.start(fsp_target)
|
|
||||||
|
|
||||||
def is_synced(
|
# XXX: rt stream with client which we MUST
|
||||||
src: ShmArray,
|
# open here (and keep it open) in order to make
|
||||||
dst: ShmArray
|
# incremental "updates" as history prepends take
|
||||||
) -> tuple[bool, int, int]:
|
# place.
|
||||||
'''Predicate to dertmine if a destination FSP
|
async with ctx.open_stream() as client_stream:
|
||||||
output array is aligned to its source array.
|
|
||||||
|
|
||||||
'''
|
# TODO: these likely should all become
|
||||||
step_diff = src.index - dst.index
|
# methods of this ``TaskLifetime`` or wtv
|
||||||
len_diff = abs(len(src.array) - len(dst.array))
|
# abstraction..
|
||||||
return not (
|
async def resync(
|
||||||
# the source is likely backfilling and we must
|
tracker: TaskTracker,
|
||||||
# sync history calculations
|
|
||||||
len_diff > 2 or
|
|
||||||
|
|
||||||
# we aren't step synced to the source and may be
|
) -> tuple[TaskTracker, int]:
|
||||||
# leading/lagging by a step
|
# TODO: adopt an incremental update engine/approach
|
||||||
step_diff > 1 or
|
# where possible here eventually!
|
||||||
step_diff < 0
|
log.warning(f're-syncing fsp {func_name} to source')
|
||||||
), step_diff, len_diff
|
tracker.cs.cancel()
|
||||||
|
await tracker.complete.wait()
|
||||||
|
tracker, index = await n.start(fsp_target)
|
||||||
|
|
||||||
async def poll_and_sync_to_step(
|
# always trigger UI refresh after history update,
|
||||||
|
# see ``piker.ui._fsp.FspAdmin.open_chain()`` and
|
||||||
|
# ``piker.ui._display.trigger_update()``.
|
||||||
|
await client_stream.send('update')
|
||||||
|
return tracker, index
|
||||||
|
|
||||||
tracker: TaskTracker,
|
def is_synced(
|
||||||
src: ShmArray,
|
src: ShmArray,
|
||||||
dst: ShmArray,
|
dst: ShmArray
|
||||||
|
) -> tuple[bool, int, int]:
|
||||||
|
'''Predicate to dertmine if a destination FSP
|
||||||
|
output array is aligned to its source array.
|
||||||
|
|
||||||
) -> tuple[TaskTracker, int]:
|
'''
|
||||||
|
step_diff = src.index - dst.index
|
||||||
|
len_diff = abs(len(src.array) - len(dst.array))
|
||||||
|
return not (
|
||||||
|
# the source is likely backfilling and we must
|
||||||
|
# sync history calculations
|
||||||
|
len_diff > 2 or
|
||||||
|
|
||||||
|
# we aren't step synced to the source and may be
|
||||||
|
# leading/lagging by a step
|
||||||
|
step_diff > 1 or
|
||||||
|
step_diff < 0
|
||||||
|
), step_diff, len_diff
|
||||||
|
|
||||||
|
async def poll_and_sync_to_step(
|
||||||
|
|
||||||
|
tracker: TaskTracker,
|
||||||
|
src: ShmArray,
|
||||||
|
dst: ShmArray,
|
||||||
|
|
||||||
|
) -> tuple[TaskTracker, int]:
|
||||||
|
|
||||||
synced, step_diff, _ = is_synced(src, dst)
|
|
||||||
while not synced:
|
|
||||||
tracker, index = await resync(tracker)
|
|
||||||
synced, step_diff, _ = is_synced(src, dst)
|
synced, step_diff, _ = is_synced(src, dst)
|
||||||
|
while not synced:
|
||||||
|
tracker, index = await resync(tracker)
|
||||||
|
synced, step_diff, _ = is_synced(src, dst)
|
||||||
|
|
||||||
return tracker, step_diff
|
return tracker, step_diff
|
||||||
|
|
||||||
s, step, ld = is_synced(src, dst)
|
s, step, ld = is_synced(src, dst)
|
||||||
|
|
||||||
# detect sample period step for subscription to increment
|
# detect sample period step for subscription to increment
|
||||||
# signal
|
# signal
|
||||||
times = src.array['time']
|
times = src.array['time']
|
||||||
delay_s = times[-1] - times[times != times[-1]][-1]
|
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||||
|
|
||||||
# Increment the underlying shared memory buffer on every
|
# Increment the underlying shared memory buffer on every
|
||||||
# "increment" msg received from the underlying data feed.
|
# "increment" msg received from the underlying data feed.
|
||||||
async with feed.index_stream(int(delay_s)) as istream:
|
async with feed.index_stream(
|
||||||
|
int(delay_s)
|
||||||
|
) as istream:
|
||||||
|
|
||||||
profiler(f'{func_name}: sample stream up')
|
profiler(f'{func_name}: sample stream up')
|
||||||
profiler.finish()
|
profiler.finish()
|
||||||
|
|
||||||
async for _ in istream:
|
async for _ in istream:
|
||||||
|
|
||||||
# respawn the compute task if the source
|
# respawn the compute task if the source
|
||||||
# array has been updated such that we compute
|
# array has been updated such that we compute
|
||||||
# new history from the (prepended) source.
|
# new history from the (prepended) source.
|
||||||
synced, step_diff, _ = is_synced(src, dst)
|
synced, step_diff, _ = is_synced(src, dst)
|
||||||
if not synced:
|
if not synced:
|
||||||
tracker, step_diff = await poll_and_sync_to_step(
|
tracker, step_diff = await poll_and_sync_to_step(
|
||||||
tracker,
|
tracker,
|
||||||
src,
|
src,
|
||||||
dst,
|
dst,
|
||||||
)
|
)
|
||||||
|
|
||||||
# skip adding a last bar since we should already
|
# skip adding a last bar since we should already
|
||||||
# be step alinged
|
# be step alinged
|
||||||
if step_diff == 0:
|
if step_diff == 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# read out last shm row, copy and write new row
|
# read out last shm row, copy and write new row
|
||||||
array = dst.array
|
array = dst.array
|
||||||
|
|
||||||
# some metrics like vlm should be reset
|
# some metrics like vlm should be reset
|
||||||
# to zero every step.
|
# to zero every step.
|
||||||
if zero_on_step:
|
if zero_on_step:
|
||||||
last = zeroed
|
last = zeroed
|
||||||
else:
|
else:
|
||||||
last = array[-1:].copy()
|
last = array[-1:].copy()
|
||||||
|
|
||||||
dst.push(last)
|
dst.push(last)
|
||||||
|
|
|
@ -25,39 +25,10 @@ from PyQt5.QtCore import QPointF
|
||||||
from PyQt5.QtWidgets import QGraphicsPathItem
|
from PyQt5.QtWidgets import QGraphicsPathItem
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._axes import PriceAxis
|
|
||||||
from ._chart import ChartPlotWidget
|
from ._chart import ChartPlotWidget
|
||||||
from ._label import Label
|
from ._label import Label
|
||||||
|
|
||||||
|
|
||||||
def marker_right_points(
|
|
||||||
chart: ChartPlotWidget, # noqa
|
|
||||||
marker_size: int = 20,
|
|
||||||
|
|
||||||
) -> (float, float, float):
|
|
||||||
'''
|
|
||||||
Return x-dimension, y-axis-aware, level-line marker oriented scene
|
|
||||||
values.
|
|
||||||
|
|
||||||
X values correspond to set the end of a level line, end of
|
|
||||||
a paried level line marker, and the right most side of the "right"
|
|
||||||
axis respectively.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# TODO: compute some sensible maximum value here
|
|
||||||
# and use a humanized scheme to limit to that length.
|
|
||||||
l1_len = chart._max_l1_line_len
|
|
||||||
ryaxis = chart.getAxis('right')
|
|
||||||
|
|
||||||
r_axis_x = ryaxis.pos().x()
|
|
||||||
up_to_l1_sc = r_axis_x - l1_len - 10
|
|
||||||
|
|
||||||
marker_right = up_to_l1_sc - (1.375 * 2 * marker_size)
|
|
||||||
line_end = marker_right - (6/16 * marker_size)
|
|
||||||
|
|
||||||
return line_end, marker_right, r_axis_x
|
|
||||||
|
|
||||||
|
|
||||||
def vbr_left(
|
def vbr_left(
|
||||||
label: Label,
|
label: Label,
|
||||||
|
|
||||||
|
|
|
@ -26,8 +26,6 @@ from PyQt5.QtWidgets import QGraphicsPathItem
|
||||||
from pyqtgraph import Point, functions as fn, Color
|
from pyqtgraph import Point, functions as fn, Color
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from ._anchors import marker_right_points
|
|
||||||
|
|
||||||
|
|
||||||
def mk_marker_path(
|
def mk_marker_path(
|
||||||
|
|
||||||
|
@ -116,7 +114,7 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
self.get_level = get_level
|
self.get_level = get_level
|
||||||
self._on_paint = on_paint
|
self._on_paint = on_paint
|
||||||
self.scene_x = lambda: marker_right_points(chart)[1]
|
self.scene_x = lambda: chart.marker_right_points()[1]
|
||||||
self.level: float = 0
|
self.level: float = 0
|
||||||
self.keep_in_view = keep_in_view
|
self.keep_in_view = keep_in_view
|
||||||
|
|
||||||
|
@ -169,7 +167,7 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
vr = view.state['viewRange']
|
vr = view.state['viewRange']
|
||||||
ymn, ymx = vr[1]
|
ymn, ymx = vr[1]
|
||||||
|
|
||||||
# _, marker_right, _ = marker_right_points(line._chart)
|
# _, marker_right, _ = line._chart.marker_right_points()
|
||||||
x = self.scene_x()
|
x = self.scene_x()
|
||||||
|
|
||||||
if self.style == '>|': # short style, points "down-to" line
|
if self.style == '>|': # short style, points "down-to" line
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -19,10 +19,14 @@ High level chart-widget apis.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import Optional
|
from typing import Optional, TYPE_CHECKING
|
||||||
|
|
||||||
from PyQt5 import QtCore, QtWidgets
|
from PyQt5 import QtCore, QtWidgets
|
||||||
from PyQt5.QtCore import Qt
|
from PyQt5.QtCore import (
|
||||||
|
Qt,
|
||||||
|
QLineF,
|
||||||
|
# QPointF,
|
||||||
|
)
|
||||||
from PyQt5.QtWidgets import (
|
from PyQt5.QtWidgets import (
|
||||||
QFrame,
|
QFrame,
|
||||||
QWidget,
|
QWidget,
|
||||||
|
@ -52,8 +56,6 @@ from ._style import (
|
||||||
CHART_MARGINS,
|
CHART_MARGINS,
|
||||||
_xaxis_at,
|
_xaxis_at,
|
||||||
_min_points_to_show,
|
_min_points_to_show,
|
||||||
_bars_from_right_in_follow_mode,
|
|
||||||
_bars_to_left_in_follow_mode,
|
|
||||||
)
|
)
|
||||||
from ..data.feed import Feed
|
from ..data.feed import Feed
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
|
@ -63,6 +65,8 @@ from ._interaction import ChartView
|
||||||
from ._forms import FieldsForm
|
from ._forms import FieldsForm
|
||||||
from ._overlay import PlotItemOverlay
|
from ._overlay import PlotItemOverlay
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._display import DisplayState
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -230,16 +234,23 @@ class GodWidget(QWidget):
|
||||||
# chart is already in memory so just focus it
|
# chart is already in memory so just focus it
|
||||||
linkedsplits.show()
|
linkedsplits.show()
|
||||||
linkedsplits.focus()
|
linkedsplits.focus()
|
||||||
|
linkedsplits.graphics_cycle()
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
# resume feeds *after* rendering chart view asap
|
# resume feeds *after* rendering chart view asap
|
||||||
chart.resume_all_feeds()
|
chart.resume_all_feeds()
|
||||||
|
|
||||||
|
# TODO: we need a check to see if the chart
|
||||||
|
# last had the xlast in view, if so then shift so it's
|
||||||
|
# still in view, if the user was viewing history then
|
||||||
|
# do nothing yah?
|
||||||
|
chart.default_view()
|
||||||
|
|
||||||
self.linkedsplits = linkedsplits
|
self.linkedsplits = linkedsplits
|
||||||
symbol = linkedsplits.symbol
|
symbol = linkedsplits.symbol
|
||||||
if symbol is not None:
|
if symbol is not None:
|
||||||
self.window.setWindowTitle(
|
self.window.setWindowTitle(
|
||||||
f'{symbol.key}@{symbol.brokers} '
|
f'{symbol.front_fqsn()} '
|
||||||
f'tick:{symbol.tick_size}'
|
f'tick:{symbol.tick_size}'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -346,8 +357,19 @@ class LinkedSplits(QWidget):
|
||||||
self.layout.setContentsMargins(0, 0, 0, 0)
|
self.layout.setContentsMargins(0, 0, 0, 0)
|
||||||
self.layout.addWidget(self.splitter)
|
self.layout.addWidget(self.splitter)
|
||||||
|
|
||||||
|
# chart-local graphics state that can be passed to
|
||||||
|
# a ``graphic_update_cycle()`` call by any task wishing to
|
||||||
|
# update the UI for a given "chart instance".
|
||||||
|
self.display_state: Optional[DisplayState] = None
|
||||||
|
|
||||||
self._symbol: Symbol = None
|
self._symbol: Symbol = None
|
||||||
|
|
||||||
|
def graphics_cycle(self) -> None:
|
||||||
|
from . import _display
|
||||||
|
ds = self.display_state
|
||||||
|
if ds:
|
||||||
|
return _display.graphics_update_cycle(ds)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def symbol(self) -> Symbol:
|
def symbol(self) -> Symbol:
|
||||||
return self._symbol
|
return self._symbol
|
||||||
|
@ -362,12 +384,15 @@ class LinkedSplits(QWidget):
|
||||||
'''
|
'''
|
||||||
ln = len(self.subplots)
|
ln = len(self.subplots)
|
||||||
|
|
||||||
|
# proportion allocated to consumer subcharts
|
||||||
if not prop:
|
if not prop:
|
||||||
# proportion allocated to consumer subcharts
|
prop = 3/8*5/8
|
||||||
if ln < 2:
|
|
||||||
prop = 1/3
|
# if ln < 2:
|
||||||
elif ln >= 2:
|
# prop = 3/8*5/8
|
||||||
prop = 3/8
|
|
||||||
|
# elif ln >= 2:
|
||||||
|
# prop = 3/8
|
||||||
|
|
||||||
major = 1 - prop
|
major = 1 - prop
|
||||||
min_h_ind = int((self.height() * prop) / ln)
|
min_h_ind = int((self.height() * prop) / ln)
|
||||||
|
@ -807,17 +832,72 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
return int(vr.left()), int(vr.right())
|
return int(vr.left()), int(vr.right())
|
||||||
|
|
||||||
def bars_range(self) -> tuple[int, int, int, int]:
|
def bars_range(self) -> tuple[int, int, int, int]:
|
||||||
"""Return a range tuple for the bars present in view.
|
'''
|
||||||
"""
|
Return a range tuple for the bars present in view.
|
||||||
|
|
||||||
|
'''
|
||||||
l, r = self.view_range()
|
l, r = self.view_range()
|
||||||
array = self._arrays[self.name]
|
array = self._arrays[self.name]
|
||||||
lbar = max(l, array[0]['index'])
|
start, stop = self._xrange = (
|
||||||
rbar = min(r, array[-1]['index'])
|
array[0]['index'],
|
||||||
|
array[-1]['index'],
|
||||||
|
)
|
||||||
|
lbar = max(l, start)
|
||||||
|
rbar = min(r, stop)
|
||||||
return l, lbar, rbar, r
|
return l, lbar, rbar, r
|
||||||
|
|
||||||
|
def curve_width_pxs(
|
||||||
|
self,
|
||||||
|
) -> float:
|
||||||
|
_, lbar, rbar, _ = self.bars_range()
|
||||||
|
return self.view.mapViewToDevice(
|
||||||
|
QLineF(lbar, 0, rbar, 0)
|
||||||
|
).length()
|
||||||
|
|
||||||
|
def pre_l1_xs(self) -> tuple[float, float]:
|
||||||
|
'''
|
||||||
|
Return the view x-coord for the value just before
|
||||||
|
the L1 labels on the y-axis as well as the length
|
||||||
|
of that L1 label from the y-axis.
|
||||||
|
|
||||||
|
'''
|
||||||
|
line_end, marker_right, yaxis_x = self.marker_right_points()
|
||||||
|
view = self.view
|
||||||
|
line = view.mapToView(
|
||||||
|
QLineF(line_end, 0, yaxis_x, 0)
|
||||||
|
)
|
||||||
|
return line.x1(), line.length()
|
||||||
|
|
||||||
|
def marker_right_points(
|
||||||
|
self,
|
||||||
|
marker_size: int = 20,
|
||||||
|
|
||||||
|
) -> (float, float, float):
|
||||||
|
'''
|
||||||
|
Return x-dimension, y-axis-aware, level-line marker oriented scene
|
||||||
|
values.
|
||||||
|
|
||||||
|
X values correspond to set the end of a level line, end of
|
||||||
|
a paried level line marker, and the right most side of the "right"
|
||||||
|
axis respectively.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# TODO: compute some sensible maximum value here
|
||||||
|
# and use a humanized scheme to limit to that length.
|
||||||
|
l1_len = self._max_l1_line_len
|
||||||
|
ryaxis = self.getAxis('right')
|
||||||
|
|
||||||
|
r_axis_x = ryaxis.pos().x()
|
||||||
|
up_to_l1_sc = r_axis_x - l1_len - 10
|
||||||
|
|
||||||
|
marker_right = up_to_l1_sc - (1.375 * 2 * marker_size)
|
||||||
|
line_end = marker_right - (6/16 * marker_size)
|
||||||
|
|
||||||
|
return line_end, marker_right, r_axis_x
|
||||||
|
|
||||||
def default_view(
|
def default_view(
|
||||||
self,
|
self,
|
||||||
index: int = -1,
|
steps_on_screen: Optional[int] = None
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -825,13 +905,38 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
xlast = self._arrays[self.name][index]['index']
|
index = self._arrays[self.name]['index']
|
||||||
except IndexError:
|
except IndexError:
|
||||||
log.warning(f'array for {self.name} not loaded yet?')
|
log.warning(f'array for {self.name} not loaded yet?')
|
||||||
return
|
return
|
||||||
|
|
||||||
begin = xlast - _bars_to_left_in_follow_mode
|
xfirst, xlast = index[0], index[-1]
|
||||||
end = xlast + _bars_from_right_in_follow_mode
|
l, lbar, rbar, r = self.bars_range()
|
||||||
|
|
||||||
|
marker_pos, l1_len = self.pre_l1_xs()
|
||||||
|
end = xlast + l1_len + 1
|
||||||
|
|
||||||
|
if (
|
||||||
|
rbar < 0
|
||||||
|
or l < xfirst
|
||||||
|
or (rbar - lbar) < 6
|
||||||
|
):
|
||||||
|
# set fixed bars count on screen that approx includes as
|
||||||
|
# many bars as possible before a downsample line is shown.
|
||||||
|
begin = xlast - round(6116 / 6)
|
||||||
|
|
||||||
|
else:
|
||||||
|
begin = end - (r - l)
|
||||||
|
|
||||||
|
# for debugging
|
||||||
|
# print(
|
||||||
|
# f'bars range: {brange}\n'
|
||||||
|
# f'xlast: {xlast}\n'
|
||||||
|
# f'marker pos: {marker_pos}\n'
|
||||||
|
# f'l1 len: {l1_len}\n'
|
||||||
|
# f'begin: {begin}\n'
|
||||||
|
# f'end: {end}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
# remove any custom user yrange setttings
|
# remove any custom user yrange setttings
|
||||||
if self._static_yrange == 'axis':
|
if self._static_yrange == 'axis':
|
||||||
|
@ -844,6 +949,11 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
padding=0,
|
padding=0,
|
||||||
)
|
)
|
||||||
view._set_yrange()
|
view._set_yrange()
|
||||||
|
self.view.maybe_downsample_graphics()
|
||||||
|
try:
|
||||||
|
self.linked.graphics_cycle()
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
|
||||||
def increment_view(
|
def increment_view(
|
||||||
self,
|
self,
|
||||||
|
@ -878,8 +988,10 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
graphics = BarItems(
|
graphics = BarItems(
|
||||||
|
self.linked,
|
||||||
self.plotItem,
|
self.plotItem,
|
||||||
pen_color=self.pen_color
|
pen_color=self.pen_color,
|
||||||
|
name=name,
|
||||||
)
|
)
|
||||||
|
|
||||||
# adds all bar/candle graphics objects for each data point in
|
# adds all bar/candle graphics objects for each data point in
|
||||||
|
@ -988,12 +1100,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
# on data reads and makes graphics rendering no faster
|
# on data reads and makes graphics rendering no faster
|
||||||
# clipToView=True,
|
# clipToView=True,
|
||||||
|
|
||||||
# TODO: see how this handles with custom ohlcv bars graphics
|
|
||||||
# and/or if we can implement something similar for OHLC graphics
|
|
||||||
# autoDownsample=True,
|
|
||||||
# downsample=60,
|
|
||||||
# downsampleMethod='subsample',
|
|
||||||
|
|
||||||
**pdi_kwargs,
|
**pdi_kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1032,7 +1138,17 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
# (we need something that avoids clutter on x-axis).
|
# (we need something that avoids clutter on x-axis).
|
||||||
self._add_sticky(name, bg_color=color)
|
self._add_sticky(name, bg_color=color)
|
||||||
|
|
||||||
|
# NOTE: this is more or less the RENDER call that tells Qt to
|
||||||
|
# start showing the generated graphics-curves. This is kind of
|
||||||
|
# of edge-triggered call where once added any
|
||||||
|
# ``QGraphicsItem.update()`` calls are automatically displayed.
|
||||||
|
# Our internal graphics objects have their own "update from
|
||||||
|
# data" style method API that allows for real-time updates on
|
||||||
|
# the next render cycle; just note a lot of the real-time
|
||||||
|
# updates are implicit and require a bit of digging to
|
||||||
|
# understand.
|
||||||
pi.addItem(curve)
|
pi.addItem(curve)
|
||||||
|
|
||||||
return curve, data_key
|
return curve, data_key
|
||||||
|
|
||||||
# TODO: make this a ctx mngr
|
# TODO: make this a ctx mngr
|
||||||
|
@ -1064,29 +1180,34 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
)
|
)
|
||||||
return last
|
return last
|
||||||
|
|
||||||
def update_ohlc_from_array(
|
# def update_ohlc_from_array(
|
||||||
|
# self,
|
||||||
|
|
||||||
|
# graphics_name: str,
|
||||||
|
# array: np.ndarray,
|
||||||
|
# **kwargs,
|
||||||
|
|
||||||
|
# ) -> pg.GraphicsObject:
|
||||||
|
# '''
|
||||||
|
# Update the named internal graphics from ``array``.
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# self._index = array['index'][0]
|
||||||
|
# self._arrays[self.name] = array
|
||||||
|
|
||||||
|
# graphics = self._graphics[graphics_name]
|
||||||
|
# graphics.update_from_array(array, **kwargs)
|
||||||
|
|
||||||
|
# return graphics
|
||||||
|
|
||||||
|
# def update_curve_from_array(
|
||||||
|
def update_graphics_from_array(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
graphics_name: str,
|
graphics_name: str,
|
||||||
array: np.ndarray,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> pg.GraphicsObject:
|
array: Optional[np.ndarray] = None,
|
||||||
'''
|
|
||||||
Update the named internal graphics from ``array``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
self._arrays[self.name] = array
|
|
||||||
graphics = self._graphics[graphics_name]
|
|
||||||
graphics.update_from_array(array, **kwargs)
|
|
||||||
return graphics
|
|
||||||
|
|
||||||
def update_curve_from_array(
|
|
||||||
self,
|
|
||||||
|
|
||||||
graphics_name: str,
|
|
||||||
array: np.ndarray,
|
|
||||||
array_key: Optional[str] = None,
|
array_key: Optional[str] = None,
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> pg.GraphicsObject:
|
) -> pg.GraphicsObject:
|
||||||
|
@ -1094,31 +1215,64 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
Update the named internal graphics from ``array``.
|
Update the named internal graphics from ``array``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
assert len(array)
|
if array is not None:
|
||||||
|
assert len(array)
|
||||||
|
|
||||||
data_key = array_key or graphics_name
|
data_key = array_key or graphics_name
|
||||||
|
|
||||||
if graphics_name not in self._flows:
|
if graphics_name not in self._flows:
|
||||||
self._arrays[self.name] = array
|
data_key = self.name
|
||||||
else:
|
|
||||||
|
if array is not None:
|
||||||
|
# write array to internal graphics table
|
||||||
self._arrays[data_key] = array
|
self._arrays[data_key] = array
|
||||||
|
else:
|
||||||
|
array = self._arrays[data_key]
|
||||||
|
|
||||||
curve = self._graphics[graphics_name]
|
# array key and graphics "name" might be different..
|
||||||
|
graphics = self._graphics[graphics_name]
|
||||||
|
|
||||||
# NOTE: back when we weren't implementing the curve graphics
|
# compute "in-view" indices
|
||||||
# ourselves you'd have updates using this method:
|
l, lbar, rbar, r = self.bars_range()
|
||||||
# curve.setData(y=array[graphics_name], x=array['index'], **kwargs)
|
indexes = array['index']
|
||||||
|
ifirst = indexes[0]
|
||||||
|
ilast = indexes[-1]
|
||||||
|
|
||||||
# NOTE: graphics **must** implement a diff based update
|
lbar_i = max(l, ifirst) - ifirst
|
||||||
# operation where an internal ``FastUpdateCurve._xrange`` is
|
rbar_i = min(r, ilast) - ifirst
|
||||||
# used to determine if the underlying path needs to be
|
|
||||||
# pre/ap-pended.
|
|
||||||
curve.update_from_array(
|
|
||||||
x=array['index'],
|
|
||||||
y=array[data_key],
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
return curve
|
in_view = array[lbar_i: rbar_i]
|
||||||
|
|
||||||
|
if not in_view.size:
|
||||||
|
return graphics
|
||||||
|
|
||||||
|
# TODO: we could do it this way as well no?
|
||||||
|
# to_draw = array[lbar - ifirst:(rbar - ifirst) + 1]
|
||||||
|
|
||||||
|
# start_index = self._index
|
||||||
|
# lbar = max(l, start_index) - start_index
|
||||||
|
# rbar = min(r, ohlc[-1]['index']) - start_index
|
||||||
|
if isinstance(graphics, BarItems):
|
||||||
|
graphics.update_from_array(
|
||||||
|
array,
|
||||||
|
in_view,
|
||||||
|
view_range=(lbar_i, rbar_i),
|
||||||
|
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
graphics.update_from_array(
|
||||||
|
x=array['index'],
|
||||||
|
y=array[data_key],
|
||||||
|
|
||||||
|
x_iv=in_view['index'],
|
||||||
|
y_iv=in_view[data_key],
|
||||||
|
view_range=(lbar_i, rbar_i),
|
||||||
|
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
return graphics
|
||||||
|
|
||||||
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
||||||
# # compute contents label "height" in view terms
|
# # compute contents label "height" in view terms
|
||||||
|
@ -1149,6 +1303,9 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
# print(f"bounds (ylow, yhigh): {(ylow, yhigh)}")
|
# print(f"bounds (ylow, yhigh): {(ylow, yhigh)}")
|
||||||
|
|
||||||
|
# TODO: pretty sure we can just call the cursor
|
||||||
|
# directly not? i don't wee why we need special "signal proxies"
|
||||||
|
# for this lul..
|
||||||
def enterEvent(self, ev): # noqa
|
def enterEvent(self, ev): # noqa
|
||||||
# pg.PlotWidget.enterEvent(self, ev)
|
# pg.PlotWidget.enterEvent(self, ev)
|
||||||
self.sig_mouse_enter.emit(self)
|
self.sig_mouse_enter.emit(self)
|
||||||
|
@ -1173,6 +1330,22 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
else:
|
else:
|
||||||
return ohlc['index'][-1]
|
return ohlc['index'][-1]
|
||||||
|
|
||||||
|
def in_view(
|
||||||
|
self,
|
||||||
|
array: np.ndarray,
|
||||||
|
|
||||||
|
) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Slice an input struct array providing only datums
|
||||||
|
"in view" of this chart.
|
||||||
|
|
||||||
|
'''
|
||||||
|
l, lbar, rbar, r = self.bars_range()
|
||||||
|
ifirst = array[0]['index']
|
||||||
|
# slice data by offset from the first index
|
||||||
|
# available in the passed datum set.
|
||||||
|
return array[lbar - ifirst:(rbar - ifirst) + 1]
|
||||||
|
|
||||||
def maxmin(
|
def maxmin(
|
||||||
self,
|
self,
|
||||||
name: Optional[str] = None,
|
name: Optional[str] = None,
|
||||||
|
|
|
@ -0,0 +1,346 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Graphics related downsampling routines for compressing to pixel
|
||||||
|
limits on the display device.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import math
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from numpy.lib import recfunctions as rfn
|
||||||
|
from numba import (
|
||||||
|
jit,
|
||||||
|
# float64, optional, int64,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ..log import get_logger
|
||||||
|
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def hl2mxmn(ohlc: np.ndarray) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||||
|
to a "joined" max/min 1-d array.
|
||||||
|
|
||||||
|
'''
|
||||||
|
index = ohlc['index']
|
||||||
|
hls = ohlc[[
|
||||||
|
'low',
|
||||||
|
'high',
|
||||||
|
]]
|
||||||
|
|
||||||
|
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||||
|
x = np.empty(2*hls.size, dtype=np.float64)
|
||||||
|
trace_hl(hls, mxmn, x, index[0])
|
||||||
|
x = x + index[0]
|
||||||
|
|
||||||
|
return mxmn, x
|
||||||
|
|
||||||
|
|
||||||
|
@jit(
|
||||||
|
# TODO: the type annots..
|
||||||
|
# float64[:](float64[:],),
|
||||||
|
nopython=True,
|
||||||
|
)
|
||||||
|
def trace_hl(
|
||||||
|
hl: 'np.ndarray',
|
||||||
|
out: np.ndarray,
|
||||||
|
x: np.ndarray,
|
||||||
|
start: int,
|
||||||
|
|
||||||
|
# the "offset" values in the x-domain which
|
||||||
|
# place the 2 output points around each ``int``
|
||||||
|
# master index.
|
||||||
|
margin: float = 0.43,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
"Trace" the outline of the high-low values of an ohlc sequence
|
||||||
|
as a line such that the maximum deviation (aka disperaion) between
|
||||||
|
bars if preserved.
|
||||||
|
|
||||||
|
This routine is expected to modify input arrays in-place.
|
||||||
|
|
||||||
|
'''
|
||||||
|
last_l = hl['low'][0]
|
||||||
|
last_h = hl['high'][0]
|
||||||
|
|
||||||
|
for i in range(hl.size):
|
||||||
|
row = hl[i]
|
||||||
|
l, h = row['low'], row['high']
|
||||||
|
|
||||||
|
up_diff = h - last_l
|
||||||
|
down_diff = last_h - l
|
||||||
|
|
||||||
|
if up_diff > down_diff:
|
||||||
|
out[2*i + 1] = h
|
||||||
|
out[2*i] = last_l
|
||||||
|
else:
|
||||||
|
out[2*i + 1] = l
|
||||||
|
out[2*i] = last_h
|
||||||
|
|
||||||
|
last_l = l
|
||||||
|
last_h = h
|
||||||
|
|
||||||
|
x[2*i] = int(i) - margin
|
||||||
|
x[2*i + 1] = int(i) + margin
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def ohlc_flatten(
|
||||||
|
ohlc: np.ndarray,
|
||||||
|
use_mxmn: bool = False,
|
||||||
|
|
||||||
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
'''
|
||||||
|
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||||
|
1-d array that is 4 times the size with x-domain values distributed
|
||||||
|
evenly (by 0.5 steps) over each index.
|
||||||
|
|
||||||
|
'''
|
||||||
|
index = ohlc['index']
|
||||||
|
|
||||||
|
if use_mxmn:
|
||||||
|
flat, x = hl2mxmn(ohlc)
|
||||||
|
else:
|
||||||
|
flat = rfn.structured_to_unstructured(
|
||||||
|
ohlc[['open', 'high', 'low', 'close']]
|
||||||
|
).flatten()
|
||||||
|
|
||||||
|
x = np.linspace(
|
||||||
|
start=index[0] - 0.5,
|
||||||
|
stop=index[-1] + 0.5,
|
||||||
|
num=len(flat),
|
||||||
|
)
|
||||||
|
return x, flat
|
||||||
|
|
||||||
|
|
||||||
|
def ohlc_to_m4_line(
|
||||||
|
ohlc: np.ndarray,
|
||||||
|
px_width: int,
|
||||||
|
|
||||||
|
downsample: bool = False,
|
||||||
|
uppx: Optional[float] = None,
|
||||||
|
pretrace: bool = False,
|
||||||
|
|
||||||
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
'''
|
||||||
|
Convert an OHLC struct-array to a m4 downsampled 1-d array.
|
||||||
|
|
||||||
|
'''
|
||||||
|
xpts, flat = ohlc_flatten(
|
||||||
|
ohlc,
|
||||||
|
use_mxmn=pretrace,
|
||||||
|
)
|
||||||
|
|
||||||
|
if downsample:
|
||||||
|
bins, x, y = ds_m4(
|
||||||
|
xpts,
|
||||||
|
flat,
|
||||||
|
px_width=px_width,
|
||||||
|
uppx=uppx,
|
||||||
|
log_scale=bool(uppx)
|
||||||
|
)
|
||||||
|
x = np.broadcast_to(x[:, None], y.shape)
|
||||||
|
x = (x + np.array([-0.43, 0, 0, 0.43])).flatten()
|
||||||
|
y = y.flatten()
|
||||||
|
|
||||||
|
return x, y
|
||||||
|
else:
|
||||||
|
return xpts, flat
|
||||||
|
|
||||||
|
|
||||||
|
def ds_m4(
|
||||||
|
x: np.ndarray,
|
||||||
|
y: np.ndarray,
|
||||||
|
|
||||||
|
# this is the width of the data in view
|
||||||
|
# in display-device-local pixel units.
|
||||||
|
px_width: int,
|
||||||
|
uppx: Optional[float] = None,
|
||||||
|
log_scale: bool = True,
|
||||||
|
|
||||||
|
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||||
|
'''
|
||||||
|
Downsample using the M4 algorithm.
|
||||||
|
|
||||||
|
This is more or less an OHLC style sampling of a line-style series.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# NOTE: this method is a so called "visualization driven data
|
||||||
|
# aggregation" approach. It gives error-free line chart
|
||||||
|
# downsampling, see
|
||||||
|
# further scientific paper resources:
|
||||||
|
# - http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||||
|
# - http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||||
|
|
||||||
|
# Details on implementation of this algo are based in,
|
||||||
|
# https://github.com/pikers/piker/issues/109
|
||||||
|
|
||||||
|
# XXX: from infinite on downsampling viewable graphics:
|
||||||
|
# "one thing i remembered about the binning - if you are
|
||||||
|
# picking a range within your timeseries the start and end bin
|
||||||
|
# should be one more bin size outside the visual range, then
|
||||||
|
# you get better visual fidelity at the edges of the graph"
|
||||||
|
# "i didn't show it in the sample code, but it's accounted for
|
||||||
|
# in the start and end indices and number of bins"
|
||||||
|
|
||||||
|
# optionally log-scale down the "supposed pxs on screen"
|
||||||
|
# as the units-per-px (uppx) get's large.
|
||||||
|
if log_scale:
|
||||||
|
assert uppx, 'You must provide a `uppx` value to use log scaling!'
|
||||||
|
|
||||||
|
scaler = round(
|
||||||
|
max(
|
||||||
|
# NOTE: found that a 16x px width brought greater
|
||||||
|
# detail, likely due to dpi scaling?
|
||||||
|
# px_width=px_width * 16,
|
||||||
|
2**6 / (1 + math.log(uppx, 2)),
|
||||||
|
1
|
||||||
|
)
|
||||||
|
)
|
||||||
|
px_width *= scaler
|
||||||
|
|
||||||
|
assert px_width > 1 # width of screen in pxs?
|
||||||
|
|
||||||
|
# NOTE: if we didn't pre-slice the data to downsample
|
||||||
|
# you could in theory pass these as the slicing params,
|
||||||
|
# do we care though since we can always just pre-slice the
|
||||||
|
# input?
|
||||||
|
x_start = x[0] # x value start/lowest in domain
|
||||||
|
x_end = x[-1] # x end value/highest in domain
|
||||||
|
|
||||||
|
# XXX: always round up on the input pixels
|
||||||
|
px_width = math.ceil(px_width)
|
||||||
|
|
||||||
|
x_range = x_end - x_start
|
||||||
|
|
||||||
|
# ratio of indexed x-value to width of raster in pixels.
|
||||||
|
# this is more or less, uppx: units-per-pixel.
|
||||||
|
w = x_range / float(px_width)
|
||||||
|
|
||||||
|
# ensure we make more then enough
|
||||||
|
# frames (windows) for the output pixel
|
||||||
|
frames = px_width
|
||||||
|
|
||||||
|
# if we have more and then exact integer's
|
||||||
|
# (uniform quotient output) worth of datum-domain-points
|
||||||
|
# per windows-frame, add one more window to ensure
|
||||||
|
# we have room for all output down-samples.
|
||||||
|
pts_per_pixel, r = divmod(len(x), frames)
|
||||||
|
if r:
|
||||||
|
frames += 1
|
||||||
|
|
||||||
|
# call into ``numba``
|
||||||
|
nb, i_win, y_out = _m4(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
|
||||||
|
frames,
|
||||||
|
|
||||||
|
# TODO: see func below..
|
||||||
|
# i_win,
|
||||||
|
# y_out,
|
||||||
|
|
||||||
|
# first index in x data to start at
|
||||||
|
x_start,
|
||||||
|
# window size for each "frame" of data to downsample (normally
|
||||||
|
# scaled by the ratio of pixels on screen to data in x-range).
|
||||||
|
w,
|
||||||
|
)
|
||||||
|
|
||||||
|
# filter out any overshoot in the input allocation arrays by
|
||||||
|
# removing zero-ed tail entries which should start at a certain
|
||||||
|
# index.
|
||||||
|
i_win = i_win[i_win != 0]
|
||||||
|
y_out = y_out[:i_win.size]
|
||||||
|
|
||||||
|
return nb, i_win, y_out
|
||||||
|
|
||||||
|
|
||||||
|
@jit(
|
||||||
|
nopython=True,
|
||||||
|
nogil=True,
|
||||||
|
)
|
||||||
|
def _m4(
|
||||||
|
|
||||||
|
xs: np.ndarray,
|
||||||
|
ys: np.ndarray,
|
||||||
|
|
||||||
|
frames: int,
|
||||||
|
|
||||||
|
# TODO: using this approach by having the ``.zeros()`` alloc lines
|
||||||
|
# below, in put python was causing segs faults and alloc crashes..
|
||||||
|
# we might need to see how it behaves with shm arrays and consider
|
||||||
|
# allocating them once at startup?
|
||||||
|
|
||||||
|
# pre-alloc array of x indices mapping to the start
|
||||||
|
# of each window used for downsampling in y.
|
||||||
|
# i_win: np.ndarray,
|
||||||
|
# pre-alloc array of output downsampled y values
|
||||||
|
# y_out: np.ndarray,
|
||||||
|
|
||||||
|
x_start: int,
|
||||||
|
step: float,
|
||||||
|
|
||||||
|
) -> int:
|
||||||
|
# nbins = len(i_win)
|
||||||
|
# count = len(xs)
|
||||||
|
|
||||||
|
# these are pre-allocated and mutated by ``numba``
|
||||||
|
# code in-place.
|
||||||
|
y_out = np.zeros((frames, 4), ys.dtype)
|
||||||
|
i_win = np.zeros(frames, xs.dtype)
|
||||||
|
|
||||||
|
bincount = 0
|
||||||
|
x_left = x_start
|
||||||
|
|
||||||
|
# Find the first window's starting value which *includes* the
|
||||||
|
# first value in the x-domain array, i.e. the first
|
||||||
|
# "left-side-of-window" **plus** the downsampling step,
|
||||||
|
# creates a window which includes the first x **value**.
|
||||||
|
while xs[0] >= x_left + step:
|
||||||
|
x_left += step
|
||||||
|
|
||||||
|
# set all bins in the left-most entry to the starting left-most x value
|
||||||
|
# (aka a row broadcast).
|
||||||
|
i_win[bincount] = x_left
|
||||||
|
# set all y-values to the first value passed in.
|
||||||
|
y_out[bincount] = ys[0]
|
||||||
|
|
||||||
|
for i in range(len(xs)):
|
||||||
|
x = xs[i]
|
||||||
|
y = ys[i]
|
||||||
|
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||||
|
y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||||
|
y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||||
|
y_out[bincount, 3] = y
|
||||||
|
else:
|
||||||
|
# Find the next bin
|
||||||
|
while x >= x_left + step:
|
||||||
|
x_left += step
|
||||||
|
|
||||||
|
bincount += 1
|
||||||
|
i_win[bincount] = x_left
|
||||||
|
y_out[bincount] = y
|
||||||
|
|
||||||
|
return bincount, i_win, y_out
|
|
@ -95,22 +95,24 @@ class LineDot(pg.CurvePoint):
|
||||||
|
|
||||||
def event(
|
def event(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
ev: QtCore.QEvent,
|
ev: QtCore.QEvent,
|
||||||
|
|
||||||
) -> None:
|
) -> bool:
|
||||||
if not isinstance(
|
if not isinstance(
|
||||||
ev, QtCore.QDynamicPropertyChangeEvent
|
ev, QtCore.QDynamicPropertyChangeEvent
|
||||||
) or self.curve() is None:
|
) or self.curve() is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# TODO: get rid of this ``.getData()`` and
|
||||||
|
# make a more pythonic api to retreive backing
|
||||||
|
# numpy arrays...
|
||||||
(x, y) = self.curve().getData()
|
(x, y) = self.curve().getData()
|
||||||
index = self.property('index')
|
index = self.property('index')
|
||||||
# first = self._plot._arrays['ohlc'][0]['index']
|
# first = self._plot._arrays['ohlc'][0]['index']
|
||||||
# first = x[0]
|
# first = x[0]
|
||||||
# i = index - first
|
# i = index - first
|
||||||
if index:
|
if index:
|
||||||
i = index - x[0]
|
i = round(index - x[0])
|
||||||
if i > 0 and i < len(y):
|
if i > 0 and i < len(y):
|
||||||
newPos = (index, y[i])
|
newPos = (index, y[i])
|
||||||
QtWidgets.QGraphicsItem.setPos(self, *newPos)
|
QtWidgets.QGraphicsItem.setPos(self, *newPos)
|
||||||
|
@ -293,7 +295,8 @@ class ContentsLabels:
|
||||||
|
|
||||||
|
|
||||||
class Cursor(pg.GraphicsObject):
|
class Cursor(pg.GraphicsObject):
|
||||||
'''Multi-plot cursor for use on a ``LinkedSplits`` chart (set).
|
'''
|
||||||
|
Multi-plot cursor for use on a ``LinkedSplits`` chart (set).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -308,7 +311,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
self.linked = linkedsplits
|
self.linked = linkedsplits
|
||||||
self.graphics: dict[str, pg.GraphicsObject] = {}
|
self.graphics: dict[str, pg.GraphicsObject] = {}
|
||||||
self.plots: List['PlotChartWidget'] = [] # type: ignore # noqa
|
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
||||||
self.active_plot = None
|
self.active_plot = None
|
||||||
self.digits: int = digits
|
self.digits: int = digits
|
||||||
self._datum_xy: tuple[int, float] = (0, 0)
|
self._datum_xy: tuple[int, float] = (0, 0)
|
||||||
|
@ -405,6 +408,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
slot=self.mouseMoved,
|
slot=self.mouseMoved,
|
||||||
delay=_debounce_delay,
|
delay=_debounce_delay,
|
||||||
)
|
)
|
||||||
|
|
||||||
px_enter = pg.SignalProxy(
|
px_enter = pg.SignalProxy(
|
||||||
plot.sig_mouse_enter,
|
plot.sig_mouse_enter,
|
||||||
rateLimit=_mouse_rate_limit,
|
rateLimit=_mouse_rate_limit,
|
||||||
|
@ -436,7 +440,10 @@ class Cursor(pg.GraphicsObject):
|
||||||
if plot.linked.xaxis_chart is plot:
|
if plot.linked.xaxis_chart is plot:
|
||||||
xlabel = self.xaxis_label = XAxisLabel(
|
xlabel = self.xaxis_label = XAxisLabel(
|
||||||
parent=self.plots[plot_index].getAxis('bottom'),
|
parent=self.plots[plot_index].getAxis('bottom'),
|
||||||
# parent=self.plots[plot_index].pi_overlay.get_axis(plot.plotItem, 'bottom'),
|
# parent=self.plots[plot_index].pi_overlay.get_axis(
|
||||||
|
# plot.plotItem, 'bottom'
|
||||||
|
# ),
|
||||||
|
|
||||||
opacity=_ch_label_opac,
|
opacity=_ch_label_opac,
|
||||||
bg_color=self.label_color,
|
bg_color=self.label_color,
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -23,6 +23,7 @@ from typing import Optional
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from PyQt5 import QtGui, QtWidgets
|
from PyQt5 import QtGui, QtWidgets
|
||||||
|
from PyQt5.QtWidgets import QGraphicsItem
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
Qt,
|
Qt,
|
||||||
QLineF,
|
QLineF,
|
||||||
|
@ -31,8 +32,16 @@ from PyQt5.QtCore import (
|
||||||
QPointF,
|
QPointF,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .._profile import pg_profile_enabled
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
from ._style import hcolor
|
from ._style import hcolor
|
||||||
|
from ._compression import (
|
||||||
|
# ohlc_to_m4_line,
|
||||||
|
ds_m4,
|
||||||
|
)
|
||||||
|
from ..log import get_logger
|
||||||
|
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def step_path_arrays_from_1d(
|
def step_path_arrays_from_1d(
|
||||||
|
@ -94,8 +103,7 @@ _line_styles: dict[str, int] = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# TODO: got a feeling that dropping this inheritance gets us even more speedups
|
class FastAppendCurve(pg.GraphicsObject):
|
||||||
class FastAppendCurve(pg.PlotCurveItem):
|
|
||||||
'''
|
'''
|
||||||
A faster, append friendly version of ``pyqtgraph.PlotCurveItem``
|
A faster, append friendly version of ``pyqtgraph.PlotCurveItem``
|
||||||
built for real-time data updates.
|
built for real-time data updates.
|
||||||
|
@ -110,22 +118,44 @@ class FastAppendCurve(pg.PlotCurveItem):
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
|
x: np.ndarray,
|
||||||
|
y: np.ndarray,
|
||||||
*args,
|
*args,
|
||||||
|
|
||||||
step_mode: bool = False,
|
step_mode: bool = False,
|
||||||
color: str = 'default_lightest',
|
color: str = 'default_lightest',
|
||||||
fill_color: Optional[str] = None,
|
fill_color: Optional[str] = None,
|
||||||
style: str = 'solid',
|
style: str = 'solid',
|
||||||
name: Optional[str] = None,
|
name: Optional[str] = None,
|
||||||
|
use_fpath: bool = True,
|
||||||
|
|
||||||
**kwargs
|
**kwargs
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
# brutaaalll, see comments within..
|
||||||
|
self._y = self.yData = y
|
||||||
|
self._x = self.xData = x
|
||||||
|
|
||||||
|
self._name = name
|
||||||
|
self.path: Optional[QtGui.QPainterPath] = None
|
||||||
|
|
||||||
|
self.use_fpath = use_fpath
|
||||||
|
self.fast_path: Optional[QtGui.QPainterPath] = None
|
||||||
|
|
||||||
|
self._ds_cache: dict = {}
|
||||||
|
|
||||||
# TODO: we can probably just dispense with the parent since
|
# TODO: we can probably just dispense with the parent since
|
||||||
# we're basically only using the pen setting now...
|
# we're basically only using the pen setting now...
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self._name = name
|
|
||||||
self._xrange: tuple[int, int] = self.dataBounds(ax=0)
|
# self._xrange: tuple[int, int] = self.dataBounds(ax=0)
|
||||||
|
self._xrange: Optional[tuple[int, int]] = None
|
||||||
|
|
||||||
|
# self._last_draw = time.time()
|
||||||
|
self._in_ds: bool = False
|
||||||
|
self._last_uppx: float = 0
|
||||||
|
|
||||||
# all history of curve is drawn in single px thickness
|
# all history of curve is drawn in single px thickness
|
||||||
pen = pg.mkPen(hcolor(color))
|
pen = pg.mkPen(hcolor(color))
|
||||||
|
@ -134,20 +164,20 @@ class FastAppendCurve(pg.PlotCurveItem):
|
||||||
if 'dash' in style:
|
if 'dash' in style:
|
||||||
pen.setDashPattern([8, 3])
|
pen.setDashPattern([8, 3])
|
||||||
|
|
||||||
self.setPen(pen)
|
self._pen = pen
|
||||||
|
|
||||||
# last segment is drawn in 2px thickness for emphasis
|
# last segment is drawn in 2px thickness for emphasis
|
||||||
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
||||||
self.last_step_pen = pg.mkPen(pen, width=2)
|
self.last_step_pen = pg.mkPen(pen, width=2)
|
||||||
|
|
||||||
self._last_line: QLineF = None
|
self._last_line: Optional[QLineF] = None
|
||||||
self._last_step_rect: QRectF = None
|
self._last_step_rect: Optional[QRectF] = None
|
||||||
|
|
||||||
# flat-top style histogram-like discrete curve
|
# flat-top style histogram-like discrete curve
|
||||||
self._step_mode: bool = step_mode
|
self._step_mode: bool = step_mode
|
||||||
|
|
||||||
# self._fill = True
|
# self._fill = True
|
||||||
self.setBrush(hcolor(fill_color or color))
|
self._brush = pg.functions.mkBrush(hcolor(fill_color or color))
|
||||||
|
|
||||||
# TODO: one question still remaining is if this makes trasform
|
# TODO: one question still remaining is if this makes trasform
|
||||||
# interactions slower (such as zooming) and if so maybe if/when
|
# interactions slower (such as zooming) and if so maybe if/when
|
||||||
|
@ -158,13 +188,143 @@ class FastAppendCurve(pg.PlotCurveItem):
|
||||||
# only thing drawn is the "last" line segment which can
|
# only thing drawn is the "last" line segment which can
|
||||||
# have a weird artifact where it won't be fully drawn to its
|
# have a weird artifact where it won't be fully drawn to its
|
||||||
# endpoint (something we saw on trade rate curves)
|
# endpoint (something we saw on trade rate curves)
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
self.setCacheMode(
|
||||||
|
QGraphicsItem.DeviceCoordinateCache
|
||||||
|
)
|
||||||
|
|
||||||
|
self.update()
|
||||||
|
|
||||||
|
# TODO: probably stick this in a new parent
|
||||||
|
# type which will contain our own version of
|
||||||
|
# what ``PlotCurveItem`` had in terms of base
|
||||||
|
# functionality? A `FlowGraphic` maybe?
|
||||||
|
def x_uppx(self) -> int:
|
||||||
|
|
||||||
|
px_vecs = self.pixelVectors()[0]
|
||||||
|
if px_vecs:
|
||||||
|
xs_in_px = px_vecs.x()
|
||||||
|
return round(xs_in_px)
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def px_width(self) -> float:
|
||||||
|
|
||||||
|
vb = self.getViewBox()
|
||||||
|
if not vb:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
vr = self.viewRect()
|
||||||
|
l, r = int(vr.left()), int(vr.right())
|
||||||
|
|
||||||
|
if not self._xrange:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
start, stop = self._xrange
|
||||||
|
lbar = max(l, start)
|
||||||
|
rbar = min(r, stop)
|
||||||
|
|
||||||
|
return round(vb.mapViewToDevice(
|
||||||
|
QLineF(lbar, 0, rbar, 0)
|
||||||
|
).length())
|
||||||
|
|
||||||
|
# def should_ds_or_redraw(
|
||||||
|
# self,
|
||||||
|
|
||||||
|
# ) -> tuple[bool, bool]:
|
||||||
|
|
||||||
|
# uppx = self.x_uppx()
|
||||||
|
# px_width = self.px_width()
|
||||||
|
# if not px_width:
|
||||||
|
# return False, False
|
||||||
|
|
||||||
|
# # uppx_diff = abs(uppx - self._last_uppx)
|
||||||
|
# uppx_diff = (uppx - self._last_uppx)
|
||||||
|
# self._last_uppx = uppx
|
||||||
|
|
||||||
|
# should_redraw: bool = False
|
||||||
|
# should_ds: bool = self._in_ds
|
||||||
|
|
||||||
|
# # print(uppx_diff)
|
||||||
|
|
||||||
|
# if (
|
||||||
|
# uppx <= 8
|
||||||
|
# ):
|
||||||
|
# # trigger redraw or original non-downsampled data
|
||||||
|
# if self._in_ds:
|
||||||
|
# print('REVERTING BACK TO SRC DATA')
|
||||||
|
# # clear downsampled curve(s) and expect
|
||||||
|
# # refresh of path segments.
|
||||||
|
# should_redraw = True
|
||||||
|
|
||||||
|
# elif (
|
||||||
|
# uppx_diff >= 1
|
||||||
|
# or uppx_diff <= -1
|
||||||
|
# or self._step_mode and abs(uppx_diff) >= 1
|
||||||
|
# ):
|
||||||
|
# log.info(
|
||||||
|
# f'{self._name} downsampler change: {self._last_uppx} -> {uppx}'
|
||||||
|
# )
|
||||||
|
# should_ds = {'px_width': px_width, 'uppx': uppx}
|
||||||
|
# should_redraw = True
|
||||||
|
|
||||||
|
# if should_ds:
|
||||||
|
# should_ds = {'px_width': px_width, 'uppx': uppx}
|
||||||
|
|
||||||
|
# return should_ds, should_redraw
|
||||||
|
|
||||||
|
def downsample(
|
||||||
|
self,
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
px_width,
|
||||||
|
uppx,
|
||||||
|
|
||||||
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
|
||||||
|
# downsample whenever more then 1 pixels per datum can be shown.
|
||||||
|
# always refresh data bounds until we get diffing
|
||||||
|
# working properly, see above..
|
||||||
|
bins, x, y = ds_m4(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
px_width=px_width,
|
||||||
|
uppx=uppx,
|
||||||
|
log_scale=bool(uppx)
|
||||||
|
)
|
||||||
|
x = np.broadcast_to(x[:, None], y.shape)
|
||||||
|
# x = (x + np.array([-0.43, 0, 0, 0.43])).flatten()
|
||||||
|
x = (x + np.array([-0.5, 0, 0, 0.5])).flatten()
|
||||||
|
y = y.flatten()
|
||||||
|
|
||||||
|
# presumably?
|
||||||
|
# self._in_ds = True
|
||||||
|
return x, y
|
||||||
|
|
||||||
|
def maybe_downsample(
|
||||||
|
self,
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Simple update call but with previously cached arrays data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# print('DS CALLED FROM INTERACTION?')
|
||||||
|
# presume this is a so called "interaction update", see
|
||||||
|
# ``ChartView.maybe_downsample_graphics()``.
|
||||||
|
self.update_from_array(self._x, self._y)
|
||||||
|
|
||||||
def update_from_array(
|
def update_from_array(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
|
# full array input history
|
||||||
x: np.ndarray,
|
x: np.ndarray,
|
||||||
y: np.ndarray,
|
y: np.ndarray,
|
||||||
|
|
||||||
|
# pre-sliced array data that's "in view"
|
||||||
|
x_iv: np.ndarray,
|
||||||
|
y_iv: np.ndarray,
|
||||||
|
|
||||||
|
view_range: Optional[tuple[int, int]] = None,
|
||||||
|
|
||||||
) -> QtGui.QPainterPath:
|
) -> QtGui.QPainterPath:
|
||||||
'''
|
'''
|
||||||
Update curve from input 2-d data.
|
Update curve from input 2-d data.
|
||||||
|
@ -173,38 +333,198 @@ class FastAppendCurve(pg.PlotCurveItem):
|
||||||
a length diff.
|
a length diff.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
profiler = pg.debug.Profiler(
|
||||||
|
msg=f'FastAppendCurve.update_from_array(): `{self._name}`',
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
gt=ms_slower_then,
|
||||||
|
)
|
||||||
flip_cache = False
|
flip_cache = False
|
||||||
|
draw_full_path = True
|
||||||
|
|
||||||
istart, istop = self._xrange
|
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||||
# print(f"xrange: {self._xrange}")
|
# our `LineDot`) required ``.getData()`` to work..
|
||||||
|
self.xData = x
|
||||||
|
self.yData = y
|
||||||
|
|
||||||
|
# update internal array refs
|
||||||
|
self._x, self._y = x, y
|
||||||
|
|
||||||
# compute the length diffs between the first/last index entry in
|
# compute the length diffs between the first/last index entry in
|
||||||
# the input data and the last indexes we have on record from the
|
# the input data and the last indexes we have on record from the
|
||||||
# last time we updated the curve index.
|
# last time we updated the curve index.
|
||||||
prepend_length = istart - x[0]
|
if self._xrange:
|
||||||
append_length = x[-1] - istop
|
istart, istop = self._xrange
|
||||||
|
else:
|
||||||
|
self._xrange = istart, istop = x[0], x[-1]
|
||||||
|
|
||||||
|
prepend_length = int(istart - x[0])
|
||||||
|
append_length = int(x[-1] - istop)
|
||||||
|
|
||||||
|
# print(f"xrange: {self._xrange}")
|
||||||
|
if view_range:
|
||||||
|
li, ri = view_range
|
||||||
|
# x, y = x[lbar:rbar], y[lbar:rbar]
|
||||||
|
# x, y = x_iv, y_iv
|
||||||
|
profiler(f'view range slice {view_range}')
|
||||||
|
|
||||||
|
# if self._name == 'OHLC':
|
||||||
|
# print(f'view range slice {view_range}')
|
||||||
|
|
||||||
|
# ds state checking
|
||||||
|
uppx = self.x_uppx()
|
||||||
|
px_width = self.px_width()
|
||||||
|
uppx_diff = (uppx - self._last_uppx)
|
||||||
|
|
||||||
# step mode: draw flat top discrete "step"
|
# step mode: draw flat top discrete "step"
|
||||||
# over the index space for each datum.
|
# over the index space for each datum.
|
||||||
if self._step_mode:
|
if self._step_mode:
|
||||||
x_out, y_out = step_path_arrays_from_1d(x[:-1], y[:-1])
|
|
||||||
|
# TODO: numba this bish
|
||||||
|
# x_out, y_out = step_path_arrays_from_1d(
|
||||||
|
# x[:-1], y[:-1]
|
||||||
|
# )
|
||||||
|
|
||||||
|
x_iv_out, y_iv_out = step_path_arrays_from_1d(
|
||||||
|
x_iv[:-1], y_iv[:-1]
|
||||||
|
)
|
||||||
|
|
||||||
|
profiler('generated step arrays')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# by default we only pull data up to the last (current) index
|
# by default we only pull data up to the last (current) index
|
||||||
x_out, y_out = x[:-1], y[:-1]
|
# x_out, y_out = x[:-1], y[:-1]
|
||||||
|
x_iv_out, y_iv_out = x_iv[:-1], y_iv[:-1]
|
||||||
|
profiler('sliced array history')
|
||||||
|
|
||||||
if self.path is None or prepend_length > 0:
|
|
||||||
self.path = pg.functions.arrayToQPath(
|
# by default plan to draw the source ouput that's "in view"
|
||||||
x_out,
|
x_to_path, y_to_path = x_iv_out, y_iv_out
|
||||||
y_out,
|
|
||||||
connect='all',
|
ds_key = px_width, uppx
|
||||||
finiteCheck=False,
|
|
||||||
|
# always re-ds if we were dsed but the input range changes.
|
||||||
|
if self._in_ds:
|
||||||
|
# slice out the portion of the downsampled data that is
|
||||||
|
# "in view" and **only** draw a path for that.
|
||||||
|
|
||||||
|
entry = self._ds_cache.get(ds_key)
|
||||||
|
if entry:
|
||||||
|
x_ds_out, y_ds_out, first_i, last_i = entry
|
||||||
|
|
||||||
|
# if last_i == x[-1]:
|
||||||
|
log.info(
|
||||||
|
f'{self._name} has cached ds {ds_key} -> {entry}'
|
||||||
|
)
|
||||||
|
prepend_length = int(first_i - ri)
|
||||||
|
append_length = int(ri - last_i)
|
||||||
|
|
||||||
|
# x_to_path = x_ds_out
|
||||||
|
# y_to_path = y_ds_out
|
||||||
|
|
||||||
|
# else:
|
||||||
|
# log.warn(f'{self._name} ds updates unhandled!')
|
||||||
|
# DS only the new part?
|
||||||
|
|
||||||
|
# check for downsampling conditions
|
||||||
|
if (
|
||||||
|
# std m4 downsample conditions
|
||||||
|
uppx_diff >= 4
|
||||||
|
or uppx_diff <= -2
|
||||||
|
or self._step_mode and abs(uppx_diff) >= 2
|
||||||
|
|
||||||
|
# or self._in_ds and px_width > 1
|
||||||
|
):
|
||||||
|
# if not uppx_diff >= 1:
|
||||||
|
log.info(
|
||||||
|
f'{self._name} sampler change: {self._last_uppx} -> {uppx}'
|
||||||
)
|
)
|
||||||
profiler('generate fresh path')
|
self._last_uppx = uppx
|
||||||
|
# should_ds = {'px_width': px_width, 'uppx': uppx}
|
||||||
|
|
||||||
# if self._step_mode:
|
# if self._step_mode:
|
||||||
# self.path.closeSubpath()
|
# # TODO: numba this bish
|
||||||
|
# x_out, y_out = step_path_arrays_from_1d(
|
||||||
|
# x_iv[:-1], y_iv[:-1]
|
||||||
|
# )
|
||||||
|
# else:
|
||||||
|
# # by default we only pull data up to the last (current) index
|
||||||
|
# x_out, y_out = x_iv[:-1], y_iv[:-1]
|
||||||
|
|
||||||
|
x_ds_out, y_ds_out = self.downsample(
|
||||||
|
x_iv_out,
|
||||||
|
y_iv_out,
|
||||||
|
|
||||||
|
px_width=px_width,
|
||||||
|
uppx=uppx,
|
||||||
|
)
|
||||||
|
profiler(
|
||||||
|
f'path downsample ds_key={ds_key}\n'
|
||||||
|
f'{x_iv_out.size}, {y_iv_out.size}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# cache downsampled outputs
|
||||||
|
self._ds_cache[ds_key] = (
|
||||||
|
x_ds_out,
|
||||||
|
y_ds_out,
|
||||||
|
x[0],
|
||||||
|
x[-1],
|
||||||
|
)
|
||||||
|
|
||||||
|
x_to_path = x_ds_out
|
||||||
|
y_to_path = y_ds_out
|
||||||
|
|
||||||
|
self._in_ds = True
|
||||||
|
|
||||||
|
elif (
|
||||||
|
uppx <= 8
|
||||||
|
and self._in_ds
|
||||||
|
):
|
||||||
|
# we should de-downsample back to our original
|
||||||
|
# source data so we clear our path data in prep
|
||||||
|
# to generate a new one from original source data.
|
||||||
|
if self.path:
|
||||||
|
self.path.clear()
|
||||||
|
|
||||||
|
if self.fast_path:
|
||||||
|
self.fast_path.clear()
|
||||||
|
|
||||||
|
log.info(f'DEDOWN -> {self._name}')
|
||||||
|
profiler('path reversion to non-ds data')
|
||||||
|
|
||||||
|
self._in_ds = False
|
||||||
|
|
||||||
|
|
||||||
|
# render path graphics
|
||||||
|
# log.info(
|
||||||
|
# # f'{self._name}: last sizes {x_to_path.size}, {y_to_path.size}',
|
||||||
|
# f'{self._name}: sizes {x_to_path.size}, {y_to_path.size}',
|
||||||
|
# )
|
||||||
|
|
||||||
|
self._last_topaths = x_to_path, y_to_path
|
||||||
|
|
||||||
|
no_path_yet = self.path is None
|
||||||
|
|
||||||
|
if draw_full_path:
|
||||||
|
self.path = pg.functions.arrayToQPath(
|
||||||
|
x_to_path,
|
||||||
|
y_to_path,
|
||||||
|
connect='all',
|
||||||
|
finiteCheck=False,
|
||||||
|
path=self.path,
|
||||||
|
)
|
||||||
|
profiler('generated FULL PATH -> {self._name}')
|
||||||
|
|
||||||
|
# reserve mem allocs see:
|
||||||
|
# - https://doc.qt.io/qt-5/qpainterpath.html#reserve
|
||||||
|
# - https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||||
|
# - https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||||
|
# XXX: right now this is based on had hoc checks on a
|
||||||
|
# hidpi 3840x2160 4k monitor but we should optimize for
|
||||||
|
# the target display(s) on the sys.
|
||||||
|
if no_path_yet:
|
||||||
|
self.path.reserve(int(500e3))
|
||||||
|
|
||||||
|
self._last_vr = view_range
|
||||||
|
|
||||||
# TODO: get this piecewise prepend working - right now it's
|
# TODO: get this piecewise prepend working - right now it's
|
||||||
# giving heck on vwap...
|
# giving heck on vwap...
|
||||||
|
@ -223,74 +543,90 @@ class FastAppendCurve(pg.PlotCurveItem):
|
||||||
# # self.path.moveTo(new_x[0], new_y[0])
|
# # self.path.moveTo(new_x[0], new_y[0])
|
||||||
# self.path.connectPath(old_path)
|
# self.path.connectPath(old_path)
|
||||||
|
|
||||||
elif append_length > 0:
|
# elif (
|
||||||
if self._step_mode:
|
# append_length > 0
|
||||||
new_x, new_y = step_path_arrays_from_1d(
|
# ):
|
||||||
x[-append_length - 2:-1],
|
# if self._step_mode:
|
||||||
y[-append_length - 2:-1],
|
# new_x, new_y = step_path_arrays_from_1d(
|
||||||
)
|
# x[-append_length - 2:-1],
|
||||||
# [1:] since we don't need the vertical line normally at
|
# y[-append_length - 2:-1],
|
||||||
# the beginning of the step curve taking the first (x,
|
# )
|
||||||
# y) poing down to the x-axis **because** this is an
|
# # [1:] since we don't need the vertical line normally at
|
||||||
# appended path graphic.
|
# # the beginning of the step curve taking the first (x,
|
||||||
new_x = new_x[1:]
|
# # y) poing down to the x-axis **because** this is an
|
||||||
new_y = new_y[1:]
|
# # appended path graphic.
|
||||||
|
# new_x = new_x[1:]
|
||||||
|
# new_y = new_y[1:]
|
||||||
|
|
||||||
else:
|
# else:
|
||||||
# print(f"append_length: {append_length}")
|
# # print(f"append_length: {append_length}")
|
||||||
new_x = x[-append_length - 2:-1]
|
# new_x = x[-append_length - 2:-1]
|
||||||
new_y = y[-append_length - 2:-1]
|
# new_y = y[-append_length - 2:-1]
|
||||||
# print((new_x, new_y))
|
# # print((new_x, new_y))
|
||||||
|
|
||||||
append_path = pg.functions.arrayToQPath(
|
# profiler('diffed append arrays')
|
||||||
new_x,
|
|
||||||
new_y,
|
|
||||||
connect='all',
|
|
||||||
# finiteCheck=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
path = self.path
|
# if should_ds:
|
||||||
|
# new_x, new_y = self.downsample(
|
||||||
|
# new_x,
|
||||||
|
# new_y,
|
||||||
|
# **should_ds,
|
||||||
|
# )
|
||||||
|
# profiler(f'fast path downsample redraw={should_ds}')
|
||||||
|
|
||||||
|
# append_path = pg.functions.arrayToQPath(
|
||||||
|
# new_x,
|
||||||
|
# new_y,
|
||||||
|
# connect='all',
|
||||||
|
# finiteCheck=False,
|
||||||
|
# path=self.fast_path,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# if self.use_fpath:
|
||||||
|
# # an attempt at trying to make append-updates faster..
|
||||||
|
# if self.fast_path is None:
|
||||||
|
# self.fast_path = append_path
|
||||||
|
# self.fast_path.reserve(int(6e3))
|
||||||
|
# else:
|
||||||
|
# self.fast_path.connectPath(append_path)
|
||||||
|
# size = self.fast_path.capacity()
|
||||||
|
# profiler(f'connected fast path w size: {size}')
|
||||||
|
|
||||||
|
# # print(f"append_path br: {append_path.boundingRect()}")
|
||||||
|
# # self.path.moveTo(new_x[0], new_y[0])
|
||||||
|
# # path.connectPath(append_path)
|
||||||
|
|
||||||
|
# # XXX: lol this causes a hang..
|
||||||
|
# # self.path = self.path.simplified()
|
||||||
|
# else:
|
||||||
|
# size = self.path.capacity()
|
||||||
|
# profiler(f'connected history path w size: {size}')
|
||||||
|
# self.path.connectPath(append_path)
|
||||||
|
|
||||||
# other merging ideas:
|
# other merging ideas:
|
||||||
# https://stackoverflow.com/questions/8936225/how-to-merge-qpainterpaths
|
# https://stackoverflow.com/questions/8936225/how-to-merge-qpainterpaths
|
||||||
if self._step_mode:
|
# path.addPath(append_path)
|
||||||
# path.addPath(append_path)
|
# path.closeSubpath()
|
||||||
self.path.connectPath(append_path)
|
|
||||||
|
|
||||||
# TODO: try out new work from `pyqtgraph` main which
|
# TODO: try out new work from `pyqtgraph` main which
|
||||||
# should repair horrid perf:
|
# should repair horrid perf:
|
||||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2032
|
# https://github.com/pyqtgraph/pyqtgraph/pull/2032
|
||||||
# ok, nope still horrible XD
|
# ok, nope still horrible XD
|
||||||
# if self._fill:
|
# if self._fill:
|
||||||
# # XXX: super slow set "union" op
|
# # XXX: super slow set "union" op
|
||||||
# self.path = self.path.united(append_path).simplified()
|
# self.path = self.path.united(append_path).simplified()
|
||||||
|
|
||||||
# # path.addPath(append_path)
|
# self.disable_cache()
|
||||||
# # path.closeSubpath()
|
# flip_cache = True
|
||||||
|
|
||||||
else:
|
# XXX: do we need this any more?
|
||||||
# print(f"append_path br: {append_path.boundingRect()}")
|
# if (
|
||||||
# self.path.moveTo(new_x[0], new_y[0])
|
# self._step_mode
|
||||||
# self.path.connectPath(append_path)
|
# ):
|
||||||
path.connectPath(append_path)
|
# self.disable_cache()
|
||||||
|
# flip_cache = True
|
||||||
|
|
||||||
self.disable_cache()
|
x_last = x[-1]
|
||||||
flip_cache = True
|
|
||||||
|
|
||||||
if (
|
|
||||||
self._step_mode
|
|
||||||
):
|
|
||||||
self.disable_cache()
|
|
||||||
flip_cache = True
|
|
||||||
|
|
||||||
# print(f"update br: {self.path.boundingRect()}")
|
|
||||||
|
|
||||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
|
||||||
# our `LineDot`) required ``.getData()`` to work..
|
|
||||||
self.xData = x
|
|
||||||
self.yData = y
|
|
||||||
|
|
||||||
x0, x_last = self._xrange = x[0], x[-1]
|
|
||||||
y_last = y[-1]
|
y_last = y[-1]
|
||||||
|
|
||||||
# draw the "current" step graphic segment so it lines up with
|
# draw the "current" step graphic segment so it lines up with
|
||||||
|
@ -304,6 +640,11 @@ class FastAppendCurve(pg.PlotCurveItem):
|
||||||
x_last - 0.5, 0,
|
x_last - 0.5, 0,
|
||||||
x_last + 0.5, y_last
|
x_last + 0.5, y_last
|
||||||
)
|
)
|
||||||
|
# print(
|
||||||
|
# f"path br: {self.path.boundingRect()}",
|
||||||
|
# f"fast path br: {self.fast_path.boundingRect()}",
|
||||||
|
# f"last rect br: {self._last_step_rect}",
|
||||||
|
# )
|
||||||
else:
|
else:
|
||||||
# print((x[-1], y_last))
|
# print((x[-1], y_last))
|
||||||
self._last_line = QLineF(
|
self._last_line = QLineF(
|
||||||
|
@ -311,14 +652,50 @@ class FastAppendCurve(pg.PlotCurveItem):
|
||||||
x[-1], y_last
|
x[-1], y_last
|
||||||
)
|
)
|
||||||
|
|
||||||
|
profiler('draw last segment')
|
||||||
|
|
||||||
# trigger redraw of path
|
# trigger redraw of path
|
||||||
# do update before reverting to cache mode
|
# do update before reverting to cache mode
|
||||||
self.prepareGeometryChange()
|
# self.prepareGeometryChange()
|
||||||
self.update()
|
self.update()
|
||||||
|
profiler('.update()')
|
||||||
|
|
||||||
if flip_cache:
|
if flip_cache:
|
||||||
# XXX: seems to be needed to avoid artifacts (see above).
|
# XXX: seems to be needed to avoid artifacts (see above).
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
|
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||||
|
# our `LineDot`) required ``.getData()`` to work..
|
||||||
|
def getData(self):
|
||||||
|
return self._x, self._y
|
||||||
|
|
||||||
|
# TODO: drop the above after ``Cursor`` re-work
|
||||||
|
def get_arrays(self) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
return self._x, self._y
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
'''
|
||||||
|
Clear internal graphics making object ready for full re-draw.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# NOTE: original code from ``pg.PlotCurveItem``
|
||||||
|
self.xData = None
|
||||||
|
self.yData = None
|
||||||
|
|
||||||
|
# XXX: previously, if not trying to leverage `.reserve()` allocs
|
||||||
|
# then you might as well create a new one..
|
||||||
|
# self.path = None
|
||||||
|
|
||||||
|
# path reservation aware non-mem de-alloc cleaning
|
||||||
|
if self.path:
|
||||||
|
self.path.clear()
|
||||||
|
|
||||||
|
if self.fast_path:
|
||||||
|
# self.fast_path.clear()
|
||||||
|
self.fast_path = None
|
||||||
|
|
||||||
|
# self.disable_cache()
|
||||||
|
# self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
def disable_cache(self) -> None:
|
def disable_cache(self) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -339,16 +716,21 @@ class FastAppendCurve(pg.PlotCurveItem):
|
||||||
else:
|
else:
|
||||||
# dynamically override this method after initial
|
# dynamically override this method after initial
|
||||||
# path is created to avoid requiring the above None check
|
# path is created to avoid requiring the above None check
|
||||||
self.boundingRect = self._br
|
self.boundingRect = self._path_br
|
||||||
return self._br()
|
return self._path_br()
|
||||||
|
|
||||||
def _br(self):
|
def _path_br(self):
|
||||||
'''
|
'''
|
||||||
Post init ``.boundingRect()```.
|
Post init ``.boundingRect()```.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
hb = self.path.controlPointRect()
|
hb = self.path.controlPointRect()
|
||||||
hb_size = hb.size()
|
hb_size = hb.size()
|
||||||
|
|
||||||
|
fp = self.fast_path
|
||||||
|
if fp:
|
||||||
|
fhb = fp.controlPointRect()
|
||||||
|
hb_size = fhb.size() + hb_size
|
||||||
# print(f'hb_size: {hb_size}')
|
# print(f'hb_size: {hb_size}')
|
||||||
|
|
||||||
w = hb_size.width() + 1
|
w = hb_size.width() + 1
|
||||||
|
@ -373,32 +755,44 @@ class FastAppendCurve(pg.PlotCurveItem):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
profiler = pg.debug.Profiler(
|
||||||
# p.setRenderHint(p.Antialiasing, True)
|
msg=f'FastAppendCurve.paint(): `{self._name}`',
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
# disabled=True,
|
||||||
|
gt=ms_slower_then,
|
||||||
|
)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
self._step_mode
|
self._step_mode
|
||||||
and self._last_step_rect
|
and self._last_step_rect
|
||||||
):
|
):
|
||||||
brush = self.opts['brush']
|
brush = self._brush
|
||||||
|
|
||||||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||||
# p.drawRect(self._last_step_rect)
|
# p.drawRect(self._last_step_rect)
|
||||||
p.fillRect(self._last_step_rect, brush)
|
p.fillRect(self._last_step_rect, brush)
|
||||||
|
profiler('.fillRect()')
|
||||||
|
|
||||||
# p.drawPath(self.path)
|
if self._last_line:
|
||||||
# profiler('.drawPath()')
|
p.setPen(self.last_step_pen)
|
||||||
|
p.drawLine(self._last_line)
|
||||||
|
profiler('.drawLine()')
|
||||||
|
p.setPen(self._pen)
|
||||||
|
|
||||||
p.setPen(self.last_step_pen)
|
path = self.path
|
||||||
p.drawLine(self._last_line)
|
|
||||||
profiler('.drawLine()')
|
|
||||||
|
|
||||||
# else:
|
if path:
|
||||||
p.setPen(self.opts['pen'])
|
p.drawPath(path)
|
||||||
p.drawPath(self.path)
|
profiler('.drawPath(path)')
|
||||||
profiler('.drawPath()')
|
|
||||||
|
|
||||||
# TODO: try out new work from `pyqtgraph` main which
|
fp = self.fast_path
|
||||||
# should repair horrid perf:
|
if fp:
|
||||||
|
p.drawPath(fp)
|
||||||
|
profiler('.drawPath(fast_path)')
|
||||||
|
|
||||||
|
# TODO: try out new work from `pyqtgraph` main which should
|
||||||
|
# repair horrid perf (pretty sure i did and it was still
|
||||||
|
# horrible?):
|
||||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2032
|
# https://github.com/pyqtgraph/pyqtgraph/pull/2032
|
||||||
# if self._fill:
|
# if self._fill:
|
||||||
# brush = self.opts['brush']
|
# brush = self.opts['brush']
|
||||||
|
|
|
@ -21,16 +21,20 @@ this module ties together quote and computational (fsp) streams with
|
||||||
graphics update methods via our custom ``pyqtgraph`` charting api.
|
graphics update methods via our custom ``pyqtgraph`` charting api.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
from dataclasses import dataclass
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import time
|
import time
|
||||||
from typing import Optional
|
from typing import Optional, Any, Callable
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
|
import pyqtgraph as pg
|
||||||
|
from PyQt5.QtCore import QLineF
|
||||||
|
|
||||||
from .. import brokers
|
from .. import brokers
|
||||||
from ..data.feed import open_feed
|
from ..data.feed import open_feed
|
||||||
|
from ._axes import YAxisLabel
|
||||||
from ._chart import (
|
from ._chart import (
|
||||||
ChartPlotWidget,
|
ChartPlotWidget,
|
||||||
LinkedSplits,
|
LinkedSplits,
|
||||||
|
@ -49,12 +53,16 @@ from ._forms import (
|
||||||
mk_order_pane_layout,
|
mk_order_pane_layout,
|
||||||
)
|
)
|
||||||
from .order_mode import open_order_mode
|
from .order_mode import open_order_mode
|
||||||
|
# from .._profile import (
|
||||||
|
# pg_profile_enabled,
|
||||||
|
# ms_slower_then,
|
||||||
|
# )
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# TODO: load this from a config.toml!
|
# TODO: load this from a config.toml!
|
||||||
_quote_throttle_rate: int = 6 + 16 # Hz
|
_quote_throttle_rate: int = 12 # Hz
|
||||||
|
|
||||||
|
|
||||||
# a working tick-type-classes template
|
# a working tick-type-classes template
|
||||||
|
@ -109,6 +117,33 @@ def chart_maxmin(
|
||||||
return last_bars_range, mx, max(mn, 0), mx_vlm_in_view
|
return last_bars_range, mx, max(mn, 0), mx_vlm_in_view
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DisplayState:
|
||||||
|
'''
|
||||||
|
Chart-local real-time graphics state container.
|
||||||
|
|
||||||
|
'''
|
||||||
|
quotes: dict[str, Any]
|
||||||
|
|
||||||
|
maxmin: Callable
|
||||||
|
ohlcv: ShmArray
|
||||||
|
|
||||||
|
# high level chart handles
|
||||||
|
linked: LinkedSplits
|
||||||
|
chart: ChartPlotWidget
|
||||||
|
vlm_chart: ChartPlotWidget
|
||||||
|
|
||||||
|
# axis labels
|
||||||
|
l1: L1Labels
|
||||||
|
last_price_sticky: YAxisLabel
|
||||||
|
vlm_sticky: YAxisLabel
|
||||||
|
|
||||||
|
# misc state tracking
|
||||||
|
vars: dict[str, Any]
|
||||||
|
|
||||||
|
wap_in_history: bool = False
|
||||||
|
|
||||||
|
|
||||||
async def graphics_update_loop(
|
async def graphics_update_loop(
|
||||||
|
|
||||||
linked: LinkedSplits,
|
linked: LinkedSplits,
|
||||||
|
@ -147,10 +182,8 @@ async def graphics_update_loop(
|
||||||
|
|
||||||
if vlm_chart:
|
if vlm_chart:
|
||||||
vlm_sticky = vlm_chart._ysticks['volume']
|
vlm_sticky = vlm_chart._ysticks['volume']
|
||||||
vlm_view = vlm_chart.view
|
|
||||||
|
|
||||||
maxmin = partial(chart_maxmin, chart, vlm_chart)
|
maxmin = partial(chart_maxmin, chart, vlm_chart)
|
||||||
chart.default_view()
|
|
||||||
last_bars_range: tuple[float, float]
|
last_bars_range: tuple[float, float]
|
||||||
(
|
(
|
||||||
last_bars_range,
|
last_bars_range,
|
||||||
|
@ -183,7 +216,7 @@ async def graphics_update_loop(
|
||||||
tick_margin = 3 * tick_size
|
tick_margin = 3 * tick_size
|
||||||
|
|
||||||
chart.show()
|
chart.show()
|
||||||
view = chart.view
|
# view = chart.view
|
||||||
last_quote = time.time()
|
last_quote = time.time()
|
||||||
i_last = ohlcv.index
|
i_last = ohlcv.index
|
||||||
|
|
||||||
|
@ -210,8 +243,31 @@ async def graphics_update_loop(
|
||||||
|
|
||||||
# async for quotes in iter_drain_quotes():
|
# async for quotes in iter_drain_quotes():
|
||||||
|
|
||||||
async for quotes in stream:
|
ds = linked.display_state = DisplayState(**{
|
||||||
|
'quotes': {},
|
||||||
|
'linked': linked,
|
||||||
|
'maxmin': maxmin,
|
||||||
|
'ohlcv': ohlcv,
|
||||||
|
'chart': chart,
|
||||||
|
'last_price_sticky': last_price_sticky,
|
||||||
|
'vlm_chart': vlm_chart,
|
||||||
|
'vlm_sticky': vlm_sticky,
|
||||||
|
'l1': l1,
|
||||||
|
|
||||||
|
'vars': {
|
||||||
|
'tick_margin': tick_margin,
|
||||||
|
'i_last': i_last,
|
||||||
|
'last_mx_vlm': last_mx_vlm,
|
||||||
|
'last_mx': last_mx,
|
||||||
|
'last_mn': last_mn,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
chart.default_view()
|
||||||
|
|
||||||
|
# main loop
|
||||||
|
async for quotes in stream:
|
||||||
|
ds.quotes = quotes
|
||||||
quote_period = time.time() - last_quote
|
quote_period = time.time() - last_quote
|
||||||
quote_rate = round(
|
quote_rate = round(
|
||||||
1/quote_period, 1) if quote_period > 0 else float('inf')
|
1/quote_period, 1) if quote_period > 0 else float('inf')
|
||||||
|
@ -232,53 +288,153 @@ async def graphics_update_loop(
|
||||||
chart.pause_all_feeds()
|
chart.pause_all_feeds()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for sym, quote in quotes.items():
|
ic = chart.view._ic
|
||||||
|
if ic:
|
||||||
|
chart.pause_all_feeds()
|
||||||
|
await ic.wait()
|
||||||
|
chart.resume_all_feeds()
|
||||||
|
|
||||||
(
|
# sync call to update all graphics/UX components.
|
||||||
brange,
|
graphics_update_cycle(ds)
|
||||||
mx_in_view,
|
|
||||||
mn_in_view,
|
|
||||||
mx_vlm_in_view,
|
|
||||||
) = maxmin()
|
|
||||||
l, lbar, rbar, r = brange
|
|
||||||
mx = mx_in_view + tick_margin
|
|
||||||
mn = mn_in_view - tick_margin
|
|
||||||
|
|
||||||
# NOTE: vlm may be written by the ``brokerd`` backend
|
|
||||||
# event though a tick sample is not emitted.
|
|
||||||
# TODO: show dark trades differently
|
|
||||||
# https://github.com/pikers/piker/issues/116
|
|
||||||
array = ohlcv.array
|
|
||||||
|
|
||||||
# NOTE: this used to be implemented in a dedicated
|
def graphics_update_cycle(
|
||||||
# "increment tas": ``check_for_new_bars()`` but it doesn't
|
ds: DisplayState,
|
||||||
# make sense to do a whole task switch when we can just do
|
wap_in_history: bool = False,
|
||||||
# this simple index-diff and all the fsp sub-curve graphics
|
|
||||||
# are diffed on each draw cycle anyway; so updates to the
|
|
||||||
# "curve" length is already automatic.
|
|
||||||
|
|
||||||
# increment the view position by the sample offset.
|
) -> None:
|
||||||
i_step = ohlcv.index
|
|
||||||
i_diff = i_step - i_last
|
|
||||||
if i_diff > 0:
|
|
||||||
chart.increment_view(
|
|
||||||
steps=i_diff,
|
|
||||||
)
|
|
||||||
i_last = i_step
|
|
||||||
|
|
||||||
if vlm_chart:
|
# TODO: eventually optimize this whole graphics stack with ``numba``
|
||||||
vlm_chart.update_curve_from_array('volume', array)
|
# hopefully XD
|
||||||
vlm_sticky.update_from_data(*array[-1][['index', 'volume']])
|
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
disabled=True, # not pg_profile_enabled(),
|
||||||
|
gt=1/12 * 1e3,
|
||||||
|
)
|
||||||
|
|
||||||
|
# unpack multi-referenced components
|
||||||
|
chart = ds.chart
|
||||||
|
vlm_chart = ds.vlm_chart
|
||||||
|
l1 = ds.l1
|
||||||
|
|
||||||
|
ohlcv = ds.ohlcv
|
||||||
|
array = ohlcv.array
|
||||||
|
vars = ds.vars
|
||||||
|
tick_margin = vars['tick_margin']
|
||||||
|
|
||||||
|
update_uppx = 5
|
||||||
|
|
||||||
|
for sym, quote in ds.quotes.items():
|
||||||
|
brange, mx_in_view, mn_in_view, mx_vlm_in_view = ds.maxmin()
|
||||||
|
l, lbar, rbar, r = brange
|
||||||
|
mx = mx_in_view + tick_margin
|
||||||
|
mn = mn_in_view - tick_margin
|
||||||
|
profiler('maxmin call')
|
||||||
|
|
||||||
|
# compute the first available graphic's x-units-per-pixel
|
||||||
|
xpx = vlm_chart.view.xs_in_px()
|
||||||
|
|
||||||
|
in_view = chart.in_view(ohlcv.array)
|
||||||
|
|
||||||
|
if lbar != rbar:
|
||||||
|
# view box width in pxs
|
||||||
|
w = chart.view.boundingRect().width()
|
||||||
|
|
||||||
|
# TODO: a better way to get this?
|
||||||
|
# i would guess the esiest way is to just
|
||||||
|
# get the ``.boundingRect()`` of the curve
|
||||||
|
# in view but maybe there's something smarter?
|
||||||
|
# Currently we're just mapping the rbar, lbar to
|
||||||
|
# pixels via:
|
||||||
|
cw = chart.view.mapViewToDevice(QLineF(lbar, 0, rbar, 0)).length()
|
||||||
|
# is this faster?
|
||||||
|
# cw = chart.mapFromView(QLineF(lbar, 0 , rbar, 0)).length()
|
||||||
|
|
||||||
|
profiler(
|
||||||
|
f'view width pxs: {w}\n'
|
||||||
|
f'curve width pxs: {cw}\n'
|
||||||
|
f'sliced in view: {in_view.size}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# compress bars to m4 line(s) if uppx is high enough
|
||||||
|
# if in_view.size > cw:
|
||||||
|
# from ._compression import ds_m4, hl2mxmn
|
||||||
|
|
||||||
|
# mxmn, x = hl2mxmn(in_view)
|
||||||
|
# profiler('hl tracer')
|
||||||
|
|
||||||
|
# nb, x, y = ds_m4(
|
||||||
|
# x=x,
|
||||||
|
# y=mxmn,
|
||||||
|
# # TODO: this needs to actually be the width
|
||||||
|
# # in pixels of the visible curve since we don't
|
||||||
|
# # want to downsample any 'zeros' around the curve,
|
||||||
|
# # just the values that make up the curve graphic,
|
||||||
|
# # i think?
|
||||||
|
# px_width=cw,
|
||||||
|
# )
|
||||||
|
# profiler(
|
||||||
|
# 'm4 downsampled\n'
|
||||||
|
# f' ds bins: {nb}\n'
|
||||||
|
# f' x.shape: {x.shape}\n'
|
||||||
|
# f' y.shape: {y.shape}\n'
|
||||||
|
# f' x: {x}\n'
|
||||||
|
# f' y: {y}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# assert y.size == mxmn.size
|
||||||
|
|
||||||
|
# NOTE: vlm may be written by the ``brokerd`` backend
|
||||||
|
# event though a tick sample is not emitted.
|
||||||
|
# TODO: show dark trades differently
|
||||||
|
# https://github.com/pikers/piker/issues/116
|
||||||
|
|
||||||
|
# NOTE: this used to be implemented in a dedicated
|
||||||
|
# "increment tas": ``check_for_new_bars()`` but it doesn't
|
||||||
|
# make sense to do a whole task switch when we can just do
|
||||||
|
# this simple index-diff and all the fsp sub-curve graphics
|
||||||
|
# are diffed on each draw cycle anyway; so updates to the
|
||||||
|
# "curve" length is already automatic.
|
||||||
|
|
||||||
|
# increment the view position by the sample offset.
|
||||||
|
i_step = ohlcv.index
|
||||||
|
i_diff = i_step - vars['i_last']
|
||||||
|
vars['i_last'] = i_step
|
||||||
|
|
||||||
|
# don't real-time "shift" the curve to the
|
||||||
|
# left under the following conditions:
|
||||||
|
if (
|
||||||
|
i_diff > 0 # no new sample step
|
||||||
|
and xpx < update_uppx # chart is zoomed out very far
|
||||||
|
and r >= i_step # the last datum isn't in view
|
||||||
|
):
|
||||||
|
# TODO: we should track and compute whether the last
|
||||||
|
# pixel in a curve should show new data based on uppx
|
||||||
|
# and then iff update curves and shift?
|
||||||
|
chart.increment_view(steps=i_diff)
|
||||||
|
|
||||||
|
if vlm_chart:
|
||||||
|
# always update y-label
|
||||||
|
ds.vlm_sticky.update_from_data(*array[-1][['index', 'volume']])
|
||||||
|
|
||||||
|
if (
|
||||||
|
# if zoomed out alot don't update the last "bar"
|
||||||
|
(xpx < update_uppx or i_diff > 0)
|
||||||
|
and r >= i_step
|
||||||
|
):
|
||||||
|
# TODO: make it so this doesn't have to be called
|
||||||
|
# once the $vlm is up?
|
||||||
|
vlm_chart.update_graphics_from_array('volume', array)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
mx_vlm_in_view != last_mx_vlm or
|
mx_vlm_in_view != vars['last_mx_vlm']
|
||||||
mx_vlm_in_view > last_mx_vlm
|
or mx_vlm_in_view > vars['last_mx_vlm']
|
||||||
):
|
):
|
||||||
# print(f'mx vlm: {last_mx_vlm} -> {mx_vlm_in_view}')
|
# print(f'mx vlm: {last_mx_vlm} -> {mx_vlm_in_view}')
|
||||||
vlm_view._set_yrange(
|
vlm_chart.view._set_yrange(
|
||||||
yrange=(0, mx_vlm_in_view * 1.375)
|
yrange=(0, mx_vlm_in_view * 1.375)
|
||||||
)
|
)
|
||||||
last_mx_vlm = mx_vlm_in_view
|
vars['last_mx_vlm'] = mx_vlm_in_view
|
||||||
|
|
||||||
for curve_name, flow in vlm_chart._flows.items():
|
for curve_name, flow in vlm_chart._flows.items():
|
||||||
update_fsp_chart(
|
update_fsp_chart(
|
||||||
|
@ -293,161 +449,167 @@ async def graphics_update_loop(
|
||||||
name=curve_name,
|
name=curve_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
ticks_frame = quote.get('ticks', ())
|
ticks_frame = quote.get('ticks', ())
|
||||||
|
|
||||||
frames_by_type: dict[str, dict] = {}
|
frames_by_type: dict[str, dict] = {}
|
||||||
lasts = {}
|
lasts = {}
|
||||||
|
|
||||||
# build tick-type "frames" of tick sequences since
|
# build tick-type "frames" of tick sequences since
|
||||||
# likely the tick arrival rate is higher then our
|
# likely the tick arrival rate is higher then our
|
||||||
# (throttled) quote stream rate.
|
# (throttled) quote stream rate.
|
||||||
for tick in ticks_frame:
|
for tick in ticks_frame:
|
||||||
price = tick.get('price')
|
price = tick.get('price')
|
||||||
ticktype = tick.get('type')
|
ticktype = tick.get('type')
|
||||||
|
|
||||||
if ticktype == 'n/a' or price == -1:
|
if ticktype == 'n/a' or price == -1:
|
||||||
# okkk..
|
# okkk..
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# keys are entered in olded-event-inserted-first order
|
# keys are entered in olded-event-inserted-first order
|
||||||
# since we iterate ``ticks_frame`` in standard order
|
# since we iterate ``ticks_frame`` in standard order
|
||||||
# above. in other words the order of the keys is the order
|
# above. in other words the order of the keys is the order
|
||||||
# of tick events by type from the provider feed.
|
# of tick events by type from the provider feed.
|
||||||
frames_by_type.setdefault(ticktype, []).append(tick)
|
frames_by_type.setdefault(ticktype, []).append(tick)
|
||||||
|
|
||||||
# overwrites so the last tick per type is the entry
|
# overwrites so the last tick per type is the entry
|
||||||
lasts[ticktype] = tick
|
lasts[ticktype] = tick
|
||||||
|
|
||||||
# from pprint import pformat
|
# from pprint import pformat
|
||||||
# frame_counts = {
|
# frame_counts = {
|
||||||
# typ: len(frame) for typ, frame in frames_by_type.items()
|
# typ: len(frame) for typ, frame in frames_by_type.items()
|
||||||
# }
|
# }
|
||||||
# print(f'{pformat(frame_counts)}')
|
# print(f'{pformat(frame_counts)}')
|
||||||
# print(f'framed: {pformat(frames_by_type)}')
|
# print(f'framed: {pformat(frames_by_type)}')
|
||||||
# print(f'lasts: {pformat(lasts)}')
|
# print(f'lasts: {pformat(lasts)}')
|
||||||
|
|
||||||
# TODO: eventually we want to separate out the utrade (aka
|
# TODO: eventually we want to separate out the utrade (aka
|
||||||
# dark vlm prices) here and show them as an additional
|
# dark vlm prices) here and show them as an additional
|
||||||
# graphic.
|
# graphic.
|
||||||
clear_types = _tick_groups['clears']
|
clear_types = _tick_groups['clears']
|
||||||
|
|
||||||
# XXX: if we wanted to iterate in "latest" (i.e. most
|
# XXX: if we wanted to iterate in "latest" (i.e. most
|
||||||
# current) tick first order as an optimization where we only
|
# current) tick first order as an optimization where we only
|
||||||
# update from the last tick from each type class.
|
# update from the last tick from each type class.
|
||||||
# last_clear_updated: bool = False
|
# last_clear_updated: bool = False
|
||||||
# for typ, tick in reversed(lasts.items()):
|
# for typ, tick in reversed(lasts.items()):
|
||||||
|
|
||||||
# iterate in FIFO order per frame
|
# update ohlc sampled price bars
|
||||||
for typ, tick in lasts.items():
|
if (
|
||||||
|
xpx < update_uppx
|
||||||
|
or i_diff > 0
|
||||||
|
):
|
||||||
|
chart.update_graphics_from_array(
|
||||||
|
chart.name,
|
||||||
|
array,
|
||||||
|
)
|
||||||
|
|
||||||
price = tick.get('price')
|
# iterate in FIFO order per frame
|
||||||
size = tick.get('size')
|
for typ, tick in lasts.items():
|
||||||
|
|
||||||
# compute max and min prices (including bid/ask) from
|
price = tick.get('price')
|
||||||
# tick frames to determine the y-range for chart
|
size = tick.get('size')
|
||||||
# auto-scaling.
|
|
||||||
# TODO: we need a streaming minmax algo here, see def above.
|
|
||||||
mx = max(price + tick_margin, mx)
|
|
||||||
mn = min(price - tick_margin, mn)
|
|
||||||
|
|
||||||
if typ in clear_types:
|
# compute max and min prices (including bid/ask) from
|
||||||
|
# tick frames to determine the y-range for chart
|
||||||
|
# auto-scaling.
|
||||||
|
# TODO: we need a streaming minmax algo here, see def above.
|
||||||
|
mx = max(price + tick_margin, mx)
|
||||||
|
mn = min(price - tick_margin, mn)
|
||||||
|
|
||||||
# XXX: if we only wanted to update graphics from the
|
if typ in clear_types:
|
||||||
# "current"/"latest received" clearing price tick
|
|
||||||
# once (see alt iteration order above).
|
|
||||||
# if last_clear_updated:
|
|
||||||
# continue
|
|
||||||
|
|
||||||
# last_clear_updated = True
|
# XXX: if we only wanted to update graphics from the
|
||||||
# we only want to update grahpics from the *last*
|
# "current"/"latest received" clearing price tick
|
||||||
# tick event that falls under the "clearing price"
|
# once (see alt iteration order above).
|
||||||
# set.
|
# if last_clear_updated:
|
||||||
|
# continue
|
||||||
|
|
||||||
# update price sticky(s)
|
# last_clear_updated = True
|
||||||
end = array[-1]
|
# we only want to update grahpics from the *last*
|
||||||
last_price_sticky.update_from_data(
|
# tick event that falls under the "clearing price"
|
||||||
*end[['index', 'close']]
|
# set.
|
||||||
)
|
|
||||||
|
|
||||||
# update ohlc sampled price bars
|
# update price sticky(s)
|
||||||
chart.update_ohlc_from_array(
|
end = array[-1]
|
||||||
chart.name,
|
ds.last_price_sticky.update_from_data(
|
||||||
|
*end[['index', 'close']]
|
||||||
|
)
|
||||||
|
|
||||||
|
if wap_in_history:
|
||||||
|
# update vwap overlay line
|
||||||
|
chart.update_graphics_from_array(
|
||||||
|
'bar_wap',
|
||||||
array,
|
array,
|
||||||
)
|
)
|
||||||
|
|
||||||
if wap_in_history:
|
# L1 book label-line updates
|
||||||
# update vwap overlay line
|
# XXX: is this correct for ib?
|
||||||
chart.update_curve_from_array('bar_wap', ohlcv.array)
|
# if ticktype in ('trade', 'last'):
|
||||||
|
# if ticktype in ('last',): # 'size'):
|
||||||
|
if typ in ('last',): # 'size'):
|
||||||
|
|
||||||
# L1 book label-line updates
|
label = {
|
||||||
# XXX: is this correct for ib?
|
l1.ask_label.fields['level']: l1.ask_label,
|
||||||
# if ticktype in ('trade', 'last'):
|
l1.bid_label.fields['level']: l1.bid_label,
|
||||||
# if ticktype in ('last',): # 'size'):
|
}.get(price)
|
||||||
if typ in ('last',): # 'size'):
|
|
||||||
|
|
||||||
label = {
|
if label is not None:
|
||||||
l1.ask_label.fields['level']: l1.ask_label,
|
label.update_fields(
|
||||||
l1.bid_label.fields['level']: l1.bid_label,
|
{'level': price, 'size': size}
|
||||||
}.get(price)
|
)
|
||||||
|
|
||||||
if label is not None:
|
# TODO: on trades should we be knocking down
|
||||||
label.update_fields({'level': price, 'size': size})
|
# the relevant L1 queue?
|
||||||
|
# label.size -= size
|
||||||
|
|
||||||
# TODO: on trades should we be knocking down
|
# elif ticktype in ('ask', 'asize'):
|
||||||
# the relevant L1 queue?
|
elif typ in _tick_groups['asks']:
|
||||||
# label.size -= size
|
l1.ask_label.update_fields({'level': price, 'size': size})
|
||||||
|
|
||||||
# elif ticktype in ('ask', 'asize'):
|
# elif ticktype in ('bid', 'bsize'):
|
||||||
elif typ in _tick_groups['asks']:
|
elif typ in _tick_groups['bids']:
|
||||||
l1.ask_label.update_fields({'level': price, 'size': size})
|
l1.bid_label.update_fields({'level': price, 'size': size})
|
||||||
|
|
||||||
# elif ticktype in ('bid', 'bsize'):
|
# check for y-range re-size
|
||||||
elif typ in _tick_groups['bids']:
|
if (
|
||||||
l1.bid_label.update_fields({'level': price, 'size': size})
|
(mx > vars['last_mx']) or (mn < vars['last_mn'])
|
||||||
|
and not chart._static_yrange == 'axis'
|
||||||
|
):
|
||||||
|
# print(f'new y range: {(mn, mx)}')
|
||||||
|
chart.view._set_yrange(
|
||||||
|
yrange=(mn, mx),
|
||||||
|
# TODO: we should probably scale
|
||||||
|
# the view margin based on the size
|
||||||
|
# of the true range? This way you can
|
||||||
|
# slap in orders outside the current
|
||||||
|
# L1 (only) book range.
|
||||||
|
# range_margin=0.1,
|
||||||
|
)
|
||||||
|
|
||||||
# check for y-range re-size
|
vars['last_mx'], vars['last_mn'] = mx, mn
|
||||||
if (
|
|
||||||
(mx > last_mx) or (mn < last_mn)
|
|
||||||
and not chart._static_yrange == 'axis'
|
|
||||||
):
|
|
||||||
# print(f'new y range: {(mn, mx)}')
|
|
||||||
view._set_yrange(
|
|
||||||
yrange=(mn, mx),
|
|
||||||
# TODO: we should probably scale
|
|
||||||
# the view margin based on the size
|
|
||||||
# of the true range? This way you can
|
|
||||||
# slap in orders outside the current
|
|
||||||
# L1 (only) book range.
|
|
||||||
# range_margin=0.1,
|
|
||||||
)
|
|
||||||
|
|
||||||
last_mx, last_mn = mx, mn
|
# run synchronous update on all derived fsp subplots
|
||||||
|
for name, subchart in ds.linked.subplots.items():
|
||||||
|
update_fsp_chart(
|
||||||
|
subchart,
|
||||||
|
subchart._shm,
|
||||||
|
|
||||||
# run synchronous update on all derived fsp subplots
|
# XXX: do we really needs seperate names here?
|
||||||
for name, subchart in linked.subplots.items():
|
name,
|
||||||
update_fsp_chart(
|
array_key=name,
|
||||||
subchart,
|
)
|
||||||
subchart._shm,
|
subchart.cv._set_yrange()
|
||||||
|
|
||||||
# XXX: do we really needs seperate names here?
|
# TODO: all overlays on all subplots..
|
||||||
name,
|
|
||||||
array_key=name,
|
|
||||||
)
|
|
||||||
subchart.cv._set_yrange()
|
|
||||||
|
|
||||||
# TODO: all overlays on all subplots..
|
# run synchronous update on all derived overlays
|
||||||
|
for curve_name, flow in chart._flows.items():
|
||||||
# run synchronous update on all derived overlays
|
update_fsp_chart(
|
||||||
for curve_name, flow in chart._flows.items():
|
chart,
|
||||||
update_fsp_chart(
|
flow.shm,
|
||||||
chart,
|
curve_name,
|
||||||
flow.shm,
|
array_key=curve_name,
|
||||||
curve_name,
|
)
|
||||||
array_key=curve_name,
|
|
||||||
)
|
|
||||||
# chart.view._set_yrange()
|
|
||||||
|
|
||||||
# loop end
|
|
||||||
|
|
||||||
|
|
||||||
async def display_symbol_data(
|
async def display_symbol_data(
|
||||||
|
@ -480,30 +642,29 @@ async def display_symbol_data(
|
||||||
# clear_on_next=True,
|
# clear_on_next=True,
|
||||||
# group_key=loading_sym_key,
|
# group_key=loading_sym_key,
|
||||||
# )
|
# )
|
||||||
|
|
||||||
async with open_feed(
|
async with open_feed(
|
||||||
provider,
|
['.'.join((sym, provider))],
|
||||||
[sym],
|
loglevel=loglevel,
|
||||||
loglevel=loglevel,
|
|
||||||
|
|
||||||
# limit to at least display's FPS
|
# limit to at least display's FPS
|
||||||
# avoiding needless Qt-in-guest-mode context switches
|
# avoiding needless Qt-in-guest-mode context switches
|
||||||
tick_throttle=_quote_throttle_rate,
|
tick_throttle=_quote_throttle_rate,
|
||||||
|
|
||||||
) as feed:
|
) as feed:
|
||||||
ohlcv: ShmArray = feed.shm
|
ohlcv: ShmArray = feed.shm
|
||||||
bars = ohlcv.array
|
bars = ohlcv.array
|
||||||
symbol = feed.symbols[sym]
|
symbol = feed.symbols[sym]
|
||||||
|
fqsn = symbol.front_fqsn()
|
||||||
|
|
||||||
# load in symbol's ohlc data
|
# load in symbol's ohlc data
|
||||||
godwidget.window.setWindowTitle(
|
godwidget.window.setWindowTitle(
|
||||||
f'{symbol.key}@{symbol.brokers} '
|
f'{fqsn} '
|
||||||
f'tick:{symbol.tick_size} '
|
f'tick:{symbol.tick_size} '
|
||||||
f'step:1s '
|
f'step:1s '
|
||||||
)
|
)
|
||||||
|
|
||||||
linkedsplits = godwidget.linkedsplits
|
linked = godwidget.linkedsplits
|
||||||
linkedsplits._symbol = symbol
|
linked._symbol = symbol
|
||||||
|
|
||||||
# generate order mode side-pane UI
|
# generate order mode side-pane UI
|
||||||
# A ``FieldsForm`` form to configure order entry
|
# A ``FieldsForm`` form to configure order entry
|
||||||
|
@ -513,7 +674,7 @@ async def display_symbol_data(
|
||||||
godwidget.pp_pane = pp_pane
|
godwidget.pp_pane = pp_pane
|
||||||
|
|
||||||
# create main OHLC chart
|
# create main OHLC chart
|
||||||
chart = linkedsplits.plot_ohlc_main(
|
chart = linked.plot_ohlc_main(
|
||||||
symbol,
|
symbol,
|
||||||
bars,
|
bars,
|
||||||
sidepane=pp_pane,
|
sidepane=pp_pane,
|
||||||
|
@ -543,8 +704,8 @@ async def display_symbol_data(
|
||||||
# NOTE: we must immediately tell Qt to show the OHLC chart
|
# NOTE: we must immediately tell Qt to show the OHLC chart
|
||||||
# to avoid a race where the subplots get added/shown to
|
# to avoid a race where the subplots get added/shown to
|
||||||
# the linked set *before* the main price chart!
|
# the linked set *before* the main price chart!
|
||||||
linkedsplits.show()
|
linked.show()
|
||||||
linkedsplits.focus()
|
linked.focus()
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
vlm_chart: Optional[ChartPlotWidget] = None
|
vlm_chart: Optional[ChartPlotWidget] = None
|
||||||
|
@ -554,7 +715,7 @@ async def display_symbol_data(
|
||||||
if has_vlm(ohlcv):
|
if has_vlm(ohlcv):
|
||||||
vlm_chart = await ln.start(
|
vlm_chart = await ln.start(
|
||||||
open_vlm_displays,
|
open_vlm_displays,
|
||||||
linkedsplits,
|
linked,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -562,7 +723,7 @@ async def display_symbol_data(
|
||||||
# from an input config.
|
# from an input config.
|
||||||
ln.start_soon(
|
ln.start_soon(
|
||||||
start_fsp_displays,
|
start_fsp_displays,
|
||||||
linkedsplits,
|
linked,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
loading_sym_key,
|
loading_sym_key,
|
||||||
loglevel,
|
loglevel,
|
||||||
|
@ -571,7 +732,7 @@ async def display_symbol_data(
|
||||||
# start graphics update loop after receiving first live quote
|
# start graphics update loop after receiving first live quote
|
||||||
ln.start_soon(
|
ln.start_soon(
|
||||||
graphics_update_loop,
|
graphics_update_loop,
|
||||||
linkedsplits,
|
linked,
|
||||||
feed.stream,
|
feed.stream,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
wap_in_history,
|
wap_in_history,
|
||||||
|
@ -582,25 +743,26 @@ async def display_symbol_data(
|
||||||
open_order_mode(
|
open_order_mode(
|
||||||
feed,
|
feed,
|
||||||
chart,
|
chart,
|
||||||
symbol,
|
fqsn,
|
||||||
provider,
|
|
||||||
order_mode_started
|
order_mode_started
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
# let Qt run to render all widgets and make sure the
|
# let Qt run to render all widgets and make sure the
|
||||||
# sidepanes line up vertically.
|
# sidepanes line up vertically.
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
linkedsplits.resize_sidepanes()
|
linked.resize_sidepanes()
|
||||||
|
|
||||||
# NOTE: we pop the volume chart from the subplots set so
|
# NOTE: we pop the volume chart from the subplots set so
|
||||||
# that it isn't double rendered in the display loop
|
# that it isn't double rendered in the display loop
|
||||||
# above since we do a maxmin calc on the volume data to
|
# above since we do a maxmin calc on the volume data to
|
||||||
# determine if auto-range adjustements should be made.
|
# determine if auto-range adjustements should be made.
|
||||||
linkedsplits.subplots.pop('volume', None)
|
linked.subplots.pop('volume', None)
|
||||||
|
|
||||||
# TODO: make this not so shit XD
|
# TODO: make this not so shit XD
|
||||||
# close group status
|
# close group status
|
||||||
sbar._status_groups[loading_sym_key][1]()
|
sbar._status_groups[loading_sym_key][1]()
|
||||||
|
|
||||||
# let the app run.. bby
|
# let the app run.. bby
|
||||||
|
chart.default_view()
|
||||||
|
# linked.graphics_cycle()
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
|
@ -89,7 +89,7 @@ def update_fsp_chart(
|
||||||
# update graphics
|
# update graphics
|
||||||
# NOTE: this does a length check internally which allows it
|
# NOTE: this does a length check internally which allows it
|
||||||
# staying above the last row check below..
|
# staying above the last row check below..
|
||||||
chart.update_curve_from_array(
|
chart.update_graphics_from_array(
|
||||||
graphics_name,
|
graphics_name,
|
||||||
array,
|
array,
|
||||||
array_key=array_key or graphics_name,
|
array_key=array_key or graphics_name,
|
||||||
|
@ -246,7 +246,6 @@ async def run_fsp_ui(
|
||||||
overlay=True,
|
overlay=True,
|
||||||
color='default_light',
|
color='default_light',
|
||||||
array_key=name,
|
array_key=name,
|
||||||
separate_axes=conf.get('separate_axes', False),
|
|
||||||
**conf.get('chart_kwargs', {})
|
**conf.get('chart_kwargs', {})
|
||||||
)
|
)
|
||||||
# specially store ref to shm for lookup in display loop
|
# specially store ref to shm for lookup in display loop
|
||||||
|
@ -386,6 +385,7 @@ class FspAdmin:
|
||||||
portal: tractor.Portal,
|
portal: tractor.Portal,
|
||||||
complete: trio.Event,
|
complete: trio.Event,
|
||||||
started: trio.Event,
|
started: trio.Event,
|
||||||
|
fqsn: str,
|
||||||
dst_shm: ShmArray,
|
dst_shm: ShmArray,
|
||||||
conf: dict,
|
conf: dict,
|
||||||
target: Fsp,
|
target: Fsp,
|
||||||
|
@ -397,7 +397,6 @@ class FspAdmin:
|
||||||
cluster and sleeps until signalled to exit.
|
cluster and sleeps until signalled to exit.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
brokername, sym = self.linked.symbol.front_feed()
|
|
||||||
ns_path = str(target.ns_path)
|
ns_path = str(target.ns_path)
|
||||||
async with (
|
async with (
|
||||||
portal.open_context(
|
portal.open_context(
|
||||||
|
@ -406,8 +405,7 @@ class FspAdmin:
|
||||||
cascade,
|
cascade,
|
||||||
|
|
||||||
# data feed key
|
# data feed key
|
||||||
brokername=brokername,
|
fqsn=fqsn,
|
||||||
symbol=sym,
|
|
||||||
|
|
||||||
# mems
|
# mems
|
||||||
src_shm_token=self.src_shm.token,
|
src_shm_token=self.src_shm.token,
|
||||||
|
@ -427,9 +425,10 @@ class FspAdmin:
|
||||||
) as (ctx, last_index),
|
) as (ctx, last_index),
|
||||||
ctx.open_stream() as stream,
|
ctx.open_stream() as stream,
|
||||||
):
|
):
|
||||||
|
|
||||||
# register output data
|
# register output data
|
||||||
self._registry[
|
self._registry[
|
||||||
(brokername, sym, ns_path)
|
(fqsn, ns_path)
|
||||||
] = (
|
] = (
|
||||||
stream,
|
stream,
|
||||||
dst_shm,
|
dst_shm,
|
||||||
|
@ -439,6 +438,14 @@ class FspAdmin:
|
||||||
started.set()
|
started.set()
|
||||||
|
|
||||||
# wait for graceful shutdown signal
|
# wait for graceful shutdown signal
|
||||||
|
async with stream.subscribe() as stream:
|
||||||
|
async for msg in stream:
|
||||||
|
if msg == 'update':
|
||||||
|
log.info(f'Re-syncing graphics for fsp: {ns_path}')
|
||||||
|
self.linked.graphics_cycle()
|
||||||
|
else:
|
||||||
|
log.info(f'recved unexpected fsp engine msg: {msg}')
|
||||||
|
|
||||||
await complete.wait()
|
await complete.wait()
|
||||||
|
|
||||||
async def start_engine_task(
|
async def start_engine_task(
|
||||||
|
@ -452,11 +459,11 @@ class FspAdmin:
|
||||||
|
|
||||||
) -> (ShmArray, trio.Event):
|
) -> (ShmArray, trio.Event):
|
||||||
|
|
||||||
fqsn = self.linked.symbol.front_feed()
|
fqsn = self.linked.symbol.front_fqsn()
|
||||||
|
|
||||||
# allocate an output shm array
|
# allocate an output shm array
|
||||||
key, dst_shm, opened = maybe_mk_fsp_shm(
|
key, dst_shm, opened = maybe_mk_fsp_shm(
|
||||||
'.'.join(fqsn),
|
fqsn,
|
||||||
target=target,
|
target=target,
|
||||||
readonly=True,
|
readonly=True,
|
||||||
)
|
)
|
||||||
|
@ -477,6 +484,7 @@ class FspAdmin:
|
||||||
portal,
|
portal,
|
||||||
complete,
|
complete,
|
||||||
started,
|
started,
|
||||||
|
fqsn,
|
||||||
dst_shm,
|
dst_shm,
|
||||||
conf,
|
conf,
|
||||||
target,
|
target,
|
||||||
|
@ -668,7 +676,7 @@ async def open_vlm_displays(
|
||||||
|
|
||||||
last_val_sticky.update_from_data(-1, value)
|
last_val_sticky.update_from_data(-1, value)
|
||||||
|
|
||||||
vlm_curve = chart.update_curve_from_array(
|
vlm_curve = chart.update_graphics_from_array(
|
||||||
'volume',
|
'volume',
|
||||||
shm.array,
|
shm.array,
|
||||||
)
|
)
|
||||||
|
@ -758,6 +766,7 @@ async def open_vlm_displays(
|
||||||
# displayed and the curves are effectively the same minus
|
# displayed and the curves are effectively the same minus
|
||||||
# liquidity events (well at least on low OHLC periods - 1s).
|
# liquidity events (well at least on low OHLC periods - 1s).
|
||||||
vlm_curve.hide()
|
vlm_curve.hide()
|
||||||
|
chart.removeItem(vlm_curve)
|
||||||
|
|
||||||
# use slightly less light (then bracket) gray
|
# use slightly less light (then bracket) gray
|
||||||
# for volume from "main exchange" and a more "bluey"
|
# for volume from "main exchange" and a more "bluey"
|
||||||
|
|
|
@ -20,6 +20,7 @@ Chart view box primitives
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
|
# import itertools
|
||||||
import time
|
import time
|
||||||
from typing import Optional, Callable
|
from typing import Optional, Callable
|
||||||
|
|
||||||
|
@ -36,6 +37,8 @@ from ..log import get_logger
|
||||||
from ._style import _min_points_to_show
|
from ._style import _min_points_to_show
|
||||||
from ._editors import SelectRect
|
from ._editors import SelectRect
|
||||||
from . import _event
|
from . import _event
|
||||||
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
|
# from ._ohlc import BarItems
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -318,6 +321,7 @@ async def handle_viewmode_mouse(
|
||||||
):
|
):
|
||||||
# when in order mode, submit execution
|
# when in order mode, submit execution
|
||||||
# msg.event.accept()
|
# msg.event.accept()
|
||||||
|
# breakpoint()
|
||||||
view.order_mode.submit_order()
|
view.order_mode.submit_order()
|
||||||
|
|
||||||
|
|
||||||
|
@ -362,7 +366,6 @@ class ChartView(ViewBox):
|
||||||
# defaultPadding=0.,
|
# defaultPadding=0.,
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
# for "known y-range style"
|
# for "known y-range style"
|
||||||
self._static_yrange = static_yrange
|
self._static_yrange = static_yrange
|
||||||
self._maxmin = None
|
self._maxmin = None
|
||||||
|
@ -384,6 +387,29 @@ class ChartView(ViewBox):
|
||||||
self.order_mode: bool = False
|
self.order_mode: bool = False
|
||||||
|
|
||||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||||
|
self._ic = None
|
||||||
|
|
||||||
|
def start_ic(
|
||||||
|
self,
|
||||||
|
) -> None:
|
||||||
|
if self._ic is None:
|
||||||
|
self.chart.pause_all_feeds()
|
||||||
|
self._ic = trio.Event()
|
||||||
|
|
||||||
|
def signal_ic(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
# ev = None,
|
||||||
|
) -> None:
|
||||||
|
if args:
|
||||||
|
print(f'range change dun: {args}')
|
||||||
|
else:
|
||||||
|
print('proxy called')
|
||||||
|
|
||||||
|
if self._ic:
|
||||||
|
self._ic.set()
|
||||||
|
self._ic = None
|
||||||
|
self.chart.resume_all_feeds()
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_async_input_handler(
|
async def open_async_input_handler(
|
||||||
|
@ -435,7 +461,8 @@ class ChartView(ViewBox):
|
||||||
axis=None,
|
axis=None,
|
||||||
relayed_from: ChartView = None,
|
relayed_from: ChartView = None,
|
||||||
):
|
):
|
||||||
'''Override "center-point" location for scrolling.
|
'''
|
||||||
|
Override "center-point" location for scrolling.
|
||||||
|
|
||||||
This is an override of the ``ViewBox`` method simply changing
|
This is an override of the ``ViewBox`` method simply changing
|
||||||
the center of the zoom to be the y-axis.
|
the center of the zoom to be the y-axis.
|
||||||
|
@ -536,6 +563,11 @@ class ChartView(ViewBox):
|
||||||
self._resetTarget()
|
self._resetTarget()
|
||||||
self.scaleBy(s, focal)
|
self.scaleBy(s, focal)
|
||||||
self.sigRangeChangedManually.emit(mask)
|
self.sigRangeChangedManually.emit(mask)
|
||||||
|
|
||||||
|
# self._ic.set()
|
||||||
|
# self._ic = None
|
||||||
|
# self.chart.resume_all_feeds()
|
||||||
|
|
||||||
ev.accept()
|
ev.accept()
|
||||||
|
|
||||||
def mouseDragEvent(
|
def mouseDragEvent(
|
||||||
|
@ -618,6 +650,11 @@ class ChartView(ViewBox):
|
||||||
# XXX: WHY
|
# XXX: WHY
|
||||||
ev.accept()
|
ev.accept()
|
||||||
|
|
||||||
|
self.start_ic()
|
||||||
|
# if self._ic is None:
|
||||||
|
# self.chart.pause_all_feeds()
|
||||||
|
# self._ic = trio.Event()
|
||||||
|
|
||||||
if axis == 1:
|
if axis == 1:
|
||||||
self.chart._static_yrange = 'axis'
|
self.chart._static_yrange = 'axis'
|
||||||
|
|
||||||
|
@ -635,6 +672,13 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
||||||
|
|
||||||
|
if ev.isFinish():
|
||||||
|
print('DRAG FINISH')
|
||||||
|
self.signal_ic()
|
||||||
|
# self._ic.set()
|
||||||
|
# self._ic = None
|
||||||
|
# self.chart.resume_all_feeds()
|
||||||
|
|
||||||
# WEIRD "RIGHT-CLICK CENTER ZOOM" MODE
|
# WEIRD "RIGHT-CLICK CENTER ZOOM" MODE
|
||||||
elif button & QtCore.Qt.RightButton:
|
elif button & QtCore.Qt.RightButton:
|
||||||
|
|
||||||
|
@ -775,12 +819,62 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
vb.sigXRangeChanged.connect(vb._set_yrange)
|
vb.sigXRangeChanged.connect(vb._set_yrange)
|
||||||
|
|
||||||
|
# TODO: a smarter way to avoid calling this needlessly?
|
||||||
|
# 2 things i can think of:
|
||||||
|
# - register downsample-able graphics specially and only
|
||||||
|
# iterate those.
|
||||||
|
# - only register this when certain downsampleable graphics are
|
||||||
|
# "added to scene".
|
||||||
|
vb.sigXRangeChanged.connect(vb.maybe_downsample_graphics)
|
||||||
|
|
||||||
# mouse wheel doesn't emit XRangeChanged
|
# mouse wheel doesn't emit XRangeChanged
|
||||||
vb.sigRangeChangedManually.connect(vb._set_yrange)
|
vb.sigRangeChangedManually.connect(vb._set_yrange)
|
||||||
vb.sigResized.connect(vb._set_yrange) # splitter(s) resizing
|
|
||||||
|
# splitter(s) resizing
|
||||||
|
vb.sigResized.connect(vb._set_yrange)
|
||||||
|
|
||||||
def disable_auto_yrange(
|
def disable_auto_yrange(
|
||||||
self,
|
self,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
self._chart._static_yrange = 'axis'
|
self._chart._static_yrange = 'axis'
|
||||||
|
|
||||||
|
def xs_in_px(self) -> float:
|
||||||
|
'''
|
||||||
|
Return the "number of x units" within a single
|
||||||
|
pixel currently being displayed for relevant
|
||||||
|
graphics items which are our children.
|
||||||
|
|
||||||
|
'''
|
||||||
|
for graphic in self._chart._graphics.values():
|
||||||
|
xvec = graphic.pixelVectors()[0]
|
||||||
|
if xvec:
|
||||||
|
xpx = xvec.x()
|
||||||
|
if xpx:
|
||||||
|
return xpx
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
return 1.0
|
||||||
|
|
||||||
|
def maybe_downsample_graphics(self):
|
||||||
|
|
||||||
|
# TODO: a faster single-loop-iterator way of doing this XD
|
||||||
|
chart = self._chart
|
||||||
|
# graphics = list(self._chart._graphics.values())
|
||||||
|
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
msg=f'FastAppendCurve.update_from_array(): `{chart.name}`',
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
gt=ms_slower_then,
|
||||||
|
)
|
||||||
|
for name, graphics in chart._graphics.items():
|
||||||
|
# pass in no array which will read and render from the last
|
||||||
|
# passed array (normally provided by the display loop.)
|
||||||
|
chart.update_graphics_from_array(name)
|
||||||
|
profiler(f'updating {name}')
|
||||||
|
|
||||||
|
# for graphic in graphics:
|
||||||
|
# ds_meth = getattr(graphic, 'maybe_downsample', None)
|
||||||
|
# if ds_meth:
|
||||||
|
# ds_meth()
|
||||||
|
|
|
@ -20,7 +20,7 @@ Lines for orders, alerts, L2.
|
||||||
"""
|
"""
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from math import floor
|
from math import floor
|
||||||
from typing import Tuple, Optional, List, Callable
|
from typing import Optional, Callable
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from pyqtgraph import Point, functions as fn
|
from pyqtgraph import Point, functions as fn
|
||||||
|
@ -29,10 +29,8 @@ from PyQt5.QtCore import QPointF
|
||||||
|
|
||||||
from ._annotate import qgo_draw_markers, LevelMarker
|
from ._annotate import qgo_draw_markers, LevelMarker
|
||||||
from ._anchors import (
|
from ._anchors import (
|
||||||
marker_right_points,
|
|
||||||
vbr_left,
|
vbr_left,
|
||||||
right_axis,
|
right_axis,
|
||||||
# pp_tight_and_right, # wanna keep it straight in the long run
|
|
||||||
gpath_pin,
|
gpath_pin,
|
||||||
)
|
)
|
||||||
from ..calc import humanize
|
from ..calc import humanize
|
||||||
|
@ -104,8 +102,8 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
# list of labels anchored at one of the 2 line endpoints
|
# list of labels anchored at one of the 2 line endpoints
|
||||||
# inside the viewbox
|
# inside the viewbox
|
||||||
self._labels: List[Label] = []
|
self._labels: list[Label] = []
|
||||||
self._markers: List[(int, Label)] = []
|
self._markers: list[(int, Label)] = []
|
||||||
|
|
||||||
# whenever this line is moved trigger label updates
|
# whenever this line is moved trigger label updates
|
||||||
self.sigPositionChanged.connect(self.on_pos_change)
|
self.sigPositionChanged.connect(self.on_pos_change)
|
||||||
|
@ -124,7 +122,7 @@ class LevelLine(pg.InfiniteLine):
|
||||||
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
||||||
self._right_end_sc: float = 0
|
self._right_end_sc: float = 0
|
||||||
|
|
||||||
def txt_offsets(self) -> Tuple[int, int]:
|
def txt_offsets(self) -> tuple[int, int]:
|
||||||
return 0, 0
|
return 0, 0
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -315,17 +313,6 @@ class LevelLine(pg.InfiniteLine):
|
||||||
# TODO: enter labels edit mode
|
# TODO: enter labels edit mode
|
||||||
print(f'double click {ev}')
|
print(f'double click {ev}')
|
||||||
|
|
||||||
def right_point(
|
|
||||||
self,
|
|
||||||
) -> float:
|
|
||||||
|
|
||||||
chart = self._chart
|
|
||||||
l1_len = chart._max_l1_line_len
|
|
||||||
ryaxis = chart.getAxis('right')
|
|
||||||
up_to_l1_sc = ryaxis.pos().x() - l1_len
|
|
||||||
|
|
||||||
return up_to_l1_sc
|
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
|
@ -345,7 +332,7 @@ class LevelLine(pg.InfiniteLine):
|
||||||
vb_left, vb_right = self._endPoints
|
vb_left, vb_right = self._endPoints
|
||||||
vb = self.getViewBox()
|
vb = self.getViewBox()
|
||||||
|
|
||||||
line_end, marker_right, r_axis_x = marker_right_points(self._chart)
|
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
||||||
|
|
||||||
if self.show_markers and self.markers:
|
if self.show_markers and self.markers:
|
||||||
|
|
||||||
|
@ -411,7 +398,7 @@ class LevelLine(pg.InfiniteLine):
|
||||||
def scene_endpoint(self) -> QPointF:
|
def scene_endpoint(self) -> QPointF:
|
||||||
|
|
||||||
if not self._right_end_sc:
|
if not self._right_end_sc:
|
||||||
line_end, _, _ = marker_right_points(self._chart)
|
line_end, _, _ = self._chart.marker_right_points()
|
||||||
self._right_end_sc = line_end - 10
|
self._right_end_sc = line_end - 10
|
||||||
|
|
||||||
return QPointF(self._right_end_sc, self.scene_y())
|
return QPointF(self._right_end_sc, self.scene_y())
|
||||||
|
@ -422,23 +409,23 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
) -> QtWidgets.QGraphicsPathItem:
|
) -> QtWidgets.QGraphicsPathItem:
|
||||||
|
|
||||||
|
self._marker = path
|
||||||
|
self._marker.setPen(self.currentPen)
|
||||||
|
self._marker.setBrush(fn.mkBrush(self.currentPen.color()))
|
||||||
# add path to scene
|
# add path to scene
|
||||||
self.getViewBox().scene().addItem(path)
|
self.getViewBox().scene().addItem(path)
|
||||||
|
|
||||||
self._marker = path
|
# place to just-left of L1 labels
|
||||||
|
rsc = self._chart.pre_l1_xs()[0]
|
||||||
rsc = self.right_point()
|
|
||||||
|
|
||||||
self._marker.setPen(self.currentPen)
|
|
||||||
self._marker.setBrush(fn.mkBrush(self.currentPen.color()))
|
|
||||||
path.setPos(QPointF(rsc, self.scene_y()))
|
path.setPos(QPointF(rsc, self.scene_y()))
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def hoverEvent(self, ev):
|
def hoverEvent(self, ev):
|
||||||
"""Mouse hover callback.
|
'''
|
||||||
|
Mouse hover callback.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
cur = self._chart.linked.cursor
|
cur = self._chart.linked.cursor
|
||||||
|
|
||||||
# hovered
|
# hovered
|
||||||
|
@ -614,7 +601,8 @@ def order_line(
|
||||||
**line_kwargs,
|
**line_kwargs,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
'''Convenience routine to add a line graphic representing an order
|
'''
|
||||||
|
Convenience routine to add a line graphic representing an order
|
||||||
execution submitted to the EMS via the chart's "order mode".
|
execution submitted to the EMS via the chart's "order mode".
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -689,7 +677,6 @@ def order_line(
|
||||||
|
|
||||||
return f'{account}: '
|
return f'{account}: '
|
||||||
|
|
||||||
|
|
||||||
label.fields = {
|
label.fields = {
|
||||||
'size': size,
|
'size': size,
|
||||||
'size_digits': 0,
|
'size_digits': 0,
|
||||||
|
|
|
@ -17,7 +17,11 @@
|
||||||
Super fast OHLC sampling graphics types.
|
Super fast OHLC sampling graphics types.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import List, Optional, Tuple
|
from __future__ import annotations
|
||||||
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
|
@ -27,30 +31,29 @@ from PyQt5.QtCore import QLineF, QPointF
|
||||||
# from numba import types as ntypes
|
# from numba import types as ntypes
|
||||||
# from ..data._source import numba_ohlc_dtype
|
# from ..data._source import numba_ohlc_dtype
|
||||||
|
|
||||||
from .._profile import pg_profile_enabled
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
from ._style import hcolor
|
from ._style import hcolor
|
||||||
|
from ..log import get_logger
|
||||||
|
from ._curve import FastAppendCurve
|
||||||
|
from ._compression import ohlc_flatten
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._chart import LinkedSplits
|
||||||
|
|
||||||
|
|
||||||
def _mk_lines_array(
|
log = get_logger(__name__)
|
||||||
data: List,
|
|
||||||
size: int,
|
|
||||||
elements_step: int = 6,
|
|
||||||
) -> np.ndarray:
|
|
||||||
"""Create an ndarray to hold lines graphics info.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return np.zeros_like(
|
|
||||||
data,
|
|
||||||
shape=(int(size), elements_step),
|
|
||||||
dtype=object,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def lines_from_ohlc(
|
def bar_from_ohlc_row(
|
||||||
row: np.ndarray,
|
row: np.ndarray,
|
||||||
w: float
|
w: float
|
||||||
) -> Tuple[QLineF]:
|
|
||||||
|
|
||||||
|
) -> tuple[QLineF]:
|
||||||
|
'''
|
||||||
|
Generate the minimal ``QLineF`` lines to construct a single
|
||||||
|
OHLC "bar" for use in the "last datum" of a series.
|
||||||
|
|
||||||
|
'''
|
||||||
open, high, low, close, index = row[
|
open, high, low, close, index = row[
|
||||||
['open', 'high', 'low', 'close', 'index']]
|
['open', 'high', 'low', 'close', 'index']]
|
||||||
|
|
||||||
|
@ -84,7 +87,7 @@ def lines_from_ohlc(
|
||||||
@njit(
|
@njit(
|
||||||
# TODO: for now need to construct this manually for readonly arrays, see
|
# TODO: for now need to construct this manually for readonly arrays, see
|
||||||
# https://github.com/numba/numba/issues/4511
|
# https://github.com/numba/numba/issues/4511
|
||||||
# ntypes.Tuple((float64[:], float64[:], float64[:]))(
|
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
||||||
# numba_ohlc_dtype[::1], # contiguous
|
# numba_ohlc_dtype[::1], # contiguous
|
||||||
# int64,
|
# int64,
|
||||||
# optional(float64),
|
# optional(float64),
|
||||||
|
@ -95,10 +98,12 @@ def path_arrays_from_ohlc(
|
||||||
data: np.ndarray,
|
data: np.ndarray,
|
||||||
start: int64,
|
start: int64,
|
||||||
bar_gap: float64 = 0.43,
|
bar_gap: float64 = 0.43,
|
||||||
) -> np.ndarray:
|
|
||||||
"""Generate an array of lines objects from input ohlc data.
|
|
||||||
|
|
||||||
"""
|
) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Generate an array of lines objects from input ohlc data.
|
||||||
|
|
||||||
|
'''
|
||||||
size = int(data.shape[0] * 6)
|
size = int(data.shape[0] * 6)
|
||||||
|
|
||||||
x = np.zeros(
|
x = np.zeros(
|
||||||
|
@ -152,26 +157,50 @@ def path_arrays_from_ohlc(
|
||||||
|
|
||||||
|
|
||||||
def gen_qpath(
|
def gen_qpath(
|
||||||
data,
|
data: np.ndarray,
|
||||||
start, # XXX: do we need this?
|
start: int, # XXX: do we need this?
|
||||||
w,
|
w: float,
|
||||||
|
path: Optional[QtGui.QPainterPath] = None,
|
||||||
|
|
||||||
) -> QtGui.QPainterPath:
|
) -> QtGui.QPainterPath:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
path_was_none = path is None
|
||||||
|
|
||||||
x, y, c = path_arrays_from_ohlc(data, start, bar_gap=w)
|
profiler = pg.debug.Profiler(
|
||||||
|
msg='gen_qpath ohlc',
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
gt=ms_slower_then,
|
||||||
|
)
|
||||||
|
|
||||||
|
x, y, c = path_arrays_from_ohlc(
|
||||||
|
data,
|
||||||
|
start,
|
||||||
|
bar_gap=w,
|
||||||
|
)
|
||||||
profiler("generate stream with numba")
|
profiler("generate stream with numba")
|
||||||
|
|
||||||
# TODO: numba the internals of this!
|
# TODO: numba the internals of this!
|
||||||
path = pg.functions.arrayToQPath(x, y, connect=c)
|
path = pg.functions.arrayToQPath(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
connect=c,
|
||||||
|
path=path,
|
||||||
|
)
|
||||||
|
|
||||||
|
# avoid mem allocs if possible
|
||||||
|
if path_was_none:
|
||||||
|
path.reserve(path.capacity())
|
||||||
|
|
||||||
profiler("generate path with arrayToQPath")
|
profiler("generate path with arrayToQPath")
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
class BarItems(pg.GraphicsObject):
|
class BarItems(pg.GraphicsObject):
|
||||||
"""Price range bars graphics rendered from a OHLC sequence.
|
'''
|
||||||
"""
|
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
||||||
|
|
||||||
|
'''
|
||||||
sigPlotChanged = QtCore.pyqtSignal(object)
|
sigPlotChanged = QtCore.pyqtSignal(object)
|
||||||
|
|
||||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||||
|
@ -179,17 +208,26 @@ class BarItems(pg.GraphicsObject):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
# scene: 'QGraphicsScene', # noqa
|
linked: LinkedSplits,
|
||||||
plotitem: 'pg.PlotItem', # noqa
|
plotitem: 'pg.PlotItem', # noqa
|
||||||
pen_color: str = 'bracket',
|
pen_color: str = 'bracket',
|
||||||
last_bar_color: str = 'bracket',
|
last_bar_color: str = 'bracket',
|
||||||
|
|
||||||
|
name: Optional[str] = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
self.linked = linked
|
||||||
# XXX: for the mega-lulz increasing width here increases draw
|
# XXX: for the mega-lulz increasing width here increases draw
|
||||||
# latency... so probably don't do it until we figure that out.
|
# latency... so probably don't do it until we figure that out.
|
||||||
|
self._color = pen_color
|
||||||
self.bars_pen = pg.mkPen(hcolor(pen_color), width=1)
|
self.bars_pen = pg.mkPen(hcolor(pen_color), width=1)
|
||||||
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
||||||
|
self._name = name
|
||||||
|
|
||||||
|
self._ds_line_xy: Optional[
|
||||||
|
tuple[np.ndarray, np.ndarray]
|
||||||
|
] = None
|
||||||
|
|
||||||
# NOTE: this prevents redraws on mouse interaction which is
|
# NOTE: this prevents redraws on mouse interaction which is
|
||||||
# a huge boon for avg interaction latency.
|
# a huge boon for avg interaction latency.
|
||||||
|
@ -200,50 +238,84 @@ class BarItems(pg.GraphicsObject):
|
||||||
# that mode?
|
# that mode?
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
# not sure if this is actually impoving anything but figured it
|
|
||||||
# was worth a shot:
|
|
||||||
# self.path.reserve(int(100e3 * 6))
|
|
||||||
|
|
||||||
self.path = QtGui.QPainterPath()
|
|
||||||
|
|
||||||
self._pi = plotitem
|
self._pi = plotitem
|
||||||
|
self.path = QtGui.QPainterPath()
|
||||||
|
self.fast_path = QtGui.QPainterPath()
|
||||||
|
|
||||||
self._xrange: Tuple[int, int]
|
self._xrange: tuple[int, int]
|
||||||
self._yrange: Tuple[float, float]
|
self._yrange: tuple[float, float]
|
||||||
|
self._vrange = None
|
||||||
|
|
||||||
# TODO: don't render the full backing array each time
|
# TODO: don't render the full backing array each time
|
||||||
# self._path_data = None
|
# self._path_data = None
|
||||||
self._last_bar_lines: Optional[Tuple[QLineF, ...]] = None
|
self._last_bar_lines: Optional[tuple[QLineF, ...]] = None
|
||||||
|
|
||||||
# track the current length of drawable lines within the larger array
|
# track the current length of drawable lines within the larger array
|
||||||
self.start_index: int = 0
|
self.start_index: int = 0
|
||||||
self.stop_index: int = 0
|
self.stop_index: int = 0
|
||||||
|
|
||||||
|
# downsampler-line state
|
||||||
|
self._in_ds: bool = False
|
||||||
|
self._ds_line: Optional[FastAppendCurve] = None
|
||||||
|
self._dsi: tuple[int, int] = 0, 0
|
||||||
|
self._xs_in_px: float = 0
|
||||||
|
|
||||||
def draw_from_data(
|
def draw_from_data(
|
||||||
self,
|
self,
|
||||||
data: np.ndarray,
|
ohlc: np.ndarray,
|
||||||
start: int = 0,
|
start: int = 0,
|
||||||
|
|
||||||
) -> QtGui.QPainterPath:
|
) -> QtGui.QPainterPath:
|
||||||
"""Draw OHLC datum graphics from a ``np.ndarray``.
|
'''
|
||||||
|
Draw OHLC datum graphics from a ``np.ndarray``.
|
||||||
|
|
||||||
This routine is usually only called to draw the initial history.
|
This routine is usually only called to draw the initial history.
|
||||||
"""
|
|
||||||
hist, last = data[:-1], data[-1]
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
hist, last = ohlc[:-1], ohlc[-1]
|
||||||
self.path = gen_qpath(hist, start, self.w)
|
self.path = gen_qpath(hist, start, self.w)
|
||||||
|
|
||||||
# save graphics for later reference and keep track
|
# save graphics for later reference and keep track
|
||||||
# of current internal "last index"
|
# of current internal "last index"
|
||||||
# self.start_index = len(data)
|
# self.start_index = len(ohlc)
|
||||||
index = data['index']
|
index = ohlc['index']
|
||||||
self._xrange = (index[0], index[-1])
|
self._xrange = (index[0], index[-1])
|
||||||
self._yrange = (
|
self._yrange = (
|
||||||
np.nanmax(data['high']),
|
np.nanmax(ohlc['high']),
|
||||||
np.nanmin(data['low']),
|
np.nanmin(ohlc['low']),
|
||||||
)
|
)
|
||||||
|
|
||||||
# up to last to avoid double draw of last bar
|
# up to last to avoid double draw of last bar
|
||||||
self._last_bar_lines = lines_from_ohlc(last, self.w)
|
self._last_bar_lines = bar_from_ohlc_row(last, self.w)
|
||||||
|
|
||||||
|
x, y = self._ds_line_xy = ohlc_flatten(ohlc)
|
||||||
|
|
||||||
|
# self.update_ds_line(
|
||||||
|
# x,
|
||||||
|
# y,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# TODO: figuring out the most optimial size for the ideal
|
||||||
|
# curve-path by,
|
||||||
|
# - calcing the display's max px width `.screen()`
|
||||||
|
# - drawing a curve and figuring out it's capacity:
|
||||||
|
# https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||||
|
# - reserving that cap for each curve-mapped-to-shm with
|
||||||
|
|
||||||
|
# - leveraging clearing when needed to redraw the entire
|
||||||
|
# curve that does not release mem allocs:
|
||||||
|
# https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||||
|
curve = FastAppendCurve(
|
||||||
|
y=y,
|
||||||
|
x=x,
|
||||||
|
name='OHLC',
|
||||||
|
color=self._color,
|
||||||
|
)
|
||||||
|
curve.hide()
|
||||||
|
self._pi.addItem(curve)
|
||||||
|
self._ds_line = curve
|
||||||
|
|
||||||
|
self._ds_xrange = (index[0], index[-1])
|
||||||
|
|
||||||
# trigger render
|
# trigger render
|
||||||
# https://doc.qt.io/qt-5/qgraphicsitem.html#update
|
# https://doc.qt.io/qt-5/qgraphicsitem.html#update
|
||||||
|
@ -251,12 +323,70 @@ class BarItems(pg.GraphicsObject):
|
||||||
|
|
||||||
return self.path
|
return self.path
|
||||||
|
|
||||||
|
# def update_ds_line(
|
||||||
|
# self,
|
||||||
|
# x,
|
||||||
|
# y,
|
||||||
|
|
||||||
|
# ) -> FastAppendCurve:
|
||||||
|
|
||||||
|
# # determine current potential downsampling value (based on pixel
|
||||||
|
# # scaling) and return any existing curve for it.
|
||||||
|
# curve = self._ds_line
|
||||||
|
|
||||||
|
# if not curve:
|
||||||
|
# # TODO: figuring out the most optimial size for the ideal
|
||||||
|
# # curve-path by,
|
||||||
|
# # - calcing the display's max px width `.screen()`
|
||||||
|
# # - drawing a curve and figuring out it's capacity:
|
||||||
|
# # https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||||
|
# # - reserving that cap for each curve-mapped-to-shm with
|
||||||
|
|
||||||
|
# # - leveraging clearing when needed to redraw the entire
|
||||||
|
# # curve that does not release mem allocs:
|
||||||
|
# # https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||||
|
# curve = FastAppendCurve(
|
||||||
|
# y=y,
|
||||||
|
# x=x,
|
||||||
|
# name='OHLC',
|
||||||
|
# color=self._color,
|
||||||
|
# )
|
||||||
|
# curve.hide()
|
||||||
|
# self._pi.addItem(curve)
|
||||||
|
# self._ds_line = curve
|
||||||
|
|
||||||
|
# return curve
|
||||||
|
|
||||||
|
# # TODO: we should be diffing the amount of new data which
|
||||||
|
# # needs to be downsampled. Ideally we actually are just
|
||||||
|
# # doing all the ds-ing in sibling actors so that the data
|
||||||
|
# # can just be read and rendered to graphics on events of our
|
||||||
|
# # choice.
|
||||||
|
# # diff = do_diff(ohlc, new_bit)
|
||||||
|
|
||||||
|
# curve.update_from_array(
|
||||||
|
# y=y,
|
||||||
|
# x=x,
|
||||||
|
# x_iv=x,
|
||||||
|
# y_iv=y,
|
||||||
|
# view_range=True, # hack
|
||||||
|
# )
|
||||||
|
# return curve
|
||||||
|
|
||||||
def update_from_array(
|
def update_from_array(
|
||||||
self,
|
self,
|
||||||
array: np.ndarray,
|
|
||||||
just_history=False,
|
# full array input history
|
||||||
|
ohlc: np.ndarray,
|
||||||
|
|
||||||
|
# pre-sliced array data that's "in view"
|
||||||
|
ohlc_iv: np.ndarray,
|
||||||
|
|
||||||
|
view_range: Optional[tuple[int, int]] = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Update the last datum's bar graphic from input data array.
|
'''
|
||||||
|
Update the last datum's bar graphic from input data array.
|
||||||
|
|
||||||
This routine should be interface compatible with
|
This routine should be interface compatible with
|
||||||
``pg.PlotCurveItem.setData()``. Normally this method in
|
``pg.PlotCurveItem.setData()``. Normally this method in
|
||||||
|
@ -266,63 +396,289 @@ class BarItems(pg.GraphicsObject):
|
||||||
does) so this "should" be simpler and faster.
|
does) so this "should" be simpler and faster.
|
||||||
|
|
||||||
This routine should be made (transitively) as fast as possible.
|
This routine should be made (transitively) as fast as possible.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
gt=ms_slower_then,
|
||||||
|
)
|
||||||
|
|
||||||
|
# vr = self.viewRect()
|
||||||
|
# l, r = int(vr.left()), int(vr.right())
|
||||||
|
# # l, r = self.view_range()
|
||||||
|
# # array = self._arrays[self.name]
|
||||||
|
# indexes = ohlc['index']
|
||||||
|
# start_index = indexes[0]
|
||||||
|
# end_index = indexes[-1]
|
||||||
|
|
||||||
|
# lbar = max(l, start_index) - start_index
|
||||||
|
# rbar = min(r, end_index) - start_index
|
||||||
|
# in_view = ohlc[lbar:rbar]
|
||||||
|
# self._vrange = lbar, rbar
|
||||||
|
|
||||||
# index = self.start_index
|
# index = self.start_index
|
||||||
istart, istop = self._xrange
|
istart, istop = self._xrange
|
||||||
|
ds_istart, ds_istop = self._ds_xrange
|
||||||
|
|
||||||
index = array['index']
|
index = ohlc['index']
|
||||||
first_index, last_index = index[0], index[-1]
|
first_index, last_index = index[0], index[-1]
|
||||||
|
|
||||||
# length = len(array)
|
# length = len(ohlc)
|
||||||
prepend_length = istart - first_index
|
prepend_length = istart - first_index
|
||||||
append_length = last_index - istop
|
append_length = last_index - istop
|
||||||
|
|
||||||
|
# ds_prepend_length = ds_istart - first_index
|
||||||
|
# ds_append_length = last_index - ds_istop
|
||||||
|
|
||||||
flip_cache = False
|
flip_cache = False
|
||||||
|
|
||||||
# TODO: allow mapping only a range of lines thus
|
x_gt = 6
|
||||||
# only drawing as many bars as exactly specified.
|
if self._ds_line:
|
||||||
|
uppx = self._ds_line.x_uppx()
|
||||||
|
else:
|
||||||
|
uppx = 0
|
||||||
|
|
||||||
if prepend_length:
|
should_line = self._in_ds
|
||||||
|
if (
|
||||||
|
self._in_ds
|
||||||
|
and uppx < x_gt
|
||||||
|
):
|
||||||
|
should_line = False
|
||||||
|
|
||||||
# new history was added and we need to render a new path
|
elif (
|
||||||
new_bars = array[:prepend_length]
|
not self._in_ds
|
||||||
prepend_path = gen_qpath(new_bars, 0, self.w)
|
and uppx >= x_gt
|
||||||
|
):
|
||||||
|
should_line = True
|
||||||
|
|
||||||
# XXX: SOMETHING IS MAYBE FISHY HERE what with the old_path
|
# should_ds, should_redraw = self.should_ds_or_redraw()
|
||||||
# y value not matching the first value from
|
# print(
|
||||||
# array[prepend_length + 1] ???
|
# f'OHLC in line: {self._in_ds}'
|
||||||
|
# f'OHLC should line: {should_line}\n'
|
||||||
|
# # f'OHLC should_redraw: {should_redraw}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
# update path
|
if (
|
||||||
old_path = self.path
|
should_line
|
||||||
self.path = prepend_path
|
):
|
||||||
self.path.addPath(old_path)
|
# update the line graphic
|
||||||
|
# x, y = self._ds_line_xy = ohlc_flatten(ohlc_iv)
|
||||||
|
# x, y = self._ds_line_xy = ohlc_flatten(ohlc)
|
||||||
|
x_iv, y_iv = self._ds_line_xy = ohlc_flatten(ohlc_iv)
|
||||||
|
profiler('flattening bars to line')
|
||||||
|
|
||||||
# trigger redraw despite caching
|
curve = self._ds_line
|
||||||
self.prepareGeometryChange()
|
# curve = self.update_ds_line(x, y)
|
||||||
|
|
||||||
if append_length:
|
# TODO: we should be diffing the amount of new data which
|
||||||
# generate new lines objects for updatable "current bar"
|
# needs to be downsampled. Ideally we actually are just
|
||||||
self._last_bar_lines = lines_from_ohlc(array[-1], self.w)
|
# doing all the ds-ing in sibling actors so that the data
|
||||||
|
# can just be read and rendered to graphics on events of our
|
||||||
|
# choice.
|
||||||
|
# diff = do_diff(ohlc, new_bit)
|
||||||
|
curve.update_from_array(
|
||||||
|
y=x_iv,
|
||||||
|
x=y_iv,
|
||||||
|
x_iv=x_iv,
|
||||||
|
y_iv=y_iv,
|
||||||
|
view_range=view_range, # hack
|
||||||
|
)
|
||||||
|
|
||||||
# generate new graphics to match provided array
|
# we already are showing a line and should be
|
||||||
# path appending logic:
|
# self._in_ds
|
||||||
# we need to get the previous "current bar(s)" for the time step
|
|
||||||
# and convert it to a sub-path to append to the historical set
|
|
||||||
# new_bars = array[istop - 1:istop + append_length - 1]
|
|
||||||
new_bars = array[-append_length - 1:-1]
|
|
||||||
append_path = gen_qpath(new_bars, 0, self.w)
|
|
||||||
self.path.moveTo(float(istop - self.w), float(new_bars[0]['open']))
|
|
||||||
self.path.addPath(append_path)
|
|
||||||
|
|
||||||
# trigger redraw despite caching
|
# check if the ds line should be resampled/drawn
|
||||||
self.prepareGeometryChange()
|
# should_ds_line, should_redraw_line = self._ds_line.should_ds_or_redraw()
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
# print(f'OHLC DS should ds: {should_ds_line}, should_redraw: {should_redraw_line}')
|
||||||
flip_cache = True
|
|
||||||
|
# if (
|
||||||
|
# # line should be redrawn/sampled
|
||||||
|
# # should_ds_line or
|
||||||
|
|
||||||
|
# # we are flipping to line from bars mode
|
||||||
|
# not self._in_ds
|
||||||
|
# ):
|
||||||
|
# uppx = self._ds_line.x_uppx()
|
||||||
|
# self._xs_in_px = uppx
|
||||||
|
|
||||||
|
|
||||||
|
if not self._in_ds:
|
||||||
|
# hide bars and show line
|
||||||
|
self.hide()
|
||||||
|
# XXX: is this actually any faster?
|
||||||
|
# self._pi.removeItem(self)
|
||||||
|
|
||||||
|
# TODO: a `.ui()` log level?
|
||||||
|
log.info(
|
||||||
|
f'downsampling to line graphic {self._name}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# self._pi.addItem(curve)
|
||||||
|
curve.show()
|
||||||
|
curve.update()
|
||||||
|
self._in_ds = True
|
||||||
|
|
||||||
|
# stop here since we don't need to update bars path any more
|
||||||
|
# as we delegate to the downsample line with updates.
|
||||||
|
return
|
||||||
|
|
||||||
|
elif (
|
||||||
|
not should_line
|
||||||
|
and self._in_ds
|
||||||
|
):
|
||||||
|
# flip back to bars graphics and hide the downsample line.
|
||||||
|
log.info(f'showing bars graphic {self._name}')
|
||||||
|
|
||||||
|
curve = self._ds_line
|
||||||
|
curve.hide()
|
||||||
|
# self._pi.removeItem(curve)
|
||||||
|
|
||||||
|
# XXX: is this actually any faster?
|
||||||
|
# self._pi.addItem(self)
|
||||||
|
self.show()
|
||||||
|
self._in_ds = False
|
||||||
|
|
||||||
|
# if not self._in_ds and should_ds
|
||||||
|
# self.hide()
|
||||||
|
# # XXX: is this actually any faster?
|
||||||
|
# # self._pi.removeItem(self)
|
||||||
|
|
||||||
|
# # this should have been done in the block above
|
||||||
|
# # x, y = self._ds_line_xy = ohlc_flatten(ohlc_iv)
|
||||||
|
# # curve = self.update_ds_line(x, y)
|
||||||
|
|
||||||
|
# # TODO: a `.ui()` log level?
|
||||||
|
# log.info(
|
||||||
|
# f'downsampling to line graphic {self._name}'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # self._pi.addItem(curve)
|
||||||
|
# curve.show()
|
||||||
|
# curve.update()
|
||||||
|
# self._in_ds = True
|
||||||
|
# return
|
||||||
|
|
||||||
|
# self._in_ds = False
|
||||||
|
|
||||||
|
# print('YO NOT DS OHLC')
|
||||||
|
|
||||||
|
# generate in_view path
|
||||||
|
self.path = gen_qpath(
|
||||||
|
ohlc_iv,
|
||||||
|
0,
|
||||||
|
self.w,
|
||||||
|
# path=self.path,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: to make the downsampling faster
|
||||||
|
# - allow mapping only a range of lines thus only drawing as
|
||||||
|
# many bars as exactly specified.
|
||||||
|
# - move ohlc "flattening" to a shmarr
|
||||||
|
# - maybe move all this embedded logic to a higher
|
||||||
|
# level type?
|
||||||
|
|
||||||
|
# ohlc = in_view
|
||||||
|
|
||||||
|
# if prepend_length:
|
||||||
|
# # new history was added and we need to render a new path
|
||||||
|
# prepend_bars = ohlc[:prepend_length]
|
||||||
|
|
||||||
|
# if ds_prepend_length:
|
||||||
|
# ds_prepend_bars = ohlc[:ds_prepend_length]
|
||||||
|
# pre_x, pre_y = ohlc_flatten(ds_prepend_bars)
|
||||||
|
# fx = np.concatenate((pre_x, fx))
|
||||||
|
# fy = np.concatenate((pre_y, fy))
|
||||||
|
# profiler('ds line prepend diff complete')
|
||||||
|
|
||||||
|
# if append_length:
|
||||||
|
# # generate new graphics to match provided array
|
||||||
|
# # path appending logic:
|
||||||
|
# # we need to get the previous "current bar(s)" for the time step
|
||||||
|
# # and convert it to a sub-path to append to the historical set
|
||||||
|
# # new_bars = ohlc[istop - 1:istop + append_length - 1]
|
||||||
|
# append_bars = ohlc[-append_length - 1:-1]
|
||||||
|
# # print(f'ohlc bars to append size: {append_bars.size}\n')
|
||||||
|
|
||||||
|
# if ds_append_length:
|
||||||
|
# ds_append_bars = ohlc[-ds_append_length - 1:-1]
|
||||||
|
# post_x, post_y = ohlc_flatten(ds_append_bars)
|
||||||
|
# print(
|
||||||
|
# f'ds curve to append sizes: {(post_x.size, post_y.size)}'
|
||||||
|
# )
|
||||||
|
# fx = np.concatenate((fx, post_x))
|
||||||
|
# fy = np.concatenate((fy, post_y))
|
||||||
|
|
||||||
|
# profiler('ds line append diff complete')
|
||||||
|
|
||||||
|
profiler('array diffs complete')
|
||||||
|
|
||||||
|
# does this work?
|
||||||
|
last = ohlc[-1]
|
||||||
|
# fy[-1] = last['close']
|
||||||
|
|
||||||
|
# # incremental update and cache line datums
|
||||||
|
# self._ds_line_xy = fx, fy
|
||||||
|
|
||||||
|
# maybe downsample to line
|
||||||
|
# ds = self.maybe_downsample()
|
||||||
|
# if ds:
|
||||||
|
# # if we downsample to a line don't bother with
|
||||||
|
# # any more path generation / updates
|
||||||
|
# self._ds_xrange = first_index, last_index
|
||||||
|
# profiler('downsampled to line')
|
||||||
|
# return
|
||||||
|
|
||||||
|
# print(in_view.size)
|
||||||
|
|
||||||
|
# if self.path:
|
||||||
|
# self.path = path
|
||||||
|
# self.path.reserve(path.capacity())
|
||||||
|
# self.path.swap(path)
|
||||||
|
|
||||||
|
# path updates
|
||||||
|
# if prepend_length:
|
||||||
|
# # XXX: SOMETHING IS MAYBE FISHY HERE what with the old_path
|
||||||
|
# # y value not matching the first value from
|
||||||
|
# # ohlc[prepend_length + 1] ???
|
||||||
|
# prepend_path = gen_qpath(prepend_bars, 0, self.w)
|
||||||
|
# old_path = self.path
|
||||||
|
# self.path = prepend_path
|
||||||
|
# self.path.addPath(old_path)
|
||||||
|
# profiler('path PREPEND')
|
||||||
|
|
||||||
|
# if append_length:
|
||||||
|
# append_path = gen_qpath(append_bars, 0, self.w)
|
||||||
|
|
||||||
|
# self.path.moveTo(
|
||||||
|
# float(istop - self.w),
|
||||||
|
# float(append_bars[0]['open'])
|
||||||
|
# )
|
||||||
|
# self.path.addPath(append_path)
|
||||||
|
|
||||||
|
# profiler('path APPEND')
|
||||||
|
# fp = self.fast_path
|
||||||
|
# if fp is None:
|
||||||
|
# self.fast_path = append_path
|
||||||
|
|
||||||
|
# else:
|
||||||
|
# fp.moveTo(
|
||||||
|
# float(istop - self.w), float(new_bars[0]['open'])
|
||||||
|
# )
|
||||||
|
# fp.addPath(append_path)
|
||||||
|
|
||||||
|
# self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
||||||
|
# flip_cache = True
|
||||||
|
|
||||||
self._xrange = first_index, last_index
|
self._xrange = first_index, last_index
|
||||||
|
|
||||||
|
# trigger redraw despite caching
|
||||||
|
self.prepareGeometryChange()
|
||||||
|
|
||||||
|
# generate new lines objects for updatable "current bar"
|
||||||
|
self._last_bar_lines = bar_from_ohlc_row(last, self.w)
|
||||||
|
|
||||||
# last bar update
|
# last bar update
|
||||||
i, o, h, l, last, v = array[-1][
|
i, o, h, l, last, v = last[
|
||||||
['index', 'open', 'high', 'low', 'close', 'volume']
|
['index', 'open', 'high', 'low', 'close', 'volume']
|
||||||
]
|
]
|
||||||
# assert i == self.start_index - 1
|
# assert i == self.start_index - 1
|
||||||
|
@ -351,7 +707,10 @@ class BarItems(pg.GraphicsObject):
|
||||||
# now out of date / from some previous sample. It's weird
|
# now out of date / from some previous sample. It's weird
|
||||||
# though because i've seen it do this to bars i - 3 back?
|
# though because i've seen it do this to bars i - 3 back?
|
||||||
|
|
||||||
|
profiler('last bar set')
|
||||||
|
|
||||||
self.update()
|
self.update()
|
||||||
|
profiler('.update()')
|
||||||
|
|
||||||
if flip_cache:
|
if flip_cache:
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
@ -373,16 +732,31 @@ class BarItems(pg.GraphicsObject):
|
||||||
# apparently this a lot faster says the docs?
|
# apparently this a lot faster says the docs?
|
||||||
# https://doc.qt.io/qt-5/qpainterpath.html#controlPointRect
|
# https://doc.qt.io/qt-5/qpainterpath.html#controlPointRect
|
||||||
hb = self.path.controlPointRect()
|
hb = self.path.controlPointRect()
|
||||||
hb_tl, hb_br = hb.topLeft(), hb.bottomRight()
|
hb_tl, hb_br = (
|
||||||
|
hb.topLeft(),
|
||||||
|
hb.bottomRight(),
|
||||||
|
)
|
||||||
|
|
||||||
|
# fp = self.fast_path
|
||||||
|
# if fp:
|
||||||
|
# fhb = fp.controlPointRect()
|
||||||
|
# print((hb_tl, hb_br))
|
||||||
|
# print(fhb)
|
||||||
|
# hb_tl, hb_br = (
|
||||||
|
# fhb.topLeft() + hb.topLeft(),
|
||||||
|
# fhb.bottomRight() + hb.bottomRight(),
|
||||||
|
# )
|
||||||
|
|
||||||
# need to include last bar height or BR will be off
|
# need to include last bar height or BR will be off
|
||||||
mx_y = hb_br.y()
|
mx_y = hb_br.y()
|
||||||
mn_y = hb_tl.y()
|
mn_y = hb_tl.y()
|
||||||
|
|
||||||
body_line = self._last_bar_lines[0]
|
last_lines = self._last_bar_lines
|
||||||
if body_line:
|
if last_lines:
|
||||||
mx_y = max(mx_y, max(body_line.y1(), body_line.y2()))
|
body_line = self._last_bar_lines[0]
|
||||||
mn_y = min(mn_y, min(body_line.y1(), body_line.y2()))
|
if body_line:
|
||||||
|
mx_y = max(mx_y, max(body_line.y1(), body_line.y2()))
|
||||||
|
mn_y = min(mn_y, min(body_line.y1(), body_line.y2()))
|
||||||
|
|
||||||
return QtCore.QRectF(
|
return QtCore.QRectF(
|
||||||
|
|
||||||
|
@ -400,14 +774,85 @@ class BarItems(pg.GraphicsObject):
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# def should_ds_or_redraw(
|
||||||
|
# self,
|
||||||
|
# x_gt: float = 2,
|
||||||
|
|
||||||
|
# ) -> tuple[bool, bool]:
|
||||||
|
|
||||||
|
# curve = self._ds_line
|
||||||
|
# if not curve:
|
||||||
|
# return False, False
|
||||||
|
|
||||||
|
# # this is the ``float`` value of the "number of x units" (in
|
||||||
|
# # view coords) that a pixel spans.
|
||||||
|
# uppx = self._ds_line.x_uppx()
|
||||||
|
# print(f'uppx: {uppx}')
|
||||||
|
|
||||||
|
# # linked = self.linked
|
||||||
|
# should_redraw: bool = False
|
||||||
|
# should_ds: bool = False
|
||||||
|
|
||||||
|
# if (
|
||||||
|
# not self._in_ds
|
||||||
|
# and uppx >= x_gt
|
||||||
|
# ):
|
||||||
|
|
||||||
|
# should_ds = True
|
||||||
|
# should_redraw = True
|
||||||
|
|
||||||
|
# elif (
|
||||||
|
# self._in_ds
|
||||||
|
# and uppx < x_gt
|
||||||
|
# ):
|
||||||
|
# should_ds = False
|
||||||
|
# should_redraw = True
|
||||||
|
|
||||||
|
# if self._in_ds:
|
||||||
|
# should_ds = True
|
||||||
|
|
||||||
|
# # no curve change
|
||||||
|
# return should_ds, should_redraw
|
||||||
|
|
||||||
|
# def maybe_downsample(
|
||||||
|
# self,
|
||||||
|
# x_gt: float = 2,
|
||||||
|
|
||||||
|
# ) -> bool:
|
||||||
|
# '''
|
||||||
|
# Call this when you want to stop drawing individual
|
||||||
|
# bars and instead use a ``FastAppendCurve`` intepolation
|
||||||
|
# line (normally when the width of a bar (aka 1.0 in the x)
|
||||||
|
# is less then a pixel width on the device).
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# ds_xy = self._ds_line_xy
|
||||||
|
# if ds_xy:
|
||||||
|
# ds_xy.maybe_downsample()
|
||||||
|
|
||||||
|
# if (
|
||||||
|
# self._ds_line_xy is not None
|
||||||
|
# and self._in_ds
|
||||||
|
# ):
|
||||||
|
# curve = self.update_ds_line(
|
||||||
|
# *self._ds_line_xy,
|
||||||
|
# )
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
self,
|
self,
|
||||||
p: QtGui.QPainter,
|
p: QtGui.QPainter,
|
||||||
opt: QtWidgets.QStyleOptionGraphicsItem,
|
opt: QtWidgets.QStyleOptionGraphicsItem,
|
||||||
w: QtWidgets.QWidget
|
w: QtWidgets.QWidget
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
if self._in_ds:
|
||||||
|
return
|
||||||
|
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
gt=ms_slower_then,
|
||||||
|
)
|
||||||
|
|
||||||
# p.setCompositionMode(0)
|
# p.setCompositionMode(0)
|
||||||
|
|
||||||
|
@ -423,4 +868,8 @@ class BarItems(pg.GraphicsObject):
|
||||||
|
|
||||||
p.setPen(self.bars_pen)
|
p.setPen(self.bars_pen)
|
||||||
p.drawPath(self.path)
|
p.drawPath(self.path)
|
||||||
profiler('draw history path')
|
profiler(f'draw history path: {self.path.capacity()}')
|
||||||
|
|
||||||
|
# if self.fast_path:
|
||||||
|
# p.drawPath(self.fast_path)
|
||||||
|
# profiler('draw fast path')
|
||||||
|
|
|
@ -14,9 +14,10 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
Qt UI styling.
|
Qt UI styling.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
from typing import Optional, Dict
|
from typing import Optional, Dict
|
||||||
import math
|
import math
|
||||||
|
|
||||||
|
@ -202,8 +203,6 @@ _xaxis_at = 'bottom'
|
||||||
# charting config
|
# charting config
|
||||||
CHART_MARGINS = (0, 0, 2, 2)
|
CHART_MARGINS = (0, 0, 2, 2)
|
||||||
_min_points_to_show = 6
|
_min_points_to_show = 6
|
||||||
_bars_to_left_in_follow_mode = int(61*6)
|
|
||||||
_bars_from_right_in_follow_mode = round(0.16 * _bars_to_left_in_follow_mode)
|
|
||||||
_tina_mode = False
|
_tina_mode = False
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -268,13 +268,14 @@ class OrderMode:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
staged = self._staged_order
|
staged = self._staged_order
|
||||||
symbol = staged.symbol
|
symbol: Symbol = staged.symbol
|
||||||
oid = str(uuid.uuid4())
|
oid = str(uuid.uuid4())
|
||||||
|
|
||||||
# format order data for ems
|
# format order data for ems
|
||||||
|
fqsn = symbol.front_fqsn()
|
||||||
order = staged.copy(
|
order = staged.copy(
|
||||||
update={
|
update={
|
||||||
'symbol': symbol.key,
|
'symbol': fqsn,
|
||||||
'oid': oid,
|
'oid': oid,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -519,8 +520,7 @@ async def open_order_mode(
|
||||||
|
|
||||||
feed: Feed,
|
feed: Feed,
|
||||||
chart: 'ChartPlotWidget', # noqa
|
chart: 'ChartPlotWidget', # noqa
|
||||||
symbol: Symbol,
|
fqsn: str,
|
||||||
brokername: str,
|
|
||||||
started: trio.Event,
|
started: trio.Event,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -546,8 +546,7 @@ async def open_order_mode(
|
||||||
|
|
||||||
# spawn EMS actor-service
|
# spawn EMS actor-service
|
||||||
async with (
|
async with (
|
||||||
|
open_ems(fqsn) as (
|
||||||
open_ems(brokername, symbol) as (
|
|
||||||
book,
|
book,
|
||||||
trades_stream,
|
trades_stream,
|
||||||
position_msgs,
|
position_msgs,
|
||||||
|
@ -556,8 +555,7 @@ async def open_order_mode(
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery() as tn,
|
||||||
|
|
||||||
):
|
):
|
||||||
log.info(f'Opening order mode for {brokername}.{symbol.key}')
|
log.info(f'Opening order mode for {fqsn}')
|
||||||
|
|
||||||
view = chart.view
|
view = chart.view
|
||||||
|
|
||||||
# annotations editors
|
# annotations editors
|
||||||
|
@ -566,7 +564,7 @@ async def open_order_mode(
|
||||||
|
|
||||||
# symbol id
|
# symbol id
|
||||||
symbol = chart.linked.symbol
|
symbol = chart.linked.symbol
|
||||||
symkey = symbol.key
|
symkey = symbol.front_fqsn()
|
||||||
|
|
||||||
# map of per-provider account keys to position tracker instances
|
# map of per-provider account keys to position tracker instances
|
||||||
trackers: dict[str, PositionTracker] = {}
|
trackers: dict[str, PositionTracker] = {}
|
||||||
|
@ -610,7 +608,7 @@ async def open_order_mode(
|
||||||
log.info(f'Loading pp for {symkey}:\n{pformat(msg)}')
|
log.info(f'Loading pp for {symkey}:\n{pformat(msg)}')
|
||||||
startup_pp.update_from_msg(msg)
|
startup_pp.update_from_msg(msg)
|
||||||
|
|
||||||
# allocator
|
# allocator config
|
||||||
alloc = mk_allocator(
|
alloc = mk_allocator(
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
account=account_name,
|
account=account_name,
|
||||||
|
@ -818,8 +816,18 @@ async def process_trades_and_update_ui(
|
||||||
'position',
|
'position',
|
||||||
):
|
):
|
||||||
sym = mode.chart.linked.symbol
|
sym = mode.chart.linked.symbol
|
||||||
if msg['symbol'].lower() in sym.key:
|
pp_msg_symbol = msg['symbol'].lower()
|
||||||
|
fqsn = sym.front_fqsn()
|
||||||
|
broker, key = sym.front_feed()
|
||||||
|
# print(
|
||||||
|
# f'pp msg symbol: {pp_msg_symbol}\n',
|
||||||
|
# f'fqsn: {fqsn}\n',
|
||||||
|
# f'front key: {key}\n',
|
||||||
|
# )
|
||||||
|
|
||||||
|
if (
|
||||||
|
pp_msg_symbol == fqsn.replace(f'.{broker}', '')
|
||||||
|
):
|
||||||
tracker = mode.trackers[msg['account']]
|
tracker = mode.trackers[msg['account']]
|
||||||
tracker.live_pp.update_from_msg(msg)
|
tracker.live_pp.update_from_msg(msg)
|
||||||
# update order pane widgets
|
# update order pane widgets
|
||||||
|
|
|
@ -7,3 +7,8 @@
|
||||||
# pin this to a dev branch that we have more control over especially
|
# pin this to a dev branch that we have more control over especially
|
||||||
# as more graphics stuff gets hashed out.
|
# as more graphics stuff gets hashed out.
|
||||||
-e git+git://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph
|
-e git+git://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph
|
||||||
|
|
||||||
|
|
||||||
|
# we own and maintain the async client for `marketstore` in our org
|
||||||
|
# `anyio_marketstore`:
|
||||||
|
-e git+git://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -30,11 +30,13 @@ orig_win_id = t.find_focused().window
|
||||||
# for tws
|
# for tws
|
||||||
win_names: list[str] = [
|
win_names: list[str] = [
|
||||||
'Interactive Brokers', # tws running in i3
|
'Interactive Brokers', # tws running in i3
|
||||||
'IB Gateway.', # gw running in i3
|
'IB Gateway', # gw running in i3
|
||||||
|
# 'IB', # gw running in i3 (newer version?)
|
||||||
]
|
]
|
||||||
|
|
||||||
for name in win_names:
|
for name in win_names:
|
||||||
results = t.find_named(name)
|
results = t.find_titled(name)
|
||||||
|
print(f'results for {name}: {results}')
|
||||||
if results:
|
if results:
|
||||||
con = results[0]
|
con = results[0]
|
||||||
print(f'Resetting data feed for {name}')
|
print(f'Resetting data feed for {name}')
|
||||||
|
@ -47,22 +49,32 @@ for name in win_names:
|
||||||
# https://github.com/rr-/pyxdotool
|
# https://github.com/rr-/pyxdotool
|
||||||
# https://github.com/ShaneHutter/pyxdotool
|
# https://github.com/ShaneHutter/pyxdotool
|
||||||
# https://github.com/cphyc/pyxdotool
|
# https://github.com/cphyc/pyxdotool
|
||||||
subprocess.call([
|
|
||||||
'xdotool',
|
|
||||||
'windowactivate', '--sync', win_id,
|
|
||||||
|
|
||||||
# move mouse to bottom left of window (where there should
|
# TODO: only run the reconnect (2nd) kc on a detected
|
||||||
# be nothing to click).
|
# disconnect?
|
||||||
'mousemove_relative', '--sync', str(w-4), str(h-4),
|
for key_combo, timeout in [
|
||||||
|
# only required if we need a connection reset.
|
||||||
|
# ('ctrl+alt+r', 12),
|
||||||
|
# data feed reset.
|
||||||
|
('ctrl+alt+f', 6)
|
||||||
|
]:
|
||||||
|
subprocess.call([
|
||||||
|
'xdotool',
|
||||||
|
'windowactivate', '--sync', win_id,
|
||||||
|
|
||||||
# NOTE: we may need to stick a `--retry 3` in here..
|
# move mouse to bottom left of window (where there should
|
||||||
'click', '--window', win_id, '--repeat', '3', '1',
|
# be nothing to click).
|
||||||
|
'mousemove_relative', '--sync', str(w-4), str(h-4),
|
||||||
|
|
||||||
# hackzorzes
|
# NOTE: we may need to stick a `--retry 3` in here..
|
||||||
'key', 'ctrl+alt+f',
|
'click', '--window', win_id,
|
||||||
],
|
'--repeat', '3', '1',
|
||||||
timeout=1,
|
|
||||||
)
|
# hackzorzes
|
||||||
|
'key', key_combo,
|
||||||
|
],
|
||||||
|
timeout=timeout,
|
||||||
|
)
|
||||||
|
|
||||||
# re-activate and focus original window
|
# re-activate and focus original window
|
||||||
subprocess.call([
|
subprocess.call([
|
Loading…
Reference in New Issue