Compare commits
206 Commits
gitea_feat
...
multichart
Author | SHA1 | Date |
---|---|---|
|
ff58421565 | |
|
c2bc01e1ac | |
|
9e5170033b | |
|
2e0e222f27 | |
|
0d45495a18 | |
|
43c08018ad | |
|
518d3a9c55 | |
|
185090f08f | |
|
36fb8abe9d | |
|
d62aa071ae | |
|
19136f66a0 | |
|
2665c6525e | |
|
983a764f54 | |
|
6f60f60c67 | |
|
a7b3b1722e | |
|
0711b469b4 | |
|
96d485b6ed | |
|
ff7b58e8c7 | |
|
0f0a97724c | |
|
b76370263d | |
|
d0b39e8a2b | |
|
2aa5137283 | |
|
404a5e1263 | |
|
325fe3cf14 | |
|
28aaaf9866 | |
|
d3d1993b5e | |
|
5da2f10ff0 | |
|
7f49792a29 | |
|
221036eee5 | |
|
b9f3546d2f | |
|
0cdb065222 | |
|
84bd4e99ef | |
|
f0e6c5827f | |
|
5b68efdf31 | |
|
99fbce3231 | |
|
f5b15aba11 | |
|
b1de6dfd0e | |
|
e280e487c8 | |
|
cf979e9ca2 | |
|
5a08ccc6a3 | |
|
e0381e49a9 | |
|
2aeddaa805 | |
|
1b888d273f | |
|
f8a0c60889 | |
|
d11b5da2b3 | |
|
28c0f80e6d | |
|
426ae9e2ca | |
|
5139a27327 | |
|
f2125187f4 | |
|
1f11f7e4bf | |
|
b24d5b61cc | |
|
7d92a8ed6c | |
|
47ffe60047 | |
|
e44b485bcb | |
|
7d404ed7ef | |
|
b45c027db8 | |
|
973902db43 | |
|
0ec1c8e85d | |
|
4866bdc460 | |
|
65434e2e67 | |
|
b762cf0456 | |
|
3ec4c851cc | |
|
5ed4e5c945 | |
|
84c48f17e2 | |
|
ee8e27f256 | |
|
e06a6c94eb | |
|
fdda0c6f77 | |
|
c9f7083b79 | |
|
a91664e154 | |
|
3b773c667b | |
|
aa4a48cb98 | |
|
cec5ef1d26 | |
|
1f6ec98790 | |
|
702a13a278 | |
|
1aa7b937b9 | |
|
81b6cb17de | |
|
36f1edead0 | |
|
e1e52e9130 | |
|
3b15e76285 | |
|
c15a2babc1 | |
|
171e5da9f9 | |
|
92a609275b | |
|
07ee244ca9 | |
|
2122c7cbd9 | |
|
888ae26d82 | |
|
61d1dd3306 | |
|
73f1e362bc | |
|
7208b3a2d2 | |
|
ba8284e73b | |
|
ad1c9d02fa | |
|
bb2d451a37 | |
|
36c93e8dfe | |
|
d68fd8662c | |
|
58a82e7a3f | |
|
5f5843613e | |
|
42810056c3 | |
|
c46f0c4a0a | |
|
a2b4276cdf | |
|
bf0810ef4f | |
|
404a34a049 | |
|
6b35484bce | |
|
3e754342de | |
|
a06ff2fd75 | |
|
5b6240d167 | |
|
846421bb28 | |
|
160c177ece | |
|
40e6b5935d | |
|
5ba472176e | |
|
5c853395bb | |
|
b7619e5ea4 | |
|
ead2e1ed1f | |
|
de3fd9edbe | |
|
e9201c2bdf | |
|
d7b9c4044e | |
|
0deee65318 | |
|
4c92575cb2 | |
|
9a65cbbf5c | |
|
a50a09ff5c | |
|
d4f8bb6545 | |
|
066027fe1c | |
|
efc16a7fb4 | |
|
c609e1b663 | |
|
c53cc5f384 | |
|
c509eff273 | |
|
12f61f0d75 | |
|
359a0e1ae3 | |
|
0612fff3c1 | |
|
62c7d42f1e | |
|
3866fcac9a | |
|
d95d6d37f1 | |
|
00a0caa963 | |
|
a6f5076db0 | |
|
7463f4c558 | |
|
44cfa5fe70 | |
|
f31f4d3256 | |
|
e9ca618fcb | |
|
3a1a188d26 | |
|
08ba6127a6 | |
|
e95152272f | |
|
eb0216feaf | |
|
1a3e2c33d9 | |
|
af3cd9faa0 | |
|
c09f5cbbff | |
|
be9f1a1aef | |
|
0b28795b52 | |
|
a36b82f781 | |
|
d85a8c09fa | |
|
bb8d26ec93 | |
|
d820ade189 | |
|
29b9091711 | |
|
080c93477a | |
|
8005253497 | |
|
56a92f23cb | |
|
6696050d01 | |
|
ba62454932 | |
|
00d8298871 | |
|
40de76180f | |
|
e2b279c251 | |
|
52788c1573 | |
|
719f7505ef | |
|
8021e0da7e | |
|
68ccf668b9 | |
|
e1af087626 | |
|
029dee994f | |
|
1ab9acf075 | |
|
02cef3ed6d | |
|
12307e5757 | |
|
e1e04d3215 | |
|
8dfa080fd1 | |
|
993a42e88f | |
|
51e6ca21fa | |
|
3da84387c3 | |
|
363820ddc0 | |
|
101c2fd865 | |
|
d739cf15b8 | |
|
b0e31990d3 | |
|
f850959a39 | |
|
f2179dc6f8 | |
|
4a9896a29d | |
|
9c69636388 | |
|
d7edcc30c4 | |
|
d717096d82 | |
|
6e74f52049 | |
|
92257995a6 | |
|
fbcd410ebe | |
|
560782ebc0 | |
|
fcbdfb4e7a | |
|
e7ee0b343f | |
|
43086029a2 | |
|
e4eb568b97 | |
|
d06f7ef679 | |
|
3633acb3c5 | |
|
2368ddadf2 | |
|
87ffb23cb8 | |
|
99311b4f46 | |
|
6252469ecc | |
|
a1dd0fb997 | |
|
772d1f0f4e | |
|
440ff047e8 | |
|
b72658e243 | |
|
ee57f5c09f | |
|
1e586e7c85 | |
|
e4a5dc55de | |
|
44c8c30327 | |
|
190c792515 | |
|
d3a40678ff |
|
@ -257,7 +257,7 @@ async def open_piker_runtime(
|
|||
# and spawn the service tree distributed per that.
|
||||
start_method: str = 'trio',
|
||||
|
||||
tractor_kwargs: dict = {},
|
||||
**tractor_kwargs,
|
||||
|
||||
) -> tuple[
|
||||
tractor.Actor,
|
||||
|
|
|
@ -152,9 +152,14 @@ class Profiler(object):
|
|||
# don't do anything
|
||||
return cls._disabledProfiler
|
||||
|
||||
# create an actual profiling object
|
||||
cls._depth += 1
|
||||
obj = super(Profiler, cls).__new__(cls)
|
||||
obj._msgs = []
|
||||
|
||||
# create an actual profiling object
|
||||
if cls._depth < 1:
|
||||
cls._msgs = []
|
||||
|
||||
obj._name = msg or func_qualname
|
||||
obj._delayed = delayed
|
||||
obj._markCount = 0
|
||||
|
@ -174,8 +179,12 @@ class Profiler(object):
|
|||
|
||||
self._markCount += 1
|
||||
newTime = perf_counter()
|
||||
tot_ms = (newTime - self._firstTime) * 1000
|
||||
ms = (newTime - self._lastTime) * 1000
|
||||
self._newMsg(" %s: %0.4f ms", msg, ms)
|
||||
self._newMsg(
|
||||
f" {msg}: {ms:0.4f}, tot:{tot_ms:0.4f}"
|
||||
)
|
||||
|
||||
self._lastTime = newTime
|
||||
|
||||
def mark(self, msg=None):
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -15,17 +15,30 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Graphics related downsampling routines for compressing to pixel
|
||||
limits on the display device.
|
||||
Graphics downsampling using the infamous M4 algorithm.
|
||||
|
||||
This is one of ``piker``'s secret weapons allowing us to boss all other
|
||||
charting platforms B)
|
||||
|
||||
(AND DON'T YOU DARE TAKE THIS CODE WITHOUT CREDIT OR WE'LL SUE UR F#&@* ASS).
|
||||
|
||||
NOTES: this method is a so called "visualization driven data
|
||||
aggregation" approach. It gives error-free line chart
|
||||
downsampling, see
|
||||
further scientific paper resources:
|
||||
- http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
- http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||
|
||||
Details on implementation of this algo are based in,
|
||||
https://github.com/pikers/piker/issues/109
|
||||
|
||||
'''
|
||||
import math
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import (
|
||||
jit,
|
||||
njit,
|
||||
# float64, optional, int64,
|
||||
)
|
||||
|
||||
|
@ -35,109 +48,6 @@ from ..log import get_logger
|
|||
log = get_logger(__name__)
|
||||
|
||||
|
||||
def hl2mxmn(ohlc: np.ndarray) -> np.ndarray:
|
||||
'''
|
||||
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||
to a "joined" max/min 1-d array.
|
||||
|
||||
'''
|
||||
index = ohlc['index']
|
||||
hls = ohlc[[
|
||||
'low',
|
||||
'high',
|
||||
]]
|
||||
|
||||
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||
x = np.empty(2*hls.size, dtype=np.float64)
|
||||
trace_hl(hls, mxmn, x, index[0])
|
||||
x = x + index[0]
|
||||
|
||||
return mxmn, x
|
||||
|
||||
|
||||
@jit(
|
||||
# TODO: the type annots..
|
||||
# float64[:](float64[:],),
|
||||
nopython=True,
|
||||
)
|
||||
def trace_hl(
|
||||
hl: 'np.ndarray',
|
||||
out: np.ndarray,
|
||||
x: np.ndarray,
|
||||
start: int,
|
||||
|
||||
# the "offset" values in the x-domain which
|
||||
# place the 2 output points around each ``int``
|
||||
# master index.
|
||||
margin: float = 0.43,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
"Trace" the outline of the high-low values of an ohlc sequence
|
||||
as a line such that the maximum deviation (aka disperaion) between
|
||||
bars if preserved.
|
||||
|
||||
This routine is expected to modify input arrays in-place.
|
||||
|
||||
'''
|
||||
last_l = hl['low'][0]
|
||||
last_h = hl['high'][0]
|
||||
|
||||
for i in range(hl.size):
|
||||
row = hl[i]
|
||||
l, h = row['low'], row['high']
|
||||
|
||||
up_diff = h - last_l
|
||||
down_diff = last_h - l
|
||||
|
||||
if up_diff > down_diff:
|
||||
out[2*i + 1] = h
|
||||
out[2*i] = last_l
|
||||
else:
|
||||
out[2*i + 1] = l
|
||||
out[2*i] = last_h
|
||||
|
||||
last_l = l
|
||||
last_h = h
|
||||
|
||||
x[2*i] = int(i) - margin
|
||||
x[2*i + 1] = int(i) + margin
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def ohlc_flatten(
|
||||
ohlc: np.ndarray,
|
||||
use_mxmn: bool = True,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||
1-d array that is 4 times the size with x-domain values distributed
|
||||
evenly (by 0.5 steps) over each index.
|
||||
|
||||
'''
|
||||
index = ohlc['index']
|
||||
|
||||
if use_mxmn:
|
||||
# traces a line optimally over highs to lows
|
||||
# using numba. NOTE: pretty sure this is faster
|
||||
# and looks about the same as the below output.
|
||||
flat, x = hl2mxmn(ohlc)
|
||||
|
||||
else:
|
||||
flat = rfn.structured_to_unstructured(
|
||||
ohlc[['open', 'high', 'low', 'close']]
|
||||
).flatten()
|
||||
|
||||
x = np.linspace(
|
||||
start=index[0] - 0.5,
|
||||
stop=index[-1] + 0.5,
|
||||
num=len(flat),
|
||||
)
|
||||
return x, flat
|
||||
|
||||
|
||||
def ds_m4(
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
|
@ -160,16 +70,6 @@ def ds_m4(
|
|||
This is more or less an OHLC style sampling of a line-style series.
|
||||
|
||||
'''
|
||||
# NOTE: this method is a so called "visualization driven data
|
||||
# aggregation" approach. It gives error-free line chart
|
||||
# downsampling, see
|
||||
# further scientific paper resources:
|
||||
# - http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
# - http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||
|
||||
# Details on implementation of this algo are based in,
|
||||
# https://github.com/pikers/piker/issues/109
|
||||
|
||||
# XXX: from infinite on downsampling viewable graphics:
|
||||
# "one thing i remembered about the binning - if you are
|
||||
# picking a range within your timeseries the start and end bin
|
||||
|
@ -191,6 +91,14 @@ def ds_m4(
|
|||
x_end = x[-1] # x end value/highest in domain
|
||||
xrange = (x_end - x_start)
|
||||
|
||||
if xrange < 0:
|
||||
log.error(f'-VE M4 X-RANGE: {x_start} -> {x_end}')
|
||||
# XXX: broken x-range calc-case, likely the x-end points
|
||||
# are wrong and have some default value set (such as
|
||||
# x_end -> <some epoch float> while x_start -> 0.5).
|
||||
# breakpoint()
|
||||
return None
|
||||
|
||||
# XXX: always round up on the input pixels
|
||||
# lnx = len(x)
|
||||
# uppx *= max(4 / (1 + math.log(uppx, 2)), 1)
|
||||
|
@ -256,8 +164,7 @@ def ds_m4(
|
|||
return nb, x_out, y_out, ymn, ymx
|
||||
|
||||
|
||||
@jit(
|
||||
nopython=True,
|
||||
@njit(
|
||||
nogil=True,
|
||||
)
|
||||
def _m4(
|
|
@ -0,0 +1,452 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Super fast ``QPainterPath`` generation related operator routines.
|
||||
|
||||
"""
|
||||
from math import (
|
||||
ceil,
|
||||
floor,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import (
|
||||
# types,
|
||||
njit,
|
||||
float64,
|
||||
int64,
|
||||
# optional,
|
||||
)
|
||||
|
||||
# TODO: for ``numba`` typing..
|
||||
# from ._source import numba_ohlc_dtype
|
||||
from ._m4 import ds_m4
|
||||
from .._profile import (
|
||||
Profiler,
|
||||
pg_profile_enabled,
|
||||
ms_slower_then,
|
||||
)
|
||||
|
||||
|
||||
def xy_downsample(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
|
||||
x_spacer: float = 0.5,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
float,
|
||||
float,
|
||||
]:
|
||||
'''
|
||||
Downsample 1D (flat ``numpy.ndarray``) arrays using M4 given an input
|
||||
``uppx`` (units-per-pixel) and add space between discreet datums.
|
||||
|
||||
'''
|
||||
# downsample whenever more then 1 pixels per datum can be shown.
|
||||
# always refresh data bounds until we get diffing
|
||||
# working properly, see above..
|
||||
m4_out = ds_m4(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
)
|
||||
|
||||
if m4_out is not None:
|
||||
bins, x, y, ymn, ymx = m4_out
|
||||
# flatten output to 1d arrays suitable for path-graphics generation.
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
x = (x + np.array(
|
||||
[-x_spacer, 0, 0, x_spacer]
|
||||
)).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
return x, y, ymn, ymx
|
||||
|
||||
# XXX: we accept a None output for the case where the input range
|
||||
# to ``ds_m4()`` is bad (-ve) and we want to catch and debug
|
||||
# that (seemingly super rare) circumstance..
|
||||
return None
|
||||
|
||||
|
||||
@njit(
|
||||
# NOTE: need to construct this manually for readonly
|
||||
# arrays, see https://github.com/numba/numba/issues/4511
|
||||
# (
|
||||
# types.Array(
|
||||
# numba_ohlc_dtype,
|
||||
# 1,
|
||||
# 'C',
|
||||
# readonly=True,
|
||||
# ),
|
||||
# int64,
|
||||
# types.unicode_type,
|
||||
# optional(float64),
|
||||
# ),
|
||||
nogil=True
|
||||
)
|
||||
def path_arrays_from_ohlc(
|
||||
data: np.ndarray,
|
||||
start: int64,
|
||||
bar_w: float64,
|
||||
bar_gap: float64 = 0.16,
|
||||
use_time_index: bool = True,
|
||||
|
||||
# XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622
|
||||
# index_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
'''
|
||||
Generate an array of lines objects from input ohlc data.
|
||||
|
||||
'''
|
||||
size = int(data.shape[0] * 6)
|
||||
|
||||
# XXX: see this for why the dtype might have to be defined outside
|
||||
# the routine.
|
||||
# https://github.com/numba/numba/issues/4098#issuecomment-493914533
|
||||
x = np.zeros(
|
||||
shape=size,
|
||||
dtype=float64,
|
||||
)
|
||||
y, c = x.copy(), x.copy()
|
||||
|
||||
half_w: float = bar_w/2
|
||||
|
||||
# TODO: report bug for assert @
|
||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||
for i, q in enumerate(data[start:], start):
|
||||
|
||||
open = q['open']
|
||||
high = q['high']
|
||||
low = q['low']
|
||||
close = q['close']
|
||||
|
||||
if use_time_index:
|
||||
index = float64(q['time'])
|
||||
else:
|
||||
index = float64(q['index'])
|
||||
|
||||
# XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622
|
||||
# index = float64(q[index_field])
|
||||
# AND this (probably)
|
||||
# open, high, low, close, index = q[
|
||||
# ['open', 'high', 'low', 'close', 'index']]
|
||||
|
||||
istart = i * 6
|
||||
istop = istart + 6
|
||||
|
||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||
mid: float = index + half_w
|
||||
x[istart:istop] = (
|
||||
index + bar_gap,
|
||||
mid,
|
||||
mid,
|
||||
mid,
|
||||
mid,
|
||||
index + bar_w - bar_gap,
|
||||
)
|
||||
y[istart:istop] = (
|
||||
open,
|
||||
open,
|
||||
low,
|
||||
high,
|
||||
close,
|
||||
close,
|
||||
)
|
||||
|
||||
# specifies that the first edge is never connected to the
|
||||
# prior bars last edge thus providing a small "gap"/"space"
|
||||
# between bars determined by ``bar_gap``.
|
||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||
|
||||
return x, y, c
|
||||
|
||||
|
||||
def hl2mxmn(
|
||||
ohlc: np.ndarray,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||
to a "joined" max/min 1-d array.
|
||||
|
||||
'''
|
||||
index = ohlc[index_field]
|
||||
hls = ohlc[[
|
||||
'low',
|
||||
'high',
|
||||
]]
|
||||
|
||||
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||
x = np.empty(2*hls.size, dtype=np.float64)
|
||||
trace_hl(hls, mxmn, x, index[0])
|
||||
x = x + index[0]
|
||||
|
||||
return mxmn, x
|
||||
|
||||
|
||||
@njit(
|
||||
# TODO: the type annots..
|
||||
# float64[:](float64[:],),
|
||||
)
|
||||
def trace_hl(
|
||||
hl: 'np.ndarray',
|
||||
out: np.ndarray,
|
||||
x: np.ndarray,
|
||||
start: int,
|
||||
|
||||
# the "offset" values in the x-domain which
|
||||
# place the 2 output points around each ``int``
|
||||
# master index.
|
||||
margin: float = 0.43,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
"Trace" the outline of the high-low values of an ohlc sequence
|
||||
as a line such that the maximum deviation (aka disperaion) between
|
||||
bars if preserved.
|
||||
|
||||
This routine is expected to modify input arrays in-place.
|
||||
|
||||
'''
|
||||
last_l = hl['low'][0]
|
||||
last_h = hl['high'][0]
|
||||
|
||||
for i in range(hl.size):
|
||||
row = hl[i]
|
||||
l, h = row['low'], row['high']
|
||||
|
||||
up_diff = h - last_l
|
||||
down_diff = last_h - l
|
||||
|
||||
if up_diff > down_diff:
|
||||
out[2*i + 1] = h
|
||||
out[2*i] = last_l
|
||||
else:
|
||||
out[2*i + 1] = l
|
||||
out[2*i] = last_h
|
||||
|
||||
last_l = l
|
||||
last_h = h
|
||||
|
||||
x[2*i] = int(i) - margin
|
||||
x[2*i + 1] = int(i) + margin
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def ohlc_flatten(
|
||||
ohlc: np.ndarray,
|
||||
use_mxmn: bool = True,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||
1-d array that is 4 times the size with x-domain values distributed
|
||||
evenly (by 0.5 steps) over each index.
|
||||
|
||||
'''
|
||||
index = ohlc[index_field]
|
||||
|
||||
if use_mxmn:
|
||||
# traces a line optimally over highs to lows
|
||||
# using numba. NOTE: pretty sure this is faster
|
||||
# and looks about the same as the below output.
|
||||
flat, x = hl2mxmn(ohlc)
|
||||
|
||||
else:
|
||||
flat = rfn.structured_to_unstructured(
|
||||
ohlc[['open', 'high', 'low', 'close']]
|
||||
).flatten()
|
||||
|
||||
x = np.linspace(
|
||||
start=index[0] - 0.5,
|
||||
stop=index[-1] + 0.5,
|
||||
num=len(flat),
|
||||
)
|
||||
return x, flat
|
||||
|
||||
|
||||
def slice_from_time(
|
||||
arr: np.ndarray,
|
||||
start_t: float,
|
||||
stop_t: float,
|
||||
step: int | None = None,
|
||||
|
||||
) -> slice:
|
||||
'''
|
||||
Calculate array indices mapped from a time range and return them in
|
||||
a slice.
|
||||
|
||||
Given an input array with an epoch `'time'` series entry, calculate
|
||||
the indices which span the time range and return in a slice. Presume
|
||||
each `'time'` step increment is uniform and when the time stamp
|
||||
series contains gaps (the uniform presumption is untrue) use
|
||||
``np.searchsorted()`` binary search to look up the appropriate
|
||||
index.
|
||||
|
||||
'''
|
||||
profiler = Profiler(
|
||||
msg='slice_from_time()',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
)
|
||||
|
||||
times = arr['time']
|
||||
t_first = floor(times[0])
|
||||
t_last = ceil(times[-1])
|
||||
|
||||
# the greatest index we can return which slices to the
|
||||
# end of the input array.
|
||||
read_i_max = arr.shape[0]
|
||||
|
||||
# TODO: require this is always passed in?
|
||||
if step is None:
|
||||
step = round(t_last - times[-2])
|
||||
if step == 0:
|
||||
step = 1
|
||||
|
||||
# compute (presumed) uniform-time-step index offsets
|
||||
i_start_t = floor(start_t)
|
||||
read_i_start = floor(((i_start_t - t_first) // step)) - 1
|
||||
|
||||
i_stop_t = ceil(stop_t)
|
||||
|
||||
# XXX: edge case -> always set stop index to last in array whenever
|
||||
# the input stop time is detected to be greater then the equiv time
|
||||
# stamp at that last entry.
|
||||
if i_stop_t >= t_last:
|
||||
read_i_stop = read_i_max
|
||||
else:
|
||||
read_i_stop = ceil((i_stop_t - t_first) // step) + 1
|
||||
|
||||
# always clip outputs to array support
|
||||
# for read start:
|
||||
# - never allow a start < the 0 index
|
||||
# - never allow an end index > the read array len
|
||||
read_i_start = min(
|
||||
max(0, read_i_start),
|
||||
read_i_max - 1,
|
||||
)
|
||||
read_i_stop = max(
|
||||
0,
|
||||
min(read_i_stop, read_i_max),
|
||||
)
|
||||
|
||||
# check for larger-then-latest calculated index for given start
|
||||
# time, in which case we do a binary search for the correct index.
|
||||
# NOTE: this is usually the result of a time series with time gaps
|
||||
# where it is expected that each index step maps to a uniform step
|
||||
# in the time stamp series.
|
||||
t_iv_start = times[read_i_start]
|
||||
if (
|
||||
t_iv_start > i_start_t
|
||||
):
|
||||
# do a binary search for the best index mapping to ``start_t``
|
||||
# given we measured an overshoot using the uniform-time-step
|
||||
# calculation from above.
|
||||
|
||||
# TODO: once we start caching these per source-array,
|
||||
# we can just overwrite ``read_i_start`` directly.
|
||||
new_read_i_start = np.searchsorted(
|
||||
times,
|
||||
i_start_t,
|
||||
side='left',
|
||||
)
|
||||
|
||||
# TODO: minimize binary search work as much as possible:
|
||||
# - cache these remap values which compensate for gaps in the
|
||||
# uniform time step basis where we calc a later start
|
||||
# index for the given input ``start_t``.
|
||||
# - can we shorten the input search sequence by heuristic?
|
||||
# up_to_arith_start = index[:read_i_start]
|
||||
|
||||
if (
|
||||
new_read_i_start <= read_i_start
|
||||
):
|
||||
# t_diff = t_iv_start - start_t
|
||||
# print(
|
||||
# f"WE'RE CUTTING OUT TIME - STEP:{step}\n"
|
||||
# f'start_t:{start_t} -> 0index start_t:{t_iv_start}\n'
|
||||
# f'diff: {t_diff}\n'
|
||||
# f'REMAPPED START i: {read_i_start} -> {new_read_i_start}\n'
|
||||
# )
|
||||
read_i_start = new_read_i_start - 1
|
||||
|
||||
t_iv_stop = times[read_i_stop - 1]
|
||||
if (
|
||||
t_iv_stop > i_stop_t
|
||||
):
|
||||
# t_diff = stop_t - t_iv_stop
|
||||
# print(
|
||||
# f"WE'RE CUTTING OUT TIME - STEP:{step}\n"
|
||||
# f'calced iv stop:{t_iv_stop} -> stop_t:{stop_t}\n'
|
||||
# f'diff: {t_diff}\n'
|
||||
# # f'SHOULD REMAP STOP: {read_i_start} -> {new_read_i_start}\n'
|
||||
# )
|
||||
new_read_i_stop = np.searchsorted(
|
||||
times[read_i_start:],
|
||||
# times,
|
||||
i_stop_t,
|
||||
side='left',
|
||||
)
|
||||
|
||||
if (
|
||||
new_read_i_stop <= read_i_stop
|
||||
):
|
||||
read_i_stop = read_i_start + new_read_i_stop + 1
|
||||
|
||||
# sanity checks for range size
|
||||
# samples = (i_stop_t - i_start_t) // step
|
||||
# index_diff = read_i_stop - read_i_start + 1
|
||||
# if index_diff > (samples + 3):
|
||||
# breakpoint()
|
||||
|
||||
# read-relative indexes: gives a slice where `shm.array[read_slc]`
|
||||
# will be the data spanning the input time range `start_t` ->
|
||||
# `stop_t`
|
||||
read_slc = slice(
|
||||
int(read_i_start),
|
||||
int(read_i_stop),
|
||||
)
|
||||
|
||||
profiler(
|
||||
'slicing complete'
|
||||
# f'{start_t} -> {abs_slc.start} | {read_slc.start}\n'
|
||||
# f'{stop_t} -> {abs_slc.stop} | {read_slc.stop}\n'
|
||||
)
|
||||
|
||||
# NOTE: if caller needs absolute buffer indices they can
|
||||
# slice the buffer abs index like so:
|
||||
# index = arr['index']
|
||||
# abs_indx = index[read_slc]
|
||||
# abs_slc = slice(
|
||||
# int(abs_indx[0]),
|
||||
# int(abs_indx[-1]),
|
||||
# )
|
||||
|
||||
return read_slc
|
|
@ -253,20 +253,30 @@ class Sampler:
|
|||
# f'consumers: {subs}'
|
||||
)
|
||||
borked: set[tractor.MsgStream] = set()
|
||||
for stream in subs:
|
||||
sent: set[tractor.MsgStream] = set()
|
||||
while True:
|
||||
try:
|
||||
await stream.send({
|
||||
'index': time_stamp or last_ts,
|
||||
'period': period_s,
|
||||
})
|
||||
except (
|
||||
trio.BrokenResourceError,
|
||||
trio.ClosedResourceError
|
||||
):
|
||||
log.error(
|
||||
f'{stream._ctx.chan.uid} dropped connection'
|
||||
)
|
||||
borked.add(stream)
|
||||
for stream in (subs - sent):
|
||||
try:
|
||||
await stream.send({
|
||||
'index': time_stamp or last_ts,
|
||||
'period': period_s,
|
||||
})
|
||||
sent.add(stream)
|
||||
|
||||
except (
|
||||
trio.BrokenResourceError,
|
||||
trio.ClosedResourceError
|
||||
):
|
||||
log.error(
|
||||
f'{stream._ctx.chan.uid} dropped connection'
|
||||
)
|
||||
borked.add(stream)
|
||||
else:
|
||||
break
|
||||
except RuntimeError:
|
||||
log.warning(f'Client subs {subs} changed while broadcasting')
|
||||
continue
|
||||
|
||||
for stream in borked:
|
||||
try:
|
||||
|
@ -848,6 +858,16 @@ async def uniform_rate_send(
|
|||
# rate timing exactly lul
|
||||
try:
|
||||
await stream.send({sym: first_quote})
|
||||
except tractor.RemoteActorError as rme:
|
||||
if rme.type is not tractor._exceptions.StreamOverrun:
|
||||
raise
|
||||
ctx = stream._ctx
|
||||
chan = ctx.chan
|
||||
log.warning(
|
||||
'Throttled quote-stream overrun!\n'
|
||||
f'{sym}:{ctx.cid}@{chan.uid}'
|
||||
)
|
||||
|
||||
except (
|
||||
# NOTE: any of these can be raised by ``tractor``'s IPC
|
||||
# transport-layer and we want to be highly resilient
|
||||
|
|
|
@ -207,7 +207,7 @@ def get_feed_bus(
|
|||
|
||||
) -> _FeedsBus:
|
||||
'''
|
||||
Retreive broker-daemon-local data feeds bus from process global
|
||||
Retrieve broker-daemon-local data feeds bus from process global
|
||||
scope. Serialize task access to lock.
|
||||
|
||||
'''
|
||||
|
@ -250,6 +250,7 @@ async def start_backfill(
|
|||
shm: ShmArray,
|
||||
timeframe: float,
|
||||
sampler_stream: tractor.MsgStream,
|
||||
feed_is_live: trio.Event,
|
||||
|
||||
last_tsdb_dt: Optional[datetime] = None,
|
||||
storage: Optional[Storage] = None,
|
||||
|
@ -281,7 +282,14 @@ async def start_backfill(
|
|||
- pendulum.from_timestamp(times[-2])
|
||||
).seconds
|
||||
|
||||
if step_size_s == 60:
|
||||
# if the market is open (aka we have a live feed) but the
|
||||
# history sample step index seems off we report the surrounding
|
||||
# data and drop into a bp. this case shouldn't really ever
|
||||
# happen if we're doing history retrieval correctly.
|
||||
if (
|
||||
step_size_s == 60
|
||||
and feed_is_live.is_set()
|
||||
):
|
||||
inow = round(time.time())
|
||||
diff = inow - times[-1]
|
||||
if abs(diff) > 60:
|
||||
|
@ -499,6 +507,7 @@ async def basic_backfill(
|
|||
bfqsn: str,
|
||||
shms: dict[int, ShmArray],
|
||||
sampler_stream: tractor.MsgStream,
|
||||
feed_is_live: trio.Event,
|
||||
|
||||
) -> None:
|
||||
|
||||
|
@ -518,6 +527,7 @@ async def basic_backfill(
|
|||
shm,
|
||||
timeframe,
|
||||
sampler_stream,
|
||||
feed_is_live,
|
||||
)
|
||||
)
|
||||
except DataUnavailable:
|
||||
|
@ -534,6 +544,7 @@ async def tsdb_backfill(
|
|||
bfqsn: str,
|
||||
shms: dict[int, ShmArray],
|
||||
sampler_stream: tractor.MsgStream,
|
||||
feed_is_live: trio.Event,
|
||||
|
||||
task_status: TaskStatus[
|
||||
tuple[ShmArray, ShmArray]
|
||||
|
@ -568,6 +579,8 @@ async def tsdb_backfill(
|
|||
shm,
|
||||
timeframe,
|
||||
sampler_stream,
|
||||
feed_is_live,
|
||||
|
||||
last_tsdb_dt=last_tsdb_dt,
|
||||
tsdb_is_up=True,
|
||||
storage=storage,
|
||||
|
@ -870,6 +883,7 @@ async def manage_history(
|
|||
60: hist_shm,
|
||||
},
|
||||
sample_stream,
|
||||
feed_is_live,
|
||||
)
|
||||
|
||||
# yield back after client connect with filled shm
|
||||
|
@ -904,6 +918,7 @@ async def manage_history(
|
|||
60: hist_shm,
|
||||
},
|
||||
sample_stream,
|
||||
feed_is_live,
|
||||
)
|
||||
task_status.started((
|
||||
hist_zero_index,
|
||||
|
@ -1065,7 +1080,10 @@ async def allocate_persistent_feed(
|
|||
# seed the buffer with a history datum - this is most handy
|
||||
# for many backends which don't sample @ 1s OHLC but do have
|
||||
# slower data such as 1m OHLC.
|
||||
if not len(rt_shm.array):
|
||||
if (
|
||||
not len(rt_shm.array)
|
||||
and hist_shm.array.size
|
||||
):
|
||||
rt_shm.push(hist_shm.array[-3:-1])
|
||||
ohlckeys = ['open', 'high', 'low', 'close']
|
||||
rt_shm.array[ohlckeys][-2:] = hist_shm.array['close'][-1]
|
||||
|
@ -1076,6 +1094,9 @@ async def allocate_persistent_feed(
|
|||
rt_shm.array['time'][0] = ts
|
||||
rt_shm.array['time'][1] = ts + 1
|
||||
|
||||
elif hist_shm.array.size == 0:
|
||||
await tractor.breakpoint()
|
||||
|
||||
# wait the spawning parent task to register its subscriber
|
||||
# send-stream entry before we start the sample loop.
|
||||
await sub_registered.wait()
|
||||
|
@ -1568,6 +1589,9 @@ async def open_feed(
|
|||
(brokermod, bfqsns),
|
||||
) in zip(ctxs, providers.items()):
|
||||
|
||||
# NOTE: do it asap to avoid overruns during multi-feed setup?
|
||||
ctx._backpressure = backpressure
|
||||
|
||||
for fqsn, flume_msg in flumes_msg_dict.items():
|
||||
flume = Flume.from_msg(flume_msg)
|
||||
assert flume.symbol.fqsn == fqsn
|
||||
|
|
|
@ -22,17 +22,11 @@ real-time data processing data-structures.
|
|||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from functools import partial
|
||||
from typing import (
|
||||
AsyncIterator,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import tractor
|
||||
from tractor.trionics import (
|
||||
maybe_open_context,
|
||||
)
|
||||
import pendulum
|
||||
import numpy as np
|
||||
|
||||
|
@ -45,12 +39,13 @@ from ._sharedmem import (
|
|||
ShmArray,
|
||||
_Token,
|
||||
)
|
||||
from ._sampling import (
|
||||
open_sample_stream,
|
||||
)
|
||||
# from .._profile import (
|
||||
# Profiler,
|
||||
# pg_profile_enabled,
|
||||
# )
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pyqtgraph import PlotItem
|
||||
# from pyqtgraph import PlotItem
|
||||
from .feed import Feed
|
||||
|
||||
|
||||
|
@ -147,26 +142,6 @@ class Flume(Struct):
|
|||
async def receive(self) -> dict:
|
||||
return await self.stream.receive()
|
||||
|
||||
@acm
|
||||
async def index_stream(
|
||||
self,
|
||||
delay_s: float = 1,
|
||||
|
||||
) -> AsyncIterator[int]:
|
||||
|
||||
if not self.feed:
|
||||
raise RuntimeError('This flume is not part of any ``Feed``?')
|
||||
|
||||
# TODO: maybe a public (property) API for this in ``tractor``?
|
||||
portal = self.stream._ctx._portal
|
||||
assert portal
|
||||
|
||||
# XXX: this should be singleton on a host,
|
||||
# a lone broker-daemon per provider should be
|
||||
# created for all practical purposes
|
||||
async with open_sample_stream(float(delay_s)) as stream:
|
||||
yield stream
|
||||
|
||||
def get_ds_info(
|
||||
self,
|
||||
) -> tuple[float, float, float]:
|
||||
|
@ -218,104 +193,18 @@ class Flume(Struct):
|
|||
def get_index(
|
||||
self,
|
||||
time_s: float,
|
||||
array: np.ndarray,
|
||||
|
||||
) -> int:
|
||||
) -> int | float:
|
||||
'''
|
||||
Return array shm-buffer index for for epoch time.
|
||||
|
||||
'''
|
||||
array = self.rt_shm.array
|
||||
times = array['time']
|
||||
mask = (times >= time_s)
|
||||
|
||||
if any(mask):
|
||||
return array['index'][mask][0]
|
||||
|
||||
# just the latest index
|
||||
array['index'][-1]
|
||||
|
||||
def slice_from_time(
|
||||
self,
|
||||
array: np.ndarray,
|
||||
start_t: float,
|
||||
stop_t: float,
|
||||
timeframe_s: int = 1,
|
||||
return_data: bool = False,
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Slice an input struct array providing only datums
|
||||
"in view" of this chart.
|
||||
|
||||
'''
|
||||
arr = {
|
||||
1: self.rt_shm.array,
|
||||
60: self.hist_shm.arry,
|
||||
}[timeframe_s]
|
||||
|
||||
times = arr['time']
|
||||
index = array['index']
|
||||
|
||||
# use advanced indexing to map the
|
||||
# time range to the index range.
|
||||
mask = (
|
||||
(times >= start_t)
|
||||
&
|
||||
(times < stop_t)
|
||||
first = np.searchsorted(
|
||||
times,
|
||||
time_s,
|
||||
side='left',
|
||||
)
|
||||
|
||||
# TODO: if we can ensure each time field has a uniform
|
||||
# step we can instead do some arithmetic to determine
|
||||
# the equivalent index like we used to?
|
||||
# return array[
|
||||
# lbar - ifirst:
|
||||
# (rbar - ifirst) + 1
|
||||
# ]
|
||||
|
||||
i_by_t = index[mask]
|
||||
i_0 = i_by_t[0]
|
||||
|
||||
abs_slc = slice(
|
||||
i_0,
|
||||
i_by_t[-1],
|
||||
)
|
||||
# slice data by offset from the first index
|
||||
# available in the passed datum set.
|
||||
read_slc = slice(
|
||||
0,
|
||||
i_by_t[-1] - i_0,
|
||||
)
|
||||
if not return_data:
|
||||
return (
|
||||
abs_slc,
|
||||
read_slc,
|
||||
)
|
||||
|
||||
# also return the readable data from the timerange
|
||||
return (
|
||||
abs_slc,
|
||||
read_slc,
|
||||
arr[mask],
|
||||
)
|
||||
|
||||
def view_data(
|
||||
self,
|
||||
plot: PlotItem,
|
||||
timeframe_s: int = 1,
|
||||
|
||||
) -> np.ndarray:
|
||||
|
||||
# get far-side x-indices plot view
|
||||
vr = plot.viewRect()
|
||||
|
||||
(
|
||||
abs_slc,
|
||||
buf_slc,
|
||||
iv_arr,
|
||||
) = self.slice_from_time(
|
||||
start_t=vr.left(),
|
||||
stop_t=vr.right(),
|
||||
timeframe_s=timeframe_s,
|
||||
return_data=True,
|
||||
)
|
||||
return iv_arr
|
||||
imx = times.shape[0] - 1
|
||||
return min(first, imx)
|
||||
|
|
|
@ -188,6 +188,8 @@ async def fsp_compute(
|
|||
|
||||
history_by_field['time'] = src_time[-len(history_by_field):]
|
||||
|
||||
history_output['time'] = src.array['time']
|
||||
|
||||
# TODO: XXX:
|
||||
# THERE'S A BIG BUG HERE WITH THE `index` field since we're
|
||||
# prepending a copy of the first value a few times to make
|
||||
|
|
|
@ -54,7 +54,7 @@ def open_trade_ledger(
|
|||
broker: str,
|
||||
account: str,
|
||||
|
||||
) -> str:
|
||||
) -> dict:
|
||||
'''
|
||||
Indempotently create and read in a trade log file from the
|
||||
``<configuration_dir>/ledgers/`` directory.
|
||||
|
|
|
@ -178,8 +178,7 @@ def _main(
|
|||
tractor_kwargs,
|
||||
) -> None:
|
||||
'''
|
||||
Sync entry point to start a chart: a ``tractor`` + Qt runtime
|
||||
entry point
|
||||
Sync entry point to start a chart: a ``tractor`` + Qt runtime.
|
||||
|
||||
'''
|
||||
run_qtractor(
|
||||
|
|
|
@ -49,7 +49,7 @@ class Axis(pg.AxisItem):
|
|||
def __init__(
|
||||
self,
|
||||
plotitem: pgo.PlotItem,
|
||||
typical_max_str: str = '100 000.000',
|
||||
typical_max_str: str = '100 000.000 ',
|
||||
text_color: str = 'bracket',
|
||||
lru_cache_tick_strings: bool = True,
|
||||
**kwargs
|
||||
|
@ -95,9 +95,10 @@ class Axis(pg.AxisItem):
|
|||
self.setPen(_axis_pen)
|
||||
|
||||
# this is the text color
|
||||
# self.setTextPen(pg.mkPen(hcolor(text_color)))
|
||||
self.text_color = text_color
|
||||
|
||||
# generate a bounding rect based on sizing to a "typical"
|
||||
# maximum length-ed string defined as init default.
|
||||
self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
||||
|
||||
# size the pertinent axis dimension to a "typical value"
|
||||
|
@ -154,8 +155,8 @@ class Axis(pg.AxisItem):
|
|||
pi: pgo.PlotItem,
|
||||
name: None | str = None,
|
||||
digits: None | int = 2,
|
||||
# axis_name: str = 'right',
|
||||
bg_color='bracket',
|
||||
bg_color='default',
|
||||
fg_color='black',
|
||||
|
||||
) -> YAxisLabel:
|
||||
|
||||
|
@ -165,22 +166,20 @@ class Axis(pg.AxisItem):
|
|||
digits = digits or 2
|
||||
|
||||
# TODO: ``._ysticks`` should really be an attr on each
|
||||
# ``PlotItem`` no instead of the (containing because of
|
||||
# overlays) widget?
|
||||
# ``PlotItem`` now instead of the containing widget (because of
|
||||
# overlays) ?
|
||||
|
||||
# add y-axis "last" value label
|
||||
sticky = self._stickies[name] = YAxisLabel(
|
||||
pi=pi,
|
||||
parent=self,
|
||||
# TODO: pass this from symbol data
|
||||
digits=digits,
|
||||
opacity=1,
|
||||
digits=digits, # TODO: pass this from symbol data
|
||||
opacity=0.9, # slight see-through
|
||||
bg_color=bg_color,
|
||||
fg_color=fg_color,
|
||||
)
|
||||
|
||||
pi.sigRangeChanged.connect(sticky.update_on_resize)
|
||||
# pi.addItem(sticky)
|
||||
# pi.addItem(last)
|
||||
return sticky
|
||||
|
||||
|
||||
|
@ -244,7 +243,6 @@ class PriceAxis(Axis):
|
|||
self._min_tick = size
|
||||
|
||||
def size_to_values(self) -> None:
|
||||
# self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
||||
self.setWidth(self.typical_br.width())
|
||||
|
||||
# XXX: drop for now since it just eats up h space
|
||||
|
@ -302,27 +300,47 @@ class DynamicDateAxis(Axis):
|
|||
# XX: ARGGGGG AG:LKSKDJF:LKJSDFD
|
||||
chart = self.pi.chart_widget
|
||||
|
||||
flow = chart._flows[chart.name]
|
||||
shm = flow.shm
|
||||
bars = shm.array
|
||||
first = shm._first.value
|
||||
viz = chart._vizs[chart.name]
|
||||
shm = viz.shm
|
||||
array = shm.array
|
||||
times = array['time']
|
||||
i_0, i_l = times[0], times[-1]
|
||||
|
||||
bars_len = len(bars)
|
||||
times = bars['time']
|
||||
# edge cases
|
||||
if (
|
||||
not indexes
|
||||
or
|
||||
(indexes[0] < i_0
|
||||
and indexes[-1] < i_l)
|
||||
or
|
||||
(indexes[0] > i_0
|
||||
and indexes[-1] > i_l)
|
||||
):
|
||||
return []
|
||||
|
||||
epochs = times[list(
|
||||
map(
|
||||
int,
|
||||
filter(
|
||||
lambda i: i > 0 and i < bars_len,
|
||||
(i-first for i in indexes)
|
||||
if viz.index_field == 'index':
|
||||
arr_len = times.shape[0]
|
||||
first = shm._first.value
|
||||
epochs = times[
|
||||
list(
|
||||
map(
|
||||
int,
|
||||
filter(
|
||||
lambda i: i > 0 and i < arr_len,
|
||||
(i - first for i in indexes)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)]
|
||||
]
|
||||
else:
|
||||
epochs = list(map(int, indexes))
|
||||
|
||||
# TODO: **don't** have this hard coded shift to EST
|
||||
# delay = times[-1] - times[-2]
|
||||
dts = np.array(epochs, dtype='datetime64[s]')
|
||||
dts = np.array(
|
||||
epochs,
|
||||
dtype='datetime64[s]',
|
||||
)
|
||||
|
||||
# see units listing:
|
||||
# https://numpy.org/devdocs/reference/arrays.datetime.html#datetime-units
|
||||
|
@ -340,24 +358,39 @@ class DynamicDateAxis(Axis):
|
|||
spacing: float,
|
||||
|
||||
) -> list[str]:
|
||||
|
||||
return self._indexes_to_timestrs(values)
|
||||
|
||||
# NOTE: handy for debugging the lru cache
|
||||
# info = self.tickStrings.cache_info()
|
||||
# print(info)
|
||||
return self._indexes_to_timestrs(values)
|
||||
|
||||
|
||||
class AxisLabel(pg.GraphicsObject):
|
||||
|
||||
_x_margin = 0
|
||||
_y_margin = 0
|
||||
# relative offsets *OF* the bounding rect relative
|
||||
# to parent graphics object.
|
||||
# eg. <parent>| => <_x_br_offset> => | <text> |
|
||||
_x_br_offset: float = 0
|
||||
_y_br_offset: float = 0
|
||||
|
||||
# relative offsets of text *within* bounding rect
|
||||
# eg. | <_x_margin> => <text> |
|
||||
_x_margin: float = 0
|
||||
_y_margin: float = 0
|
||||
|
||||
# multiplier of the text content's height in order
|
||||
# to force a larger (y-dimension) bounding rect.
|
||||
_y_txt_h_scaling: float = 1
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
parent: pg.GraphicsItem,
|
||||
digits: int = 2,
|
||||
|
||||
bg_color: str = 'bracket',
|
||||
bg_color: str = 'default',
|
||||
fg_color: str = 'black',
|
||||
opacity: int = 1, # XXX: seriously don't set this to 0
|
||||
opacity: int = .8, # XXX: seriously don't set this to 0
|
||||
font_size: str = 'default',
|
||||
|
||||
use_arrow: bool = True,
|
||||
|
@ -368,6 +401,7 @@ class AxisLabel(pg.GraphicsObject):
|
|||
self.setParentItem(parent)
|
||||
|
||||
self.setFlag(self.ItemIgnoresTransformations)
|
||||
self.setZValue(100)
|
||||
|
||||
# XXX: pretty sure this is faster
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
@ -399,14 +433,14 @@ class AxisLabel(pg.GraphicsObject):
|
|||
p: QtGui.QPainter,
|
||||
opt: QtWidgets.QStyleOptionGraphicsItem,
|
||||
w: QtWidgets.QWidget
|
||||
|
||||
) -> None:
|
||||
"""Draw a filled rectangle based on the size of ``.label_str`` text.
|
||||
'''
|
||||
Draw a filled rectangle based on the size of ``.label_str`` text.
|
||||
|
||||
Subtypes can customize further by overloading ``.draw()``.
|
||||
|
||||
"""
|
||||
# p.setCompositionMode(QtWidgets.QPainter.CompositionMode_SourceOver)
|
||||
|
||||
'''
|
||||
if self.label_str:
|
||||
|
||||
# if not self.rect:
|
||||
|
@ -417,7 +451,11 @@ class AxisLabel(pg.GraphicsObject):
|
|||
|
||||
p.setFont(self._dpifont.font)
|
||||
p.setPen(self.fg_color)
|
||||
p.drawText(self.rect, self.text_flags, self.label_str)
|
||||
p.drawText(
|
||||
self.rect,
|
||||
self.text_flags,
|
||||
self.label_str,
|
||||
)
|
||||
|
||||
def draw(
|
||||
self,
|
||||
|
@ -425,6 +463,8 @@ class AxisLabel(pg.GraphicsObject):
|
|||
rect: QtCore.QRectF
|
||||
) -> None:
|
||||
|
||||
p.setOpacity(self.opacity)
|
||||
|
||||
if self._use_arrow:
|
||||
if not self.path:
|
||||
self._draw_arrow_path()
|
||||
|
@ -432,15 +472,13 @@ class AxisLabel(pg.GraphicsObject):
|
|||
p.drawPath(self.path)
|
||||
p.fillPath(self.path, pg.mkBrush(self.bg_color))
|
||||
|
||||
# this adds a nice black outline around the label for some odd
|
||||
# reason; ok by us
|
||||
p.setOpacity(self.opacity)
|
||||
|
||||
# this cause the L1 labels to glitch out if used in the subtype
|
||||
# and it will leave a small black strip with the arrow path if
|
||||
# done before the above
|
||||
p.fillRect(self.rect, self.bg_color)
|
||||
|
||||
p.fillRect(
|
||||
self.rect,
|
||||
self.bg_color,
|
||||
)
|
||||
|
||||
def boundingRect(self): # noqa
|
||||
'''
|
||||
|
@ -484,15 +522,18 @@ class AxisLabel(pg.GraphicsObject):
|
|||
txt_h, txt_w = txt_br.height(), txt_br.width()
|
||||
# print(f'wsw: {self._dpifont.boundingRect(" ")}')
|
||||
|
||||
# allow subtypes to specify a static width and height
|
||||
# allow subtypes to override width and height
|
||||
h, w = self.size_hint()
|
||||
# print(f'axis size: {self._parent.size()}')
|
||||
# print(f'axis geo: {self._parent.geometry()}')
|
||||
|
||||
self.rect = QtCore.QRectF(
|
||||
0, 0,
|
||||
|
||||
# relative bounds offsets
|
||||
self._x_br_offset,
|
||||
self._y_br_offset,
|
||||
|
||||
(w or txt_w) + self._x_margin / 2,
|
||||
(h or txt_h) + self._y_margin / 2,
|
||||
|
||||
(h or txt_h) * self._y_txt_h_scaling + (self._y_margin / 2),
|
||||
)
|
||||
# print(self.rect)
|
||||
# hb = self.path.controlPointRect()
|
||||
|
|
|
@ -50,7 +50,6 @@ from ._cursor import (
|
|||
ContentsLabel,
|
||||
)
|
||||
from ..data._sharedmem import ShmArray
|
||||
from ._l1 import L1Labels
|
||||
from ._ohlc import BarItems
|
||||
from ._curve import (
|
||||
Curve,
|
||||
|
@ -60,7 +59,7 @@ from ._style import (
|
|||
hcolor,
|
||||
CHART_MARGINS,
|
||||
_xaxis_at,
|
||||
_min_points_to_show,
|
||||
# _min_points_to_show,
|
||||
)
|
||||
from ..data.feed import (
|
||||
Feed,
|
||||
|
@ -70,12 +69,10 @@ from ..data._source import Symbol
|
|||
from ..log import get_logger
|
||||
from ._interaction import ChartView
|
||||
from ._forms import FieldsForm
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._overlay import PlotItemOverlay
|
||||
from ._flows import Flow
|
||||
from ._dataviz import Viz
|
||||
from ._search import SearchWidget
|
||||
from . import _pg_overrides as pgo
|
||||
from .._profile import Profiler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._display import DisplayState
|
||||
|
@ -127,7 +124,10 @@ class GodWidget(QWidget):
|
|||
# self.init_strategy_ui()
|
||||
# self.vbox.addLayout(self.hbox)
|
||||
|
||||
self._chart_cache: dict[str, LinkedSplits] = {}
|
||||
self._chart_cache: dict[
|
||||
str,
|
||||
tuple[LinkedSplits, LinkedSplits],
|
||||
] = {}
|
||||
|
||||
self.hist_linked: Optional[LinkedSplits] = None
|
||||
self.rt_linked: Optional[LinkedSplits] = None
|
||||
|
@ -147,23 +147,6 @@ class GodWidget(QWidget):
|
|||
def linkedsplits(self) -> LinkedSplits:
|
||||
return self.rt_linked
|
||||
|
||||
# def init_timeframes_ui(self):
|
||||
# self.tf_layout = QHBoxLayout()
|
||||
# self.tf_layout.setSpacing(0)
|
||||
# self.tf_layout.setContentsMargins(0, 12, 0, 0)
|
||||
# time_frames = ('1M', '5M', '15M', '30M', '1H', '1D', '1W', 'MN')
|
||||
# btn_prefix = 'TF'
|
||||
|
||||
# for tf in time_frames:
|
||||
# btn_name = ''.join([btn_prefix, tf])
|
||||
# btn = QtWidgets.QPushButton(tf)
|
||||
# # TODO:
|
||||
# btn.setEnabled(False)
|
||||
# setattr(self, btn_name, btn)
|
||||
# self.tf_layout.addWidget(btn)
|
||||
|
||||
# self.toolbar_layout.addLayout(self.tf_layout)
|
||||
|
||||
# XXX: strat loader/saver that we don't need yet.
|
||||
# def init_strategy_ui(self):
|
||||
# self.strategy_box = StrategyBoxWidget(self)
|
||||
|
@ -545,6 +528,8 @@ class LinkedSplits(QWidget):
|
|||
|
||||
style: str = 'ohlc_bar',
|
||||
|
||||
**add_plot_kwargs,
|
||||
|
||||
) -> ChartPlotWidget:
|
||||
'''
|
||||
Start up and show main (price) chart and all linked subcharts.
|
||||
|
@ -569,6 +554,7 @@ class LinkedSplits(QWidget):
|
|||
style=style,
|
||||
_is_main=True,
|
||||
sidepane=sidepane,
|
||||
**add_plot_kwargs,
|
||||
)
|
||||
# add crosshair graphic
|
||||
self.chart.addItem(self.cursor)
|
||||
|
@ -593,6 +579,7 @@ class LinkedSplits(QWidget):
|
|||
_is_main: bool = False,
|
||||
|
||||
sidepane: Optional[QWidget] = None,
|
||||
draw_kwargs: dict = {},
|
||||
|
||||
**cpw_kwargs,
|
||||
|
||||
|
@ -647,10 +634,12 @@ class LinkedSplits(QWidget):
|
|||
axis.pi = cpw.plotItem
|
||||
|
||||
cpw.hideAxis('left')
|
||||
# cpw.removeAxis('left')
|
||||
cpw.hideAxis('bottom')
|
||||
|
||||
if (
|
||||
_xaxis_at == 'bottom' and (
|
||||
_xaxis_at == 'bottom'
|
||||
and (
|
||||
self.xaxis_chart
|
||||
or (
|
||||
not self.subplots
|
||||
|
@ -658,6 +647,8 @@ class LinkedSplits(QWidget):
|
|||
)
|
||||
)
|
||||
):
|
||||
# hide the previous x-axis chart's bottom axis since we're
|
||||
# presumably being appended to the bottom subplot.
|
||||
if self.xaxis_chart:
|
||||
self.xaxis_chart.hideAxis('bottom')
|
||||
|
||||
|
@ -702,7 +693,12 @@ class LinkedSplits(QWidget):
|
|||
# link chart x-axis to main chart
|
||||
# this is 1/2 of where the `Link` in ``LinkedSplit``
|
||||
# comes from ;)
|
||||
cpw.setXLink(self.chart)
|
||||
cpw.cv.setXLink(self.chart)
|
||||
|
||||
# NOTE: above is the same as the following,
|
||||
# link this subchart's axes to the main top level chart.
|
||||
# if self.chart:
|
||||
# cpw.cv.linkView(0, self.chart.cv)
|
||||
|
||||
add_label = False
|
||||
anchor_at = ('top', 'left')
|
||||
|
@ -710,12 +706,12 @@ class LinkedSplits(QWidget):
|
|||
# draw curve graphics
|
||||
if style == 'ohlc_bar':
|
||||
|
||||
# graphics, data_key = cpw.draw_ohlc(
|
||||
flow = cpw.draw_ohlc(
|
||||
viz = cpw.draw_ohlc(
|
||||
name,
|
||||
shm,
|
||||
flume=flume,
|
||||
array_key=array_key
|
||||
array_key=array_key,
|
||||
**draw_kwargs,
|
||||
)
|
||||
self.cursor.contents_labels.add_label(
|
||||
cpw,
|
||||
|
@ -727,18 +723,19 @@ class LinkedSplits(QWidget):
|
|||
elif style == 'line':
|
||||
add_label = True
|
||||
# graphics, data_key = cpw.draw_curve(
|
||||
flow = cpw.draw_curve(
|
||||
viz = cpw.draw_curve(
|
||||
name,
|
||||
shm,
|
||||
flume,
|
||||
array_key=array_key,
|
||||
color='default_light',
|
||||
**draw_kwargs,
|
||||
)
|
||||
|
||||
elif style == 'step':
|
||||
add_label = True
|
||||
# graphics, data_key = cpw.draw_curve(
|
||||
flow = cpw.draw_curve(
|
||||
viz = cpw.draw_curve(
|
||||
name,
|
||||
shm,
|
||||
flume,
|
||||
|
@ -746,13 +743,23 @@ class LinkedSplits(QWidget):
|
|||
step_mode=True,
|
||||
color='davies',
|
||||
fill_color='davies',
|
||||
**draw_kwargs,
|
||||
)
|
||||
|
||||
else:
|
||||
raise ValueError(f"Chart style {style} is currently unsupported")
|
||||
|
||||
graphics = flow.graphics
|
||||
data_key = flow.name
|
||||
# NOTE: back-link the new sub-chart to trigger y-autoranging in
|
||||
# the (ohlc parent) main chart for this linked set.
|
||||
# if self.chart:
|
||||
# main_viz = self.chart.get_viz(self.chart.name)
|
||||
# self.chart.view.enable_auto_yrange(
|
||||
# src_vb=cpw.view,
|
||||
# viz=main_viz,
|
||||
# )
|
||||
|
||||
graphics = viz.graphics
|
||||
data_key = viz.name
|
||||
|
||||
if _is_main:
|
||||
assert style == 'ohlc_bar', 'main chart must be OHLC'
|
||||
|
@ -810,9 +817,13 @@ class LinkedSplits(QWidget):
|
|||
self.chart.sidepane.setMinimumWidth(sp_w)
|
||||
|
||||
|
||||
# TODO: we should really drop using this type and instead just
|
||||
# write our own wrapper around `PlotItem`..
|
||||
class ChartPlotWidget(pg.PlotWidget):
|
||||
'''
|
||||
``GraphicsView`` subtype containing a single ``PlotItem``.
|
||||
``GraphicsView`` subtype containing a ``.plotItem: PlotItem`` as well
|
||||
as a `.pi_overlay: PlotItemOverlay`` which helps manage and overlay flow
|
||||
graphics view multiple compose view boxes.
|
||||
|
||||
- The added methods allow for plotting OHLC sequences from
|
||||
``np.ndarray``s with appropriate field names.
|
||||
|
@ -827,8 +838,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
sig_mouse_leave = QtCore.pyqtSignal(object)
|
||||
sig_mouse_enter = QtCore.pyqtSignal(object)
|
||||
|
||||
_l1_labels: L1Labels = None
|
||||
|
||||
mode_name: str = 'view'
|
||||
|
||||
# TODO: can take a ``background`` color setting - maybe there's
|
||||
|
@ -871,17 +880,17 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
self.sidepane: Optional[FieldsForm] = None
|
||||
|
||||
# source of our custom interactions
|
||||
self.cv = cv = self.mk_vb(name)
|
||||
self.cv = self.mk_vb(name)
|
||||
|
||||
pi = pgo.PlotItem(
|
||||
viewBox=cv,
|
||||
viewBox=self.cv,
|
||||
name=name,
|
||||
**kwargs,
|
||||
)
|
||||
pi.chart_widget = self
|
||||
super().__init__(
|
||||
background=hcolor(view_color),
|
||||
viewBox=cv,
|
||||
viewBox=self.cv,
|
||||
# parent=None,
|
||||
# plotItem=None,
|
||||
# antialias=True,
|
||||
|
@ -892,7 +901,9 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# give viewbox as reference to chart
|
||||
# allowing for kb controls and interactions on **this** widget
|
||||
# (see our custom view mode in `._interactions.py`)
|
||||
cv.chart = self
|
||||
self.cv.chart = self
|
||||
|
||||
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
||||
|
||||
# ensure internal pi matches
|
||||
assert self.cv is self.plotItem.vb
|
||||
|
@ -908,7 +919,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# self.setViewportMargins(0, 0, 0, 0)
|
||||
|
||||
# registry of overlay curve names
|
||||
self._flows: dict[str, Flow] = {}
|
||||
self._vizs: dict[str, Viz] = {}
|
||||
|
||||
self.feed: Feed | None = None
|
||||
|
||||
|
@ -921,10 +932,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# show background grid
|
||||
self.showGrid(x=False, y=True, alpha=0.3)
|
||||
|
||||
self.cv.enable_auto_yrange()
|
||||
|
||||
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
||||
|
||||
# indempotent startup flag for auto-yrange subsys
|
||||
# to detect the "first time" y-domain graphics begin
|
||||
# to be shown in the (main) graphics view.
|
||||
|
@ -951,41 +958,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
def focus(self) -> None:
|
||||
self.view.setFocus()
|
||||
|
||||
def _set_xlimits(
|
||||
self,
|
||||
xfirst: int,
|
||||
xlast: int
|
||||
) -> None:
|
||||
"""Set view limits (what's shown in the main chart "pane")
|
||||
based on max/min x/y coords.
|
||||
"""
|
||||
self.setLimits(
|
||||
xMin=xfirst,
|
||||
xMax=xlast,
|
||||
minXRange=_min_points_to_show,
|
||||
)
|
||||
|
||||
def view_range(self) -> tuple[int, int]:
|
||||
vr = self.viewRect()
|
||||
return int(vr.left()), int(vr.right())
|
||||
|
||||
def bars_range(self) -> tuple[int, int, int, int]:
|
||||
'''
|
||||
Return a range tuple for the bars present in view.
|
||||
|
||||
'''
|
||||
main_flow = self._flows[self.name]
|
||||
ifirst, l, lbar, rbar, r, ilast = main_flow.datums_range()
|
||||
return l, lbar, rbar, r
|
||||
|
||||
def curve_width_pxs(
|
||||
self,
|
||||
) -> float:
|
||||
_, lbar, rbar, _ = self.bars_range()
|
||||
return self.view.mapViewToDevice(
|
||||
QLineF(lbar, 0, rbar, 0)
|
||||
).length()
|
||||
|
||||
def pre_l1_xs(self) -> tuple[float, float]:
|
||||
'''
|
||||
Return the view x-coord for the value just before
|
||||
|
@ -994,11 +966,16 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
'''
|
||||
line_end, marker_right, yaxis_x = self.marker_right_points()
|
||||
view = self.view
|
||||
line = view.mapToView(
|
||||
line = self.view.mapToView(
|
||||
QLineF(line_end, 0, yaxis_x, 0)
|
||||
)
|
||||
return line.x1(), line.length()
|
||||
linex, linelen = line.x1(), line.length()
|
||||
# print(
|
||||
# f'line: {line}\n'
|
||||
# f'linex: {linex}\n'
|
||||
# f'linelen: {linelen}\n'
|
||||
# )
|
||||
return linex, linelen
|
||||
|
||||
def marker_right_points(
|
||||
self,
|
||||
|
@ -1016,15 +993,22 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
'''
|
||||
# TODO: compute some sensible maximum value here
|
||||
# and use a humanized scheme to limit to that length.
|
||||
l1_len = self._max_l1_line_len
|
||||
from ._l1 import L1Label
|
||||
l1_len = abs(L1Label._x_br_offset)
|
||||
ryaxis = self.getAxis('right')
|
||||
|
||||
r_axis_x = ryaxis.pos().x()
|
||||
up_to_l1_sc = r_axis_x - l1_len - 10
|
||||
|
||||
up_to_l1_sc = r_axis_x - l1_len
|
||||
marker_right = up_to_l1_sc - (1.375 * 2 * marker_size)
|
||||
line_end = marker_right - (6/16 * marker_size)
|
||||
# line_end = marker_right - (6/16 * marker_size)
|
||||
line_end = marker_right - marker_size
|
||||
|
||||
# print(
|
||||
# f'r_axis_x: {r_axis_x}\n'
|
||||
# f'up_to_l1_sc: {up_to_l1_sc}\n'
|
||||
# f'marker_right: {marker_right}\n'
|
||||
# f'line_end: {line_end}\n'
|
||||
# )
|
||||
return line_end, marker_right, r_axis_x
|
||||
|
||||
def default_view(
|
||||
|
@ -1038,95 +1022,45 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
Set the view box to the "default" startup view of the scene.
|
||||
|
||||
'''
|
||||
flow = self._flows.get(self.name)
|
||||
if not flow:
|
||||
log.warning(f'`Flow` for {self.name} not loaded yet?')
|
||||
viz = self.get_viz(self.name)
|
||||
|
||||
if not viz:
|
||||
log.warning(f'`Viz` for {self.name} not loaded yet?')
|
||||
return
|
||||
|
||||
arr = flow.shm.array
|
||||
index = arr['index']
|
||||
# times = arr['time']
|
||||
|
||||
# these will be epoch time floats
|
||||
xfirst, xlast = index[0], index[-1]
|
||||
l, lbar, rbar, r = self.bars_range()
|
||||
|
||||
view = self.view
|
||||
|
||||
if (
|
||||
rbar < 0
|
||||
or l < xfirst
|
||||
or l < 0
|
||||
or (rbar - lbar) < 6
|
||||
):
|
||||
# TODO: set fixed bars count on screen that approx includes as
|
||||
# many bars as possible before a downsample line is shown.
|
||||
begin = xlast - bars_from_y
|
||||
view.setXRange(
|
||||
min=begin,
|
||||
max=xlast,
|
||||
padding=0,
|
||||
)
|
||||
# re-get range
|
||||
l, lbar, rbar, r = self.bars_range()
|
||||
|
||||
# we get the L1 spread label "length" in view coords
|
||||
# terms now that we've scaled either by user control
|
||||
# or to the default set of bars as per the immediate block
|
||||
# above.
|
||||
if not y_offset:
|
||||
marker_pos, l1_len = self.pre_l1_xs()
|
||||
end = xlast + l1_len + 1
|
||||
else:
|
||||
end = xlast + y_offset + 1
|
||||
|
||||
begin = end - (r - l)
|
||||
|
||||
# for debugging
|
||||
# print(
|
||||
# # f'bars range: {brange}\n'
|
||||
# f'xlast: {xlast}\n'
|
||||
# f'marker pos: {marker_pos}\n'
|
||||
# f'l1 len: {l1_len}\n'
|
||||
# f'begin: {begin}\n'
|
||||
# f'end: {end}\n'
|
||||
# )
|
||||
|
||||
# remove any custom user yrange setttings
|
||||
if self._static_yrange == 'axis':
|
||||
self._static_yrange = None
|
||||
|
||||
view.setXRange(
|
||||
min=begin,
|
||||
max=end,
|
||||
padding=0,
|
||||
viz.default_view(
|
||||
bars_from_y,
|
||||
y_offset,
|
||||
do_ds,
|
||||
)
|
||||
|
||||
if do_ds:
|
||||
self.view.maybe_downsample_graphics()
|
||||
view._set_yrange()
|
||||
|
||||
try:
|
||||
self.linked.graphics_cycle()
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
def increment_view(
|
||||
self,
|
||||
steps: int = 1,
|
||||
datums: int = 1,
|
||||
vb: Optional[ChartView] = None,
|
||||
|
||||
) -> None:
|
||||
"""
|
||||
Increment the data view one step to the right thus "following"
|
||||
the current time slot/step/bar.
|
||||
'''
|
||||
Increment the data view ``datums``` steps toward y-axis thus
|
||||
"following" the current time slot/step/bar.
|
||||
|
||||
"""
|
||||
l, r = self.view_range()
|
||||
'''
|
||||
view = vb or self.view
|
||||
viz = self.main_viz
|
||||
l, r = viz.view_range()
|
||||
x_shift = viz.index_step() * datums
|
||||
|
||||
if datums >= 300:
|
||||
print("FUCKING FIX THE GLOBAL STEP BULLSHIT")
|
||||
# breakpoint()
|
||||
return
|
||||
|
||||
view.setXRange(
|
||||
min=l + steps,
|
||||
max=r + steps,
|
||||
min=l + x_shift,
|
||||
max=r + x_shift,
|
||||
|
||||
# TODO: holy shit, wtf dude... why tf would this not be 0 by
|
||||
# default... speechless.
|
||||
|
@ -1173,6 +1107,12 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
pi.chart_widget = self
|
||||
pi.hideButtons()
|
||||
|
||||
# hide all axes not named by ``axis_side``
|
||||
for axname in (
|
||||
({'bottom'} | allowed_sides) - {axis_side}
|
||||
):
|
||||
pi.hideAxis(axname)
|
||||
|
||||
# compose this new plot's graphics with the current chart's
|
||||
# existing one but with separate axes as neede and specified.
|
||||
self.pi_overlay.add_plotitem(
|
||||
|
@ -1186,14 +1126,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
link_axes=(0,),
|
||||
)
|
||||
|
||||
# connect auto-yrange callbacks *from* this new
|
||||
# view **to** this parent and likewise *from* the
|
||||
# main/parent chart back *to* the created overlay.
|
||||
cv.enable_auto_yrange(src_vb=self.view)
|
||||
# makes it so that interaction on the new overlay will reflect
|
||||
# back on the main chart (which overlay was added to).
|
||||
self.view.enable_auto_yrange(src_vb=cv)
|
||||
|
||||
# add axis title
|
||||
# TODO: do we want this API to still work?
|
||||
# raxis = pi.getAxis('right')
|
||||
|
@ -1220,7 +1152,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
**graphics_kwargs,
|
||||
|
||||
) -> Flow:
|
||||
) -> Viz:
|
||||
'''
|
||||
Draw a "curve" (line plot graphics) for the provided data in
|
||||
the input shm array ``shm``.
|
||||
|
@ -1233,8 +1165,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
if is_ohlc:
|
||||
graphics = BarItems(
|
||||
linked=self.linked,
|
||||
plotitem=pi,
|
||||
color=color,
|
||||
name=name,
|
||||
**graphics_kwargs,
|
||||
|
@ -1254,17 +1184,27 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
**graphics_kwargs,
|
||||
)
|
||||
|
||||
flow = self._flows[data_key] = Flow(
|
||||
viz = self._vizs[data_key] = Viz(
|
||||
data_key,
|
||||
pi,
|
||||
shm,
|
||||
flume,
|
||||
|
||||
is_ohlc=is_ohlc,
|
||||
# register curve graphics with this flow
|
||||
# register curve graphics with this viz
|
||||
graphics=graphics,
|
||||
)
|
||||
assert isinstance(flow.shm, ShmArray)
|
||||
|
||||
# connect auto-yrange callbacks *from* this new
|
||||
# view **to** this parent and likewise *from* the
|
||||
# main/parent chart back *to* the created overlay.
|
||||
pi.vb.enable_auto_yrange(
|
||||
src_vb=self.view,
|
||||
viz=viz,
|
||||
)
|
||||
|
||||
pi.viz = viz
|
||||
assert isinstance(viz.shm, ShmArray)
|
||||
|
||||
# TODO: this probably needs its own method?
|
||||
if overlay:
|
||||
|
@ -1276,17 +1216,21 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
pi = overlay
|
||||
|
||||
if add_sticky:
|
||||
axis = pi.getAxis(add_sticky)
|
||||
if pi.name not in axis._stickies:
|
||||
|
||||
if pi is not self.plotItem:
|
||||
overlay = self.pi_overlay
|
||||
assert pi in overlay.overlays
|
||||
overlay_axis = overlay.get_axis(
|
||||
pi,
|
||||
add_sticky,
|
||||
)
|
||||
assert overlay_axis is axis
|
||||
if pi is not self.plotItem:
|
||||
# overlay = self.pi_overlay
|
||||
# assert pi in overlay.overlays
|
||||
overlay = self.pi_overlay
|
||||
assert pi in overlay.overlays
|
||||
axis = overlay.get_axis(
|
||||
pi,
|
||||
add_sticky,
|
||||
)
|
||||
|
||||
else:
|
||||
axis = pi.getAxis(add_sticky)
|
||||
|
||||
if pi.name not in axis._stickies:
|
||||
|
||||
# TODO: UGH! just make this not here! we should
|
||||
# be making the sticky from code which has access
|
||||
|
@ -1306,7 +1250,8 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# (we need something that avoids clutter on x-axis).
|
||||
axis.add_sticky(
|
||||
pi=pi,
|
||||
bg_color=color,
|
||||
fg_color='black',
|
||||
# bg_color=color,
|
||||
digits=digits,
|
||||
)
|
||||
|
||||
|
@ -1321,7 +1266,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# understand.
|
||||
pi.addItem(graphics)
|
||||
|
||||
return flow
|
||||
return viz
|
||||
|
||||
def draw_ohlc(
|
||||
self,
|
||||
|
@ -1332,7 +1277,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
array_key: Optional[str] = None,
|
||||
**draw_curve_kwargs,
|
||||
|
||||
) -> Flow:
|
||||
) -> Viz:
|
||||
'''
|
||||
Draw OHLC datums to chart.
|
||||
|
||||
|
@ -1346,53 +1291,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
**draw_curve_kwargs,
|
||||
)
|
||||
|
||||
def update_graphics_from_flow(
|
||||
self,
|
||||
graphics_name: str,
|
||||
array_key: Optional[str] = None,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> pg.GraphicsObject:
|
||||
'''
|
||||
Update the named internal graphics from ``array``.
|
||||
|
||||
'''
|
||||
flow = self._flows[array_key or graphics_name]
|
||||
return flow.update_graphics(
|
||||
array_key=array_key,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
||||
# # compute contents label "height" in view terms
|
||||
# # to avoid having data "contents" overlap with them
|
||||
# if self._labels:
|
||||
# label = self._labels[self.name][0]
|
||||
|
||||
# rect = label.itemRect()
|
||||
# tl, br = rect.topLeft(), rect.bottomRight()
|
||||
# vb = self.plotItem.vb
|
||||
|
||||
# try:
|
||||
# # on startup labels might not yet be rendered
|
||||
# top, bottom = (vb.mapToView(tl).y(), vb.mapToView(br).y())
|
||||
|
||||
# # XXX: magic hack, how do we compute exactly?
|
||||
# label_h = (top - bottom) * 0.42
|
||||
|
||||
# except np.linalg.LinAlgError:
|
||||
# label_h = 0
|
||||
# else:
|
||||
# label_h = 0
|
||||
|
||||
# # print(f'label height {self.name}: {label_h}')
|
||||
|
||||
# if label_h > yhigh - ylow:
|
||||
# label_h = 0
|
||||
|
||||
# print(f"bounds (ylow, yhigh): {(ylow, yhigh)}")
|
||||
|
||||
# TODO: pretty sure we can just call the cursor
|
||||
# directly not? i don't wee why we need special "signal proxies"
|
||||
# for this lul..
|
||||
|
@ -1419,49 +1317,39 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
If ``bars_range`` is provided use that range.
|
||||
|
||||
'''
|
||||
profiler = Profiler(
|
||||
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
delayed=True,
|
||||
)
|
||||
|
||||
# TODO: here we should instead look up the ``Flow.shm.array``
|
||||
# TODO: here we should instead look up the ``Viz.shm.array``
|
||||
# and read directly from shm to avoid copying to memory first
|
||||
# and then reading it again here.
|
||||
flow_key = name or self.name
|
||||
flow = self._flows.get(flow_key)
|
||||
viz_key = name or self.name
|
||||
viz = self._vizs.get(viz_key)
|
||||
if viz is None:
|
||||
log.error(f"viz {viz_key} doesn't exist in chart {self.name} !?")
|
||||
return 0, 0
|
||||
|
||||
res = viz.maxmin()
|
||||
|
||||
if (
|
||||
flow is None
|
||||
res is None
|
||||
):
|
||||
log.error(f"flow {flow_key} doesn't exist in chart {self.name} !?")
|
||||
key = res = 0, 0
|
||||
|
||||
mxmn = 0, 0
|
||||
if not self._on_screen:
|
||||
self.default_view(do_ds=False)
|
||||
self._on_screen = True
|
||||
else:
|
||||
(
|
||||
first,
|
||||
l,
|
||||
lbar,
|
||||
rbar,
|
||||
r,
|
||||
last,
|
||||
) = bars_range or flow.datums_range()
|
||||
profiler(f'{self.name} got bars range')
|
||||
x_range, read_slc, mxmn = res
|
||||
|
||||
key = round(lbar), round(rbar)
|
||||
res = flow.maxmin(*key)
|
||||
return mxmn
|
||||
|
||||
if (
|
||||
res is None
|
||||
):
|
||||
log.warning(
|
||||
f"{flow_key} no mxmn for bars_range => {key} !?"
|
||||
)
|
||||
res = 0, 0
|
||||
if not self._on_screen:
|
||||
self.default_view(do_ds=False)
|
||||
self._on_screen = True
|
||||
def get_viz(
|
||||
self,
|
||||
key: str,
|
||||
) -> Viz:
|
||||
'''
|
||||
Try to get an underlying ``Viz`` by key.
|
||||
|
||||
profiler(f'yrange mxmn: {key} -> {res}')
|
||||
# print(f'{flow_key} yrange mxmn: {key} -> {res}')
|
||||
return res
|
||||
'''
|
||||
return self._vizs.get(key)
|
||||
|
||||
@property
|
||||
def main_viz(self) -> Viz:
|
||||
return self.get_viz(self.name)
|
||||
|
|
|
@ -71,7 +71,7 @@ class LineDot(pg.CurvePoint):
|
|||
|
||||
plot: ChartPlotWidget, # type: ingore # noqa
|
||||
pos=None,
|
||||
color: str = 'default_light',
|
||||
color: str = 'bracket',
|
||||
|
||||
) -> None:
|
||||
# scale from dpi aware font size
|
||||
|
@ -198,12 +198,11 @@ class ContentsLabel(pg.LabelItem):
|
|||
self,
|
||||
|
||||
name: str,
|
||||
index: int,
|
||||
ix: int,
|
||||
array: np.ndarray,
|
||||
|
||||
) -> None:
|
||||
# this being "html" is the dumbest shit :eyeroll:
|
||||
first = array[0]['index']
|
||||
|
||||
self.setText(
|
||||
"<b>i</b>:{index}<br/>"
|
||||
|
@ -216,7 +215,7 @@ class ContentsLabel(pg.LabelItem):
|
|||
"<b>C</b>:{}<br/>"
|
||||
"<b>V</b>:{}<br/>"
|
||||
"<b>wap</b>:{}".format(
|
||||
*array[index - first][
|
||||
*array[ix][
|
||||
[
|
||||
'time',
|
||||
'open',
|
||||
|
@ -228,7 +227,7 @@ class ContentsLabel(pg.LabelItem):
|
|||
]
|
||||
],
|
||||
name=name,
|
||||
index=index,
|
||||
index=ix,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -236,15 +235,12 @@ class ContentsLabel(pg.LabelItem):
|
|||
self,
|
||||
|
||||
name: str,
|
||||
index: int,
|
||||
ix: int,
|
||||
array: np.ndarray,
|
||||
|
||||
) -> None:
|
||||
|
||||
first = array[0]['index']
|
||||
if index < array[-1]['index'] and index > first:
|
||||
data = array[index - first][name]
|
||||
self.setText(f"{name}: {data:.2f}")
|
||||
data = array[ix][name]
|
||||
self.setText(f"{name}: {data:.2f}")
|
||||
|
||||
|
||||
class ContentsLabels:
|
||||
|
@ -269,17 +265,20 @@ class ContentsLabels:
|
|||
|
||||
def update_labels(
|
||||
self,
|
||||
index: int,
|
||||
x_in: int,
|
||||
|
||||
) -> None:
|
||||
for chart, name, label, update in self._labels:
|
||||
|
||||
flow = chart._flows[name]
|
||||
array = flow.shm.array
|
||||
viz = chart.get_viz(name)
|
||||
array = viz.shm.array
|
||||
index = array[viz.index_field]
|
||||
start = index[0]
|
||||
stop = index[-1]
|
||||
|
||||
if not (
|
||||
index >= 0
|
||||
and index < array[-1]['index']
|
||||
x_in >= start
|
||||
and x_in <= stop
|
||||
):
|
||||
# out of range
|
||||
print('WTF out of range?')
|
||||
|
@ -288,7 +287,10 @@ class ContentsLabels:
|
|||
# call provided update func with data point
|
||||
try:
|
||||
label.show()
|
||||
update(index, array)
|
||||
ix = np.searchsorted(index, x_in)
|
||||
if ix > len(array):
|
||||
breakpoint()
|
||||
update(ix, array)
|
||||
|
||||
except IndexError:
|
||||
log.exception(f"Failed to update label: {name}")
|
||||
|
@ -349,7 +351,7 @@ class Cursor(pg.GraphicsObject):
|
|||
# XXX: not sure why these are instance variables?
|
||||
# It's not like we can change them on the fly..?
|
||||
self.pen = pg.mkPen(
|
||||
color=hcolor('default'),
|
||||
color=hcolor('bracket'),
|
||||
style=QtCore.Qt.DashLine,
|
||||
)
|
||||
self.lines_pen = pg.mkPen(
|
||||
|
@ -365,7 +367,7 @@ class Cursor(pg.GraphicsObject):
|
|||
self._lw = self.pixelWidth() * self.lines_pen.width()
|
||||
|
||||
# xhair label's color name
|
||||
self.label_color: str = 'default'
|
||||
self.label_color: str = 'bracket'
|
||||
|
||||
self._y_label_update: bool = True
|
||||
|
||||
|
@ -482,25 +484,32 @@ class Cursor(pg.GraphicsObject):
|
|||
|
||||
def add_curve_cursor(
|
||||
self,
|
||||
plot: ChartPlotWidget, # noqa
|
||||
chart: ChartPlotWidget, # noqa
|
||||
curve: 'PlotCurveItem', # noqa
|
||||
|
||||
) -> LineDot:
|
||||
# if this plot contains curves add line dot "cursors" to denote
|
||||
# if this chart contains curves add line dot "cursors" to denote
|
||||
# the current sample under the mouse
|
||||
main_flow = plot._flows[plot.name]
|
||||
main_viz = chart.get_viz(chart.name)
|
||||
|
||||
# read out last index
|
||||
i = main_flow.shm.array[-1]['index']
|
||||
i = main_viz.shm.array[-1]['index']
|
||||
cursor = LineDot(
|
||||
curve,
|
||||
index=i,
|
||||
plot=plot
|
||||
plot=chart
|
||||
)
|
||||
plot.addItem(cursor)
|
||||
self.graphics[plot].setdefault('cursors', []).append(cursor)
|
||||
chart.addItem(cursor)
|
||||
self.graphics[chart].setdefault('cursors', []).append(cursor)
|
||||
return cursor
|
||||
|
||||
def mouseAction(self, action, plot): # noqa
|
||||
def mouseAction(
|
||||
self,
|
||||
action: str,
|
||||
plot: ChartPlotWidget,
|
||||
|
||||
) -> None: # noqa
|
||||
|
||||
log.debug(f"{(action, plot.name)}")
|
||||
if action == 'Enter':
|
||||
self.active_plot = plot
|
||||
|
|
|
@ -36,10 +36,6 @@ from PyQt5.QtGui import (
|
|||
)
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._style import hcolor
|
||||
# from ._compression import (
|
||||
# # ohlc_to_m4_line,
|
||||
# ds_m4,
|
||||
# )
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
|
||||
|
@ -55,7 +51,117 @@ _line_styles: dict[str, int] = {
|
|||
}
|
||||
|
||||
|
||||
class Curve(pg.GraphicsObject):
|
||||
class FlowGraphic(pg.GraphicsObject):
|
||||
'''
|
||||
Base class with minimal interface for `QPainterPath` implemented,
|
||||
real-time updated "data flow" graphics.
|
||||
|
||||
See subtypes below.
|
||||
|
||||
'''
|
||||
# sub-type customization methods
|
||||
declare_paintables: Callable | None = None
|
||||
sub_paint: Callable | None = None
|
||||
|
||||
# XXX-NOTE-XXX: graphics caching B)
|
||||
# see explanation for different caching modes:
|
||||
# https://stackoverflow.com/a/39410081
|
||||
cache_mode: int = QGraphicsItem.DeviceCoordinateCache
|
||||
# XXX: WARNING item caching seems to only be useful
|
||||
# if we don't re-generate the entire QPainterPath every time
|
||||
# don't ever use this - it's a colossal nightmare of artefacts
|
||||
# and is disastrous for performance.
|
||||
# QGraphicsItem.ItemCoordinateCache
|
||||
# TODO: still questions todo with coord-cacheing that we should
|
||||
# probably talk to a core dev about:
|
||||
# - if this makes trasform interactions slower (such as zooming)
|
||||
# and if so maybe if/when we implement a "history" mode for the
|
||||
# view we disable this in that mode?
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
name: str | None = None,
|
||||
|
||||
# line styling
|
||||
color: str = 'bracket',
|
||||
last_step_color: str | None = None,
|
||||
fill_color: Optional[str] = None,
|
||||
style: str = 'solid',
|
||||
|
||||
**kwargs
|
||||
|
||||
) -> None:
|
||||
|
||||
self._name = name
|
||||
|
||||
# primary graphics item used for history
|
||||
self.path: QPainterPath = QPainterPath()
|
||||
|
||||
# additional path that can be optionally used for appends which
|
||||
# tries to avoid triggering an update/redraw of the presumably
|
||||
# larger historical ``.path`` above. the flag to enable
|
||||
# this behaviour is found in `Renderer.render()`.
|
||||
self.fast_path: QPainterPath | None = None
|
||||
|
||||
# TODO: evaluating the path capacity stuff and see
|
||||
# if it really makes much diff pre-allocating it.
|
||||
# self._last_cap: int = 0
|
||||
# cap = path.capacity()
|
||||
# if cap != self._last_cap:
|
||||
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
||||
# self._last_cap = cap
|
||||
|
||||
# all history of curve is drawn in single px thickness
|
||||
self._color: str = color
|
||||
pen = pg.mkPen(hcolor(color), width=1)
|
||||
pen.setStyle(_line_styles[style])
|
||||
|
||||
if 'dash' in style:
|
||||
pen.setDashPattern([8, 3])
|
||||
|
||||
self._pen = pen
|
||||
self._brush = pg.functions.mkBrush(
|
||||
hcolor(fill_color or color)
|
||||
)
|
||||
|
||||
# last segment is drawn in 2px thickness for emphasis
|
||||
if last_step_color:
|
||||
self.last_step_pen = pg.mkPen(
|
||||
hcolor(last_step_color),
|
||||
width=2,
|
||||
)
|
||||
else:
|
||||
self.last_step_pen = pg.mkPen(
|
||||
self._pen,
|
||||
width=2,
|
||||
)
|
||||
|
||||
self._last_line: QLineF = QLineF()
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# apply cache mode
|
||||
self.setCacheMode(self.cache_mode)
|
||||
|
||||
def x_uppx(self) -> int:
|
||||
|
||||
px_vecs = self.pixelVectors()[0]
|
||||
if px_vecs:
|
||||
return px_vecs.x()
|
||||
else:
|
||||
return 0
|
||||
|
||||
def x_last(self) -> float | None:
|
||||
'''
|
||||
Return the last most x value of the last line segment or if not
|
||||
drawn yet, ``None``.
|
||||
|
||||
'''
|
||||
return self._last_line.x1() if self._last_line else None
|
||||
|
||||
|
||||
class Curve(FlowGraphic):
|
||||
'''
|
||||
A faster, simpler, append friendly version of
|
||||
``pyqtgraph.PlotCurveItem`` built for highly customizable real-time
|
||||
|
@ -72,7 +178,7 @@ class Curve(pg.GraphicsObject):
|
|||
lower level graphics data can be rendered in different threads and
|
||||
then read and drawn in this main thread without having to worry
|
||||
about dealing with Qt's concurrency primitives. See
|
||||
``piker.ui._flows.Renderer`` for details and logic related to lower
|
||||
``piker.ui._render.Renderer`` for details and logic related to lower
|
||||
level path generation and incremental update. The main differences in
|
||||
the path generation code include:
|
||||
|
||||
|
@ -84,125 +190,38 @@ class Curve(pg.GraphicsObject):
|
|||
updates don't trigger a full path redraw.
|
||||
|
||||
'''
|
||||
|
||||
# sub-type customization methods
|
||||
declare_paintables: Optional[Callable] = None
|
||||
sub_paint: Optional[Callable] = None
|
||||
# TODO: can we remove this?
|
||||
# sub_br: Optional[Callable] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
|
||||
step_mode: bool = False,
|
||||
color: str = 'default_lightest',
|
||||
fill_color: Optional[str] = None,
|
||||
style: str = 'solid',
|
||||
name: Optional[str] = None,
|
||||
use_fpath: bool = True,
|
||||
# color: str = 'default_lightest',
|
||||
# fill_color: Optional[str] = None,
|
||||
# style: str = 'solid',
|
||||
|
||||
**kwargs
|
||||
|
||||
) -> None:
|
||||
|
||||
self._name = name
|
||||
|
||||
# brutaaalll, see comments within..
|
||||
self.yData = None
|
||||
self.xData = None
|
||||
|
||||
# self._last_cap: int = 0
|
||||
self.path: Optional[QPainterPath] = None
|
||||
|
||||
# additional path used for appends which tries to avoid
|
||||
# triggering an update/redraw of the presumably larger
|
||||
# historical ``.path`` above.
|
||||
self.use_fpath = use_fpath
|
||||
self.fast_path: Optional[QPainterPath] = None
|
||||
|
||||
# TODO: we can probably just dispense with the parent since
|
||||
# we're basically only using the pen setting now...
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# all history of curve is drawn in single px thickness
|
||||
pen = pg.mkPen(hcolor(color))
|
||||
pen.setStyle(_line_styles[style])
|
||||
|
||||
if 'dash' in style:
|
||||
pen.setDashPattern([8, 3])
|
||||
|
||||
self._pen = pen
|
||||
|
||||
# last segment is drawn in 2px thickness for emphasis
|
||||
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
||||
self.last_step_pen = pg.mkPen(pen, width=2)
|
||||
|
||||
self._last_line = QLineF()
|
||||
|
||||
# flat-top style histogram-like discrete curve
|
||||
# self._step_mode: bool = step_mode
|
||||
self._last_line: QLineF = QLineF()
|
||||
|
||||
# self._fill = True
|
||||
self._brush = pg.functions.mkBrush(hcolor(fill_color or color))
|
||||
|
||||
# NOTE: this setting seems to mostly prevent redraws on mouse
|
||||
# interaction which is a huge boon for avg interaction latency.
|
||||
|
||||
# TODO: one question still remaining is if this makes trasform
|
||||
# interactions slower (such as zooming) and if so maybe if/when
|
||||
# we implement a "history" mode for the view we disable this in
|
||||
# that mode?
|
||||
# don't enable caching by default for the case where the
|
||||
# only thing drawn is the "last" line segment which can
|
||||
# have a weird artifact where it won't be fully drawn to its
|
||||
# endpoint (something we saw on trade rate curves)
|
||||
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# XXX: see explanation for different caching modes:
|
||||
# https://stackoverflow.com/a/39410081
|
||||
# seems to only be useful if we don't re-generate the entire
|
||||
# QPainterPath every time
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# don't ever use this - it's a colossal nightmare of artefacts
|
||||
# and is disastrous for performance.
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache)
|
||||
|
||||
# allow sub-type customization
|
||||
declare = self.declare_paintables
|
||||
if declare:
|
||||
declare()
|
||||
|
||||
# TODO: probably stick this in a new parent
|
||||
# type which will contain our own version of
|
||||
# what ``PlotCurveItem`` had in terms of base
|
||||
# functionality? A `FlowGraphic` maybe?
|
||||
def x_uppx(self) -> int:
|
||||
|
||||
px_vecs = self.pixelVectors()[0]
|
||||
if px_vecs:
|
||||
xs_in_px = px_vecs.x()
|
||||
return round(xs_in_px)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def px_width(self) -> float:
|
||||
|
||||
vb = self.getViewBox()
|
||||
if not vb:
|
||||
return 0
|
||||
|
||||
vr = self.viewRect()
|
||||
l, r = int(vr.left()), int(vr.right())
|
||||
|
||||
start, stop = self._xrange
|
||||
lbar = max(l, start)
|
||||
rbar = min(r, stop)
|
||||
|
||||
return vb.mapViewToDevice(
|
||||
QLineF(lbar, 0, rbar, 0)
|
||||
).length()
|
||||
|
||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||
# our `LineDot`) required ``.getData()`` to work..
|
||||
def getData(self):
|
||||
|
@ -327,14 +346,10 @@ class Curve(pg.GraphicsObject):
|
|||
|
||||
p.setPen(self.last_step_pen)
|
||||
p.drawLine(self._last_line)
|
||||
profiler('.drawLine()')
|
||||
p.setPen(self._pen)
|
||||
profiler('last datum `.drawLine()`')
|
||||
|
||||
p.setPen(self._pen)
|
||||
path = self.path
|
||||
# cap = path.capacity()
|
||||
# if cap != self._last_cap:
|
||||
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
||||
# self._last_cap = cap
|
||||
|
||||
if path:
|
||||
p.drawPath(path)
|
||||
|
@ -357,22 +372,30 @@ class Curve(pg.GraphicsObject):
|
|||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
index_field: str,
|
||||
|
||||
) -> None:
|
||||
# default line draw last call
|
||||
# with self.reset_cache():
|
||||
x = render_data['index']
|
||||
y = render_data[array_key]
|
||||
x = src_data[index_field]
|
||||
y = src_data[array_key]
|
||||
|
||||
x_last = x[-1]
|
||||
x_2last = x[-2]
|
||||
|
||||
# draw the "current" step graphic segment so it
|
||||
# lines up with the "middle" of the current
|
||||
# (OHLC) sample.
|
||||
self._last_line = QLineF(
|
||||
x[-2], y[-2],
|
||||
x[-1], y[-1],
|
||||
|
||||
# NOTE: currently we draw in x-domain
|
||||
# from last datum to current such that
|
||||
# the end of line touches the "beginning"
|
||||
# of the current datum step span.
|
||||
x_2last, y[-2],
|
||||
x_last, y[-1],
|
||||
)
|
||||
|
||||
return x, y
|
||||
|
@ -384,17 +407,20 @@ class Curve(pg.GraphicsObject):
|
|||
# (via it's max / min) even when highly zoomed out.
|
||||
class FlattenedOHLC(Curve):
|
||||
|
||||
# avoids strange dragging/smearing artifacts when panning..
|
||||
cache_mode: int = QGraphicsItem.NoCache
|
||||
|
||||
def draw_last_datum(
|
||||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
index_field: str,
|
||||
|
||||
) -> None:
|
||||
lasts = src_data[-2:]
|
||||
x = lasts['index']
|
||||
x = lasts[index_field]
|
||||
y = lasts['close']
|
||||
|
||||
# draw the "current" step graphic segment so it
|
||||
|
@ -418,9 +444,9 @@ class StepCurve(Curve):
|
|||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
index_field: str,
|
||||
|
||||
w: float = 0.5,
|
||||
|
||||
|
@ -429,14 +455,13 @@ class StepCurve(Curve):
|
|||
# TODO: remove this and instead place all step curve
|
||||
# updating into pre-path data render callbacks.
|
||||
# full input data
|
||||
x = src_data['index']
|
||||
x = src_data[index_field]
|
||||
y = src_data[array_key]
|
||||
|
||||
x_last = x[-1]
|
||||
x_2last = x[-2]
|
||||
y_last = y[-1]
|
||||
step_size = x_last - x_2last
|
||||
half_step = step_size / 2
|
||||
|
||||
# lol, commenting this makes step curves
|
||||
# all "black" for me :eyeroll:..
|
||||
|
@ -445,7 +470,7 @@ class StepCurve(Curve):
|
|||
x_last, 0,
|
||||
)
|
||||
self._last_step_rect = QRectF(
|
||||
x_last - half_step, 0,
|
||||
x_last, 0,
|
||||
step_size, y_last,
|
||||
)
|
||||
return x, y
|
||||
|
@ -458,9 +483,3 @@ class StepCurve(Curve):
|
|||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||
# p.drawRect(self._last_step_rect)
|
||||
p.fillRect(self._last_step_rect, self._brush)
|
||||
|
||||
# def sub_br(
|
||||
# self,
|
||||
# parent_br: QRectF | None = None,
|
||||
# ) -> QRectF:
|
||||
# return self._last_step_rect
|
||||
|
|
File diff suppressed because it is too large
Load Diff
1217
piker/ui/_display.py
1217
piker/ui/_display.py
File diff suppressed because it is too large
Load Diff
|
@ -377,7 +377,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
|||
nbars = ixmx - ixmn + 1
|
||||
|
||||
chart = self._chart
|
||||
data = chart._flows[chart.name].shm.array[ixmn:ixmx]
|
||||
data = chart.get_viz(chart.name).shm.array[ixmn:ixmx]
|
||||
|
||||
if len(data):
|
||||
std = data['close'].std()
|
||||
|
|
|
@ -1,974 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
High level streaming graphics primitives.
|
||||
|
||||
This is an intermediate layer which associates real-time low latency
|
||||
graphics primitives with underlying FSP related data structures for fast
|
||||
incremental update.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Optional,
|
||||
)
|
||||
|
||||
import msgspec
|
||||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
from PyQt5.QtGui import QPainterPath
|
||||
from PyQt5.QtCore import QLineF
|
||||
|
||||
from ..data._sharedmem import (
|
||||
ShmArray,
|
||||
)
|
||||
from ..data.feed import Flume
|
||||
from .._profile import (
|
||||
pg_profile_enabled,
|
||||
# ms_slower_then,
|
||||
)
|
||||
from ._pathops import (
|
||||
IncrementalFormatter,
|
||||
OHLCBarsFmtr, # Plain OHLC renderer
|
||||
OHLCBarsAsCurveFmtr, # OHLC converted to line
|
||||
StepCurveFmtr, # "step" curve (like for vlm)
|
||||
xy_downsample,
|
||||
)
|
||||
from ._ohlc import (
|
||||
BarItems,
|
||||
# bar_from_ohlc_row,
|
||||
)
|
||||
from ._curve import (
|
||||
Curve,
|
||||
StepCurve,
|
||||
FlattenedOHLC,
|
||||
)
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
def render_baritems(
|
||||
flow: Flow,
|
||||
graphics: BarItems,
|
||||
read: tuple[
|
||||
int, int, np.ndarray,
|
||||
int, int, np.ndarray,
|
||||
],
|
||||
profiler: Profiler,
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Graphics management logic for a ``BarItems`` object.
|
||||
|
||||
Mostly just logic to determine when and how to downsample an OHLC
|
||||
lines curve into a flattened line graphic and when to display one
|
||||
graphic or the other.
|
||||
|
||||
TODO: this should likely be moved into some kind of better abstraction
|
||||
layer, if not a `Renderer` then something just above it?
|
||||
|
||||
'''
|
||||
bars = graphics
|
||||
|
||||
# if no source data renderer exists create one.
|
||||
self = flow
|
||||
show_bars: bool = False
|
||||
|
||||
r = self._src_r
|
||||
if not r:
|
||||
show_bars = True
|
||||
|
||||
# OHLC bars path renderer
|
||||
r = self._src_r = Renderer(
|
||||
flow=self,
|
||||
fmtr=OHLCBarsFmtr(
|
||||
shm=flow.shm,
|
||||
flow=flow,
|
||||
_last_read=read,
|
||||
),
|
||||
)
|
||||
|
||||
ds_curve_r = Renderer(
|
||||
flow=self,
|
||||
fmtr=OHLCBarsAsCurveFmtr(
|
||||
shm=flow.shm,
|
||||
flow=flow,
|
||||
_last_read=read,
|
||||
),
|
||||
)
|
||||
|
||||
curve = FlattenedOHLC(
|
||||
name=f'{flow.name}_ds_ohlc',
|
||||
color=bars._color,
|
||||
)
|
||||
flow.ds_graphics = curve
|
||||
curve.hide()
|
||||
self.plot.addItem(curve)
|
||||
|
||||
# baseline "line" downsampled OHLC curve that should
|
||||
# kick on only when we reach a certain uppx threshold.
|
||||
self._render_table = (ds_curve_r, curve)
|
||||
|
||||
ds_r, curve = self._render_table
|
||||
|
||||
# do checks for whether or not we require downsampling:
|
||||
# - if we're **not** downsampling then we simply want to
|
||||
# render the bars graphics curve and update..
|
||||
# - if instead we are in a downsamplig state then we to
|
||||
x_gt = 6
|
||||
uppx = curve.x_uppx()
|
||||
in_line = should_line = curve.isVisible()
|
||||
if (
|
||||
in_line
|
||||
and uppx < x_gt
|
||||
):
|
||||
# print('FLIPPING TO BARS')
|
||||
should_line = False
|
||||
flow._in_ds = False
|
||||
|
||||
elif (
|
||||
not in_line
|
||||
and uppx >= x_gt
|
||||
):
|
||||
# print('FLIPPING TO LINE')
|
||||
should_line = True
|
||||
flow._in_ds = True
|
||||
|
||||
profiler(f'ds logic complete line={should_line}')
|
||||
|
||||
# do graphics updates
|
||||
if should_line:
|
||||
r = ds_r
|
||||
graphics = curve
|
||||
profiler('updated ds curve')
|
||||
|
||||
else:
|
||||
graphics = bars
|
||||
|
||||
if show_bars:
|
||||
bars.show()
|
||||
|
||||
changed_to_line = False
|
||||
if (
|
||||
not in_line
|
||||
and should_line
|
||||
):
|
||||
# change to line graphic
|
||||
log.info(
|
||||
f'downsampling to line graphic {self.name}'
|
||||
)
|
||||
bars.hide()
|
||||
curve.show()
|
||||
curve.update()
|
||||
changed_to_line = True
|
||||
|
||||
elif in_line and not should_line:
|
||||
# change to bars graphic
|
||||
log.info(f'showing bars graphic {self.name}')
|
||||
curve.hide()
|
||||
bars.show()
|
||||
bars.update()
|
||||
|
||||
return (
|
||||
graphics,
|
||||
r,
|
||||
{'read_from_key': False},
|
||||
should_line,
|
||||
changed_to_line,
|
||||
)
|
||||
|
||||
|
||||
class Flow(msgspec.Struct): # , frozen=True):
|
||||
'''
|
||||
(Financial Signal-)Flow compound type which wraps a real-time
|
||||
shm array stream with displayed graphics (curves, charts)
|
||||
for high level access and control as well as efficient incremental
|
||||
update.
|
||||
|
||||
The intention is for this type to eventually be capable of shm-passing
|
||||
of incrementally updated graphics stream data between actors.
|
||||
|
||||
'''
|
||||
name: str
|
||||
plot: pg.PlotItem
|
||||
_shm: ShmArray
|
||||
flume: Flume
|
||||
graphics: Curve | BarItems
|
||||
|
||||
# for tracking y-mn/mx for y-axis auto-ranging
|
||||
yrange: tuple[float, float] = None
|
||||
|
||||
# in some cases a flow may want to change its
|
||||
# graphical "type" or, "form" when downsampling, to
|
||||
# start this is only ever an interpolation line.
|
||||
ds_graphics: Optional[Curve] = None
|
||||
|
||||
is_ohlc: bool = False
|
||||
render: bool = True # toggle for display loop
|
||||
|
||||
# downsampling state
|
||||
_last_uppx: float = 0
|
||||
_in_ds: bool = False
|
||||
|
||||
# map from uppx -> (downsampled data, incremental graphics)
|
||||
_src_r: Optional[Renderer] = None
|
||||
_render_table: dict[
|
||||
Optional[int],
|
||||
tuple[Renderer, pg.GraphicsItem],
|
||||
] = (None, None)
|
||||
|
||||
# TODO: hackery to be able to set a shm later
|
||||
# but whilst also allowing this type to hashable,
|
||||
# likely will require serializable token that is used to attach
|
||||
# to the underlying shm ref after startup?
|
||||
# _shm: Optional[ShmArray] = None # currently, may be filled in "later"
|
||||
|
||||
# last read from shm (usually due to an update call)
|
||||
_last_read: Optional[np.ndarray] = None
|
||||
|
||||
# cache of y-range values per x-range input.
|
||||
_mxmns: dict[tuple[int, int], tuple[float, float]] = {}
|
||||
|
||||
@property
|
||||
def shm(self) -> ShmArray:
|
||||
return self._shm
|
||||
|
||||
# TODO: remove this and only allow setting through
|
||||
# private ``._shm`` attr?
|
||||
# @shm.setter
|
||||
# def shm(self, shm: ShmArray) -> ShmArray:
|
||||
# self._shm = shm
|
||||
|
||||
def maxmin(
|
||||
self,
|
||||
lbar: int,
|
||||
rbar: int,
|
||||
|
||||
) -> Optional[tuple[float, float]]:
|
||||
'''
|
||||
Compute the cached max and min y-range values for a given
|
||||
x-range determined by ``lbar`` and ``rbar`` or ``None``
|
||||
if no range can be determined (yet).
|
||||
|
||||
'''
|
||||
rkey = (lbar, rbar)
|
||||
cached_result = self._mxmns.get(rkey)
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
shm = self.shm
|
||||
if shm is None:
|
||||
return None
|
||||
|
||||
arr = shm.array
|
||||
|
||||
# build relative indexes into shm array
|
||||
# TODO: should we just add/use a method
|
||||
# on the shm to do this?
|
||||
ifirst = arr[0]['index']
|
||||
slice_view = arr[
|
||||
lbar - ifirst:
|
||||
(rbar - ifirst) + 1
|
||||
]
|
||||
|
||||
if not slice_view.size:
|
||||
return None
|
||||
|
||||
elif self.yrange:
|
||||
mxmn = self.yrange
|
||||
# print(f'{self.name} M4 maxmin: {mxmn}')
|
||||
|
||||
else:
|
||||
if self.is_ohlc:
|
||||
ylow = np.min(slice_view['low'])
|
||||
yhigh = np.max(slice_view['high'])
|
||||
|
||||
else:
|
||||
view = slice_view[self.name]
|
||||
ylow = np.min(view)
|
||||
yhigh = np.max(view)
|
||||
|
||||
mxmn = ylow, yhigh
|
||||
# print(f'{self.name} MANUAL maxmin: {mxmin}')
|
||||
|
||||
# cache result for input range
|
||||
assert mxmn
|
||||
self._mxmns[rkey] = mxmn
|
||||
|
||||
return mxmn
|
||||
|
||||
def view_range(self) -> tuple[int, int]:
|
||||
'''
|
||||
Return the indexes in view for the associated
|
||||
plot displaying this flow's data.
|
||||
|
||||
'''
|
||||
vr = self.plot.viewRect()
|
||||
return (
|
||||
vr.left(),
|
||||
vr.right(),
|
||||
)
|
||||
|
||||
def datums_range(
|
||||
self,
|
||||
index_field: str = 'index',
|
||||
) -> tuple[
|
||||
int, int, int, int, int, int
|
||||
]:
|
||||
'''
|
||||
Return a range tuple for the datums present in view.
|
||||
|
||||
'''
|
||||
l, r = self.view_range()
|
||||
l = round(l)
|
||||
r = round(r)
|
||||
|
||||
# TODO: avoid this and have shm passed
|
||||
# in earlier.
|
||||
if self.shm is None:
|
||||
# haven't initialized the flow yet
|
||||
return (0, l, 0, 0, r, 0)
|
||||
|
||||
array = self.shm.array
|
||||
index = array['index']
|
||||
start = index[0]
|
||||
end = index[-1]
|
||||
lbar = max(l, start)
|
||||
rbar = min(r, end)
|
||||
return (
|
||||
start, l, lbar, rbar, r, end,
|
||||
)
|
||||
|
||||
def read(
|
||||
self,
|
||||
array_field: Optional[str] = None,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> tuple[
|
||||
int, int, np.ndarray,
|
||||
int, int, np.ndarray,
|
||||
]:
|
||||
'''
|
||||
Read the underlying shm array buffer and
|
||||
return the data plus indexes for the first
|
||||
and last
|
||||
which has been written to.
|
||||
|
||||
'''
|
||||
# readable data
|
||||
array = self.shm.array
|
||||
|
||||
indexes = array[index_field]
|
||||
ifirst = indexes[0]
|
||||
ilast = indexes[-1]
|
||||
|
||||
ifirst, l, lbar, rbar, r, ilast = self.datums_range()
|
||||
|
||||
# get read-relative indices adjusting
|
||||
# for master shm index.
|
||||
lbar_i = max(l, ifirst) - ifirst
|
||||
rbar_i = min(r, ilast) - ifirst
|
||||
|
||||
if array_field:
|
||||
array = array[array_field]
|
||||
|
||||
# TODO: we could do it this way as well no?
|
||||
# to_draw = array[lbar - ifirst:(rbar - ifirst) + 1]
|
||||
in_view = array[lbar_i: rbar_i + 1]
|
||||
|
||||
return (
|
||||
# abs indices + full data set
|
||||
ifirst, ilast, array,
|
||||
|
||||
# relative indices + in view datums
|
||||
lbar_i, rbar_i, in_view,
|
||||
)
|
||||
|
||||
def update_graphics(
|
||||
self,
|
||||
use_vr: bool = True,
|
||||
render: bool = True,
|
||||
array_key: Optional[str] = None,
|
||||
|
||||
profiler: Optional[Profiler] = None,
|
||||
do_append: bool = True,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> pg.GraphicsObject:
|
||||
'''
|
||||
Read latest datums from shm and render to (incrementally)
|
||||
render to graphics.
|
||||
|
||||
'''
|
||||
profiler = Profiler(
|
||||
msg=f'Flow.update_graphics() for {self.name}',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=4,
|
||||
# ms_threshold=ms_slower_then,
|
||||
)
|
||||
# shm read and slice to view
|
||||
read = (
|
||||
xfirst, xlast, src_array,
|
||||
ivl, ivr, in_view,
|
||||
) = self.read()
|
||||
|
||||
profiler('read src shm data')
|
||||
|
||||
graphics = self.graphics
|
||||
|
||||
if (
|
||||
not in_view.size
|
||||
or not render
|
||||
):
|
||||
# print('exiting early')
|
||||
return graphics
|
||||
|
||||
slice_to_head: int = -1
|
||||
should_redraw: bool = False
|
||||
should_line: bool = False
|
||||
rkwargs = {}
|
||||
|
||||
# TODO: probably specialize ``Renderer`` types instead of
|
||||
# these logic checks?
|
||||
# - put these blocks into a `.load_renderer()` meth?
|
||||
# - consider a OHLCRenderer, StepCurveRenderer, Renderer?
|
||||
r = self._src_r
|
||||
if isinstance(graphics, BarItems):
|
||||
# XXX: special case where we change out graphics
|
||||
# to a line after a certain uppx threshold.
|
||||
(
|
||||
graphics,
|
||||
r,
|
||||
rkwargs,
|
||||
should_line,
|
||||
changed_to_line,
|
||||
) = render_baritems(
|
||||
self,
|
||||
graphics,
|
||||
read,
|
||||
profiler,
|
||||
**kwargs,
|
||||
)
|
||||
should_redraw = changed_to_line or not should_line
|
||||
self._in_ds = should_line
|
||||
|
||||
elif not r:
|
||||
if isinstance(graphics, StepCurve):
|
||||
|
||||
r = self._src_r = Renderer(
|
||||
flow=self,
|
||||
fmtr=StepCurveFmtr(
|
||||
shm=self.shm,
|
||||
flow=self,
|
||||
_last_read=read,
|
||||
),
|
||||
)
|
||||
|
||||
# TODO: append logic inside ``.render()`` isn't
|
||||
# correct yet for step curves.. remove this to see it.
|
||||
should_redraw = True
|
||||
slice_to_head = -2
|
||||
|
||||
else:
|
||||
r = self._src_r
|
||||
if not r:
|
||||
# just using for ``.diff()`` atm..
|
||||
r = self._src_r = Renderer(
|
||||
flow=self,
|
||||
fmtr=IncrementalFormatter(
|
||||
shm=self.shm,
|
||||
flow=self,
|
||||
_last_read=read,
|
||||
),
|
||||
)
|
||||
|
||||
# ``Curve`` derivative case(s):
|
||||
array_key = array_key or self.name
|
||||
# print(array_key)
|
||||
|
||||
# ds update config
|
||||
new_sample_rate: bool = False
|
||||
should_ds: bool = r._in_ds
|
||||
showing_src_data: bool = not r._in_ds
|
||||
|
||||
# downsampling incremental state checking
|
||||
# check for and set std m4 downsample conditions
|
||||
uppx = graphics.x_uppx()
|
||||
uppx_diff = (uppx - self._last_uppx)
|
||||
profiler(f'diffed uppx {uppx}')
|
||||
if (
|
||||
uppx > 1
|
||||
and abs(uppx_diff) >= 1
|
||||
):
|
||||
log.debug(
|
||||
f'{array_key} sampler change: {self._last_uppx} -> {uppx}'
|
||||
)
|
||||
self._last_uppx = uppx
|
||||
|
||||
new_sample_rate = True
|
||||
showing_src_data = False
|
||||
should_ds = True
|
||||
should_redraw = True
|
||||
|
||||
elif (
|
||||
uppx <= 2
|
||||
and self._in_ds
|
||||
):
|
||||
# we should de-downsample back to our original
|
||||
# source data so we clear our path data in prep
|
||||
# to generate a new one from original source data.
|
||||
new_sample_rate = True
|
||||
should_ds = False
|
||||
should_redraw = True
|
||||
|
||||
showing_src_data = True
|
||||
# reset yrange to be computed from source data
|
||||
self.yrange = None
|
||||
|
||||
# MAIN RENDER LOGIC:
|
||||
# - determine in view data and redraw on range change
|
||||
# - determine downsampling ops if needed
|
||||
# - (incrementally) update ``QPainterPath``
|
||||
|
||||
out = r.render(
|
||||
read,
|
||||
array_key,
|
||||
profiler,
|
||||
uppx=uppx,
|
||||
# use_vr=True,
|
||||
|
||||
# TODO: better way to detect and pass this?
|
||||
# if we want to eventually cache renderers for a given uppx
|
||||
# we should probably use this as a key + state?
|
||||
should_redraw=should_redraw,
|
||||
new_sample_rate=new_sample_rate,
|
||||
should_ds=should_ds,
|
||||
showing_src_data=showing_src_data,
|
||||
|
||||
slice_to_head=slice_to_head,
|
||||
do_append=do_append,
|
||||
|
||||
**rkwargs,
|
||||
)
|
||||
if showing_src_data:
|
||||
# print(f"{self.name} SHOWING SOURCE")
|
||||
# reset yrange to be computed from source data
|
||||
self.yrange = None
|
||||
|
||||
if not out:
|
||||
log.warning(f'{self.name} failed to render!?')
|
||||
return graphics
|
||||
|
||||
path, data, reset = out
|
||||
|
||||
# if self.yrange:
|
||||
# print(f'flow {self.name} yrange from m4: {self.yrange}')
|
||||
|
||||
# XXX: SUPER UGGGHHH... without this we get stale cache
|
||||
# graphics that don't update until you downsampler again..
|
||||
# reset = False
|
||||
# if reset:
|
||||
# with graphics.reset_cache():
|
||||
# # assign output paths to graphicis obj
|
||||
# graphics.path = r.path
|
||||
# graphics.fast_path = r.fast_path
|
||||
|
||||
# # XXX: we don't need this right?
|
||||
# # graphics.draw_last_datum(
|
||||
# # path,
|
||||
# # src_array,
|
||||
# # data,
|
||||
# # reset,
|
||||
# # array_key,
|
||||
# # )
|
||||
# # graphics.update()
|
||||
# # profiler('.update()')
|
||||
# else:
|
||||
# assign output paths to graphicis obj
|
||||
graphics.path = r.path
|
||||
graphics.fast_path = r.fast_path
|
||||
|
||||
graphics.draw_last_datum(
|
||||
path,
|
||||
src_array,
|
||||
data,
|
||||
reset,
|
||||
array_key,
|
||||
)
|
||||
graphics.update()
|
||||
profiler('.update()')
|
||||
|
||||
# TODO: does this actuallly help us in any way (prolly should
|
||||
# look at the source / ask ogi). I think it avoid artifacts on
|
||||
# wheel-scroll downsampling curve updates?
|
||||
# TODO: is this ever better?
|
||||
# graphics.prepareGeometryChange()
|
||||
# profiler('.prepareGeometryChange()')
|
||||
|
||||
# track downsampled state
|
||||
self._in_ds = r._in_ds
|
||||
|
||||
return graphics
|
||||
|
||||
def draw_last(
|
||||
self,
|
||||
array_key: Optional[str] = None,
|
||||
only_last_uppx: bool = False,
|
||||
|
||||
) -> None:
|
||||
|
||||
# shm read and slice to view
|
||||
(
|
||||
xfirst, xlast, src_array,
|
||||
ivl, ivr, in_view,
|
||||
) = self.read()
|
||||
|
||||
g = self.graphics
|
||||
array_key = array_key or self.name
|
||||
x, y = g.draw_last_datum(
|
||||
g.path,
|
||||
src_array,
|
||||
src_array,
|
||||
False, # never reset path
|
||||
array_key,
|
||||
)
|
||||
|
||||
# the renderer is downsampling we choose
|
||||
# to always try and updadte a single (interpolating)
|
||||
# line segment that spans and tries to display
|
||||
# the las uppx's worth of datums.
|
||||
# we only care about the last pixel's
|
||||
# worth of data since that's all the screen
|
||||
# can represent on the last column where
|
||||
# the most recent datum is being drawn.
|
||||
if self._in_ds or only_last_uppx:
|
||||
dsg = self.ds_graphics or self.graphics
|
||||
|
||||
# XXX: pretty sure we don't need this?
|
||||
# if isinstance(g, Curve):
|
||||
# with dsg.reset_cache():
|
||||
uppx = self._last_uppx
|
||||
y = y[-uppx:]
|
||||
ymn, ymx = y.min(), y.max()
|
||||
# print(f'drawing uppx={uppx} mxmn line: {ymn}, {ymx}')
|
||||
try:
|
||||
iuppx = x[-uppx]
|
||||
except IndexError:
|
||||
# we're less then an x-px wide so just grab the start
|
||||
# datum index.
|
||||
iuppx = x[0]
|
||||
|
||||
dsg._last_line = QLineF(
|
||||
iuppx, ymn,
|
||||
x[-1], ymx,
|
||||
)
|
||||
# print(f'updating DS curve {self.name}')
|
||||
dsg.update()
|
||||
|
||||
else:
|
||||
# print(f'updating NOT DS curve {self.name}')
|
||||
g.update()
|
||||
|
||||
|
||||
class Renderer(msgspec.Struct):
|
||||
|
||||
flow: Flow
|
||||
fmtr: IncrementalFormatter
|
||||
|
||||
# output graphics rendering, the main object
|
||||
# processed in ``QGraphicsObject.paint()``
|
||||
path: Optional[QPainterPath] = None
|
||||
fast_path: Optional[QPainterPath] = None
|
||||
|
||||
# XXX: just ideas..
|
||||
# called on the final data (transform) output to convert
|
||||
# to "graphical data form" a format that can be passed to
|
||||
# the ``.draw()`` implementation.
|
||||
# graphics_t: Optional[Callable[ShmArray, np.ndarray]] = None
|
||||
# graphics_t_shm: Optional[ShmArray] = None
|
||||
|
||||
# path graphics update implementation methods
|
||||
# prepend_fn: Optional[Callable[QPainterPath, QPainterPath]] = None
|
||||
# append_fn: Optional[Callable[QPainterPath, QPainterPath]] = None
|
||||
|
||||
# downsampling state
|
||||
_last_uppx: float = 0
|
||||
_in_ds: bool = False
|
||||
|
||||
def draw_path(
|
||||
self,
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
connect: str | np.ndarray = 'all',
|
||||
path: Optional[QPainterPath] = None,
|
||||
redraw: bool = False,
|
||||
|
||||
) -> QPainterPath:
|
||||
|
||||
path_was_none = path is None
|
||||
|
||||
if redraw and path:
|
||||
path.clear()
|
||||
|
||||
# TODO: avoid this?
|
||||
if self.fast_path:
|
||||
self.fast_path.clear()
|
||||
|
||||
# profiler('cleared paths due to `should_redraw=True`')
|
||||
|
||||
path = pg.functions.arrayToQPath(
|
||||
x,
|
||||
y,
|
||||
connect=connect,
|
||||
finiteCheck=False,
|
||||
|
||||
# reserve mem allocs see:
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#reserve
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||
# XXX: right now this is based on had hoc checks on a
|
||||
# hidpi 3840x2160 4k monitor but we should optimize for
|
||||
# the target display(s) on the sys.
|
||||
# if no_path_yet:
|
||||
# graphics.path.reserve(int(500e3))
|
||||
# path=path, # path re-use / reserving
|
||||
)
|
||||
|
||||
# avoid mem allocs if possible
|
||||
if path_was_none:
|
||||
path.reserve(path.capacity())
|
||||
|
||||
return path
|
||||
|
||||
def render(
|
||||
self,
|
||||
|
||||
new_read,
|
||||
array_key: str,
|
||||
profiler: Profiler,
|
||||
uppx: float = 1,
|
||||
|
||||
# redraw and ds flags
|
||||
should_redraw: bool = False,
|
||||
new_sample_rate: bool = False,
|
||||
should_ds: bool = False,
|
||||
showing_src_data: bool = True,
|
||||
|
||||
do_append: bool = True,
|
||||
slice_to_head: int = -1,
|
||||
use_fpath: bool = True,
|
||||
|
||||
# only render datums "in view" of the ``ChartView``
|
||||
use_vr: bool = True,
|
||||
read_from_key: bool = True,
|
||||
|
||||
) -> list[QPainterPath]:
|
||||
'''
|
||||
Render the current graphics path(s)
|
||||
|
||||
There are (at least) 3 stages from source data to graphics data:
|
||||
- a data transform (which can be stored in additional shm)
|
||||
- a graphics transform which converts discrete basis data to
|
||||
a `float`-basis view-coords graphics basis. (eg. ``ohlc_flatten()``,
|
||||
``step_path_arrays_from_1d()``, etc.)
|
||||
|
||||
- blah blah blah (from notes)
|
||||
|
||||
'''
|
||||
# TODO: can the renderer just call ``Flow.read()`` directly?
|
||||
# unpack latest source data read
|
||||
fmtr = self.fmtr
|
||||
|
||||
(
|
||||
_,
|
||||
_,
|
||||
array,
|
||||
ivl,
|
||||
ivr,
|
||||
in_view,
|
||||
) = new_read
|
||||
|
||||
# xy-path data transform: convert source data to a format
|
||||
# able to be passed to a `QPainterPath` rendering routine.
|
||||
fmt_out = fmtr.format_to_1d(
|
||||
new_read,
|
||||
array_key,
|
||||
profiler,
|
||||
|
||||
slice_to_head=slice_to_head,
|
||||
read_src_from_key=read_from_key,
|
||||
slice_to_inview=use_vr,
|
||||
)
|
||||
|
||||
# no history in view case
|
||||
if not fmt_out:
|
||||
# XXX: this might be why the profiler only has exits?
|
||||
return
|
||||
|
||||
(
|
||||
x_1d,
|
||||
y_1d,
|
||||
connect,
|
||||
prepend_length,
|
||||
append_length,
|
||||
view_changed,
|
||||
# append_tres,
|
||||
|
||||
) = fmt_out
|
||||
|
||||
# redraw conditions
|
||||
if (
|
||||
prepend_length > 0
|
||||
or new_sample_rate
|
||||
or view_changed
|
||||
|
||||
# NOTE: comment this to try and make "append paths"
|
||||
# work below..
|
||||
or append_length > 0
|
||||
):
|
||||
should_redraw = True
|
||||
|
||||
path = self.path
|
||||
fast_path = self.fast_path
|
||||
reset = False
|
||||
|
||||
# redraw the entire source data if we have either of:
|
||||
# - no prior path graphic rendered or,
|
||||
# - we always intend to re-render the data only in view
|
||||
if (
|
||||
path is None
|
||||
or should_redraw
|
||||
):
|
||||
# print(f"{self.flow.name} -> REDRAWING BRUH")
|
||||
if new_sample_rate and showing_src_data:
|
||||
log.info(f'DEDOWN -> {array_key}')
|
||||
self._in_ds = False
|
||||
|
||||
elif should_ds and uppx > 1:
|
||||
|
||||
x_1d, y_1d, ymn, ymx = xy_downsample(
|
||||
x_1d,
|
||||
y_1d,
|
||||
uppx,
|
||||
)
|
||||
self.flow.yrange = ymn, ymx
|
||||
# print(f'{self.flow.name} post ds: ymn, ymx: {ymn},{ymx}')
|
||||
|
||||
reset = True
|
||||
profiler(f'FULL PATH downsample redraw={should_ds}')
|
||||
self._in_ds = True
|
||||
|
||||
path = self.draw_path(
|
||||
x=x_1d,
|
||||
y=y_1d,
|
||||
connect=connect,
|
||||
path=path,
|
||||
redraw=True,
|
||||
)
|
||||
|
||||
profiler(
|
||||
'generated fresh path. '
|
||||
f'(should_redraw: {should_redraw} '
|
||||
f'should_ds: {should_ds} new_sample_rate: {new_sample_rate})'
|
||||
)
|
||||
|
||||
# TODO: get this piecewise prepend working - right now it's
|
||||
# giving heck on vwap...
|
||||
# elif prepend_length:
|
||||
|
||||
# prepend_path = pg.functions.arrayToQPath(
|
||||
# x[0:prepend_length],
|
||||
# y[0:prepend_length],
|
||||
# connect='all'
|
||||
# )
|
||||
|
||||
# # swap prepend path in "front"
|
||||
# old_path = graphics.path
|
||||
# graphics.path = prepend_path
|
||||
# # graphics.path.moveTo(new_x[0], new_y[0])
|
||||
# graphics.path.connectPath(old_path)
|
||||
|
||||
elif (
|
||||
append_length > 0
|
||||
and do_append
|
||||
):
|
||||
print(f'{array_key} append len: {append_length}')
|
||||
# new_x = x_1d[-append_length - 2:] # slice_to_head]
|
||||
# new_y = y_1d[-append_length - 2:] # slice_to_head]
|
||||
profiler('sliced append path')
|
||||
# (
|
||||
# x_1d,
|
||||
# y_1d,
|
||||
# connect,
|
||||
# ) = append_tres
|
||||
|
||||
profiler(
|
||||
f'diffed array input, append_length={append_length}'
|
||||
)
|
||||
|
||||
# if should_ds and uppx > 1:
|
||||
# new_x, new_y = xy_downsample(
|
||||
# new_x,
|
||||
# new_y,
|
||||
# uppx,
|
||||
# )
|
||||
# profiler(f'fast path downsample redraw={should_ds}')
|
||||
|
||||
append_path = self.draw_path(
|
||||
x=x_1d,
|
||||
y=y_1d,
|
||||
connect=connect,
|
||||
path=fast_path,
|
||||
)
|
||||
profiler('generated append qpath')
|
||||
|
||||
if use_fpath:
|
||||
# print(f'{self.flow.name}: FAST PATH')
|
||||
# an attempt at trying to make append-updates faster..
|
||||
if fast_path is None:
|
||||
fast_path = append_path
|
||||
# fast_path.reserve(int(6e3))
|
||||
else:
|
||||
fast_path.connectPath(append_path)
|
||||
size = fast_path.capacity()
|
||||
profiler(f'connected fast path w size: {size}')
|
||||
|
||||
print(
|
||||
f"append_path br: {append_path.boundingRect()}\n"
|
||||
f"path size: {size}\n"
|
||||
f"append_path len: {append_path.length()}\n"
|
||||
f"fast_path len: {fast_path.length()}\n"
|
||||
)
|
||||
# graphics.path.moveTo(new_x[0], new_y[0])
|
||||
# path.connectPath(append_path)
|
||||
|
||||
# XXX: lol this causes a hang..
|
||||
# graphics.path = graphics.path.simplified()
|
||||
else:
|
||||
size = path.capacity()
|
||||
profiler(f'connected history path w size: {size}')
|
||||
path.connectPath(append_path)
|
||||
|
||||
self.path = path
|
||||
self.fast_path = fast_path
|
||||
|
||||
return self.path, array, reset
|
184
piker/ui/_fsp.py
184
piker/ui/_fsp.py
|
@ -78,15 +78,14 @@ def has_vlm(ohlcv: ShmArray) -> bool:
|
|||
|
||||
|
||||
def update_fsp_chart(
|
||||
chart: ChartPlotWidget,
|
||||
flow,
|
||||
viz,
|
||||
graphics_name: str,
|
||||
array_key: Optional[str],
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
|
||||
shm = flow.shm
|
||||
shm = viz.shm
|
||||
if not shm:
|
||||
return
|
||||
|
||||
|
@ -101,18 +100,14 @@ def update_fsp_chart(
|
|||
# update graphics
|
||||
# NOTE: this does a length check internally which allows it
|
||||
# staying above the last row check below..
|
||||
chart.update_graphics_from_flow(
|
||||
graphics_name,
|
||||
array_key=array_key or graphics_name,
|
||||
**kwargs,
|
||||
)
|
||||
viz.update_graphics()
|
||||
|
||||
# XXX: re: ``array_key``: fsp func names must be unique meaning we
|
||||
# can't have duplicates of the underlying data even if multiple
|
||||
# sub-charts reference it under different 'named charts'.
|
||||
|
||||
# read from last calculated value and update any label
|
||||
last_val_sticky = chart.plotItem.getAxis(
|
||||
last_val_sticky = viz.plot.getAxis(
|
||||
'right')._stickies.get(graphics_name)
|
||||
if last_val_sticky:
|
||||
last = last_row[array_key]
|
||||
|
@ -287,9 +282,10 @@ async def run_fsp_ui(
|
|||
# profiler(f'fsp:{name} chart created')
|
||||
|
||||
# first UI update, usually from shm pushed history
|
||||
viz = chart.get_viz(array_key)
|
||||
update_fsp_chart(
|
||||
chart,
|
||||
chart._flows[array_key],
|
||||
viz,
|
||||
name,
|
||||
array_key=array_key,
|
||||
)
|
||||
|
@ -316,7 +312,7 @@ async def run_fsp_ui(
|
|||
# level_line(chart, 70, orient_v='bottom')
|
||||
# level_line(chart, 80, orient_v='top')
|
||||
|
||||
chart.view._set_yrange()
|
||||
chart.view._set_yrange(viz=viz)
|
||||
# done() # status updates
|
||||
|
||||
# profiler(f'fsp:{func_name} starting update loop')
|
||||
|
@ -357,7 +353,7 @@ async def run_fsp_ui(
|
|||
# last = time.time()
|
||||
|
||||
|
||||
# TODO: maybe this should be our ``Flow`` type since it maps
|
||||
# TODO: maybe this should be our ``Viz`` type since it maps
|
||||
# one flume to the next? The machinery for task/actor mgmt should
|
||||
# be part of the instantiation API?
|
||||
class FspAdmin:
|
||||
|
@ -386,7 +382,7 @@ class FspAdmin:
|
|||
|
||||
# TODO: make this a `.src_flume` and add
|
||||
# a `dst_flume`?
|
||||
# (=> but then wouldn't this be the most basic `Flow`?)
|
||||
# (=> but then wouldn't this be the most basic `Viz`?)
|
||||
self.flume = flume
|
||||
|
||||
def rr_next_portal(self) -> tractor.Portal:
|
||||
|
@ -612,6 +608,7 @@ async def open_vlm_displays(
|
|||
linked: LinkedSplits,
|
||||
flume: Flume,
|
||||
dvlm: bool = True,
|
||||
loglevel: str = 'info',
|
||||
|
||||
task_status: TaskStatus[ChartPlotWidget] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
|
@ -661,12 +658,18 @@ async def open_vlm_displays(
|
|||
# str(period_param.default)
|
||||
# )
|
||||
|
||||
# use slightly less light (then bracket) gray
|
||||
# for volume from "main exchange" and a more "bluey"
|
||||
# gray for "dark" vlm.
|
||||
vlm_color = 'i3'
|
||||
dark_vlm_color = 'charcoal'
|
||||
|
||||
# built-in vlm which we plot ASAP since it's
|
||||
# usually data provided directly with OHLC history.
|
||||
shm = ohlcv
|
||||
ohlc_chart = linked.chart
|
||||
# ohlc_chart = linked.chart
|
||||
|
||||
chart = linked.add_plot(
|
||||
vlm_chart = linked.add_plot(
|
||||
name='volume',
|
||||
shm=shm,
|
||||
flume=flume,
|
||||
|
@ -682,67 +685,38 @@ async def open_vlm_displays(
|
|||
# the curve item internals are pretty convoluted.
|
||||
style='step',
|
||||
)
|
||||
# back-link the volume chart to trigger y-autoranging
|
||||
# in the ohlc (parent) chart.
|
||||
ohlc_chart.view.enable_auto_yrange(
|
||||
src_vb=chart.view,
|
||||
)
|
||||
|
||||
# force 0 to always be in view
|
||||
def multi_maxmin(
|
||||
names: list[str],
|
||||
|
||||
) -> tuple[float, float]:
|
||||
'''
|
||||
Flows "group" maxmin loop; assumes all named flows
|
||||
are in the same co-domain and thus can be sorted
|
||||
as one set.
|
||||
|
||||
Iterates all the named flows and calls the chart
|
||||
api to find their range values and return.
|
||||
|
||||
TODO: really we should probably have a more built-in API
|
||||
for this?
|
||||
|
||||
'''
|
||||
mx = 0
|
||||
for name in names:
|
||||
ymn, ymx = chart.maxmin(name=name)
|
||||
mx = max(mx, ymx)
|
||||
|
||||
return 0, mx
|
||||
vlm_viz = vlm_chart._vizs['volume']
|
||||
|
||||
# TODO: fix the x-axis label issue where if you put
|
||||
# the axis on the left it's totally not lined up...
|
||||
# show volume units value on LHS (for dinkus)
|
||||
# chart.hideAxis('right')
|
||||
# chart.showAxis('left')
|
||||
# vlm_chart.hideAxis('right')
|
||||
vlm_chart.hideAxis('left')
|
||||
|
||||
# send back new chart to caller
|
||||
task_status.started(chart)
|
||||
task_status.started(vlm_chart)
|
||||
|
||||
# should **not** be the same sub-chart widget
|
||||
assert chart.name != linked.chart.name
|
||||
assert vlm_chart.name != linked.chart.name
|
||||
|
||||
# sticky only on sub-charts atm
|
||||
last_val_sticky = chart.plotItem.getAxis(
|
||||
'right')._stickies.get(chart.name)
|
||||
last_val_sticky = vlm_chart.plotItem.getAxis(
|
||||
'right')._stickies.get(vlm_chart.name)
|
||||
|
||||
# read from last calculated value
|
||||
value = shm.array['volume'][-1]
|
||||
|
||||
last_val_sticky.update_from_data(-1, value)
|
||||
|
||||
vlm_curve = chart.update_graphics_from_flow(
|
||||
'volume',
|
||||
# shm.array,
|
||||
)
|
||||
_, _, vlm_curve = vlm_viz.update_graphics()
|
||||
|
||||
# size view to data once at outset
|
||||
chart.view._set_yrange()
|
||||
# vlm_chart.view._set_yrange(
|
||||
# viz=vlm_viz
|
||||
# )
|
||||
|
||||
# add axis title
|
||||
axis = chart.getAxis('right')
|
||||
axis = vlm_chart.getAxis('right')
|
||||
axis.set_title(' vlm')
|
||||
|
||||
if dvlm:
|
||||
|
@ -761,45 +735,32 @@ async def open_vlm_displays(
|
|||
},
|
||||
},
|
||||
},
|
||||
# loglevel,
|
||||
loglevel,
|
||||
)
|
||||
tasks_ready.append(started)
|
||||
|
||||
# FIXME: we should error on starting the same fsp right
|
||||
# since it might collide with existing shm.. or wait we
|
||||
# had this before??
|
||||
# dolla_vlm,
|
||||
|
||||
tasks_ready.append(started)
|
||||
# profiler(f'created shm for fsp actor: {display_name}')
|
||||
|
||||
# wait for all engine tasks to startup
|
||||
async with trio.open_nursery() as n:
|
||||
for event in tasks_ready:
|
||||
n.start_soon(event.wait)
|
||||
|
||||
# dolla vlm overlay
|
||||
# XXX: the main chart already contains a vlm "units" axis
|
||||
# so here we add an overlay wth a y-range in
|
||||
# $ liquidity-value units (normally a fiat like USD).
|
||||
dvlm_pi = chart.overlay_plotitem(
|
||||
dvlm_pi = vlm_chart.overlay_plotitem(
|
||||
'dolla_vlm',
|
||||
index=0, # place axis on inside (nearest to chart)
|
||||
|
||||
axis_title=' $vlm',
|
||||
axis_side='right',
|
||||
axis_side='left',
|
||||
|
||||
axis_kwargs={
|
||||
'typical_max_str': ' 100.0 M ',
|
||||
'formatter': partial(
|
||||
humanize,
|
||||
digits=2,
|
||||
),
|
||||
'text_color': vlm_color,
|
||||
},
|
||||
)
|
||||
|
||||
dvlm_pi.hideAxis('left')
|
||||
dvlm_pi.hideAxis('bottom')
|
||||
# all to be overlayed curve names
|
||||
fields = [
|
||||
dvlm_fields = [
|
||||
'dolla_vlm',
|
||||
'dark_vlm',
|
||||
]
|
||||
|
@ -812,27 +773,12 @@ async def open_vlm_displays(
|
|||
'dark_trade_rate',
|
||||
]
|
||||
|
||||
group_mxmn = partial(
|
||||
multi_maxmin,
|
||||
# keep both regular and dark vlm in view
|
||||
names=fields,
|
||||
# names=fields + dvlm_rate_fields,
|
||||
)
|
||||
|
||||
# add custom auto range handler
|
||||
dvlm_pi.vb._maxmin = group_mxmn
|
||||
|
||||
# use slightly less light (then bracket) gray
|
||||
# for volume from "main exchange" and a more "bluey"
|
||||
# gray for "dark" vlm.
|
||||
vlm_color = 'i3'
|
||||
dark_vlm_color = 'charcoal'
|
||||
|
||||
# add dvlm (step) curves to common view
|
||||
def chart_curves(
|
||||
names: list[str],
|
||||
pi: pg.PlotItem,
|
||||
shm: ShmArray,
|
||||
flume: Flume,
|
||||
step_mode: bool = False,
|
||||
style: str = 'solid',
|
||||
|
||||
|
@ -849,7 +795,7 @@ async def open_vlm_displays(
|
|||
assert isinstance(shm, ShmArray)
|
||||
assert isinstance(flume, Flume)
|
||||
|
||||
flow = chart.draw_curve(
|
||||
viz = vlm_chart.draw_curve(
|
||||
name,
|
||||
shm,
|
||||
flume,
|
||||
|
@ -860,34 +806,28 @@ async def open_vlm_displays(
|
|||
style=style,
|
||||
pi=pi,
|
||||
)
|
||||
assert viz.plot is pi
|
||||
|
||||
# TODO: we need a better API to do this..
|
||||
# specially store ref to shm for lookup in display loop
|
||||
# since only a placeholder of `None` is entered in
|
||||
# ``.draw_curve()``.
|
||||
# flow = chart._flows[name]
|
||||
assert flow.plot is pi
|
||||
|
||||
await started.wait()
|
||||
chart_curves(
|
||||
fields,
|
||||
dvlm_fields,
|
||||
dvlm_pi,
|
||||
dvlm_flume.rt_shm,
|
||||
dvlm_flume,
|
||||
step_mode=True,
|
||||
)
|
||||
|
||||
# spawn flow rates fsp **ONLY AFTER** the 'dolla_vlm' fsp is
|
||||
# up since this one depends on it.
|
||||
|
||||
# NOTE: spawn flow rates fsp **ONLY AFTER** the 'dolla_vlm' fsp is
|
||||
# up since calculating vlm "rates" obvs first requires the
|
||||
# underlying vlm event feed ;)
|
||||
fr_flume, started = await admin.start_engine_task(
|
||||
flow_rates,
|
||||
{ # fsp engine conf
|
||||
'func_name': 'flow_rates',
|
||||
'zero_on_step': False,
|
||||
'zero_on_step': True,
|
||||
},
|
||||
# loglevel,
|
||||
loglevel,
|
||||
)
|
||||
await started.wait()
|
||||
|
||||
# chart_curves(
|
||||
# dvlm_rate_fields,
|
||||
# dvlm_pi,
|
||||
|
@ -900,24 +840,29 @@ async def open_vlm_displays(
|
|||
# displayed and the curves are effectively the same minus
|
||||
# liquidity events (well at least on low OHLC periods - 1s).
|
||||
vlm_curve.hide()
|
||||
chart.removeItem(vlm_curve)
|
||||
vflow = chart._flows['volume']
|
||||
vflow.render = False
|
||||
vlm_chart.removeItem(vlm_curve)
|
||||
# vlm_chart.plotItem.layout.setMinimumWidth(0)
|
||||
# vlm_chart.removeAxis('left')
|
||||
vlm_viz = vlm_chart._vizs['volume']
|
||||
|
||||
# avoid range sorting on volume once disabled
|
||||
chart.view.disable_auto_yrange()
|
||||
# NOTE: DON'T DO THIS.
|
||||
# WHY: we want range sorting on volume for the RHS label!
|
||||
# -> if you don't want that then use this but likely you
|
||||
# only will if we decide to drop unit vlm..
|
||||
# vlm_viz.render = False
|
||||
vlm_chart.view.disable_auto_yrange()
|
||||
|
||||
# Trade rate overlay
|
||||
# XXX: requires an additional overlay for
|
||||
# a trades-per-period (time) y-range.
|
||||
tr_pi = chart.overlay_plotitem(
|
||||
tr_pi = vlm_chart.overlay_plotitem(
|
||||
'trade_rates',
|
||||
|
||||
# TODO: dynamically update period (and thus this axis?)
|
||||
# title from user input.
|
||||
axis_title='clears',
|
||||
|
||||
axis_side='left',
|
||||
|
||||
axis_kwargs={
|
||||
'typical_max_str': ' 10.0 M ',
|
||||
'formatter': partial(
|
||||
|
@ -928,18 +873,13 @@ async def open_vlm_displays(
|
|||
},
|
||||
|
||||
)
|
||||
# add custom auto range handler
|
||||
tr_pi.vb.maxmin = partial(
|
||||
multi_maxmin,
|
||||
# keep both regular and dark vlm in view
|
||||
names=trade_rate_fields,
|
||||
)
|
||||
tr_pi.hideAxis('bottom')
|
||||
|
||||
await started.wait()
|
||||
chart_curves(
|
||||
trade_rate_fields,
|
||||
tr_pi,
|
||||
fr_flume.rt_shm,
|
||||
fr_flume,
|
||||
# step_mode=True,
|
||||
|
||||
# dashed line to represent "individual trades" being
|
||||
|
|
|
@ -20,8 +20,15 @@ Chart view box primitives
|
|||
"""
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager
|
||||
from math import (
|
||||
isinf,
|
||||
)
|
||||
import time
|
||||
from typing import Optional, Callable
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import pyqtgraph as pg
|
||||
# from pyqtgraph.GraphicsScene import mouseEvents
|
||||
|
@ -35,10 +42,17 @@ import trio
|
|||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ..data.types import Struct
|
||||
from ..data._pathops import slice_from_time
|
||||
# from ._style import _min_points_to_show
|
||||
from ._editors import SelectRect
|
||||
from . import _event
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import ChartPlotWidget
|
||||
from ._dataviz import Viz
|
||||
# from ._overlay import PlotItemOverlay
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -76,7 +90,6 @@ async def handle_viewmode_kb_inputs(
|
|||
pressed: set[str] = set()
|
||||
|
||||
last = time.time()
|
||||
trigger_mode: str
|
||||
action: str
|
||||
|
||||
on_next_release: Optional[Callable] = None
|
||||
|
@ -332,6 +345,49 @@ async def handle_viewmode_mouse(
|
|||
view.order_mode.submit_order()
|
||||
|
||||
|
||||
class OverlayT(Struct):
|
||||
'''
|
||||
An overlay co-domain range transformer.
|
||||
|
||||
Used to translate and apply a range from one y-range
|
||||
to another based on a returns logarithm:
|
||||
|
||||
R(ymn, ymx, yref) = (ymx - yref)/yref
|
||||
|
||||
which gives the log-scale multiplier, and
|
||||
|
||||
ymx_t = yref * (1 + R)
|
||||
|
||||
which gives the inverse to translate to the same value
|
||||
in the target co-domain.
|
||||
|
||||
'''
|
||||
viz: Viz # viz with largest measured dispersion
|
||||
|
||||
mx: float = 0
|
||||
mn: float = float('inf')
|
||||
|
||||
up_swing: float = 0
|
||||
down_swing: float = 0
|
||||
disp: float = 0
|
||||
|
||||
def loglin_from_range(
|
||||
self,
|
||||
|
||||
y_ref: float, # reference value for dispersion metric
|
||||
mn: float, # min y in target log-lin range
|
||||
mx: float, # max y in target log-lin range
|
||||
offset: float, # y-offset to start log-scaling from
|
||||
|
||||
) -> tuple[float, float]:
|
||||
r_up = (mx - y_ref) / y_ref
|
||||
r_down = (mn - y_ref) / y_ref
|
||||
ymn = offset * (1 + r_down)
|
||||
ymx = offset * (1 + r_up)
|
||||
|
||||
return ymn, ymx
|
||||
|
||||
|
||||
class ChartView(ViewBox):
|
||||
'''
|
||||
Price chart view box with interaction behaviors you'd expect from
|
||||
|
@ -366,7 +422,6 @@ class ChartView(ViewBox):
|
|||
)
|
||||
# for "known y-range style"
|
||||
self._static_yrange = static_yrange
|
||||
self._maxmin = None
|
||||
|
||||
# disable vertical scrolling
|
||||
self.setMouseEnabled(
|
||||
|
@ -375,7 +430,7 @@ class ChartView(ViewBox):
|
|||
)
|
||||
|
||||
self.linked = None
|
||||
self._chart: 'ChartPlotWidget' = None # noqa
|
||||
self._chart: ChartPlotWidget | None = None # noqa
|
||||
|
||||
# add our selection box annotator
|
||||
self.select_box = SelectRect(self)
|
||||
|
@ -387,6 +442,10 @@ class ChartView(ViewBox):
|
|||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
self._ic = None
|
||||
|
||||
# TODO: probably just assign this whenever a new `PlotItem` is
|
||||
# allocated since they're 1to1 with views..
|
||||
self._viz: Viz | None = None
|
||||
|
||||
def start_ic(
|
||||
self,
|
||||
) -> None:
|
||||
|
@ -446,29 +505,18 @@ class ChartView(ViewBox):
|
|||
yield self
|
||||
|
||||
@property
|
||||
def chart(self) -> 'ChartPlotWidget': # type: ignore # noqa
|
||||
def chart(self) -> ChartPlotWidget: # type: ignore # noqa
|
||||
return self._chart
|
||||
|
||||
@chart.setter
|
||||
def chart(self, chart: 'ChartPlotWidget') -> None: # type: ignore # noqa
|
||||
def chart(self, chart: ChartPlotWidget) -> None: # type: ignore # noqa
|
||||
self._chart = chart
|
||||
self.select_box.chart = chart
|
||||
if self._maxmin is None:
|
||||
self._maxmin = chart.maxmin
|
||||
|
||||
@property
|
||||
def maxmin(self) -> Callable:
|
||||
return self._maxmin
|
||||
|
||||
@maxmin.setter
|
||||
def maxmin(self, callback: Callable) -> None:
|
||||
self._maxmin = callback
|
||||
|
||||
def wheelEvent(
|
||||
self,
|
||||
ev,
|
||||
axis=None,
|
||||
# relayed_from: ChartView = None,
|
||||
):
|
||||
'''
|
||||
Override "center-point" location for scrolling.
|
||||
|
@ -483,7 +531,6 @@ class ChartView(ViewBox):
|
|||
if (
|
||||
not linked
|
||||
):
|
||||
# print(f'{self.name} not linked but relay from {relayed_from.name}')
|
||||
return
|
||||
|
||||
if axis in (0, 1):
|
||||
|
@ -495,22 +542,23 @@ class ChartView(ViewBox):
|
|||
chart = self.linked.chart
|
||||
|
||||
# don't zoom more then the min points setting
|
||||
l, lbar, rbar, r = chart.bars_range()
|
||||
# vl = r - l
|
||||
viz = chart.get_viz(chart.name)
|
||||
vl, lbar, rbar, vr = viz.bars_range()
|
||||
|
||||
# if ev.delta() > 0 and vl <= _min_points_to_show:
|
||||
# log.debug("Max zoom bruh...")
|
||||
# TODO: max/min zoom limits incorporating time step size.
|
||||
# rl = vr - vl
|
||||
# if ev.delta() > 0 and rl <= _min_points_to_show:
|
||||
# log.warning("Max zoom bruh...")
|
||||
# return
|
||||
|
||||
# if (
|
||||
# ev.delta() < 0
|
||||
# and vl >= len(chart._flows[chart.name].shm.array) + 666
|
||||
# and rl >= len(chart._vizs[chart.name].shm.array) + 666
|
||||
# ):
|
||||
# log.debug("Min zoom bruh...")
|
||||
# log.warning("Min zoom bruh...")
|
||||
# return
|
||||
|
||||
# actual scaling factor
|
||||
s = 1.015 ** (ev.delta() * -1 / 20) # self.state['wheelScaleFactor'])
|
||||
s = 1.016 ** (ev.delta() * -1 / 20) # self.state['wheelScaleFactor'])
|
||||
s = [(None if m is False else s) for m in mask]
|
||||
|
||||
if (
|
||||
|
@ -536,50 +584,19 @@ class ChartView(ViewBox):
|
|||
# scale_y = 1.3 ** (center.y() * -1 / 20)
|
||||
self.scaleBy(s, center)
|
||||
|
||||
# zoom in view-box area
|
||||
else:
|
||||
|
||||
# center = pg.Point(
|
||||
# fn.invertQTransform(self.childGroup.transform()).map(ev.pos())
|
||||
# )
|
||||
|
||||
# XXX: scroll "around" the right most element in the view
|
||||
# which stays "pinned" in place.
|
||||
|
||||
# furthest_right_coord = self.boundingRect().topRight()
|
||||
|
||||
# yaxis = pg.Point(
|
||||
# fn.invertQTransform(
|
||||
# self.childGroup.transform()
|
||||
# ).map(furthest_right_coord)
|
||||
# )
|
||||
|
||||
# This seems like the most "intuitive option, a hybrid of
|
||||
# tws and tv styles
|
||||
last_bar = pg.Point(int(rbar)) + 1
|
||||
|
||||
ryaxis = chart.getAxis('right')
|
||||
r_axis_x = ryaxis.pos().x()
|
||||
|
||||
end_of_l1 = pg.Point(
|
||||
round(
|
||||
chart.cv.mapToView(
|
||||
pg.Point(r_axis_x - chart._max_l1_line_len)
|
||||
# QPointF(chart._max_l1_line_len, 0)
|
||||
).x()
|
||||
)
|
||||
) # .x()
|
||||
|
||||
# self.state['viewRange'][0][1] = end_of_l1
|
||||
# focal = pg.Point((last_bar.x() + end_of_l1)/2)
|
||||
|
||||
# use right-most point of current curve graphic
|
||||
xl = viz.graphics.x_last()
|
||||
focal = min(
|
||||
last_bar,
|
||||
end_of_l1,
|
||||
key=lambda p: p.x()
|
||||
xl,
|
||||
vr,
|
||||
)
|
||||
# focal = pg.Point(last_bar.x() + end_of_l1)
|
||||
|
||||
self._resetTarget()
|
||||
|
||||
# NOTE: scroll "around" the right most datum-element in view
|
||||
# gives the feeling of staying "pinned" in place.
|
||||
self.scaleBy(s, focal)
|
||||
|
||||
# XXX: the order of the next 2 lines i'm pretty sure
|
||||
|
@ -587,7 +604,7 @@ class ChartView(ViewBox):
|
|||
# update, but i gotta feelin that because this one is signal
|
||||
# based (and thus not necessarily sync invoked right away)
|
||||
# that calling the resize method manually might work better.
|
||||
self.sigRangeChangedManually.emit(mask)
|
||||
# self.sigRangeChangedManually.emit(mask)
|
||||
|
||||
# XXX: without this is seems as though sometimes
|
||||
# when zooming in from far out (and maybe vice versa?)
|
||||
|
@ -597,7 +614,8 @@ class ChartView(ViewBox):
|
|||
# that never seems to happen? Only question is how much this
|
||||
# "double work" is causing latency when these missing event
|
||||
# fires don't happen?
|
||||
self.maybe_downsample_graphics()
|
||||
self.interact_graphics_cycle()
|
||||
self.interact_graphics_cycle()
|
||||
|
||||
ev.accept()
|
||||
|
||||
|
@ -605,21 +623,8 @@ class ChartView(ViewBox):
|
|||
self,
|
||||
ev,
|
||||
axis: Optional[int] = None,
|
||||
# relayed_from: ChartView = None,
|
||||
|
||||
) -> None:
|
||||
# if relayed_from:
|
||||
# print(f'PAN: {self.name} -> RELAYED FROM: {relayed_from.name}')
|
||||
|
||||
# NOTE since in the overlay case axes are already
|
||||
# "linked" any x-range change will already be mirrored
|
||||
# in all overlaid ``PlotItems``, so we need to simply
|
||||
# ignore the signal here since otherwise we get N-calls
|
||||
# from N-overlays resulting in an "accelerated" feeling
|
||||
# panning motion instead of the expect linear shift.
|
||||
# if relayed_from:
|
||||
# return
|
||||
|
||||
pos = ev.pos()
|
||||
lastPos = ev.lastPos()
|
||||
dif = pos - lastPos
|
||||
|
@ -629,7 +634,10 @@ class ChartView(ViewBox):
|
|||
button = ev.button()
|
||||
|
||||
# Ignore axes if mouse is disabled
|
||||
mouseEnabled = np.array(self.state['mouseEnabled'], dtype=np.float)
|
||||
mouseEnabled = np.array(
|
||||
self.state['mouseEnabled'],
|
||||
dtype=np.float,
|
||||
)
|
||||
mask = mouseEnabled.copy()
|
||||
if axis is not None:
|
||||
mask[1-axis] = 0.0
|
||||
|
@ -689,9 +697,6 @@ class ChartView(ViewBox):
|
|||
|
||||
# PANNING MODE
|
||||
else:
|
||||
# XXX: WHY
|
||||
ev.accept()
|
||||
|
||||
try:
|
||||
self.start_ic()
|
||||
except RuntimeError:
|
||||
|
@ -715,7 +720,10 @@ class ChartView(ViewBox):
|
|||
if x is not None or y is not None:
|
||||
self.translateBy(x=x, y=y)
|
||||
|
||||
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
||||
# self.sigRangeChangedManually.emit(mask)
|
||||
# self.state['mouseEnabled']
|
||||
# )
|
||||
self.interact_graphics_cycle()
|
||||
|
||||
if ev.isFinish():
|
||||
self.signal_ic()
|
||||
|
@ -723,6 +731,9 @@ class ChartView(ViewBox):
|
|||
# self._ic = None
|
||||
# self.chart.resume_all_feeds()
|
||||
|
||||
# # XXX: WHY
|
||||
# ev.accept()
|
||||
|
||||
# WEIRD "RIGHT-CLICK CENTER ZOOM" MODE
|
||||
elif button & QtCore.Qt.RightButton:
|
||||
|
||||
|
@ -743,10 +754,12 @@ class ChartView(ViewBox):
|
|||
center = Point(tr.map(ev.buttonDownPos(QtCore.Qt.RightButton)))
|
||||
self._resetTarget()
|
||||
self.scaleBy(x=x, y=y, center=center)
|
||||
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
||||
|
||||
# XXX: WHY
|
||||
ev.accept()
|
||||
# self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
||||
self.interact_graphics_cycle()
|
||||
|
||||
# XXX: WHY
|
||||
ev.accept()
|
||||
|
||||
# def mouseClickEvent(self, event: QtCore.QEvent) -> None:
|
||||
# '''This routine is rerouted to an async handler.
|
||||
|
@ -768,7 +781,12 @@ class ChartView(ViewBox):
|
|||
*,
|
||||
|
||||
yrange: Optional[tuple[float, float]] = None,
|
||||
range_margin: float = 0.06,
|
||||
viz: Viz | None = None,
|
||||
|
||||
# NOTE: this value pairs (more or less) with L1 label text
|
||||
# height offset from from the bid/ask lines.
|
||||
range_margin: float | None = 0.09,
|
||||
|
||||
bars_range: Optional[tuple[int, int, int, int]] = None,
|
||||
|
||||
# flag to prevent triggering sibling charts from the same linked
|
||||
|
@ -786,14 +804,13 @@ class ChartView(ViewBox):
|
|||
|
||||
'''
|
||||
name = self.name
|
||||
# print(f'YRANGE ON {name}')
|
||||
# print(f'YRANGE ON {name} -> yrange{yrange}')
|
||||
profiler = Profiler(
|
||||
msg=f'`ChartView._set_yrange()`: `{name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
delayed=True,
|
||||
)
|
||||
set_range = True
|
||||
chart = self._chart
|
||||
|
||||
# view has been set in 'axis' mode
|
||||
|
@ -802,8 +819,8 @@ class ChartView(ViewBox):
|
|||
# - disable autoranging
|
||||
# - remove any y range limits
|
||||
if chart._static_yrange == 'axis':
|
||||
set_range = False
|
||||
self.setLimits(yMin=None, yMax=None)
|
||||
return
|
||||
|
||||
# static y-range has been set likely by
|
||||
# a specialized FSP configuration.
|
||||
|
@ -816,42 +833,70 @@ class ChartView(ViewBox):
|
|||
elif yrange is not None:
|
||||
ylow, yhigh = yrange
|
||||
|
||||
if set_range:
|
||||
# XXX: only compute the mxmn range
|
||||
# if none is provided as input!
|
||||
if not yrange:
|
||||
|
||||
# XXX: only compute the mxmn range
|
||||
# if none is provided as input!
|
||||
if not yrange:
|
||||
# flow = chart._flows[name]
|
||||
yrange = self._maxmin()
|
||||
if not viz:
|
||||
breakpoint()
|
||||
|
||||
if yrange is None:
|
||||
log.warning(f'No yrange provided for {name}!?')
|
||||
print(f"WTF NO YRANGE {name}")
|
||||
return
|
||||
out = viz.maxmin()
|
||||
if out is None:
|
||||
log.warning(f'No yrange provided for {name}!?')
|
||||
return
|
||||
(
|
||||
ixrng,
|
||||
_,
|
||||
yrange
|
||||
) = out
|
||||
|
||||
profiler(f'`{self.name}:Viz.maxmin()` -> {ixrng}=>{yrange}')
|
||||
|
||||
if yrange is None:
|
||||
log.warning(f'No yrange provided for {name}!?')
|
||||
return
|
||||
|
||||
ylow, yhigh = yrange
|
||||
|
||||
profiler(f'callback ._maxmin(): {yrange}')
|
||||
|
||||
# view margins: stay within a % of the "true range"
|
||||
# view margins: stay within a % of the "true range"
|
||||
if range_margin is not None:
|
||||
diff = yhigh - ylow
|
||||
ylow = ylow - (diff * range_margin)
|
||||
yhigh = yhigh + (diff * range_margin)
|
||||
|
||||
# XXX: this often needs to be unset
|
||||
# to get different view modes to operate
|
||||
# correctly!
|
||||
self.setLimits(
|
||||
yMin=ylow,
|
||||
yMax=yhigh,
|
||||
ylow = max(
|
||||
ylow - (diff * range_margin),
|
||||
0,
|
||||
)
|
||||
yhigh = min(
|
||||
yhigh + (diff * range_margin),
|
||||
yhigh * (1 + range_margin),
|
||||
)
|
||||
self.setYRange(ylow, yhigh)
|
||||
profiler(f'set limits: {(ylow, yhigh)}')
|
||||
|
||||
# XXX: this often needs to be unset
|
||||
# to get different view modes to operate
|
||||
# correctly!
|
||||
|
||||
# print(
|
||||
# f'set limits {self.name}:\n'
|
||||
# f'ylow: {ylow}\n'
|
||||
# f'yhigh: {yhigh}\n'
|
||||
# )
|
||||
self.setYRange(
|
||||
ylow,
|
||||
yhigh,
|
||||
padding=0,
|
||||
)
|
||||
self.setLimits(
|
||||
yMin=ylow,
|
||||
yMax=yhigh,
|
||||
)
|
||||
self.update()
|
||||
|
||||
# LOL: yet anothercucking pg buggg..
|
||||
# can't use `msg=f'setYRange({ylow}, {yhigh}')`
|
||||
profiler.finish()
|
||||
|
||||
def enable_auto_yrange(
|
||||
self,
|
||||
viz: Viz,
|
||||
src_vb: Optional[ChartView] = None,
|
||||
|
||||
) -> None:
|
||||
|
@ -863,9 +908,6 @@ class ChartView(ViewBox):
|
|||
if src_vb is None:
|
||||
src_vb = self
|
||||
|
||||
# widget-UIs/splitter(s) resizing
|
||||
src_vb.sigResized.connect(self._set_yrange)
|
||||
|
||||
# re-sampling trigger:
|
||||
# TODO: a smarter way to avoid calling this needlessly?
|
||||
# 2 things i can think of:
|
||||
|
@ -873,37 +915,21 @@ class ChartView(ViewBox):
|
|||
# iterate those.
|
||||
# - only register this when certain downsample-able graphics are
|
||||
# "added to scene".
|
||||
src_vb.sigRangeChangedManually.connect(
|
||||
self.maybe_downsample_graphics
|
||||
)
|
||||
# mouse wheel doesn't emit XRangeChanged
|
||||
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
||||
# src_vb.sigRangeChangedManually.connect(
|
||||
# self.interact_graphics_cycle
|
||||
# )
|
||||
|
||||
# XXX: enabling these will cause "jittery"-ness
|
||||
# on zoom where sharp diffs in the y-range will
|
||||
# not re-size right away until a new sample update?
|
||||
# if src_vb is not self:
|
||||
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
||||
# src_vb.sigXRangeChanged.connect(
|
||||
# self.maybe_downsample_graphics
|
||||
# )
|
||||
# widget-UIs/splitter(s) resizing
|
||||
src_vb.sigResized.connect(
|
||||
self.interact_graphics_cycle
|
||||
)
|
||||
|
||||
def disable_auto_yrange(self) -> None:
|
||||
|
||||
# XXX: not entirely sure why we can't de-reg this..
|
||||
self.sigResized.disconnect(
|
||||
self._set_yrange,
|
||||
self.interact_graphics_cycle
|
||||
)
|
||||
self.sigRangeChangedManually.disconnect(
|
||||
self.maybe_downsample_graphics
|
||||
)
|
||||
self.sigRangeChangedManually.disconnect(
|
||||
self._set_yrange,
|
||||
)
|
||||
|
||||
# self.sigXRangeChanged.disconnect(self._set_yrange)
|
||||
# self.sigXRangeChanged.disconnect(
|
||||
# self.maybe_downsample_graphics
|
||||
# )
|
||||
|
||||
def x_uppx(self) -> float:
|
||||
'''
|
||||
|
@ -912,7 +938,7 @@ class ChartView(ViewBox):
|
|||
graphics items which are our children.
|
||||
|
||||
'''
|
||||
graphics = [f.graphics for f in self._chart._flows.values()]
|
||||
graphics = [f.graphics for f in self._chart._vizs.values()]
|
||||
if not graphics:
|
||||
return 0
|
||||
|
||||
|
@ -923,59 +949,441 @@ class ChartView(ViewBox):
|
|||
else:
|
||||
return 0
|
||||
|
||||
def maybe_downsample_graphics(
|
||||
def interact_graphics_cycle(
|
||||
self,
|
||||
autoscale_overlays: bool = True,
|
||||
*args, # capture signal-handler related shit
|
||||
|
||||
debug_print: bool = False,
|
||||
do_overlay_scaling: bool = True,
|
||||
do_linked_charts: bool = True,
|
||||
|
||||
yranges: tuple[float, float] | None = None,
|
||||
):
|
||||
profiler = Profiler(
|
||||
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
||||
msg=f'ChartView.interact_graphics_cycle() for {self.name}',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
|
||||
# XXX: important to avoid not seeing underlying
|
||||
# ``.update_graphics_from_flow()`` nested profiling likely
|
||||
# ``Viz.update_graphics()`` nested profiling likely
|
||||
# due to the way delaying works and garbage collection of
|
||||
# the profiler in the delegated method calls.
|
||||
ms_threshold=6,
|
||||
# ms_threshold=ms_slower_then,
|
||||
delayed=True,
|
||||
|
||||
# for hardcore latency checking, comment these flags above.
|
||||
# disabled=False,
|
||||
# ms_threshold=4,
|
||||
)
|
||||
|
||||
# TODO: a faster single-loop-iterator way of doing this XD
|
||||
chart = self._chart
|
||||
plots = {chart.name: chart}
|
||||
|
||||
linked = self.linked
|
||||
if linked:
|
||||
if (
|
||||
do_linked_charts
|
||||
and linked
|
||||
):
|
||||
plots = {linked.chart.name: linked.chart}
|
||||
plots |= linked.subplots
|
||||
else:
|
||||
plots = {chart.name: chart}
|
||||
|
||||
# TODO: a faster single-loop-iterator way of doing this?
|
||||
for chart_name, chart in plots.items():
|
||||
for name, flow in chart._flows.items():
|
||||
|
||||
if (
|
||||
not flow.render
|
||||
# Common `PlotItem` maxmin table; presumes that some path
|
||||
# graphics (and thus their backing data sets) are in the
|
||||
# same co-domain and view box (since the were added
|
||||
# a separate graphics objects to a common plot) and thus can
|
||||
# be sorted as one set per plot.
|
||||
mxmns_by_common_pi: dict[
|
||||
pg.PlotItem,
|
||||
tuple[float, float],
|
||||
] = {}
|
||||
|
||||
# XXX: super important to be aware of this.
|
||||
# or not flow.graphics.isVisible()
|
||||
):
|
||||
# proportional group auto-scaling per overlay set.
|
||||
# -> loop through overlays on each multi-chart widget
|
||||
# and scale all y-ranges based on autoscale config.
|
||||
# -> for any "group" overlay we want to dispersion normalize
|
||||
# and scale minor charts onto the major chart: the chart
|
||||
# with the most dispersion in the set.
|
||||
major_viz: Viz = None
|
||||
major_mx: float = 0
|
||||
major_mn: float = float('inf')
|
||||
# mx_up_rng: float = 0
|
||||
# mn_down_rng: float = 0
|
||||
mx_disp: float = 0
|
||||
|
||||
# collect certain flows have grapics objects **in seperate
|
||||
# plots/viewboxes** into groups and do a common calc to
|
||||
# determine auto-ranging input for `._set_yrange()`.
|
||||
# this is primarly used for our so called "log-linearized
|
||||
# multi-plot" overlay technique.
|
||||
start_datums: dict[
|
||||
ViewBox,
|
||||
tuple[
|
||||
Viz,
|
||||
float, # y start
|
||||
float, # y min
|
||||
float, # y max
|
||||
float, # y median
|
||||
slice, # in-view array slice
|
||||
np.ndarray, # in-view array
|
||||
],
|
||||
] = {}
|
||||
major_in_view: np.ndarray = None
|
||||
|
||||
for name, viz in chart._vizs.items():
|
||||
|
||||
if not viz.render:
|
||||
# print(f'skipping {flow.name}')
|
||||
continue
|
||||
|
||||
# pass in no array which will read and render from the last
|
||||
# passed array (normally provided by the display loop.)
|
||||
chart.update_graphics_from_flow(
|
||||
name,
|
||||
use_vr=True,
|
||||
in_view, i_read_range, _ = viz.update_graphics()
|
||||
|
||||
if not in_view:
|
||||
continue
|
||||
|
||||
profiler(f'{viz.name}@{chart_name} `Viz.update_graphics()`')
|
||||
|
||||
yrange = yranges.get(viz) if yranges else None
|
||||
if yrange is not None:
|
||||
# print(f'INPUT {viz.name} yrange: {yrange}')
|
||||
read_slc = slice(*i_read_range)
|
||||
|
||||
else:
|
||||
out = viz.maxmin(i_read_range=i_read_range)
|
||||
if out is None:
|
||||
log.warning(f'No yrange provided for {name}!?')
|
||||
return
|
||||
(
|
||||
_, # ixrng,
|
||||
read_slc,
|
||||
yrange
|
||||
) = out
|
||||
profiler(f'{viz.name}@{chart_name} `Viz.maxmin()`')
|
||||
|
||||
pi = viz.plot
|
||||
|
||||
# handle multiple graphics-objs per viewbox cases
|
||||
mxmn = mxmns_by_common_pi.get(pi)
|
||||
if mxmn:
|
||||
yrange = mxmns_by_common_pi[pi] = (
|
||||
min(yrange[0], mxmn[0]),
|
||||
max(yrange[1], mxmn[1]),
|
||||
)
|
||||
|
||||
else:
|
||||
mxmns_by_common_pi[pi] = yrange
|
||||
|
||||
profiler(f'{viz.name}@{chart_name} common pi sort')
|
||||
|
||||
# handle overlay log-linearized group scaling cases
|
||||
# TODO: a better predicate here, likely something
|
||||
# to do with overlays and their settings..
|
||||
if (
|
||||
viz.is_ohlc
|
||||
):
|
||||
ymn, ymx = yrange
|
||||
# print(f'adding {viz.name} to overlay')
|
||||
|
||||
# determine start datum in view
|
||||
arr = viz.shm.array
|
||||
in_view = arr[read_slc]
|
||||
if not in_view.size:
|
||||
log.warning(f'{viz.name} not in view?')
|
||||
return
|
||||
|
||||
row_start = arr[read_slc.start - 1]
|
||||
|
||||
if viz.is_ohlc:
|
||||
y_start = row_start['open']
|
||||
else:
|
||||
y_start = row_start[viz.name]
|
||||
|
||||
profiler(f'{viz.name}@{chart_name} MINOR curve median')
|
||||
|
||||
start_datums[viz.plot.vb] = (
|
||||
viz,
|
||||
y_start,
|
||||
ymn,
|
||||
ymx,
|
||||
read_slc,
|
||||
in_view,
|
||||
)
|
||||
|
||||
# find curve with max dispersion
|
||||
disp = abs(ymx - ymn) / y_start
|
||||
|
||||
# track the "major" curve as the curve with most
|
||||
# dispersion.
|
||||
if disp > mx_disp:
|
||||
major_viz = viz
|
||||
mx_disp = disp
|
||||
major_mn = ymn
|
||||
major_mx = ymx
|
||||
major_in_view = in_view
|
||||
profiler(f'{viz.name}@{chart_name} set new major')
|
||||
|
||||
# compute directional (up/down) y-range % swing/dispersion
|
||||
# y_ref = y_med
|
||||
# up_rng = (ymx - y_ref) / y_ref
|
||||
# down_rng = (ymn - y_ref) / y_ref
|
||||
# mx_up_rng = max(mx_up_rng, up_rng)
|
||||
# mn_down_rng = min(mn_down_rng, down_rng)
|
||||
# print(
|
||||
# f'{viz.name}@{chart_name} group mxmn calc\n'
|
||||
# '--------------------\n'
|
||||
# f'y_start: {y_start}\n'
|
||||
# f'ymn: {ymn}\n'
|
||||
# f'ymx: {ymx}\n'
|
||||
# f'mx_disp: {mx_disp}\n'
|
||||
# f'up %: {up_rng * 100}\n'
|
||||
# f'down %: {down_rng * 100}\n'
|
||||
# f'mx up %: {mx_up_rng * 100}\n'
|
||||
# f'mn down %: {mn_down_rng * 100}\n'
|
||||
# )
|
||||
profiler(f'{viz.name}@{chart_name} MINOR curve scale')
|
||||
|
||||
# non-overlay group case
|
||||
else:
|
||||
pi.vb._set_yrange(yrange=yrange)
|
||||
profiler(
|
||||
f'{viz.name}@{chart_name} simple std `._set_yrange()`'
|
||||
)
|
||||
|
||||
profiler(f'<{chart_name}>.interact_graphics_cycle({name})')
|
||||
if not start_datums:
|
||||
return
|
||||
|
||||
# if no overlays, set lone chart's yrange and short circuit
|
||||
if (
|
||||
len(start_datums) < 2
|
||||
or not do_overlay_scaling
|
||||
):
|
||||
# print(f'ONLY ranging major: {viz.name}')
|
||||
if not major_viz:
|
||||
major_viz = viz
|
||||
|
||||
major_viz.plot.vb._set_yrange(
|
||||
yrange=yrange,
|
||||
)
|
||||
profiler(f'{viz.name}@{chart_name} single curve yrange')
|
||||
return
|
||||
|
||||
# for each overlay on this chart auto-scale the
|
||||
# y-range to max-min values.
|
||||
if autoscale_overlays:
|
||||
overlay = chart.pi_overlay
|
||||
if overlay:
|
||||
for pi in overlay.overlays:
|
||||
pi.vb._set_yrange(
|
||||
# TODO: get the range once up front...
|
||||
# bars_range=br,
|
||||
# conduct "log-linearized multi-plot" scalings for all groups
|
||||
for (
|
||||
view,
|
||||
(
|
||||
viz,
|
||||
y_start,
|
||||
y_min,
|
||||
y_max,
|
||||
# y_med,
|
||||
read_slc,
|
||||
minor_in_view,
|
||||
)
|
||||
) in start_datums.items():
|
||||
|
||||
# we use the ymn/mx verbatim from the major curve
|
||||
# (i.e. the curve measured to have the highest
|
||||
# dispersion in view).
|
||||
if viz is major_viz:
|
||||
ymn = y_min
|
||||
ymx = y_max
|
||||
continue
|
||||
|
||||
else:
|
||||
key = 'open' if viz.is_ohlc else viz.name
|
||||
|
||||
# handle case where major and minor curve(s) have
|
||||
# a disjoint x-domain (one curve is smaller in
|
||||
# length then the other):
|
||||
# - find the highest (time) index common to both
|
||||
# curves.
|
||||
# - slice out the first "intersecting" y-value from
|
||||
# both curves for use in log-linear scaling such
|
||||
# that the intersecting y-value is used as the
|
||||
# reference point for scaling minor curve's
|
||||
# y-range based on the major curves y-range.
|
||||
|
||||
# get intersection point y-values for both curves
|
||||
minor_in_view_start = minor_in_view[0]
|
||||
minor_i_start = minor_in_view_start['index']
|
||||
minor_i_start_t = minor_in_view_start['time']
|
||||
|
||||
major_in_view_start = major_in_view[0]
|
||||
major_i_start = major_in_view_start['index']
|
||||
major_i_start_t = major_in_view_start['time']
|
||||
|
||||
y_major_intersect = major_in_view_start[key]
|
||||
y_minor_intersect = minor_in_view_start[key]
|
||||
|
||||
profiler(f'{viz.name}@{chart_name} intersect detection')
|
||||
|
||||
tdiff = (major_i_start_t - minor_i_start_t)
|
||||
if debug_print:
|
||||
print(
|
||||
f'{major_viz.name} time diff with minor:\n'
|
||||
f'maj:{major_i_start_t}\n'
|
||||
'-\n'
|
||||
f'min:{minor_i_start_t}\n'
|
||||
f'=> {tdiff}\n'
|
||||
)
|
||||
|
||||
# major has later timestamp adjust minor
|
||||
if tdiff > 0:
|
||||
slc = slice_from_time(
|
||||
arr=minor_in_view,
|
||||
start_t=major_i_start_t,
|
||||
stop_t=major_i_start_t,
|
||||
)
|
||||
y_minor_intersect = minor_in_view[slc.start][key]
|
||||
profiler(f'{viz.name}@{chart_name} intersect by t')
|
||||
|
||||
# minor has later timestamp adjust major
|
||||
elif tdiff < 0:
|
||||
slc = slice_from_time(
|
||||
arr=major_in_view,
|
||||
start_t=minor_i_start_t,
|
||||
stop_t=minor_i_start_t,
|
||||
)
|
||||
y_major_intersect = major_in_view[slc.start][key]
|
||||
|
||||
profiler(f'{viz.name}@{chart_name} intersect by t')
|
||||
|
||||
if debug_print:
|
||||
print(
|
||||
f'major_i_start: {major_i_start}\n'
|
||||
f'major_i_start_t: {major_i_start_t}\n'
|
||||
f'minor_i_start: {minor_i_start}\n'
|
||||
f'minor_i_start_t: {minor_i_start_t}\n'
|
||||
)
|
||||
|
||||
# TODO: probably write this as a compile cpython or
|
||||
# numba func.
|
||||
|
||||
# compute directional (up/down) y-range
|
||||
# % swing/dispersion starting at the reference index
|
||||
# determined by the above indexing arithmetic.
|
||||
y_ref = y_major_intersect
|
||||
if not y_ref:
|
||||
log.warning(
|
||||
f'BAD y_major_intersect?!: {y_major_intersect}'
|
||||
)
|
||||
# breakpoint()
|
||||
|
||||
r_up = (major_mx - y_ref) / y_ref
|
||||
r_down = (major_mn - y_ref) / y_ref
|
||||
|
||||
minor_y_start = y_minor_intersect
|
||||
ymn = minor_y_start * (1 + r_down)
|
||||
ymx = minor_y_start * (1 + r_up)
|
||||
|
||||
profiler(f'{viz.name}@{chart_name} SCALE minor')
|
||||
|
||||
# XXX: handle out of view cases where minor curve
|
||||
# now is outside the range of the major curve. in
|
||||
# this case we then re-scale the major curve to
|
||||
# include the range missing now enforced by the
|
||||
# minor (now new major for this *side*). Note this
|
||||
# is side (up/down) specific.
|
||||
new_maj_mxmn: None | tuple[float, float] = None
|
||||
if y_max > ymx:
|
||||
|
||||
y_ref = y_minor_intersect
|
||||
r_up_minor = (y_max - y_ref) / y_ref
|
||||
|
||||
y_maj_ref = y_major_intersect
|
||||
new_maj_ymx = y_maj_ref * (1 + r_up_minor)
|
||||
new_maj_mxmn = (major_mn, new_maj_ymx)
|
||||
if debug_print:
|
||||
print(
|
||||
f'{view.name} OUT OF RANGE:\n'
|
||||
'--------------------\n'
|
||||
f'y_max:{y_max} > ymx:{ymx}\n'
|
||||
)
|
||||
profiler('autoscaled linked plots')
|
||||
ymx = y_max
|
||||
profiler(f'{viz.name}@{chart_name} re-SCALE major UP')
|
||||
|
||||
profiler(f'<{chart_name}>.update_graphics_from_flow({name})')
|
||||
if y_min < ymn:
|
||||
|
||||
y_ref = y_minor_intersect
|
||||
r_down_minor = (y_min - y_ref) / y_ref
|
||||
|
||||
y_maj_ref = y_major_intersect
|
||||
new_maj_ymn = y_maj_ref * (1 + r_down_minor)
|
||||
new_maj_mxmn = (
|
||||
new_maj_ymn,
|
||||
new_maj_mxmn[1] if new_maj_mxmn else major_mx
|
||||
)
|
||||
if debug_print:
|
||||
print(
|
||||
f'{view.name} OUT OF RANGE:\n'
|
||||
'--------------------\n'
|
||||
f'y_min:{y_min} < ymn:{ymn}\n'
|
||||
)
|
||||
ymn = y_min
|
||||
|
||||
profiler(
|
||||
f'{viz.name}@{chart_name} re-SCALE major DOWN'
|
||||
)
|
||||
|
||||
if new_maj_mxmn:
|
||||
if debug_print:
|
||||
print(
|
||||
f'RESCALE MAJOR {major_viz.name}:\n'
|
||||
f'previous: {(major_mn, major_mx)}\n'
|
||||
f'new: {new_maj_mxmn}\n'
|
||||
)
|
||||
major_mn, major_mx = new_maj_mxmn
|
||||
|
||||
if debug_print:
|
||||
print(
|
||||
f'{view.name} APPLY group mxmn\n'
|
||||
'--------------------\n'
|
||||
f'y_minor_intersect: {y_minor_intersect}\n'
|
||||
f'y_major_intersect: {y_major_intersect}\n'
|
||||
# f'mn_down_rng: {mn_down_rng * 100}\n'
|
||||
# f'mx_up_rng: {mx_up_rng * 100}\n'
|
||||
f'scaled ymn: {ymn}\n'
|
||||
f'scaled ymx: {ymx}\n'
|
||||
f'scaled mx_disp: {mx_disp}\n'
|
||||
)
|
||||
|
||||
if (
|
||||
isinf(ymx)
|
||||
or isinf(ymn)
|
||||
):
|
||||
log.warning(
|
||||
f'BAD ymx/ymn: {(ymn, ymx)}'
|
||||
)
|
||||
continue
|
||||
|
||||
view._set_yrange(
|
||||
yrange=(ymn, ymx),
|
||||
)
|
||||
profiler(f'{viz.name}@{chart_name} log-SCALE minor')
|
||||
|
||||
# NOTE XXX: we have to set the major curve's range once (and
|
||||
# only once) here since we're doing this entire routine
|
||||
# inside of a single render cycle (and apparently calling
|
||||
# `ViewBox.setYRange()` multiple times within one only takes
|
||||
# the first call as serious...) XD
|
||||
if debug_print:
|
||||
print(
|
||||
f'Scale MAJOR {major_viz.name}:\n'
|
||||
f'scaled mx_disp: {mx_disp}\n'
|
||||
f'previous: {(major_mn, major_mx)}\n'
|
||||
f'new: {new_maj_mxmn}\n'
|
||||
)
|
||||
major_viz.plot.vb._set_yrange(
|
||||
yrange=(major_mn, major_mx),
|
||||
)
|
||||
profiler(f'{viz.name}@{chart_name} log-SCALE major')
|
||||
# major_mx, major_mn = new_maj_mxmn
|
||||
# vrs = major_viz.plot.vb.viewRange()
|
||||
# if vrs[1][0] > major_mn:
|
||||
# breakpoint()
|
||||
|
||||
profiler.finish()
|
||||
|
|
|
@ -30,19 +30,20 @@ from ._pg_overrides import PlotItem
|
|||
|
||||
|
||||
class LevelLabel(YAxisLabel):
|
||||
"""Y-axis (vertically) oriented, horizontal label that sticks to
|
||||
'''
|
||||
Y-axis (vertically) oriented, horizontal label that sticks to
|
||||
where it's placed despite chart resizing and supports displaying
|
||||
multiple fields.
|
||||
|
||||
|
||||
TODO: replace the rectangle-text part with our new ``Label`` type.
|
||||
|
||||
"""
|
||||
_x_margin = 0
|
||||
_y_margin = 0
|
||||
'''
|
||||
_x_br_offset: float = -16
|
||||
_y_txt_h_scaling: float = 2
|
||||
|
||||
# adjustment "further away from" anchor point
|
||||
_x_offset = 9
|
||||
_x_offset = 0
|
||||
_y_offset = 0
|
||||
|
||||
# fields to be displayed in the label string
|
||||
|
@ -58,12 +59,12 @@ class LevelLabel(YAxisLabel):
|
|||
chart,
|
||||
parent,
|
||||
|
||||
color: str = 'bracket',
|
||||
color: str = 'default_light',
|
||||
|
||||
orient_v: str = 'bottom',
|
||||
orient_h: str = 'left',
|
||||
orient_h: str = 'right',
|
||||
|
||||
opacity: float = 0,
|
||||
opacity: float = 1,
|
||||
|
||||
# makes order line labels offset from their parent axis
|
||||
# such that they don't collide with the L1/L2 lines/prices
|
||||
|
@ -99,13 +100,15 @@ class LevelLabel(YAxisLabel):
|
|||
|
||||
self._h_shift = {
|
||||
'left': -1.,
|
||||
'right': 0.
|
||||
'right': 0.,
|
||||
}[orient_h]
|
||||
|
||||
self.fields = self._fields.copy()
|
||||
# ensure default format fields are in correct
|
||||
self.set_fmt_str(self._fmt_str, self.fields)
|
||||
|
||||
self.setZValue(10)
|
||||
|
||||
@property
|
||||
def color(self):
|
||||
return self._hcolor
|
||||
|
@ -113,7 +116,10 @@ class LevelLabel(YAxisLabel):
|
|||
@color.setter
|
||||
def color(self, color: str) -> None:
|
||||
self._hcolor = color
|
||||
self._pen = self.pen = pg.mkPen(hcolor(color))
|
||||
self._pen = self.pen = pg.mkPen(
|
||||
hcolor(color),
|
||||
width=3,
|
||||
)
|
||||
|
||||
def update_on_resize(self, vr, r):
|
||||
"""Tiis is a ``.sigRangeChanged()`` handler.
|
||||
|
@ -125,10 +131,11 @@ class LevelLabel(YAxisLabel):
|
|||
self,
|
||||
fields: dict = None,
|
||||
) -> None:
|
||||
"""Update the label's text contents **and** position from
|
||||
'''
|
||||
Update the label's text contents **and** position from
|
||||
a view box coordinate datum.
|
||||
|
||||
"""
|
||||
'''
|
||||
self.fields.update(fields)
|
||||
level = self.fields['level']
|
||||
|
||||
|
@ -175,7 +182,8 @@ class LevelLabel(YAxisLabel):
|
|||
fields: dict,
|
||||
):
|
||||
# use space as e3 delim
|
||||
self.label_str = self._fmt_str.format(**fields).replace(',', ' ')
|
||||
self.label_str = self._fmt_str.format(
|
||||
**fields).replace(',', ' ')
|
||||
|
||||
br = self.boundingRect()
|
||||
h, w = br.height(), br.width()
|
||||
|
@ -188,14 +196,14 @@ class LevelLabel(YAxisLabel):
|
|||
self,
|
||||
p: QtGui.QPainter,
|
||||
rect: QtCore.QRectF
|
||||
) -> None:
|
||||
p.setPen(self._pen)
|
||||
|
||||
) -> None:
|
||||
|
||||
p.setPen(self._pen)
|
||||
rect = self.rect
|
||||
|
||||
if self._orient_v == 'bottom':
|
||||
lp, rp = rect.topLeft(), rect.topRight()
|
||||
# p.drawLine(rect.topLeft(), rect.topRight())
|
||||
|
||||
elif self._orient_v == 'top':
|
||||
lp, rp = rect.bottomLeft(), rect.bottomRight()
|
||||
|
@ -209,6 +217,11 @@ class LevelLabel(YAxisLabel):
|
|||
])
|
||||
)
|
||||
|
||||
p.fillRect(
|
||||
self.rect,
|
||||
self.bg_color,
|
||||
)
|
||||
|
||||
def highlight(self, pen) -> None:
|
||||
self._pen = pen
|
||||
self.update()
|
||||
|
@ -247,9 +260,10 @@ class L1Label(LevelLabel):
|
|||
|
||||
|
||||
class L1Labels:
|
||||
"""Level 1 bid ask labels for dynamic update on price-axis.
|
||||
'''
|
||||
Level 1 bid ask labels for dynamic update on price-axis.
|
||||
|
||||
"""
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
plotitem: PlotItem,
|
||||
|
@ -265,15 +279,17 @@ class L1Labels:
|
|||
'chart': plotitem,
|
||||
'parent': raxis,
|
||||
|
||||
'opacity': 1,
|
||||
'opacity': .9,
|
||||
'font_size': font_size,
|
||||
'fg_color': chart.pen_color,
|
||||
'bg_color': chart.view_color,
|
||||
'fg_color': 'default_light',
|
||||
'bg_color': chart.view_color, # normally 'papas_special'
|
||||
}
|
||||
|
||||
# TODO: add humanized source-asset
|
||||
# info format.
|
||||
fmt_str = (
|
||||
' {size:.{size_digits}f} x '
|
||||
'{level:,.{level_digits}f} '
|
||||
' {size:.{size_digits}f} u'
|
||||
# '{level:,.{level_digits}f} '
|
||||
)
|
||||
fields = {
|
||||
'level': 0,
|
||||
|
@ -286,12 +302,17 @@ class L1Labels:
|
|||
orient_v='bottom',
|
||||
**kwargs,
|
||||
)
|
||||
bid.set_fmt_str(fmt_str=fmt_str, fields=fields)
|
||||
bid.set_fmt_str(
|
||||
fmt_str='\n' + fmt_str,
|
||||
fields=fields,
|
||||
)
|
||||
bid.show()
|
||||
|
||||
ask = self.ask_label = L1Label(
|
||||
orient_v='top',
|
||||
**kwargs,
|
||||
)
|
||||
ask.set_fmt_str(fmt_str=fmt_str, fields=fields)
|
||||
ask.set_fmt_str(
|
||||
fmt_str=fmt_str,
|
||||
fields=fields)
|
||||
ask.show()
|
||||
|
|
|
@ -233,6 +233,36 @@ class Label:
|
|||
def delete(self) -> None:
|
||||
self.vb.scene().removeItem(self.txt)
|
||||
|
||||
# NOTE: pulled out from ``ChartPlotWidget`` from way way old code.
|
||||
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
||||
# # compute contents label "height" in view terms
|
||||
# # to avoid having data "contents" overlap with them
|
||||
# if self._labels:
|
||||
# label = self._labels[self.name][0]
|
||||
|
||||
# rect = label.itemRect()
|
||||
# tl, br = rect.topLeft(), rect.bottomRight()
|
||||
# vb = self.plotItem.vb
|
||||
|
||||
# try:
|
||||
# # on startup labels might not yet be rendered
|
||||
# top, bottom = (vb.mapToView(tl).y(), vb.mapToView(br).y())
|
||||
|
||||
# # XXX: magic hack, how do we compute exactly?
|
||||
# label_h = (top - bottom) * 0.42
|
||||
|
||||
# except np.linalg.LinAlgError:
|
||||
# label_h = 0
|
||||
# else:
|
||||
# label_h = 0
|
||||
|
||||
# # print(f'label height {self.name}: {label_h}')
|
||||
|
||||
# if label_h > yhigh - ylow:
|
||||
# label_h = 0
|
||||
|
||||
# print(f"bounds (ylow, yhigh): {(ylow, yhigh)}")
|
||||
|
||||
|
||||
class FormatLabel(QLabel):
|
||||
'''
|
||||
|
|
|
@ -18,13 +18,8 @@ Super fast OHLC sampling graphics types.
|
|||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
from PyQt5 import (
|
||||
QtGui,
|
||||
QtWidgets,
|
||||
|
@ -33,17 +28,14 @@ from PyQt5.QtCore import (
|
|||
QLineF,
|
||||
QRectF,
|
||||
)
|
||||
|
||||
from PyQt5.QtWidgets import QGraphicsItem
|
||||
from PyQt5.QtGui import QPainterPath
|
||||
|
||||
from ._curve import FlowGraphic
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._style import hcolor
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import LinkedSplits
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -51,7 +43,8 @@ log = get_logger(__name__)
|
|||
def bar_from_ohlc_row(
|
||||
row: np.ndarray,
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
w: float = 0.43
|
||||
bar_w: float,
|
||||
bar_gap: float = 0.16
|
||||
|
||||
) -> tuple[QLineF]:
|
||||
'''
|
||||
|
@ -59,8 +52,7 @@ def bar_from_ohlc_row(
|
|||
OHLC "bar" for use in the "last datum" of a series.
|
||||
|
||||
'''
|
||||
open, high, low, close, index = row[
|
||||
['open', 'high', 'low', 'close', 'index']]
|
||||
open, high, low, close, index = row
|
||||
|
||||
# TODO: maybe consider using `QGraphicsLineItem` ??
|
||||
# gives us a ``.boundingRect()`` on the objects which may make
|
||||
|
@ -68,9 +60,11 @@ def bar_from_ohlc_row(
|
|||
# history path faster since it's done in C++:
|
||||
# https://doc.qt.io/qt-5/qgraphicslineitem.html
|
||||
|
||||
mid: float = (bar_w / 2) + index
|
||||
|
||||
# high -> low vertical (body) line
|
||||
if low != high:
|
||||
hl = QLineF(index, low, index, high)
|
||||
hl = QLineF(mid, low, mid, high)
|
||||
else:
|
||||
# XXX: if we don't do it renders a weird rectangle?
|
||||
# see below for filtering this later...
|
||||
|
@ -81,45 +75,47 @@ def bar_from_ohlc_row(
|
|||
# the index's range according to the view mapping coordinates.
|
||||
|
||||
# open line
|
||||
o = QLineF(index - w, open, index, open)
|
||||
o = QLineF(index + bar_gap, open, mid, open)
|
||||
|
||||
# close line
|
||||
c = QLineF(index, close, index + w, close)
|
||||
c = QLineF(
|
||||
mid, close,
|
||||
index + bar_w - bar_gap, close,
|
||||
)
|
||||
|
||||
return [hl, o, c]
|
||||
|
||||
|
||||
class BarItems(pg.GraphicsObject):
|
||||
class BarItems(FlowGraphic):
|
||||
'''
|
||||
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
||||
|
||||
'''
|
||||
# XXX: causes this weird jitter bug when click-drag panning
|
||||
# where the path curve will awkwardly flicker back and forth?
|
||||
cache_mode: int = QGraphicsItem.NoCache
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
linked: LinkedSplits,
|
||||
plotitem: 'pg.PlotItem', # noqa
|
||||
color: str = 'bracket',
|
||||
last_bar_color: str = 'original',
|
||||
|
||||
name: Optional[str] = None,
|
||||
*args,
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self.linked = linked
|
||||
# XXX: for the mega-lulz increasing width here increases draw
|
||||
# latency... so probably don't do it until we figure that out.
|
||||
self._color = color
|
||||
self.bars_pen = pg.mkPen(hcolor(color), width=1)
|
||||
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
||||
self._name = name
|
||||
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
self.path = QPainterPath()
|
||||
self._last_bar_lines: Optional[tuple[QLineF, ...]] = None
|
||||
super().__init__(*args, **kwargs)
|
||||
self._last_bar_lines: tuple[QLineF, ...] | None = None
|
||||
|
||||
def x_uppx(self) -> int:
|
||||
# we expect the downsample curve report this.
|
||||
return 0
|
||||
def x_last(self) -> None | float:
|
||||
'''
|
||||
Return the last most x value of the close line segment
|
||||
or if not drawn yet, ``None``.
|
||||
|
||||
'''
|
||||
if self._last_bar_lines:
|
||||
close_arm_line = self._last_bar_lines[-1]
|
||||
return close_arm_line.x2() if close_arm_line else None
|
||||
else:
|
||||
return None
|
||||
|
||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||
def boundingRect(self):
|
||||
|
@ -201,12 +197,12 @@ class BarItems(pg.GraphicsObject):
|
|||
# as is necesarry for what's in "view". Not sure if this will
|
||||
# lead to any perf gains other then when zoomed in to less bars
|
||||
# in view.
|
||||
p.setPen(self.last_bar_pen)
|
||||
p.setPen(self.last_step_pen)
|
||||
if self._last_bar_lines:
|
||||
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
||||
profiler('draw last bar')
|
||||
|
||||
p.setPen(self.bars_pen)
|
||||
p.setPen(self._pen)
|
||||
p.drawPath(self.path)
|
||||
profiler(f'draw history path: {self.path.capacity()}')
|
||||
|
||||
|
@ -214,33 +210,40 @@ class BarItems(pg.GraphicsObject):
|
|||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
|
||||
fields: list[str] = [
|
||||
'index',
|
||||
'open',
|
||||
'high',
|
||||
'low',
|
||||
'close',
|
||||
],
|
||||
index_field: str,
|
||||
|
||||
) -> None:
|
||||
|
||||
# relevant fields
|
||||
fields: list[str] = [
|
||||
'open',
|
||||
'high',
|
||||
'low',
|
||||
'close',
|
||||
index_field,
|
||||
]
|
||||
ohlc = src_data[fields]
|
||||
# last_row = ohlc[-1:]
|
||||
|
||||
# individual values
|
||||
last_row = i, o, h, l, last = ohlc[-1]
|
||||
last_row = o, h, l, last, i = ohlc[-1]
|
||||
|
||||
# times = src_data['time']
|
||||
# if times[-1] - times[-2]:
|
||||
# breakpoint()
|
||||
|
||||
index = src_data[index_field]
|
||||
step_size = index[-1] - index[-2]
|
||||
|
||||
# generate new lines objects for updatable "current bar"
|
||||
self._last_bar_lines = bar_from_ohlc_row(last_row)
|
||||
bg: float = 0.16 * step_size
|
||||
self._last_bar_lines = bar_from_ohlc_row(
|
||||
last_row,
|
||||
bar_w=step_size,
|
||||
bar_gap=bg,
|
||||
)
|
||||
|
||||
# assert i == graphics.start_index - 1
|
||||
# assert i == last_index
|
||||
|
@ -255,10 +258,16 @@ class BarItems(pg.GraphicsObject):
|
|||
if l != h: # noqa
|
||||
|
||||
if body is None:
|
||||
body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
||||
body = self._last_bar_lines[0] = QLineF(
|
||||
i + bg, l,
|
||||
i + step_size - bg, h,
|
||||
)
|
||||
else:
|
||||
# update body
|
||||
body.setLine(i, l, i, h)
|
||||
body.setLine(
|
||||
body.x1(), l,
|
||||
body.x2(), h,
|
||||
)
|
||||
|
||||
# XXX: pretty sure this is causing an issue where the
|
||||
# bar has a large upward move right before the next
|
||||
|
@ -269,5 +278,4 @@ class BarItems(pg.GraphicsObject):
|
|||
# date / from some previous sample. It's weird though
|
||||
# because i've seen it do this to bars i - 3 back?
|
||||
|
||||
# return ohlc['time'], ohlc['close']
|
||||
return ohlc['index'], ohlc['close']
|
||||
return ohlc[index_field], ohlc['close']
|
||||
|
|
|
@ -22,7 +22,6 @@ from collections import defaultdict
|
|||
from functools import partial
|
||||
from typing import (
|
||||
Callable,
|
||||
Optional,
|
||||
)
|
||||
|
||||
from pyqtgraph.graphicsItems.AxisItem import AxisItem
|
||||
|
@ -92,11 +91,11 @@ class ComposedGridLayout:
|
|||
'''
|
||||
def __init__(
|
||||
self,
|
||||
item: PlotItem,
|
||||
pi: PlotItem,
|
||||
|
||||
) -> None:
|
||||
|
||||
self.items: list[PlotItem] = []
|
||||
self.pitems: list[PlotItem] = []
|
||||
self._pi2axes: dict[ # TODO: use a ``bidict`` here?
|
||||
int,
|
||||
dict[str, AxisItem],
|
||||
|
@ -116,6 +115,7 @@ class ComposedGridLayout:
|
|||
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.setSpacing(0)
|
||||
layout.setMinimumWidth(0)
|
||||
|
||||
if name in ('top', 'bottom'):
|
||||
orient = Qt.Vertical
|
||||
|
@ -125,7 +125,11 @@ class ComposedGridLayout:
|
|||
|
||||
layout.setOrientation(orient)
|
||||
|
||||
self.insert_plotitem(0, item)
|
||||
self.insert_plotitem(
|
||||
0,
|
||||
pi,
|
||||
remove_axes=False,
|
||||
)
|
||||
|
||||
# insert surrounding linear layouts into the parent pi's layout
|
||||
# such that additional axes can be appended arbitrarily without
|
||||
|
@ -135,13 +139,16 @@ class ComposedGridLayout:
|
|||
# TODO: do we need this?
|
||||
# axis should have been removed during insert above
|
||||
index = _axes_layout_indices[name]
|
||||
axis = item.layout.itemAt(*index)
|
||||
axis = pi.layout.itemAt(*index)
|
||||
if axis and axis.isVisible():
|
||||
assert linlayout.itemAt(0) is axis
|
||||
|
||||
# item.layout.removeItem(axis)
|
||||
item.layout.addItem(linlayout, *index)
|
||||
layout = item.layout.itemAt(*index)
|
||||
# XXX: see comment in ``.insert_plotitem()``...
|
||||
# our `PlotItem.removeAxis()` does this internally.
|
||||
# pi.layout.removeItem(axis)
|
||||
|
||||
pi.layout.addItem(linlayout, *index)
|
||||
layout = pi.layout.itemAt(*index)
|
||||
assert layout is linlayout
|
||||
|
||||
def _register_item(
|
||||
|
@ -157,14 +164,16 @@ class ComposedGridLayout:
|
|||
self._pi2axes.setdefault(name, {})[index] = axis
|
||||
|
||||
# enter plot into list for index tracking
|
||||
self.items.insert(index, plotitem)
|
||||
self.pitems.insert(index, plotitem)
|
||||
|
||||
def insert_plotitem(
|
||||
self,
|
||||
index: int,
|
||||
plotitem: PlotItem,
|
||||
|
||||
) -> (int, int):
|
||||
remove_axes: bool = False,
|
||||
|
||||
) -> tuple[int, list[AxisItem]]:
|
||||
'''
|
||||
Place item at index by inserting all axes into the grid
|
||||
at list-order appropriate position.
|
||||
|
@ -175,11 +184,14 @@ class ComposedGridLayout:
|
|||
'`.insert_plotitem()` only supports an index >= 0'
|
||||
)
|
||||
|
||||
inserted_axes: list[AxisItem] = []
|
||||
|
||||
# add plot's axes in sequence to the embedded linear layouts
|
||||
# for each "side" thus avoiding graphics collisions.
|
||||
for name, axis_info in plotitem.axes.copy().items():
|
||||
linlayout, axes = self.sides[name]
|
||||
axis = axis_info['item']
|
||||
inserted_axes.append(axis)
|
||||
|
||||
if axis in axes:
|
||||
# TODO: re-order using ``.pop()`` ?
|
||||
|
@ -192,22 +204,16 @@ class ComposedGridLayout:
|
|||
if (
|
||||
not axis.isVisible()
|
||||
|
||||
# XXX: we never skip moving the axes for the *first*
|
||||
# XXX: we never skip moving the axes for the *root*
|
||||
# plotitem inserted (even if not shown) since we need to
|
||||
# move all the hidden axes into linear sub-layouts for
|
||||
# that "central" plot in the overlay. Also if we don't
|
||||
# do it there's weird geomoetry calc offsets that make
|
||||
# view coords slightly off somehow .. smh
|
||||
and not len(self.items) == 0
|
||||
and not len(self.pitems) == 0
|
||||
):
|
||||
continue
|
||||
|
||||
# XXX: Remove old axis? No, turns out we don't need this?
|
||||
# DON'T unlink it since we the original ``ViewBox``
|
||||
# to still drive it B)
|
||||
# popped = plotitem.removeAxis(name, unlink=False)
|
||||
# assert axis is popped
|
||||
|
||||
# invert insert index for layouts which are
|
||||
# not-left-to-right, top-to-bottom insert oriented
|
||||
insert_index = index
|
||||
|
@ -220,7 +226,17 @@ class ComposedGridLayout:
|
|||
|
||||
self._register_item(index, plotitem)
|
||||
|
||||
return index
|
||||
if remove_axes:
|
||||
for name, axis_info in plotitem.axes.copy().items():
|
||||
axis = axis_info['item']
|
||||
# XXX: Remove old axis?
|
||||
# No, turns out we don't need this?
|
||||
# DON'T UNLINK IT since we need the original ``ViewBox`` to
|
||||
# still drive it with events/handlers B)
|
||||
popped = plotitem.removeAxis(name, unlink=False)
|
||||
assert axis is popped
|
||||
|
||||
return (index, inserted_axes)
|
||||
|
||||
def append_plotitem(
|
||||
self,
|
||||
|
@ -234,20 +250,20 @@ class ComposedGridLayout:
|
|||
'''
|
||||
# for left and bottom axes we have to first remove
|
||||
# items and re-insert to maintain a list-order.
|
||||
return self.insert_plotitem(len(self.items), item)
|
||||
return self.insert_plotitem(len(self.pitems), item)
|
||||
|
||||
def get_axis(
|
||||
self,
|
||||
plot: PlotItem,
|
||||
name: str,
|
||||
|
||||
) -> Optional[AxisItem]:
|
||||
) -> AxisItem | None:
|
||||
'''
|
||||
Retrieve the named axis for overlayed ``plot`` or ``None``
|
||||
if axis for that name is not shown.
|
||||
|
||||
'''
|
||||
index = self.items.index(plot)
|
||||
index = self.pitems.index(plot)
|
||||
named = self._pi2axes[name]
|
||||
return named.get(index)
|
||||
|
||||
|
@ -306,14 +322,17 @@ class PlotItemOverlay:
|
|||
# events/signals.
|
||||
root_plotitem.vb.setZValue(10)
|
||||
|
||||
self.overlays: list[PlotItem] = []
|
||||
self.layout = ComposedGridLayout(root_plotitem)
|
||||
self._relays: dict[str, Signal] = {}
|
||||
|
||||
@property
|
||||
def overlays(self) -> list[PlotItem]:
|
||||
return self.layout.pitems
|
||||
|
||||
def add_plotitem(
|
||||
self,
|
||||
plotitem: PlotItem,
|
||||
index: Optional[int] = None,
|
||||
index: int | None = None,
|
||||
|
||||
# event/signal names which will be broadcasted to all added
|
||||
# (relayee) ``PlotItem``s (eg. ``ViewBox.mouseDragEvent``).
|
||||
|
@ -324,11 +343,9 @@ class PlotItemOverlay:
|
|||
# (0, 1), # link both
|
||||
link_axes: tuple[int] = (),
|
||||
|
||||
) -> None:
|
||||
) -> tuple[int, list[AxisItem]]:
|
||||
|
||||
index = index or len(self.overlays)
|
||||
root = self.root_plotitem
|
||||
self.overlays.insert(index, plotitem)
|
||||
vb: ViewBox = plotitem.vb
|
||||
|
||||
# TODO: some sane way to allow menu event broadcast XD
|
||||
|
@ -361,8 +378,8 @@ class PlotItemOverlay:
|
|||
if not sub_handlers:
|
||||
|
||||
src_handler = getattr(
|
||||
root.vb,
|
||||
ev_name,
|
||||
root.vb,
|
||||
ev_name,
|
||||
)
|
||||
|
||||
def broadcast(
|
||||
|
@ -370,7 +387,7 @@ class PlotItemOverlay:
|
|||
|
||||
# TODO: drop this viewbox specific input and
|
||||
# allow a predicate to be passed in by user.
|
||||
axis: 'Optional[int]' = None,
|
||||
axis: int | None = None,
|
||||
|
||||
*,
|
||||
|
||||
|
@ -476,7 +493,10 @@ class PlotItemOverlay:
|
|||
# ``PlotItem`` dynamically.
|
||||
|
||||
# append-compose into the layout all axes from this plot
|
||||
self.layout.insert_plotitem(index, plotitem)
|
||||
if index is None:
|
||||
insert_index, axes = self.layout.append_plotitem(plotitem)
|
||||
else:
|
||||
insert_index, axes = self.layout.insert_plotitem(index, plotitem)
|
||||
|
||||
plotitem.setGeometry(root.vb.sceneBoundingRect())
|
||||
|
||||
|
@ -496,6 +516,11 @@ class PlotItemOverlay:
|
|||
|
||||
vb.setZValue(100)
|
||||
|
||||
return (
|
||||
index,
|
||||
axes,
|
||||
)
|
||||
|
||||
def get_axis(
|
||||
self,
|
||||
plot: PlotItem,
|
||||
|
|
|
@ -54,6 +54,10 @@ def _do_overrides() -> None:
|
|||
pg.functions.invertQTransform = invertQTransform
|
||||
pg.PlotItem = PlotItem
|
||||
|
||||
# enable "QPainterPathPrivate for faster arrayToQPath" from
|
||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2324
|
||||
pg.setConfigOption('enableExperimental', True)
|
||||
|
||||
|
||||
# NOTE: the below customized type contains all our changes on a method
|
||||
# by method basis as per the diff:
|
||||
|
@ -87,7 +91,7 @@ class PlotItem(pg.PlotItem):
|
|||
title=None,
|
||||
viewBox=None,
|
||||
axisItems=None,
|
||||
default_axes=['left', 'bottom'],
|
||||
default_axes=['right', 'bottom'],
|
||||
enableMenu=True,
|
||||
**kargs
|
||||
):
|
||||
|
@ -126,7 +130,7 @@ class PlotItem(pg.PlotItem):
|
|||
|
||||
If the ``unlink: bool`` is set to ``False`` then the axis will
|
||||
stay linked to its view and will only be removed from the
|
||||
layoutonly be removed from the layout.
|
||||
layout.
|
||||
|
||||
If no axis with ``name: str`` is found then this is a noop.
|
||||
|
||||
|
@ -140,7 +144,10 @@ class PlotItem(pg.PlotItem):
|
|||
|
||||
axis = entry['item']
|
||||
self.layout.removeItem(axis)
|
||||
axis.scene().removeItem(axis)
|
||||
scn = axis.scene()
|
||||
if scn:
|
||||
scn.removeItem(axis)
|
||||
|
||||
if unlink:
|
||||
axis.unlinkFromView()
|
||||
|
||||
|
|
|
@ -0,0 +1,320 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
High level streaming graphics primitives.
|
||||
|
||||
This is an intermediate layer which associates real-time low latency
|
||||
graphics primitives with underlying stream/flow related data structures
|
||||
for fast incremental update.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import msgspec
|
||||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
from PyQt5.QtGui import QPainterPath
|
||||
|
||||
from ..data._formatters import (
|
||||
IncrementalFormatter,
|
||||
)
|
||||
from ..data._pathops import (
|
||||
xy_downsample,
|
||||
)
|
||||
from ..log import get_logger
|
||||
from .._profile import (
|
||||
Profiler,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._dataviz import Viz
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class Renderer(msgspec.Struct):
|
||||
|
||||
viz: Viz
|
||||
fmtr: IncrementalFormatter
|
||||
|
||||
# output graphics rendering, the main object
|
||||
# processed in ``QGraphicsObject.paint()``
|
||||
path: QPainterPath | None = None
|
||||
fast_path: QPainterPath | None = None
|
||||
|
||||
# downsampling state
|
||||
_last_uppx: float = 0
|
||||
_in_ds: bool = False
|
||||
|
||||
def draw_path(
|
||||
self,
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
connect: str | np.ndarray = 'all',
|
||||
path: QPainterPath | None = None,
|
||||
redraw: bool = False,
|
||||
|
||||
) -> QPainterPath:
|
||||
|
||||
path_was_none = path is None
|
||||
|
||||
if redraw and path:
|
||||
path.clear()
|
||||
|
||||
# TODO: avoid this?
|
||||
if self.fast_path:
|
||||
self.fast_path.clear()
|
||||
|
||||
path = pg.functions.arrayToQPath(
|
||||
x,
|
||||
y,
|
||||
connect=connect,
|
||||
finiteCheck=False,
|
||||
|
||||
# reserve mem allocs see:
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#reserve
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||
# XXX: right now this is based on ad-hoc checks on a
|
||||
# hidpi 3840x2160 4k monitor but we should optimize for
|
||||
# the target display(s) on the sys.
|
||||
# if no_path_yet:
|
||||
# graphics.path.reserve(int(500e3))
|
||||
# path=path, # path re-use / reserving
|
||||
)
|
||||
|
||||
# avoid mem allocs if possible
|
||||
if path_was_none:
|
||||
path.reserve(path.capacity())
|
||||
|
||||
return path
|
||||
|
||||
def render(
|
||||
self,
|
||||
|
||||
new_read,
|
||||
array_key: str,
|
||||
profiler: Profiler,
|
||||
uppx: float = 1,
|
||||
|
||||
# redraw and ds flags
|
||||
should_redraw: bool = False,
|
||||
new_sample_rate: bool = False,
|
||||
should_ds: bool = False,
|
||||
showing_src_data: bool = True,
|
||||
|
||||
do_append: bool = True,
|
||||
use_fpath: bool = True,
|
||||
|
||||
# only render datums "in view" of the ``ChartView``
|
||||
use_vr: bool = True,
|
||||
|
||||
) -> tuple[QPainterPath, bool]:
|
||||
'''
|
||||
Render the current graphics path(s)
|
||||
|
||||
There are (at least) 3 stages from source data to graphics data:
|
||||
- a data transform (which can be stored in additional shm)
|
||||
- a graphics transform which converts discrete basis data to
|
||||
a `float`-basis view-coords graphics basis. (eg. ``ohlc_flatten()``,
|
||||
``step_path_arrays_from_1d()``, etc.)
|
||||
|
||||
- blah blah blah (from notes)
|
||||
|
||||
'''
|
||||
# TODO: can the renderer just call ``Viz.read()`` directly?
|
||||
# unpack latest source data read
|
||||
fmtr = self.fmtr
|
||||
|
||||
(
|
||||
_,
|
||||
_,
|
||||
array,
|
||||
ivl,
|
||||
ivr,
|
||||
in_view,
|
||||
) = new_read
|
||||
|
||||
# xy-path data transform: convert source data to a format
|
||||
# able to be passed to a `QPainterPath` rendering routine.
|
||||
fmt_out = fmtr.format_to_1d(
|
||||
new_read,
|
||||
array_key,
|
||||
profiler,
|
||||
|
||||
slice_to_inview=use_vr,
|
||||
)
|
||||
|
||||
# no history in view case
|
||||
if not fmt_out:
|
||||
# XXX: this might be why the profiler only has exits?
|
||||
return
|
||||
|
||||
(
|
||||
x_1d,
|
||||
y_1d,
|
||||
connect,
|
||||
prepend_length,
|
||||
append_length,
|
||||
view_changed,
|
||||
# append_tres,
|
||||
|
||||
) = fmt_out
|
||||
|
||||
# redraw conditions
|
||||
if (
|
||||
prepend_length > 0
|
||||
or new_sample_rate
|
||||
or view_changed
|
||||
|
||||
# NOTE: comment this to try and make "append paths"
|
||||
# work below..
|
||||
or append_length > 0
|
||||
):
|
||||
should_redraw = True
|
||||
|
||||
path: QPainterPath = self.path
|
||||
fast_path: QPainterPath = self.fast_path
|
||||
reset: bool = False
|
||||
|
||||
self.viz.yrange = None
|
||||
|
||||
# redraw the entire source data if we have either of:
|
||||
# - no prior path graphic rendered or,
|
||||
# - we always intend to re-render the data only in view
|
||||
if (
|
||||
path is None
|
||||
or should_redraw
|
||||
):
|
||||
# print(f"{self.viz.name} -> REDRAWING BRUH")
|
||||
if new_sample_rate and showing_src_data:
|
||||
log.info(f'DE-downsampling -> {array_key}')
|
||||
self._in_ds = False
|
||||
|
||||
elif should_ds and uppx > 1:
|
||||
|
||||
ds_out = xy_downsample(
|
||||
x_1d,
|
||||
y_1d,
|
||||
uppx,
|
||||
)
|
||||
if ds_out is not None:
|
||||
x_1d, y_1d, ymn, ymx = ds_out
|
||||
self.viz.yrange = ymn, ymx
|
||||
# print(f'{self.viz.name} post ds: ymn, ymx: {ymn},{ymx}')
|
||||
|
||||
reset = True
|
||||
profiler(f'FULL PATH downsample redraw={should_ds}')
|
||||
self._in_ds = True
|
||||
|
||||
path = self.draw_path(
|
||||
x=x_1d,
|
||||
y=y_1d,
|
||||
connect=connect,
|
||||
path=path,
|
||||
redraw=True,
|
||||
)
|
||||
|
||||
profiler(
|
||||
'generated fresh path. '
|
||||
f'(should_redraw: {should_redraw} '
|
||||
f'should_ds: {should_ds} new_sample_rate: {new_sample_rate})'
|
||||
)
|
||||
|
||||
# TODO: get this piecewise prepend working - right now it's
|
||||
# giving heck on vwap...
|
||||
# elif prepend_length:
|
||||
|
||||
# prepend_path = pg.functions.arrayToQPath(
|
||||
# x[0:prepend_length],
|
||||
# y[0:prepend_length],
|
||||
# connect='all'
|
||||
# )
|
||||
|
||||
# # swap prepend path in "front"
|
||||
# old_path = graphics.path
|
||||
# graphics.path = prepend_path
|
||||
# # graphics.path.moveTo(new_x[0], new_y[0])
|
||||
# graphics.path.connectPath(old_path)
|
||||
|
||||
elif (
|
||||
append_length > 0
|
||||
and do_append
|
||||
):
|
||||
profiler(f'sliced append path {append_length}')
|
||||
# (
|
||||
# x_1d,
|
||||
# y_1d,
|
||||
# connect,
|
||||
# ) = append_tres
|
||||
|
||||
profiler(
|
||||
f'diffed array input, append_length={append_length}'
|
||||
)
|
||||
|
||||
# if should_ds and uppx > 1:
|
||||
# new_x, new_y = xy_downsample(
|
||||
# new_x,
|
||||
# new_y,
|
||||
# uppx,
|
||||
# )
|
||||
# profiler(f'fast path downsample redraw={should_ds}')
|
||||
|
||||
append_path = self.draw_path(
|
||||
x=x_1d,
|
||||
y=y_1d,
|
||||
connect=connect,
|
||||
path=fast_path,
|
||||
)
|
||||
profiler('generated append qpath')
|
||||
|
||||
if use_fpath:
|
||||
# an attempt at trying to make append-updates faster..
|
||||
if fast_path is None:
|
||||
fast_path = append_path
|
||||
# fast_path.reserve(int(6e3))
|
||||
else:
|
||||
# print(
|
||||
# f'{self.viz.name}: FAST PATH\n'
|
||||
# f"append_path br: {append_path.boundingRect()}\n"
|
||||
# f"path size: {size}\n"
|
||||
# f"append_path len: {append_path.length()}\n"
|
||||
# f"fast_path len: {fast_path.length()}\n"
|
||||
# )
|
||||
|
||||
fast_path.connectPath(append_path)
|
||||
size = fast_path.capacity()
|
||||
profiler(f'connected fast path w size: {size}')
|
||||
|
||||
# graphics.path.moveTo(new_x[0], new_y[0])
|
||||
# path.connectPath(append_path)
|
||||
|
||||
# XXX: lol this causes a hang..
|
||||
# graphics.path = graphics.path.simplified()
|
||||
else:
|
||||
size = path.capacity()
|
||||
profiler(f'connected history path w size: {size}')
|
||||
path.connectPath(append_path)
|
||||
|
||||
self.path = path
|
||||
self.fast_path = fast_path
|
||||
|
||||
return self.path, reset
|
|
@ -144,15 +144,29 @@ class CompleterView(QTreeView):
|
|||
self._font_size: int = 0 # pixels
|
||||
self._init: bool = False
|
||||
|
||||
async def on_pressed(self, idx: QModelIndex) -> None:
|
||||
async def on_pressed(
|
||||
self,
|
||||
idx: QModelIndex,
|
||||
) -> None:
|
||||
'''
|
||||
Mouse pressed on view handler.
|
||||
|
||||
'''
|
||||
search = self.parent()
|
||||
await search.chart_current_item()
|
||||
|
||||
await search.chart_current_item(
|
||||
clear_to_cache=True,
|
||||
)
|
||||
|
||||
# XXX: this causes Qt to hang and segfault..lovely
|
||||
# self.show_cache_entries(
|
||||
# only=True,
|
||||
# keep_current_item_selected=True,
|
||||
# )
|
||||
|
||||
search.focus()
|
||||
|
||||
|
||||
def set_font_size(self, size: int = 18):
|
||||
# print(size)
|
||||
if size < 0:
|
||||
|
@ -288,7 +302,7 @@ class CompleterView(QTreeView):
|
|||
def select_first(self) -> QStandardItem:
|
||||
'''
|
||||
Select the first depth >= 2 entry from the completer tree and
|
||||
return it's item.
|
||||
return its item.
|
||||
|
||||
'''
|
||||
# ensure we're **not** selecting the first level parent node and
|
||||
|
@ -615,6 +629,8 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
def show_cache_entries(
|
||||
self,
|
||||
only: bool = False,
|
||||
keep_current_item_selected: bool = False,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Clear the search results view and show only cached (aka recently
|
||||
|
@ -624,10 +640,14 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
godw = self.godwidget
|
||||
|
||||
# first entry in the cache is the current symbol(s)
|
||||
fqsns = []
|
||||
|
||||
fqsns = set()
|
||||
for multi_fqsns in list(godw._chart_cache):
|
||||
fqsns.extend(list(multi_fqsns))
|
||||
for fqsn in set(multi_fqsns):
|
||||
fqsns.add(fqsn)
|
||||
|
||||
if keep_current_item_selected:
|
||||
sel = self.view.selectionModel()
|
||||
cidx = sel.currentIndex()
|
||||
|
||||
self.view.set_section_entries(
|
||||
'cache',
|
||||
|
@ -637,7 +657,17 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
reverse=True,
|
||||
)
|
||||
|
||||
def get_current_item(self) -> Optional[tuple[str, str]]:
|
||||
if (
|
||||
keep_current_item_selected
|
||||
and cidx.isValid()
|
||||
):
|
||||
# set current selection back to what it was before filling out
|
||||
# the view results.
|
||||
self.view.select_from_idx(cidx)
|
||||
else:
|
||||
self.view.select_first()
|
||||
|
||||
def get_current_item(self) -> tuple[QModelIndex, str, str] | None:
|
||||
'''
|
||||
Return the current completer tree selection as
|
||||
a tuple ``(parent: str, child: str)`` if valid, else ``None``.
|
||||
|
@ -665,7 +695,11 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
if provider == 'cache':
|
||||
symbol, _, provider = symbol.rpartition('.')
|
||||
|
||||
return provider, symbol
|
||||
return (
|
||||
cidx,
|
||||
provider,
|
||||
symbol,
|
||||
)
|
||||
|
||||
else:
|
||||
return None
|
||||
|
@ -686,7 +720,7 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
if value is None:
|
||||
return None
|
||||
|
||||
provider, symbol = value
|
||||
cidx, provider, symbol = value
|
||||
godw = self.godwidget
|
||||
|
||||
fqsn = f'{symbol}.{provider}'
|
||||
|
@ -715,7 +749,9 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
godw.rt_linked,
|
||||
)
|
||||
)
|
||||
self.show_cache_entries(only=True)
|
||||
self.show_cache_entries(
|
||||
only=True,
|
||||
)
|
||||
|
||||
self.bar.focus()
|
||||
return fqsn
|
||||
|
@ -956,11 +992,10 @@ async def handle_keyboard_input(
|
|||
global _search_active, _search_enabled
|
||||
|
||||
# startup
|
||||
bar = searchbar
|
||||
search = searchbar.parent()
|
||||
godwidget = search.godwidget
|
||||
view = bar.view
|
||||
view.set_font_size(bar.dpi_font.px_size)
|
||||
searchw = searchbar.parent()
|
||||
godwidget = searchw.godwidget
|
||||
view = searchbar.view
|
||||
view.set_font_size(searchbar.dpi_font.px_size)
|
||||
send, recv = trio.open_memory_channel(616)
|
||||
|
||||
async with trio.open_nursery() as n:
|
||||
|
@ -971,13 +1006,13 @@ async def handle_keyboard_input(
|
|||
n.start_soon(
|
||||
partial(
|
||||
fill_results,
|
||||
search,
|
||||
searchw,
|
||||
recv,
|
||||
)
|
||||
)
|
||||
|
||||
bar.focus()
|
||||
search.show_cache_entries()
|
||||
searchbar.focus()
|
||||
searchw.show_cache_entries()
|
||||
await trio.sleep(0)
|
||||
|
||||
async for kbmsg in recv_chan:
|
||||
|
@ -994,16 +1029,24 @@ async def handle_keyboard_input(
|
|||
Qt.Key_Return
|
||||
):
|
||||
_search_enabled = False
|
||||
await search.chart_current_item(clear_to_cache=True)
|
||||
search.show_cache_entries(only=True)
|
||||
await searchw.chart_current_item(clear_to_cache=True)
|
||||
|
||||
# XXX: causes hang and segfault..
|
||||
# searchw.show_cache_entries(
|
||||
# only=True,
|
||||
# keep_current_item_selected=True,
|
||||
# )
|
||||
|
||||
view.show_matches()
|
||||
search.focus()
|
||||
|
||||
elif not ctl and not bar.text():
|
||||
searchw.focus()
|
||||
|
||||
elif (
|
||||
not ctl
|
||||
and not searchbar.text()
|
||||
):
|
||||
# TODO: really should factor this somewhere..bc
|
||||
# we're doin it in another spot as well..
|
||||
search.show_cache_entries(only=True)
|
||||
searchw.show_cache_entries(only=True)
|
||||
continue
|
||||
|
||||
# cancel and close
|
||||
|
@ -1012,7 +1055,7 @@ async def handle_keyboard_input(
|
|||
Qt.Key_Space, # i feel like this is the "native" one
|
||||
Qt.Key_Alt,
|
||||
}:
|
||||
bar.unfocus()
|
||||
searchbar.unfocus()
|
||||
|
||||
# kill the search and focus back on main chart
|
||||
if godwidget:
|
||||
|
@ -1020,41 +1063,54 @@ async def handle_keyboard_input(
|
|||
|
||||
continue
|
||||
|
||||
if ctl and key in {
|
||||
Qt.Key_L,
|
||||
}:
|
||||
if (
|
||||
ctl
|
||||
and key in {Qt.Key_L}
|
||||
):
|
||||
# like url (link) highlight in a web browser
|
||||
bar.focus()
|
||||
searchbar.focus()
|
||||
|
||||
# selection navigation controls
|
||||
elif ctl and key in {
|
||||
Qt.Key_D,
|
||||
}:
|
||||
elif (
|
||||
ctl
|
||||
and key in {Qt.Key_D}
|
||||
):
|
||||
view.next_section(direction='down')
|
||||
_search_enabled = False
|
||||
|
||||
elif ctl and key in {
|
||||
Qt.Key_U,
|
||||
}:
|
||||
elif (
|
||||
ctl
|
||||
and key in {Qt.Key_U}
|
||||
):
|
||||
view.next_section(direction='up')
|
||||
_search_enabled = False
|
||||
|
||||
# selection navigation controls
|
||||
elif (ctl and key in {
|
||||
elif (
|
||||
ctl and (
|
||||
key in {
|
||||
Qt.Key_K,
|
||||
Qt.Key_J,
|
||||
}
|
||||
|
||||
Qt.Key_K,
|
||||
Qt.Key_J,
|
||||
|
||||
}) or key in {
|
||||
|
||||
Qt.Key_Up,
|
||||
Qt.Key_Down,
|
||||
}:
|
||||
or key in {
|
||||
Qt.Key_Up,
|
||||
Qt.Key_Down,
|
||||
}
|
||||
)
|
||||
):
|
||||
_search_enabled = False
|
||||
if key in {Qt.Key_K, Qt.Key_Up}:
|
||||
|
||||
if key in {
|
||||
Qt.Key_K,
|
||||
Qt.Key_Up
|
||||
}:
|
||||
item = view.select_previous()
|
||||
|
||||
elif key in {Qt.Key_J, Qt.Key_Down}:
|
||||
elif key in {
|
||||
Qt.Key_J,
|
||||
Qt.Key_Down,
|
||||
}:
|
||||
item = view.select_next()
|
||||
|
||||
if item:
|
||||
|
@ -1063,15 +1119,18 @@ async def handle_keyboard_input(
|
|||
# if we're in the cache section and thus the next
|
||||
# selection is a cache item, switch and show it
|
||||
# immediately since it should be very fast.
|
||||
if parent_item and parent_item.text() == 'cache':
|
||||
await search.chart_current_item(clear_to_cache=False)
|
||||
if (
|
||||
parent_item
|
||||
and parent_item.text() == 'cache'
|
||||
):
|
||||
await searchw.chart_current_item(clear_to_cache=False)
|
||||
|
||||
# ACTUAL SEARCH BLOCK #
|
||||
# where we fuzzy complete and fill out sections.
|
||||
elif not ctl:
|
||||
# relay to completer task
|
||||
_search_enabled = True
|
||||
send.send_nowait(search.bar.text())
|
||||
send.send_nowait(searchw.bar.text())
|
||||
_search_active.set()
|
||||
|
||||
|
||||
|
|
|
@ -349,7 +349,7 @@ class OrderMode:
|
|||
|
||||
'''
|
||||
if not order:
|
||||
staged = self._staged_order
|
||||
staged: Order = self._staged_order
|
||||
# apply order fields for ems
|
||||
oid = str(uuid.uuid4())
|
||||
order = staged.copy()
|
||||
|
@ -494,7 +494,7 @@ class OrderMode:
|
|||
|
||||
uuid: str,
|
||||
price: float,
|
||||
arrow_index: float,
|
||||
time_s: float,
|
||||
|
||||
pointing: Optional[str] = None,
|
||||
|
||||
|
@ -513,22 +513,32 @@ class OrderMode:
|
|||
'''
|
||||
dialog = self.dialogs[uuid]
|
||||
lines = dialog.lines
|
||||
chart = self.chart
|
||||
|
||||
# XXX: seems to fail on certain types of races?
|
||||
# assert len(lines) == 2
|
||||
if lines:
|
||||
flume: Flume = self.feed.flumes[self.chart.linked.symbol.fqsn]
|
||||
flume: Flume = self.feed.flumes[chart.linked.symbol.fqsn]
|
||||
_, _, ratio = flume.get_ds_info()
|
||||
for i, chart in [
|
||||
(arrow_index, self.chart),
|
||||
(flume.izero_hist
|
||||
+
|
||||
round((arrow_index - flume.izero_rt)/ratio),
|
||||
self.hist_chart)
|
||||
|
||||
for chart, shm in [
|
||||
(self.chart, flume.rt_shm),
|
||||
(self.hist_chart, flume.hist_shm),
|
||||
]:
|
||||
viz = chart.get_viz(chart.name)
|
||||
index_field = viz.index_field
|
||||
arr = shm.array
|
||||
|
||||
# TODO: borked for int index based..
|
||||
index = flume.get_index(time_s, arr)
|
||||
|
||||
# get absolute index for arrow placement
|
||||
arrow_index = arr[index_field][index]
|
||||
|
||||
self.arrows.add(
|
||||
chart.plotItem,
|
||||
uuid,
|
||||
i,
|
||||
arrow_index,
|
||||
price,
|
||||
pointing=pointing,
|
||||
color=lines[0].color
|
||||
|
@ -693,7 +703,6 @@ async def open_order_mode(
|
|||
|
||||
# symbol id
|
||||
symbol = chart.linked.symbol
|
||||
symkey = symbol.front_fqsn()
|
||||
|
||||
# map of per-provider account keys to position tracker instances
|
||||
trackers: dict[str, PositionTracker] = {}
|
||||
|
@ -854,7 +863,7 @@ async def open_order_mode(
|
|||
# the expected symbol key in its positions msg.
|
||||
for (broker, acctid), msgs in position_msgs.items():
|
||||
for msg in msgs:
|
||||
log.info(f'Loading pp for {symkey}:\n{pformat(msg)}')
|
||||
log.info(f'Loading pp for {acctid}@{broker}:\n{pformat(msg)}')
|
||||
await process_trade_msg(
|
||||
mode,
|
||||
book,
|
||||
|
@ -966,7 +975,6 @@ async def process_trade_msg(
|
|||
|
||||
if dialog:
|
||||
fqsn = dialog.symbol
|
||||
flume = mode.feed.flumes[fqsn]
|
||||
|
||||
match msg:
|
||||
case Status(
|
||||
|
@ -1037,11 +1045,11 @@ async def process_trade_msg(
|
|||
# should only be one "fill" for an alert
|
||||
# add a triangle and remove the level line
|
||||
req = Order(**req)
|
||||
index = flume.get_index(time.time())
|
||||
tm = time.time()
|
||||
mode.on_fill(
|
||||
oid,
|
||||
price=req.price,
|
||||
arrow_index=index,
|
||||
time_s=tm,
|
||||
)
|
||||
mode.lines.remove_line(uuid=oid)
|
||||
msg.req = req
|
||||
|
@ -1070,6 +1078,8 @@ async def process_trade_msg(
|
|||
details = msg.brokerd_msg
|
||||
|
||||
# TODO: put the actual exchange timestamp?
|
||||
# TODO: some kinda progress system?
|
||||
|
||||
# NOTE: currently the ``kraken`` openOrders sub
|
||||
# doesn't deliver their engine timestamp as part of
|
||||
# it's schema, so this value is **not** from them
|
||||
|
@ -1080,15 +1090,11 @@ async def process_trade_msg(
|
|||
# a true backend one? This will require finagling
|
||||
# with how each backend tracks/summarizes time
|
||||
# stamps for the downstream API.
|
||||
index = flume.get_index(
|
||||
details['broker_time']
|
||||
)
|
||||
|
||||
# TODO: some kinda progress system
|
||||
tm = details['broker_time']
|
||||
mode.on_fill(
|
||||
oid,
|
||||
price=details['price'],
|
||||
arrow_index=index,
|
||||
time_s=tm,
|
||||
pointing='up' if action == 'buy' else 'down',
|
||||
)
|
||||
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
"""
|
||||
Super hawt Qt UI components
|
||||
"""
|
|
@ -1,67 +0,0 @@
|
|||
import sys
|
||||
|
||||
from PySide2.QtCharts import QtCharts
|
||||
from PySide2.QtWidgets import QApplication, QMainWindow
|
||||
from PySide2.QtCore import Qt, QPointF
|
||||
from PySide2 import QtGui
|
||||
import qdarkstyle
|
||||
|
||||
data = ((1, 7380, 7520, 7380, 7510, 7324),
|
||||
(2, 7520, 7580, 7410, 7440, 7372),
|
||||
(3, 7440, 7650, 7310, 7520, 7434),
|
||||
(4, 7450, 7640, 7450, 7550, 7480),
|
||||
(5, 7510, 7590, 7460, 7490, 7502),
|
||||
(6, 7500, 7590, 7480, 7560, 7512),
|
||||
(7, 7560, 7830, 7540, 7800, 7584))
|
||||
|
||||
|
||||
app = QApplication([])
|
||||
# set dark stylesheet
|
||||
# import pdb; pdb.set_trace()
|
||||
app.setStyleSheet(qdarkstyle.load_stylesheet_pyside())
|
||||
|
||||
series = QtCharts.QCandlestickSeries()
|
||||
series.setDecreasingColor(Qt.darkRed)
|
||||
series.setIncreasingColor(Qt.darkGreen)
|
||||
|
||||
ma5 = QtCharts.QLineSeries() # 5-days average data line
|
||||
tm = [] # stores str type data
|
||||
|
||||
# in a loop, series and ma5 append corresponding data
|
||||
for num, o, h, l, c, m in data:
|
||||
candle = QtCharts.QCandlestickSet(o, h, l, c)
|
||||
series.append(candle)
|
||||
ma5.append(QPointF(num, m))
|
||||
tm.append(str(num))
|
||||
|
||||
pen = candle.pen()
|
||||
# import pdb; pdb.set_trace()
|
||||
|
||||
chart = QtCharts.QChart()
|
||||
|
||||
# import pdb; pdb.set_trace()
|
||||
series.setBodyOutlineVisible(False)
|
||||
series.setCapsVisible(False)
|
||||
# brush = QtGui.QBrush()
|
||||
# brush.setColor(Qt.green)
|
||||
# series.setBrush(brush)
|
||||
chart.addSeries(series) # candle
|
||||
chart.addSeries(ma5) # ma5 line
|
||||
|
||||
chart.setAnimationOptions(QtCharts.QChart.SeriesAnimations)
|
||||
chart.createDefaultAxes()
|
||||
chart.legend().hide()
|
||||
|
||||
chart.axisX(series).setCategories(tm)
|
||||
chart.axisX(ma5).setVisible(False)
|
||||
|
||||
view = QtCharts.QChartView(chart)
|
||||
view.chart().setTheme(QtCharts.QChart.ChartTheme.ChartThemeDark)
|
||||
view.setRubberBand(QtCharts.QChartView.HorizontalRubberBand)
|
||||
# chartview.chart().setTheme(QtCharts.QChart.ChartTheme.ChartThemeBlueCerulean)
|
||||
|
||||
ui = QMainWindow()
|
||||
# ui.setGeometry(50, 50, 500, 300)
|
||||
ui.setCentralWidget(view)
|
||||
ui.show()
|
||||
sys.exit(app.exec_())
|
Loading…
Reference in New Issue