Compare commits
No commits in common. "ib_async" and "main" have entirely different histories.
|
|
@ -689,14 +689,13 @@ class Client:
|
||||||
ContFuture(symbol, exchange=exchange)
|
ContFuture(symbol, exchange=exchange)
|
||||||
))[0]
|
))[0]
|
||||||
else:
|
else:
|
||||||
cons = (await self.ib.qualifyContractsAsync(
|
con = (await self.ib.qualifyContractsAsync(
|
||||||
Future(
|
Future(
|
||||||
symbol,
|
symbol,
|
||||||
exchange=exchange,
|
exchange=exchange,
|
||||||
lastTradeDateOrContractMonth=expiry,
|
lastTradeDateOrContractMonth=expiry,
|
||||||
)
|
)
|
||||||
))
|
))[0]
|
||||||
con = cons[0]
|
|
||||||
|
|
||||||
return con
|
return con
|
||||||
|
|
||||||
|
|
@ -896,6 +895,7 @@ class Client:
|
||||||
async def get_sym_details(
|
async def get_sym_details(
|
||||||
self,
|
self,
|
||||||
fqme: str,
|
fqme: str,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
Contract,
|
Contract,
|
||||||
ContractDetails,
|
ContractDetails,
|
||||||
|
|
@ -1187,7 +1187,7 @@ async def load_aio_clients(
|
||||||
# the API TCP in `ib_insync` connection can be flaky af so instead
|
# the API TCP in `ib_insync` connection can be flaky af so instead
|
||||||
# retry a few times to get the client going..
|
# retry a few times to get the client going..
|
||||||
connect_retries: int = 3,
|
connect_retries: int = 3,
|
||||||
connect_timeout: float = 30, # in case a remote-host
|
connect_timeout: float = 10,
|
||||||
disconnect_on_exit: bool = True,
|
disconnect_on_exit: bool = True,
|
||||||
|
|
||||||
) -> dict[str, Client]:
|
) -> dict[str, Client]:
|
||||||
|
|
@ -1534,7 +1534,6 @@ async def open_aio_client_method_relay(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# with tractor.devx.maybe_open_crash_handler() as _bxerr:
|
|
||||||
# sync with `open_client_proxy()` caller
|
# sync with `open_client_proxy()` caller
|
||||||
chan.started_nowait(client)
|
chan.started_nowait(client)
|
||||||
|
|
||||||
|
|
@ -1544,11 +1543,7 @@ async def open_aio_client_method_relay(
|
||||||
# relay all method requests to ``asyncio``-side client and deliver
|
# relay all method requests to ``asyncio``-side client and deliver
|
||||||
# back results
|
# back results
|
||||||
while not chan._to_trio._closed: # <- TODO, better check like `._web_bs`?
|
while not chan._to_trio._closed: # <- TODO, better check like `._web_bs`?
|
||||||
msg: (
|
msg: tuple[str, dict]|dict|None = await chan.get()
|
||||||
None
|
|
||||||
|tuple[str, dict]
|
|
||||||
|dict
|
|
||||||
) = await chan.get()
|
|
||||||
match msg:
|
match msg:
|
||||||
case None: # termination sentinel
|
case None: # termination sentinel
|
||||||
log.info('asyncio `Client` method-proxy SHUTDOWN!')
|
log.info('asyncio `Client` method-proxy SHUTDOWN!')
|
||||||
|
|
|
||||||
|
|
@ -522,11 +522,7 @@ async def get_mkt_info(
|
||||||
if atype == 'commodity':
|
if atype == 'commodity':
|
||||||
venue: str = 'cmdty'
|
venue: str = 'cmdty'
|
||||||
else:
|
else:
|
||||||
venue: str = (
|
venue = con.primaryExchange or con.exchange
|
||||||
con.primaryExchange
|
|
||||||
or
|
|
||||||
con.exchange
|
|
||||||
)
|
|
||||||
|
|
||||||
price_tick: Decimal = Decimal(str(details.minTick))
|
price_tick: Decimal = Decimal(str(details.minTick))
|
||||||
ib_min_tick_gt_2: Decimal = Decimal('0.01')
|
ib_min_tick_gt_2: Decimal = Decimal('0.01')
|
||||||
|
|
|
||||||
|
|
@ -43,6 +43,7 @@ from typing import (
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
from .. import config
|
from .. import config
|
||||||
from ..service import (
|
from ..service import (
|
||||||
check_for_service,
|
check_for_service,
|
||||||
|
|
@ -151,10 +152,7 @@ class StorageConnectionError(ConnectionError):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def get_storagemod(
|
def get_storagemod(name: str) -> ModuleType:
|
||||||
name: str,
|
|
||||||
|
|
||||||
) -> ModuleType:
|
|
||||||
mod: ModuleType = import_module(
|
mod: ModuleType = import_module(
|
||||||
'.' + name,
|
'.' + name,
|
||||||
'piker.storage',
|
'piker.storage',
|
||||||
|
|
@ -167,12 +165,9 @@ def get_storagemod(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_storage_client(
|
async def open_storage_client(
|
||||||
backend: str|None = None,
|
backend: str | None = None,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[ModuleType, StorageClient]:
|
||||||
ModuleType,
|
|
||||||
StorageClient,
|
|
||||||
]:
|
|
||||||
'''
|
'''
|
||||||
Load the ``StorageClient`` for named backend.
|
Load the ``StorageClient`` for named backend.
|
||||||
|
|
||||||
|
|
@ -272,10 +267,7 @@ async def open_tsdb_client(
|
||||||
from ..data.feed import maybe_open_feed
|
from ..data.feed import maybe_open_feed
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_storage_client() as (
|
open_storage_client() as (_, storage),
|
||||||
_,
|
|
||||||
storage,
|
|
||||||
),
|
|
||||||
|
|
||||||
maybe_open_feed(
|
maybe_open_feed(
|
||||||
[fqme],
|
[fqme],
|
||||||
|
|
@ -283,7 +275,7 @@ async def open_tsdb_client(
|
||||||
|
|
||||||
) as feed,
|
) as feed,
|
||||||
):
|
):
|
||||||
profiler(f'opened feed for {fqme!r}')
|
profiler(f'opened feed for {fqme}')
|
||||||
|
|
||||||
# to_append = feed.hist_shm.array
|
# to_append = feed.hist_shm.array
|
||||||
# to_prepend = None
|
# to_prepend = None
|
||||||
|
|
|
||||||
|
|
@ -19,10 +19,16 @@ Storage middle-ware CLIs.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
# from datetime import datetime
|
||||||
|
# from contextlib import (
|
||||||
|
# AsyncExitStack,
|
||||||
|
# )
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from math import copysign
|
||||||
import time
|
import time
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import (
|
from typing import (
|
||||||
|
Any,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -41,6 +47,7 @@ from piker.data import (
|
||||||
ShmArray,
|
ShmArray,
|
||||||
)
|
)
|
||||||
from piker import tsp
|
from piker import tsp
|
||||||
|
from piker.data._formatters import BGM
|
||||||
from . import log
|
from . import log
|
||||||
from . import (
|
from . import (
|
||||||
__tsdbs__,
|
__tsdbs__,
|
||||||
|
|
@ -235,12 +242,122 @@ def anal(
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
async def markup_gaps(
|
||||||
|
fqme: str,
|
||||||
|
timeframe: float,
|
||||||
|
actl: AnnotCtl,
|
||||||
|
wdts: pl.DataFrame,
|
||||||
|
gaps: pl.DataFrame,
|
||||||
|
|
||||||
|
) -> dict[int, dict]:
|
||||||
|
'''
|
||||||
|
Remote annotate time-gaps in a dt-fielded ts (normally OHLC)
|
||||||
|
with rectangles.
|
||||||
|
|
||||||
|
'''
|
||||||
|
aids: dict[int] = {}
|
||||||
|
for i in range(gaps.height):
|
||||||
|
|
||||||
|
row: pl.DataFrame = gaps[i]
|
||||||
|
|
||||||
|
# the gap's RIGHT-most bar's OPEN value
|
||||||
|
# at that time (sample) step.
|
||||||
|
iend: int = row['index'][0]
|
||||||
|
# dt: datetime = row['dt'][0]
|
||||||
|
# dt_prev: datetime = row['dt_prev'][0]
|
||||||
|
# dt_end_t: float = dt.timestamp()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: can we eventually remove this
|
||||||
|
# once we figure out why the epoch cols
|
||||||
|
# don't match?
|
||||||
|
# TODO: FIX HOW/WHY these aren't matching
|
||||||
|
# and are instead off by 4hours (EST
|
||||||
|
# vs. UTC?!?!)
|
||||||
|
# end_t: float = row['time']
|
||||||
|
# assert (
|
||||||
|
# dt.timestamp()
|
||||||
|
# ==
|
||||||
|
# end_t
|
||||||
|
# )
|
||||||
|
|
||||||
|
# the gap's LEFT-most bar's CLOSE value
|
||||||
|
# at that time (sample) step.
|
||||||
|
prev_r: pl.DataFrame = wdts.filter(
|
||||||
|
pl.col('index') == iend - 1
|
||||||
|
)
|
||||||
|
# XXX: probably a gap in the (newly sorted or de-duplicated)
|
||||||
|
# dt-df, so we might need to re-index first..
|
||||||
|
if prev_r.is_empty():
|
||||||
|
await tractor.pause()
|
||||||
|
|
||||||
|
istart: int = prev_r['index'][0]
|
||||||
|
# dt_start_t: float = dt_prev.timestamp()
|
||||||
|
|
||||||
|
# start_t: float = prev_r['time']
|
||||||
|
# assert (
|
||||||
|
# dt_start_t
|
||||||
|
# ==
|
||||||
|
# start_t
|
||||||
|
# )
|
||||||
|
|
||||||
|
# TODO: implement px-col width measure
|
||||||
|
# and ensure at least as many px-cols
|
||||||
|
# shown per rect as configured by user.
|
||||||
|
# gap_w: float = abs((iend - istart))
|
||||||
|
# if gap_w < 6:
|
||||||
|
# margin: float = 6
|
||||||
|
# iend += margin
|
||||||
|
# istart -= margin
|
||||||
|
|
||||||
|
rect_gap: float = BGM*3/8
|
||||||
|
opn: float = row['open'][0]
|
||||||
|
ro: tuple[float, float] = (
|
||||||
|
# dt_end_t,
|
||||||
|
iend + rect_gap + 1,
|
||||||
|
opn,
|
||||||
|
)
|
||||||
|
cls: float = prev_r['close'][0]
|
||||||
|
lc: tuple[float, float] = (
|
||||||
|
# dt_start_t,
|
||||||
|
istart - rect_gap, # + 1 ,
|
||||||
|
cls,
|
||||||
|
)
|
||||||
|
|
||||||
|
color: str = 'dad_blue'
|
||||||
|
diff: float = cls - opn
|
||||||
|
sgn: float = copysign(1, diff)
|
||||||
|
color: str = {
|
||||||
|
-1: 'buy_green',
|
||||||
|
1: 'sell_red',
|
||||||
|
}[sgn]
|
||||||
|
|
||||||
|
rect_kwargs: dict[str, Any] = dict(
|
||||||
|
fqme=fqme,
|
||||||
|
timeframe=timeframe,
|
||||||
|
start_pos=lc,
|
||||||
|
end_pos=ro,
|
||||||
|
color=color,
|
||||||
|
)
|
||||||
|
|
||||||
|
aid: int = await actl.add_rect(**rect_kwargs)
|
||||||
|
assert aid
|
||||||
|
aids[aid] = rect_kwargs
|
||||||
|
|
||||||
|
# tell chart to redraw all its
|
||||||
|
# graphics view layers Bo
|
||||||
|
await actl.redraw(
|
||||||
|
fqme=fqme,
|
||||||
|
timeframe=timeframe,
|
||||||
|
)
|
||||||
|
return aids
|
||||||
|
|
||||||
|
|
||||||
@store.command()
|
@store.command()
|
||||||
def ldshm(
|
def ldshm(
|
||||||
fqme: str,
|
fqme: str,
|
||||||
write_parquet: bool = True,
|
write_parquet: bool = True,
|
||||||
reload_parquet_to_shm: bool = True,
|
reload_parquet_to_shm: bool = True,
|
||||||
pdb: bool = False, # --pdb passed?
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
|
@ -260,7 +377,7 @@ def ldshm(
|
||||||
open_piker_runtime(
|
open_piker_runtime(
|
||||||
'polars_boi',
|
'polars_boi',
|
||||||
enable_modules=['piker.data._sharedmem'],
|
enable_modules=['piker.data._sharedmem'],
|
||||||
debug_mode=pdb,
|
debug_mode=True,
|
||||||
),
|
),
|
||||||
open_storage_client() as (
|
open_storage_client() as (
|
||||||
mod,
|
mod,
|
||||||
|
|
@ -280,19 +397,17 @@ def ldshm(
|
||||||
|
|
||||||
times: np.ndarray = shm.array['time']
|
times: np.ndarray = shm.array['time']
|
||||||
d1: float = float(times[-1] - times[-2])
|
d1: float = float(times[-1] - times[-2])
|
||||||
d2: float = 0
|
d2: float = float(times[-2] - times[-3])
|
||||||
# XXX, take a median sample rate if sufficient data
|
med: float = np.median(np.diff(times))
|
||||||
if times.size > 2:
|
if (
|
||||||
d2: float = float(times[-2] - times[-3])
|
d1 < 1.
|
||||||
med: float = np.median(np.diff(times))
|
and d2 < 1.
|
||||||
if (
|
and med < 1.
|
||||||
d1 < 1.
|
):
|
||||||
and d2 < 1.
|
raise ValueError(
|
||||||
and med < 1.
|
f'Something is wrong with time period for {shm}:\n{times}'
|
||||||
):
|
)
|
||||||
raise ValueError(
|
|
||||||
f'Something is wrong with time period for {shm}:\n{times}'
|
|
||||||
)
|
|
||||||
period_s: float = float(max(d1, d2, med))
|
period_s: float = float(max(d1, d2, med))
|
||||||
|
|
||||||
null_segs: tuple = tsp.get_null_segs(
|
null_segs: tuple = tsp.get_null_segs(
|
||||||
|
|
@ -302,9 +417,7 @@ def ldshm(
|
||||||
|
|
||||||
# TODO: call null-seg fixer somehow?
|
# TODO: call null-seg fixer somehow?
|
||||||
if null_segs:
|
if null_segs:
|
||||||
|
await tractor.pause()
|
||||||
if tractor._state.is_debug_mode():
|
|
||||||
await tractor.pause()
|
|
||||||
# async with (
|
# async with (
|
||||||
# trio.open_nursery() as tn,
|
# trio.open_nursery() as tn,
|
||||||
# mod.open_history_client(
|
# mod.open_history_client(
|
||||||
|
|
@ -328,37 +441,11 @@ def ldshm(
|
||||||
wdts,
|
wdts,
|
||||||
deduped,
|
deduped,
|
||||||
diff,
|
diff,
|
||||||
valid_races,
|
) = tsp.dedupe(
|
||||||
dq_issues,
|
|
||||||
) = tsp.dedupe_ohlcv_smart(
|
|
||||||
shm_df,
|
shm_df,
|
||||||
|
period=period_s,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Report duplicate analysis
|
|
||||||
if diff > 0:
|
|
||||||
log.info(
|
|
||||||
f'Removed {diff} duplicate timestamp(s)\n'
|
|
||||||
)
|
|
||||||
if valid_races is not None:
|
|
||||||
identical: int = (
|
|
||||||
valid_races
|
|
||||||
.filter(pl.col('identical_bars'))
|
|
||||||
.height
|
|
||||||
)
|
|
||||||
monotonic: int = valid_races.height - identical
|
|
||||||
log.info(
|
|
||||||
f'Valid race conditions: {valid_races.height}\n'
|
|
||||||
f' - Identical bars: {identical}\n'
|
|
||||||
f' - Volume monotonic: {monotonic}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
if dq_issues is not None:
|
|
||||||
log.warning(
|
|
||||||
f'DATA QUALITY ISSUES from provider: '
|
|
||||||
f'{dq_issues.height} timestamp(s)\n'
|
|
||||||
f'{dq_issues}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
# detect gaps from in expected (uniform OHLC) sample period
|
# detect gaps from in expected (uniform OHLC) sample period
|
||||||
step_gaps: pl.DataFrame = tsp.detect_time_gaps(
|
step_gaps: pl.DataFrame = tsp.detect_time_gaps(
|
||||||
deduped,
|
deduped,
|
||||||
|
|
@ -373,8 +460,7 @@ def ldshm(
|
||||||
|
|
||||||
# TODO: actually pull the exact duration
|
# TODO: actually pull the exact duration
|
||||||
# expected for each venue operational period?
|
# expected for each venue operational period?
|
||||||
# gap_dt_unit='day',
|
gap_dt_unit='days',
|
||||||
gap_dt_unit='day',
|
|
||||||
gap_thresh=1,
|
gap_thresh=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -385,11 +471,8 @@ def ldshm(
|
||||||
if (
|
if (
|
||||||
not venue_gaps.is_empty()
|
not venue_gaps.is_empty()
|
||||||
or (
|
or (
|
||||||
not step_gaps.is_empty()
|
period_s < 60
|
||||||
# XXX, i presume i put this bc i was guarding
|
and not step_gaps.is_empty()
|
||||||
# for ib venue gaps?
|
|
||||||
# and
|
|
||||||
# period_s < 60
|
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
# write repaired ts to parquet-file?
|
# write repaired ts to parquet-file?
|
||||||
|
|
@ -438,7 +521,7 @@ def ldshm(
|
||||||
do_markup_gaps: bool = True
|
do_markup_gaps: bool = True
|
||||||
if do_markup_gaps:
|
if do_markup_gaps:
|
||||||
new_df: pl.DataFrame = tsp.np2pl(new)
|
new_df: pl.DataFrame = tsp.np2pl(new)
|
||||||
aids: dict = await tsp._annotate.markup_gaps(
|
aids: dict = await markup_gaps(
|
||||||
fqme,
|
fqme,
|
||||||
period_s,
|
period_s,
|
||||||
actl,
|
actl,
|
||||||
|
|
@ -451,13 +534,8 @@ def ldshm(
|
||||||
tf2aids[period_s] = aids
|
tf2aids[period_s] = aids
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# No significant gaps to handle, but may have had
|
# allow interaction even when no ts problems.
|
||||||
# duplicates removed (valid race conditions are ok)
|
assert not diff
|
||||||
if diff > 0 and dq_issues is not None:
|
|
||||||
log.warning(
|
|
||||||
'Found duplicates with data quality issues '
|
|
||||||
'but no significant time gaps!\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
log.info('Exiting TSP shm anal-izer!')
|
log.info('Exiting TSP shm anal-izer!')
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -578,22 +578,11 @@ def detect_time_gaps(
|
||||||
# NOTE: this flag is to indicate that on this (sampling) time
|
# NOTE: this flag is to indicate that on this (sampling) time
|
||||||
# scale we expect to only be filtering against larger venue
|
# scale we expect to only be filtering against larger venue
|
||||||
# closures-scale time gaps.
|
# closures-scale time gaps.
|
||||||
#
|
|
||||||
# Map to total_ method since `dt_diff` is a duration type,
|
|
||||||
# not datetime - modern polars requires `total_*` methods
|
|
||||||
# for duration types (e.g. `total_days()` not `day()`)
|
|
||||||
# Ensure plural form for polars API (e.g. 'day' -> 'days')
|
|
||||||
unit_plural: str = (
|
|
||||||
gap_dt_unit
|
|
||||||
if gap_dt_unit.endswith('s')
|
|
||||||
else f'{gap_dt_unit}s'
|
|
||||||
)
|
|
||||||
duration_method: str = f'total_{unit_plural}'
|
|
||||||
return step_gaps.filter(
|
return step_gaps.filter(
|
||||||
# Second by an arbitrary dt-unit step size
|
# Second by an arbitrary dt-unit step size
|
||||||
getattr(
|
getattr(
|
||||||
pl.col('dt_diff').dt,
|
pl.col('dt_diff').dt,
|
||||||
duration_method,
|
gap_dt_unit,
|
||||||
)().abs() > gap_thresh
|
)().abs() > gap_thresh
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,182 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Time-series (remote) annotation APIs.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
from math import copysign
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
import tractor
|
|
||||||
|
|
||||||
from piker.data._formatters import BGM
|
|
||||||
from piker.storage import log
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from piker.ui._remote_ctl import AnnotCtl
|
|
||||||
|
|
||||||
|
|
||||||
async def markup_gaps(
|
|
||||||
fqme: str,
|
|
||||||
timeframe: float,
|
|
||||||
actl: AnnotCtl,
|
|
||||||
wdts: pl.DataFrame,
|
|
||||||
gaps: pl.DataFrame,
|
|
||||||
|
|
||||||
) -> dict[int, dict]:
|
|
||||||
'''
|
|
||||||
Remote annotate time-gaps in a dt-fielded ts (normally OHLC)
|
|
||||||
with rectangles.
|
|
||||||
|
|
||||||
'''
|
|
||||||
aids: dict[int] = {}
|
|
||||||
for i in range(gaps.height):
|
|
||||||
row: pl.DataFrame = gaps[i]
|
|
||||||
|
|
||||||
# the gap's RIGHT-most bar's OPEN value
|
|
||||||
# at that time (sample) step.
|
|
||||||
iend: int = row['index'][0]
|
|
||||||
# dt: datetime = row['dt'][0]
|
|
||||||
# dt_prev: datetime = row['dt_prev'][0]
|
|
||||||
# dt_end_t: float = dt.timestamp()
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: can we eventually remove this
|
|
||||||
# once we figure out why the epoch cols
|
|
||||||
# don't match?
|
|
||||||
# TODO: FIX HOW/WHY these aren't matching
|
|
||||||
# and are instead off by 4hours (EST
|
|
||||||
# vs. UTC?!?!)
|
|
||||||
# end_t: float = row['time']
|
|
||||||
# assert (
|
|
||||||
# dt.timestamp()
|
|
||||||
# ==
|
|
||||||
# end_t
|
|
||||||
# )
|
|
||||||
|
|
||||||
# the gap's LEFT-most bar's CLOSE value
|
|
||||||
# at that time (sample) step.
|
|
||||||
prev_r: pl.DataFrame = wdts.filter(
|
|
||||||
pl.col('index') == iend - 1
|
|
||||||
)
|
|
||||||
# XXX: probably a gap in the (newly sorted or de-duplicated)
|
|
||||||
# dt-df, so we might need to re-index first..
|
|
||||||
dt: pl.Series = row['dt']
|
|
||||||
dt_prev: pl.Series = row['dt_prev']
|
|
||||||
if prev_r.is_empty():
|
|
||||||
|
|
||||||
# XXX, filter out any special ignore cases,
|
|
||||||
# - UNIX-epoch stamped datums
|
|
||||||
# - first row
|
|
||||||
if (
|
|
||||||
dt_prev.dt.epoch()[0] == 0
|
|
||||||
or
|
|
||||||
dt.dt.epoch()[0] == 0
|
|
||||||
):
|
|
||||||
log.warning('Skipping row with UNIX epoch timestamp ??')
|
|
||||||
continue
|
|
||||||
|
|
||||||
if wdts[0]['index'][0] == iend: # first row
|
|
||||||
log.warning('Skipping first-row (has no previous obvi) !!')
|
|
||||||
continue
|
|
||||||
|
|
||||||
# XXX, if the previous-row by shm-index is missing,
|
|
||||||
# meaning there is a missing sample (set), get the prior
|
|
||||||
# row by df index and attempt to use it?
|
|
||||||
i_wdts: pl.DataFrame = wdts.with_row_index(name='i')
|
|
||||||
i_row: int = i_wdts.filter(pl.col('index') == iend)['i'][0]
|
|
||||||
prev_row_by_i = wdts[i_row]
|
|
||||||
prev_r: pl.DataFrame = prev_row_by_i
|
|
||||||
|
|
||||||
# debug any missing pre-row
|
|
||||||
if tractor._state.is_debug_mode():
|
|
||||||
await tractor.pause()
|
|
||||||
|
|
||||||
istart: int = prev_r['index'][0]
|
|
||||||
# TODO: implement px-col width measure
|
|
||||||
# and ensure at least as many px-cols
|
|
||||||
# shown per rect as configured by user.
|
|
||||||
# gap_w: float = abs((iend - istart))
|
|
||||||
# if gap_w < 6:
|
|
||||||
# margin: float = 6
|
|
||||||
# iend += margin
|
|
||||||
# istart -= margin
|
|
||||||
|
|
||||||
rect_gap: float = BGM*3/8
|
|
||||||
opn: float = row['open'][0]
|
|
||||||
cls: float = prev_r['close'][0]
|
|
||||||
ro: tuple[float, float] = (
|
|
||||||
iend + rect_gap + 1,
|
|
||||||
opn,
|
|
||||||
)
|
|
||||||
lc: tuple[float, float] = (
|
|
||||||
istart - rect_gap, # + 1 ,
|
|
||||||
cls,
|
|
||||||
)
|
|
||||||
|
|
||||||
diff: float = cls - opn
|
|
||||||
sgn: float = copysign(1, diff)
|
|
||||||
|
|
||||||
color: str = 'dad_blue'
|
|
||||||
# TODO? mks more sense to have up/down coloring?
|
|
||||||
# color: str = {
|
|
||||||
# -1: 'lilypad_green', # up-gap
|
|
||||||
# 1: 'wine', # down-gap
|
|
||||||
# }[sgn]
|
|
||||||
|
|
||||||
rect_kwargs: dict[str, Any] = dict(
|
|
||||||
fqme=fqme,
|
|
||||||
timeframe=timeframe,
|
|
||||||
start_pos=lc,
|
|
||||||
end_pos=ro,
|
|
||||||
color=color,
|
|
||||||
)
|
|
||||||
|
|
||||||
# add up/down rects
|
|
||||||
aid: int = await actl.add_rect(**rect_kwargs)
|
|
||||||
assert aid
|
|
||||||
aids[aid] = rect_kwargs
|
|
||||||
direction: str = (
|
|
||||||
'down' if sgn == 1
|
|
||||||
else 'up'
|
|
||||||
)
|
|
||||||
arrow_kwargs: dict[str, Any] = dict(
|
|
||||||
fqme=fqme,
|
|
||||||
timeframe=timeframe,
|
|
||||||
x=iend,
|
|
||||||
y=cls,
|
|
||||||
color=color,
|
|
||||||
alpha=160,
|
|
||||||
pointing=direction,
|
|
||||||
)
|
|
||||||
|
|
||||||
aid: int = await actl.add_arrow(
|
|
||||||
**arrow_kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
# tell chart to redraw all its
|
|
||||||
# graphics view layers Bo
|
|
||||||
await actl.redraw(
|
|
||||||
fqme=fqme,
|
|
||||||
timeframe=timeframe,
|
|
||||||
)
|
|
||||||
return aids
|
|
||||||
|
|
@ -1,206 +0,0 @@
|
||||||
'''
|
|
||||||
Smart OHLCV deduplication with data quality validation.
|
|
||||||
|
|
||||||
Handles concurrent write conflicts by keeping the most complete bar
|
|
||||||
(highest volume) while detecting data quality anomalies.
|
|
||||||
|
|
||||||
'''
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
from ._anal import with_dts
|
|
||||||
|
|
||||||
|
|
||||||
def dedupe_ohlcv_smart(
|
|
||||||
src_df: pl.DataFrame,
|
|
||||||
time_col: str = 'time',
|
|
||||||
volume_col: str = 'volume',
|
|
||||||
sort: bool = True,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
pl.DataFrame, # with dts
|
|
||||||
pl.DataFrame, # deduped (keeping higher volume bars)
|
|
||||||
int, # count of dupes removed
|
|
||||||
pl.DataFrame|None, # valid race conditions
|
|
||||||
pl.DataFrame|None, # data quality violations
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Smart OHLCV deduplication keeping most complete bars.
|
|
||||||
|
|
||||||
For duplicate timestamps, keeps bar with highest volume under
|
|
||||||
the assumption that higher volume indicates more complete/final
|
|
||||||
data from backfill vs partial live updates.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
Tuple of:
|
|
||||||
- wdts: original dataframe with datetime columns added
|
|
||||||
- deduped: deduplicated frame keeping highest-volume bars
|
|
||||||
- diff: number of duplicate rows removed
|
|
||||||
- valid_races: duplicates meeting expected race condition pattern
|
|
||||||
(volume monotonic, OHLC ranges valid)
|
|
||||||
- data_quality_issues: duplicates violating expected relationships
|
|
||||||
indicating provider data problems
|
|
||||||
|
|
||||||
'''
|
|
||||||
wdts: pl.DataFrame = with_dts(src_df)
|
|
||||||
|
|
||||||
# Find duplicate timestamps
|
|
||||||
dupes: pl.DataFrame = wdts.filter(
|
|
||||||
pl.col(time_col).is_duplicated()
|
|
||||||
)
|
|
||||||
|
|
||||||
if dupes.is_empty():
|
|
||||||
# No duplicates, return as-is
|
|
||||||
return (wdts, wdts, 0, None, None)
|
|
||||||
|
|
||||||
# Analyze duplicate groups for validation
|
|
||||||
dupe_analysis: pl.DataFrame = (
|
|
||||||
dupes
|
|
||||||
.sort([time_col, 'index'])
|
|
||||||
.group_by(time_col, maintain_order=True)
|
|
||||||
.agg([
|
|
||||||
pl.col('index').alias('indices'),
|
|
||||||
pl.col('volume').alias('volumes'),
|
|
||||||
pl.col('high').alias('highs'),
|
|
||||||
pl.col('low').alias('lows'),
|
|
||||||
pl.col('open').alias('opens'),
|
|
||||||
pl.col('close').alias('closes'),
|
|
||||||
pl.col('dt').first().alias('dt'),
|
|
||||||
pl.len().alias('count'),
|
|
||||||
])
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validate OHLCV monotonicity for each duplicate group
|
|
||||||
def check_ohlcv_validity(row) -> dict[str, bool]:
|
|
||||||
'''
|
|
||||||
Check if duplicate bars follow expected race condition pattern.
|
|
||||||
|
|
||||||
For a valid live-update → backfill race:
|
|
||||||
- volume should be monotonically increasing
|
|
||||||
- high should be monotonically non-decreasing
|
|
||||||
- low should be monotonically non-increasing
|
|
||||||
- open should be identical (fixed at bar start)
|
|
||||||
|
|
||||||
Returns dict of violation flags.
|
|
||||||
|
|
||||||
'''
|
|
||||||
vols: list = row['volumes']
|
|
||||||
highs: list = row['highs']
|
|
||||||
lows: list = row['lows']
|
|
||||||
opens: list = row['opens']
|
|
||||||
|
|
||||||
violations: dict[str, bool] = {
|
|
||||||
'volume_non_monotonic': False,
|
|
||||||
'high_decreased': False,
|
|
||||||
'low_increased': False,
|
|
||||||
'open_mismatch': False,
|
|
||||||
'identical_bars': False,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if all bars are identical (pure duplicate)
|
|
||||||
if (
|
|
||||||
len(set(vols)) == 1
|
|
||||||
and len(set(highs)) == 1
|
|
||||||
and len(set(lows)) == 1
|
|
||||||
and len(set(opens)) == 1
|
|
||||||
):
|
|
||||||
violations['identical_bars'] = True
|
|
||||||
return violations
|
|
||||||
|
|
||||||
# Check volume monotonicity
|
|
||||||
for i in range(1, len(vols)):
|
|
||||||
if vols[i] < vols[i-1]:
|
|
||||||
violations['volume_non_monotonic'] = True
|
|
||||||
break
|
|
||||||
|
|
||||||
# Check high monotonicity (can only increase or stay same)
|
|
||||||
for i in range(1, len(highs)):
|
|
||||||
if highs[i] < highs[i-1]:
|
|
||||||
violations['high_decreased'] = True
|
|
||||||
break
|
|
||||||
|
|
||||||
# Check low monotonicity (can only decrease or stay same)
|
|
||||||
for i in range(1, len(lows)):
|
|
||||||
if lows[i] > lows[i-1]:
|
|
||||||
violations['low_increased'] = True
|
|
||||||
break
|
|
||||||
|
|
||||||
# Check open consistency (should be fixed)
|
|
||||||
if len(set(opens)) > 1:
|
|
||||||
violations['open_mismatch'] = True
|
|
||||||
|
|
||||||
return violations
|
|
||||||
|
|
||||||
# Apply validation
|
|
||||||
dupe_analysis = dupe_analysis.with_columns([
|
|
||||||
pl.struct(['volumes', 'highs', 'lows', 'opens'])
|
|
||||||
.map_elements(
|
|
||||||
check_ohlcv_validity,
|
|
||||||
return_dtype=pl.Struct([
|
|
||||||
pl.Field('volume_non_monotonic', pl.Boolean),
|
|
||||||
pl.Field('high_decreased', pl.Boolean),
|
|
||||||
pl.Field('low_increased', pl.Boolean),
|
|
||||||
pl.Field('open_mismatch', pl.Boolean),
|
|
||||||
pl.Field('identical_bars', pl.Boolean),
|
|
||||||
])
|
|
||||||
)
|
|
||||||
.alias('validity')
|
|
||||||
])
|
|
||||||
|
|
||||||
# Unnest validity struct
|
|
||||||
dupe_analysis = dupe_analysis.unnest('validity')
|
|
||||||
|
|
||||||
# Separate valid races from data quality issues
|
|
||||||
valid_races: pl.DataFrame|None = (
|
|
||||||
dupe_analysis
|
|
||||||
.filter(
|
|
||||||
# Valid if no violations OR just identical bars
|
|
||||||
~pl.col('volume_non_monotonic')
|
|
||||||
& ~pl.col('high_decreased')
|
|
||||||
& ~pl.col('low_increased')
|
|
||||||
& ~pl.col('open_mismatch')
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if valid_races.is_empty():
|
|
||||||
valid_races = None
|
|
||||||
|
|
||||||
data_quality_issues: pl.DataFrame|None = (
|
|
||||||
dupe_analysis
|
|
||||||
.filter(
|
|
||||||
# Issues if any non-identical violation exists
|
|
||||||
(
|
|
||||||
pl.col('volume_non_monotonic')
|
|
||||||
| pl.col('high_decreased')
|
|
||||||
| pl.col('low_increased')
|
|
||||||
| pl.col('open_mismatch')
|
|
||||||
)
|
|
||||||
& ~pl.col('identical_bars')
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if data_quality_issues.is_empty():
|
|
||||||
data_quality_issues = None
|
|
||||||
|
|
||||||
# Deduplicate: keep highest volume bar for each timestamp
|
|
||||||
deduped: pl.DataFrame = (
|
|
||||||
wdts
|
|
||||||
.sort([time_col, volume_col])
|
|
||||||
.unique(
|
|
||||||
subset=[time_col],
|
|
||||||
keep='last',
|
|
||||||
maintain_order=False,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Re-sort by time or index
|
|
||||||
if sort:
|
|
||||||
deduped = deduped.sort(by=time_col)
|
|
||||||
|
|
||||||
diff: int = wdts.height - deduped.height
|
|
||||||
|
|
||||||
return (
|
|
||||||
wdts,
|
|
||||||
deduped,
|
|
||||||
diff,
|
|
||||||
valid_races,
|
|
||||||
data_quality_issues,
|
|
||||||
)
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -21,7 +21,6 @@ Higher level annotation editors.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import (
|
from typing import (
|
||||||
Literal,
|
|
||||||
Sequence,
|
Sequence,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
@ -67,18 +66,9 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ArrowEditor(Struct):
|
class ArrowEditor(Struct):
|
||||||
'''
|
|
||||||
Annotate a chart-view with arrows most often used for indicating,
|
|
||||||
- order txns/clears,
|
|
||||||
- positions directions,
|
|
||||||
- general points-of-interest like nooz events.
|
|
||||||
|
|
||||||
'''
|
|
||||||
godw: GodWidget = None # type: ignore # noqa
|
godw: GodWidget = None # type: ignore # noqa
|
||||||
_arrows: dict[
|
_arrows: dict[str, list[pg.ArrowItem]] = {}
|
||||||
str,
|
|
||||||
list[pg.ArrowItem]
|
|
||||||
] = {}
|
|
||||||
|
|
||||||
def add(
|
def add(
|
||||||
self,
|
self,
|
||||||
|
|
@ -86,14 +76,8 @@ class ArrowEditor(Struct):
|
||||||
uid: str,
|
uid: str,
|
||||||
x: float,
|
x: float,
|
||||||
y: float,
|
y: float,
|
||||||
color: str|None = None,
|
color: str = 'default',
|
||||||
pointing: Literal[
|
pointing: str | None = None,
|
||||||
'up',
|
|
||||||
'down',
|
|
||||||
None,
|
|
||||||
] = None,
|
|
||||||
alpha: int = 255,
|
|
||||||
zval: float = 1e9,
|
|
||||||
|
|
||||||
) -> pg.ArrowItem:
|
) -> pg.ArrowItem:
|
||||||
'''
|
'''
|
||||||
|
|
@ -109,11 +93,6 @@ class ArrowEditor(Struct):
|
||||||
# scale arrow sizing to dpi-aware font
|
# scale arrow sizing to dpi-aware font
|
||||||
size = _font.font.pixelSize() * 0.8
|
size = _font.font.pixelSize() * 0.8
|
||||||
|
|
||||||
color = color or 'default'
|
|
||||||
color = QColor(hcolor(color))
|
|
||||||
color.setAlpha(alpha)
|
|
||||||
pen = fn.mkPen(color, width=1)
|
|
||||||
brush = fn.mkBrush(color)
|
|
||||||
arrow = pg.ArrowItem(
|
arrow = pg.ArrowItem(
|
||||||
angle=angle,
|
angle=angle,
|
||||||
baseAngle=0,
|
baseAngle=0,
|
||||||
|
|
@ -121,58 +100,22 @@ class ArrowEditor(Struct):
|
||||||
headWidth=size/2,
|
headWidth=size/2,
|
||||||
tailLen=None,
|
tailLen=None,
|
||||||
pxMode=True,
|
pxMode=True,
|
||||||
# coloring
|
|
||||||
pen=pen,
|
|
||||||
brush=brush,
|
|
||||||
)
|
|
||||||
arrow.setZValue(zval)
|
|
||||||
arrow.setPos(x, y)
|
|
||||||
plot.addItem(arrow) # render to view
|
|
||||||
|
|
||||||
# register for removal
|
# coloring
|
||||||
arrow._uid = uid
|
pen=pg.mkPen(hcolor('papas_special')),
|
||||||
self._arrows.setdefault(
|
brush=pg.mkBrush(hcolor(color)),
|
||||||
uid, []
|
)
|
||||||
).append(arrow)
|
arrow.setPos(x, y)
|
||||||
|
self._arrows.setdefault(uid, []).append(arrow)
|
||||||
|
|
||||||
|
# render to view
|
||||||
|
plot.addItem(arrow)
|
||||||
|
|
||||||
return arrow
|
return arrow
|
||||||
|
|
||||||
def remove(
|
def remove(self, arrow) -> bool:
|
||||||
self,
|
|
||||||
arrow: pg.ArrowItem,
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Remove a *single arrow* from all chart views to which it was
|
|
||||||
added.
|
|
||||||
|
|
||||||
'''
|
|
||||||
uid: str = arrow._uid
|
|
||||||
arrows: list[pg.ArrowItem] = self._arrows[uid]
|
|
||||||
log.info(
|
|
||||||
f'Removing arrow from views\n'
|
|
||||||
f'uid: {uid!r}\n'
|
|
||||||
f'{arrow!r}\n'
|
|
||||||
)
|
|
||||||
for linked in self.godw.iter_linked():
|
for linked in self.godw.iter_linked():
|
||||||
linked.chart.plotItem.removeItem(arrow)
|
linked.chart.plotItem.removeItem(arrow)
|
||||||
try:
|
|
||||||
arrows.remove(arrow)
|
|
||||||
except ValueError:
|
|
||||||
log.warning(
|
|
||||||
f'Arrow was already removed?\n'
|
|
||||||
f'uid: {uid!r}\n'
|
|
||||||
f'{arrow!r}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
def remove_all(self) -> set[pg.ArrowItem]:
|
|
||||||
'''
|
|
||||||
Remove all arrows added by this editor from all
|
|
||||||
chart-views.
|
|
||||||
|
|
||||||
'''
|
|
||||||
for uid, arrows in self._arrows.items():
|
|
||||||
for arrow in arrows:
|
|
||||||
self.remove(arrow)
|
|
||||||
|
|
||||||
|
|
||||||
class LineEditor(Struct):
|
class LineEditor(Struct):
|
||||||
|
|
@ -318,9 +261,6 @@ class LineEditor(Struct):
|
||||||
|
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
# compat with ArrowEditor
|
|
||||||
remove = remove_line
|
|
||||||
|
|
||||||
|
|
||||||
def as_point(
|
def as_point(
|
||||||
pair: Sequence[float, float] | QPointF,
|
pair: Sequence[float, float] | QPointF,
|
||||||
|
|
@ -353,7 +293,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
viewbox: ViewBox,
|
viewbox: ViewBox,
|
||||||
color: str|None = None,
|
color: str | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(0, 0, 1, 1)
|
super().__init__(0, 0, 1, 1)
|
||||||
|
|
||||||
|
|
@ -669,6 +609,3 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
|
|
||||||
):
|
):
|
||||||
scen.removeItem(self._label_proxy)
|
scen.removeItem(self._label_proxy)
|
||||||
|
|
||||||
# compat with ArrowEditor
|
|
||||||
remove = delete
|
|
||||||
|
|
|
||||||
|
|
@ -237,8 +237,8 @@ class LevelLabel(YAxisLabel):
|
||||||
class L1Label(LevelLabel):
|
class L1Label(LevelLabel):
|
||||||
|
|
||||||
text_flags = (
|
text_flags = (
|
||||||
QtCore.Qt.TextFlag.TextDontClip
|
QtCore.Qt.TextDontClip
|
||||||
| QtCore.Qt.AlignmentFlag.AlignLeft
|
| QtCore.Qt.AlignLeft
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_label_str(
|
def set_label_str(
|
||||||
|
|
|
||||||
|
|
@ -27,12 +27,10 @@ from contextlib import (
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import (
|
from typing import (
|
||||||
|
# Any,
|
||||||
AsyncContextManager,
|
AsyncContextManager,
|
||||||
Literal,
|
|
||||||
)
|
)
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
import pyqtgraph as pg
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from tractor import trionics
|
from tractor import trionics
|
||||||
|
|
@ -51,13 +49,11 @@ from piker.ui.qt import (
|
||||||
)
|
)
|
||||||
from ._display import DisplayState
|
from ._display import DisplayState
|
||||||
from ._interaction import ChartView
|
from ._interaction import ChartView
|
||||||
from ._editors import (
|
from ._editors import SelectRect
|
||||||
SelectRect,
|
|
||||||
ArrowEditor,
|
|
||||||
)
|
|
||||||
from ._chart import ChartPlotWidget
|
from ._chart import ChartPlotWidget
|
||||||
from ._dataviz import Viz
|
from ._dataviz import Viz
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# NOTE: this is UPDATED by the `._display.graphics_update_loop()`
|
# NOTE: this is UPDATED by the `._display.graphics_update_loop()`
|
||||||
|
|
@ -87,34 +83,8 @@ _ctxs: IpcCtxTable = {}
|
||||||
# the "annotations server" which actually renders to a Qt canvas).
|
# the "annotations server" which actually renders to a Qt canvas).
|
||||||
# type AnnotsTable = dict[int, QGraphicsItem]
|
# type AnnotsTable = dict[int, QGraphicsItem]
|
||||||
AnnotsTable = dict[int, QGraphicsItem]
|
AnnotsTable = dict[int, QGraphicsItem]
|
||||||
EditorsTable = dict[int, ArrowEditor]
|
|
||||||
|
|
||||||
_annots: AnnotsTable = {}
|
_annots: AnnotsTable = {}
|
||||||
_editors: EditorsTable = {}
|
|
||||||
|
|
||||||
def rm_annot(
|
|
||||||
annot: ArrowEditor|SelectRect
|
|
||||||
) -> bool:
|
|
||||||
global _editors
|
|
||||||
match annot:
|
|
||||||
case pg.ArrowItem():
|
|
||||||
editor = _editors[annot._uid]
|
|
||||||
editor.remove(annot)
|
|
||||||
# ^TODO? only remove each arrow or all?
|
|
||||||
# if editor._arrows:
|
|
||||||
# editor.remove_all()
|
|
||||||
# else:
|
|
||||||
# log.warning(
|
|
||||||
# f'Annot already removed!\n'
|
|
||||||
# f'{annot!r}\n'
|
|
||||||
# )
|
|
||||||
return True
|
|
||||||
|
|
||||||
case SelectRect():
|
|
||||||
annot.delete()
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
async def serve_rc_annots(
|
async def serve_rc_annots(
|
||||||
|
|
@ -125,12 +95,6 @@ async def serve_rc_annots(
|
||||||
annots: AnnotsTable,
|
annots: AnnotsTable,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
|
||||||
A small viz(ualization) server for remote ctl of chart
|
|
||||||
annotations.
|
|
||||||
|
|
||||||
'''
|
|
||||||
global _editors
|
|
||||||
async for msg in annot_req_stream:
|
async for msg in annot_req_stream:
|
||||||
match msg:
|
match msg:
|
||||||
case {
|
case {
|
||||||
|
|
@ -140,6 +104,7 @@ async def serve_rc_annots(
|
||||||
'meth': str(meth),
|
'meth': str(meth),
|
||||||
'kwargs': dict(kwargs),
|
'kwargs': dict(kwargs),
|
||||||
}:
|
}:
|
||||||
|
|
||||||
ds: DisplayState = _dss[fqme]
|
ds: DisplayState = _dss[fqme]
|
||||||
chart: ChartPlotWidget = {
|
chart: ChartPlotWidget = {
|
||||||
60: ds.hist_chart,
|
60: ds.hist_chart,
|
||||||
|
|
@ -171,67 +136,15 @@ async def serve_rc_annots(
|
||||||
aids.add(aid)
|
aids.add(aid)
|
||||||
await annot_req_stream.send(aid)
|
await annot_req_stream.send(aid)
|
||||||
|
|
||||||
case {
|
|
||||||
'cmd': 'ArrowEditor',
|
|
||||||
'fqme': fqme,
|
|
||||||
'timeframe': timeframe,
|
|
||||||
'meth': 'add'|'remove' as meth,
|
|
||||||
'kwargs': {
|
|
||||||
'x': float(x),
|
|
||||||
'y': float(y),
|
|
||||||
'pointing': pointing,
|
|
||||||
'color': color,
|
|
||||||
'aid': str()|None as aid,
|
|
||||||
'alpha': int(alpha),
|
|
||||||
},
|
|
||||||
# ?TODO? split based on method fn-sigs?
|
|
||||||
# 'pointing',
|
|
||||||
}:
|
|
||||||
ds: DisplayState = _dss[fqme]
|
|
||||||
chart: ChartPlotWidget = {
|
|
||||||
60: ds.hist_chart,
|
|
||||||
1: ds.chart,
|
|
||||||
}[timeframe]
|
|
||||||
cv: ChartView = chart.cv
|
|
||||||
godw = chart.linked.godwidget
|
|
||||||
|
|
||||||
arrows = ArrowEditor(godw=godw)
|
|
||||||
# `.add/.remove()` API
|
|
||||||
if meth != 'add':
|
|
||||||
# await tractor.pause()
|
|
||||||
raise ValueError(
|
|
||||||
f'Invalid arrow-edit request ?\n'
|
|
||||||
f'{msg!r}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
aid: str = str(uuid4())
|
|
||||||
arrow: pg.ArrowItem = arrows.add(
|
|
||||||
plot=chart.plotItem,
|
|
||||||
uid=aid,
|
|
||||||
x=x,
|
|
||||||
y=y,
|
|
||||||
pointing=pointing,
|
|
||||||
color=color,
|
|
||||||
alpha=alpha,
|
|
||||||
)
|
|
||||||
annots[aid] = arrow
|
|
||||||
_editors[aid] = arrows
|
|
||||||
aids: set[int] = ctxs[ipc_key][1]
|
|
||||||
aids.add(aid)
|
|
||||||
await annot_req_stream.send(aid)
|
|
||||||
|
|
||||||
# TODO, use `pg.TextItem` to put a humaized
|
|
||||||
# time label beside the arrows
|
|
||||||
|
|
||||||
case {
|
case {
|
||||||
'cmd': 'remove',
|
'cmd': 'remove',
|
||||||
'aid': int(aid)|str(aid),
|
'aid': int(aid),
|
||||||
}:
|
}:
|
||||||
# NOTE: this is normally entered on
|
# NOTE: this is normally entered on
|
||||||
# a client's annotation de-alloc normally
|
# a client's annotation de-alloc normally
|
||||||
# prior to detach or modify.
|
# prior to detach or modify.
|
||||||
annot: QGraphicsItem = annots[aid]
|
annot: QGraphicsItem = annots[aid]
|
||||||
assert rm_annot(annot)
|
annot.delete()
|
||||||
|
|
||||||
# respond to client indicating annot
|
# respond to client indicating annot
|
||||||
# was indeed deleted.
|
# was indeed deleted.
|
||||||
|
|
@ -275,12 +188,6 @@ async def remote_annotate(
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
global _dss, _ctxs
|
global _dss, _ctxs
|
||||||
if not _dss:
|
|
||||||
raise RuntimeError(
|
|
||||||
'Race condition on chart-init state ??\n'
|
|
||||||
'Anoter actor is trying to annoate this chart '
|
|
||||||
'before it has fully spawned.\n'
|
|
||||||
)
|
|
||||||
assert _dss
|
assert _dss
|
||||||
|
|
||||||
_ctxs[ctx.cid] = (ctx, set())
|
_ctxs[ctx.cid] = (ctx, set())
|
||||||
|
|
@ -305,7 +212,7 @@ async def remote_annotate(
|
||||||
assert _ctx is ctx
|
assert _ctx is ctx
|
||||||
for aid in aids:
|
for aid in aids:
|
||||||
annot: QGraphicsItem = _annots[aid]
|
annot: QGraphicsItem = _annots[aid]
|
||||||
assert rm_annot(annot)
|
annot.delete()
|
||||||
|
|
||||||
|
|
||||||
class AnnotCtl(Struct):
|
class AnnotCtl(Struct):
|
||||||
|
|
@ -427,55 +334,20 @@ class AnnotCtl(Struct):
|
||||||
'timeframe': timeframe,
|
'timeframe': timeframe,
|
||||||
})
|
})
|
||||||
|
|
||||||
async def add_arrow(
|
# TODO: do we even need this?
|
||||||
self,
|
# async def modify(
|
||||||
fqme: str,
|
# self,
|
||||||
timeframe: float,
|
# aid: int, # annotation id
|
||||||
x: float,
|
# meth: str, # far end graphics object method to invoke
|
||||||
y: float,
|
# params: dict[str, Any], # far end `meth(**kwargs)`
|
||||||
pointing: Literal[
|
# ) -> bool:
|
||||||
'up',
|
# '''
|
||||||
'down',
|
# Modify an existing (remote) annotation's graphics
|
||||||
],
|
# paramters, thus changing it's appearance / state in real
|
||||||
# TODO: a `Literal['view', 'scene']` for this?
|
# time.
|
||||||
# domain: str = 'view', # or 'scene'
|
|
||||||
color: str = 'dad_blue',
|
|
||||||
alpha: int = 116,
|
|
||||||
|
|
||||||
from_acm: bool = False,
|
# '''
|
||||||
|
# raise NotImplementedError
|
||||||
) -> int:
|
|
||||||
'''
|
|
||||||
Add a `SelectRect` annotation to the target view, return
|
|
||||||
the instances `id(obj)` from the remote UI actor.
|
|
||||||
|
|
||||||
'''
|
|
||||||
ipc: MsgStream = self._get_ipc(fqme)
|
|
||||||
await ipc.send({
|
|
||||||
'fqme': fqme,
|
|
||||||
'cmd': 'ArrowEditor',
|
|
||||||
'timeframe': timeframe,
|
|
||||||
# 'meth': str(meth),
|
|
||||||
'meth': 'add',
|
|
||||||
'kwargs': {
|
|
||||||
'x': float(x),
|
|
||||||
'y': float(y),
|
|
||||||
'color': color,
|
|
||||||
'pointing': pointing, # up|down
|
|
||||||
'alpha': alpha,
|
|
||||||
'aid': None,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
aid: int = await ipc.receive()
|
|
||||||
self._ipcs[aid] = ipc
|
|
||||||
if not from_acm:
|
|
||||||
self._annot_stack.push_async_callback(
|
|
||||||
partial(
|
|
||||||
self.remove,
|
|
||||||
aid,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return aid
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
|
@ -502,9 +374,7 @@ async def open_annot_ctl(
|
||||||
# TODO: print the current discoverable actor UID set
|
# TODO: print the current discoverable actor UID set
|
||||||
# here as well?
|
# here as well?
|
||||||
if not maybe_portals:
|
if not maybe_portals:
|
||||||
raise RuntimeError(
|
raise RuntimeError('No chart UI actors found in service domain?')
|
||||||
'No chart actors found in service domain?'
|
|
||||||
)
|
|
||||||
|
|
||||||
for portal in maybe_portals:
|
for portal in maybe_portals:
|
||||||
ctx_mngrs.append(
|
ctx_mngrs.append(
|
||||||
|
|
|
||||||
|
|
@ -59,14 +59,8 @@ from piker.data import (
|
||||||
from piker.types import Struct
|
from piker.types import Struct
|
||||||
from piker.log import get_logger
|
from piker.log import get_logger
|
||||||
from piker.ui.qt import Qt
|
from piker.ui.qt import Qt
|
||||||
from ._editors import (
|
from ._editors import LineEditor, ArrowEditor
|
||||||
LineEditor,
|
from ._lines import order_line, LevelLine
|
||||||
ArrowEditor,
|
|
||||||
)
|
|
||||||
from ._lines import (
|
|
||||||
order_line,
|
|
||||||
LevelLine,
|
|
||||||
)
|
|
||||||
from ._position import (
|
from ._position import (
|
||||||
PositionTracker,
|
PositionTracker,
|
||||||
SettingsPane,
|
SettingsPane,
|
||||||
|
|
|
||||||
|
|
@ -1,256 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
'''
|
|
||||||
Programmatic debugging helper for `pdbp` REPL human-like
|
|
||||||
interaction but built to allow `claude` to interact with
|
|
||||||
crashes and `tractor.pause()` breakpoints along side a human dev.
|
|
||||||
|
|
||||||
Originally written by `clauded` during a backfiller inspection
|
|
||||||
session with @goodboy trying to resolve duplicate/gappy ohlcv ts
|
|
||||||
issues discovered while testing the new `nativedb` tsdb.
|
|
||||||
|
|
||||||
Allows `claude` to run `pdb` commands and capture output in an "offline"
|
|
||||||
manner but generating similar output as if it was iteracting with
|
|
||||||
the debug REPL.
|
|
||||||
|
|
||||||
The use of `pexpect` is heavily based on tractor's REPL UX test
|
|
||||||
suite(s), namely various `tests/devx/test_debugger.py` patterns.
|
|
||||||
|
|
||||||
'''
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
import pexpect
|
|
||||||
from pexpect.exceptions import (
|
|
||||||
TIMEOUT,
|
|
||||||
EOF,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
PROMPT: str = r'\(Pdb\+\)'
|
|
||||||
|
|
||||||
|
|
||||||
def expect(
|
|
||||||
child: pexpect.spawn,
|
|
||||||
patt: str,
|
|
||||||
**kwargs,
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Expect wrapper that prints last console data before failing.
|
|
||||||
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
child.expect(
|
|
||||||
patt,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
except TIMEOUT:
|
|
||||||
before: str = (
|
|
||||||
str(child.before.decode())
|
|
||||||
if isinstance(child.before, bytes)
|
|
||||||
else str(child.before)
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
f'TIMEOUT waiting for pattern: {patt}\n'
|
|
||||||
f'Last seen output:\n{before}'
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def run_pdb_commands(
|
|
||||||
commands: list[str],
|
|
||||||
initial_cmd: str = 'piker store ldshm xmrusdt.usdtm.perp.binance',
|
|
||||||
timeout: int = 30,
|
|
||||||
print_output: bool = True,
|
|
||||||
) -> dict[str, str]:
|
|
||||||
'''
|
|
||||||
Spawn piker process, wait for pdb prompt, execute commands.
|
|
||||||
|
|
||||||
Returns dict mapping command -> output.
|
|
||||||
|
|
||||||
'''
|
|
||||||
results: dict[str, str] = {}
|
|
||||||
|
|
||||||
# Disable colored output for easier parsing
|
|
||||||
os.environ['PYTHON_COLORS'] = '0'
|
|
||||||
|
|
||||||
# Spawn the process
|
|
||||||
if print_output:
|
|
||||||
print(f'Spawning: {initial_cmd}')
|
|
||||||
|
|
||||||
child: pexpect.spawn = pexpect.spawn(
|
|
||||||
initial_cmd,
|
|
||||||
timeout=timeout,
|
|
||||||
encoding='utf-8',
|
|
||||||
echo=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Wait for pdb prompt
|
|
||||||
try:
|
|
||||||
expect(child, PROMPT, timeout=timeout)
|
|
||||||
if print_output:
|
|
||||||
print('Reached pdb prompt!')
|
|
||||||
|
|
||||||
# Execute each command
|
|
||||||
for cmd in commands:
|
|
||||||
if print_output:
|
|
||||||
print(f'\n>>> {cmd}')
|
|
||||||
|
|
||||||
child.sendline(cmd)
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
# Wait for next prompt
|
|
||||||
expect(child, PROMPT, timeout=timeout)
|
|
||||||
|
|
||||||
# Capture output (everything before the prompt)
|
|
||||||
output: str = (
|
|
||||||
str(child.before.decode())
|
|
||||||
if isinstance(child.before, bytes)
|
|
||||||
else str(child.before)
|
|
||||||
)
|
|
||||||
results[cmd] = output
|
|
||||||
|
|
||||||
if print_output:
|
|
||||||
print(output)
|
|
||||||
|
|
||||||
# Quit debugger gracefully
|
|
||||||
child.sendline('quit')
|
|
||||||
try:
|
|
||||||
child.expect(EOF, timeout=5)
|
|
||||||
except (TIMEOUT, EOF):
|
|
||||||
pass
|
|
||||||
|
|
||||||
except TIMEOUT as e:
|
|
||||||
print(f'Timeout: {e}')
|
|
||||||
if child.before:
|
|
||||||
before: str = (
|
|
||||||
str(child.before.decode())
|
|
||||||
if isinstance(child.before, bytes)
|
|
||||||
else str(child.before)
|
|
||||||
)
|
|
||||||
print(f'Buffer:\n{before}')
|
|
||||||
results['_error'] = str(e)
|
|
||||||
|
|
||||||
finally:
|
|
||||||
if child.isalive():
|
|
||||||
child.close(force=True)
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
class InteractivePdbSession:
|
|
||||||
'''
|
|
||||||
Interactive pdb session manager for incremental debugging.
|
|
||||||
|
|
||||||
'''
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
cmd: str = 'piker store ldshm xmrusdt.usdtm.perp.binance',
|
|
||||||
timeout: int = 30,
|
|
||||||
):
|
|
||||||
self.cmd: str = cmd
|
|
||||||
self.timeout: int = timeout
|
|
||||||
self.child: pexpect.spawn|None = None
|
|
||||||
self.history: list[tuple[str, str]] = []
|
|
||||||
|
|
||||||
def start(self) -> None:
|
|
||||||
'''
|
|
||||||
Start the piker process and wait for first prompt.
|
|
||||||
|
|
||||||
'''
|
|
||||||
os.environ['PYTHON_COLORS'] = '0'
|
|
||||||
|
|
||||||
print(f'Starting: {self.cmd}')
|
|
||||||
self.child = pexpect.spawn(
|
|
||||||
self.cmd,
|
|
||||||
timeout=self.timeout,
|
|
||||||
encoding='utf-8',
|
|
||||||
echo=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Wait for initial prompt
|
|
||||||
expect(self.child, PROMPT, timeout=self.timeout)
|
|
||||||
print('Ready at pdb prompt!')
|
|
||||||
|
|
||||||
def run(
|
|
||||||
self,
|
|
||||||
cmd: str,
|
|
||||||
print_output: bool = True,
|
|
||||||
) -> str:
|
|
||||||
'''
|
|
||||||
Execute a single pdb command and return output.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not self.child or not self.child.isalive():
|
|
||||||
raise RuntimeError('Session not started or dead')
|
|
||||||
|
|
||||||
if print_output:
|
|
||||||
print(f'\n>>> {cmd}')
|
|
||||||
|
|
||||||
self.child.sendline(cmd)
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
# Wait for next prompt
|
|
||||||
expect(self.child, PROMPT, timeout=self.timeout)
|
|
||||||
|
|
||||||
output: str = (
|
|
||||||
str(self.child.before.decode())
|
|
||||||
if isinstance(self.child.before, bytes)
|
|
||||||
else str(self.child.before)
|
|
||||||
)
|
|
||||||
self.history.append((cmd, output))
|
|
||||||
|
|
||||||
if print_output:
|
|
||||||
print(output)
|
|
||||||
|
|
||||||
return output
|
|
||||||
|
|
||||||
def quit(self) -> None:
|
|
||||||
'''
|
|
||||||
Exit the debugger and cleanup.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if self.child and self.child.isalive():
|
|
||||||
self.child.sendline('quit')
|
|
||||||
try:
|
|
||||||
self.child.expect(EOF, timeout=5)
|
|
||||||
except (TIMEOUT, EOF):
|
|
||||||
pass
|
|
||||||
self.child.close(force=True)
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.start()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, *args):
|
|
||||||
self.quit()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
# Example inspection commands
|
|
||||||
inspect_cmds: list[str] = [
|
|
||||||
'locals().keys()',
|
|
||||||
'type(deduped)',
|
|
||||||
'deduped.shape',
|
|
||||||
(
|
|
||||||
'step_gaps.shape '
|
|
||||||
'if "step_gaps" in locals() '
|
|
||||||
'else "N/A"'
|
|
||||||
),
|
|
||||||
(
|
|
||||||
'venue_gaps.shape '
|
|
||||||
'if "venue_gaps" in locals() '
|
|
||||||
'else "N/A"'
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
# Allow commands from CLI args
|
|
||||||
if len(sys.argv) > 1:
|
|
||||||
inspect_cmds = sys.argv[1:]
|
|
||||||
|
|
||||||
# Interactive session example
|
|
||||||
with InteractivePdbSession() as session:
|
|
||||||
for cmd in inspect_cmds:
|
|
||||||
session.run(cmd)
|
|
||||||
|
|
||||||
print('\n=== Session Complete ===')
|
|
||||||
Loading…
Reference in New Issue