Compare commits

..

5 Commits

Author SHA1 Message Date
Gud Boi 88353ffef8 Ignore single-zero-sample trace on no runtime.. 2026-01-30 14:53:00 -05:00
Gud Boi ec4e6ec742 ib.feed: drop legacy "quote-with-vlm" polling
Since now we explicitly check each mkt's venue hours now we don't need
this mega hacky "waiting on a quote with real vlm" stuff to determing
whether historical data should be loaded immediately. This approach also
had the added complexity that we needed to handle edge cases for tickers
(like xauusd.cmdty) which never have vlm.. so it's nice to be rid of it
all ;p
2026-01-30 14:47:11 -05:00
Gud Boi 205058de21 Always overwrite tsdb duplicates found during backfill
Enable the previously commented-out dedupe-and-write logic in
`start_backfill()` to ensure tsdb stays clean of duplicate
entries.

(this patch was generated in some part by [`claude-code`][claude-code-gh])
[claude-code-gh]: https://github.com/anthropics/claude-code
2026-01-30 14:46:23 -05:00
Gud Boi f11ab5f0aa For claude, ignore no runtime for offline shm reading 2026-01-29 02:49:25 -05:00
Gud Boi 8718ad4874 .ib._util: ignore attr err on click-hack twm wakeups? 2026-01-29 02:48:41 -05:00
5 changed files with 38 additions and 51 deletions

View File

@ -333,7 +333,14 @@ def i3ipc_xdotool_manual_click_hack() -> None:
''' '''
focussed, matches = i3ipc_fin_wins_titled() focussed, matches = i3ipc_fin_wins_titled()
orig_win_id = focussed.window try:
orig_win_id = focussed.window
except AttributeError:
# XXX if .window cucks we prolly aren't intending to
# use this and/or just woke up from suspend..
log.exception('xdotool invalid usage ya ??\n')
return
try: try:
for name, con in matches: for name, con in matches:
print(f'Resetting data feed for {name}') print(f'Resetting data feed for {name}')

View File

@ -1246,54 +1246,12 @@ async def stream_quotes(
tn.start_soon(reset_on_feed) tn.start_soon(reset_on_feed)
async with aclosing(iter_quotes): async with aclosing(iter_quotes):
# if syminfo.get('no_vlm', False):
if not init_msg.shm_write_opts['has_vlm']:
# generally speaking these feeds don't
# include vlm data.
atype: str = mkt.dst.atype
log.info(
f'No-vlm {mkt.fqme}@{atype}, skipping quote poll'
)
else:
# wait for real volume on feed (trading might be
# closed)
while True:
ticker = await iter_quotes.receive()
# for a real volume contract we rait for
# the first "real" trade to take place
if (
# not calc_price
# and not ticker.rtTime
False
# not ticker.rtTime
):
# spin consuming tickers until we
# get a real market datum
log.debug(f"New unsent ticker: {ticker}")
continue
else:
log.debug("Received first volume tick")
# ugh, clear ticks since we've
# consumed them (ahem, ib_insync is
# truly stateful trash)
# ticker.ticks = []
# XXX: this works because we don't use
# ``aclosing()`` above?
break
quote = normalize(ticker)
log.debug(f"First ticker received {quote}")
# tell data-layer spawner-caller that live # tell data-layer spawner-caller that live
# quotes are now active desptie not having # quotes are now active desptie not having
# necessarily received a first vlm/clearing # necessarily received a first vlm/clearing
# tick. # tick.
ticker = await iter_quotes.receive() ticker = await iter_quotes.receive()
quote = normalize(ticker)
feed_is_live.set() feed_is_live.set()
fqme: str = quote['fqme'] fqme: str = quote['fqme']
await send_chan.send({fqme: quote}) await send_chan.send({fqme: quote})

View File

@ -520,9 +520,12 @@ def open_shm_array(
# "unlink" created shm on process teardown by # "unlink" created shm on process teardown by
# pushing teardown calls onto actor context stack # pushing teardown calls onto actor context stack
stack = tractor.current_actor().lifetime_stack stack = tractor.current_actor(
stack.callback(shmarr.close) err_on_no_runtime=False,
stack.callback(shmarr.destroy) ).lifetime_stack
if stack:
stack.callback(shmarr.close)
stack.callback(shmarr.destroy)
return shmarr return shmarr
@ -607,7 +610,10 @@ def attach_shm_array(
_known_tokens[key] = token _known_tokens[key] = token
# "close" attached shm on actor teardown # "close" attached shm on actor teardown
tractor.current_actor().lifetime_stack.callback(sha.close) if (actor := tractor.current_actor(
err_on_no_runtime=False,
)):
actor.lifetime_stack.callback(sha.close)
return sha return sha

View File

@ -276,7 +276,15 @@ def get_null_segs(
absi_zdiff: np.ndarray = np.diff(absi_zeros) absi_zdiff: np.ndarray = np.diff(absi_zeros)
if zero_t.size < 2: if zero_t.size < 2:
breakpoint() try:
breakpoint()
except RuntimeError:
# XXX, if greenback not active from
# piker store ldshm cmd..
log.exception(
"Can't debug single-sample null!\n"
)
return None return None
# scan for all frame-indices where the # scan for all frame-indices where the

View File

@ -708,8 +708,16 @@ async def start_backfill(
deduped, deduped,
diff, diff,
) = dedupe(df) ) = dedupe(df)
# if diff: if diff:
# sort_diff(df) log.warning(
f'Found {diff} duplicates in tsdb, '
f'overwriting with deduped data\n'
)
await storage.write_ohlcv(
col_sym_key,
deduped,
timeframe,
)
else: else:
# finally filled gap # finally filled gap