ib: add venue-hours checking
Such that we can avoid other (pretty unreliable) "alternative" checks to determine whether a real-time quote should be waited on or (when venue is closed) we should just signal that historical backfilling can commence immediately. This has been a todo for a very long time and it turned out to be much easier to accomplish than anticipated.. Deats, - add a new `is_current_time_in_range()` dt range checker to predicate whether an input range contains `datetime.now(start_dt.tzinfo)`. - in `.ib.feed.stream_quotes()` add a `venue_is_open: bool` which uses all of the new ^^ to determine whether to branch for the short-circuit-and-do-history-now-case or the std real-time-quotes should-be-awaited-since-venue-is-open, case; drop all the old hacks trying to workaround not figuring that venue state stuff.. Other, - also add a gpt5 composed parser to `._util` for the `ib_insync.ContractDetails.tradingHours: str` for before i realized there was a `.tradingSessions` property XD - in `.ib_feed`, * add various EG-collapsings per recent tractor/trio updates. * better logging / exc-handling around ticker quote pushes. * stop clearing `Ticker.ticks` each quote iteration; not sure if this is needed/correct tho? * add masked `Ticker.ticks` poll loop that logs. - fix some `str.format()` usage in `._util.try_xdo_manual()`testing_utils
parent
390a57c96d
commit
e19a724037
|
@ -20,6 +20,11 @@ runnable script-programs.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from datetime import ( # noqa
|
||||||
|
datetime,
|
||||||
|
date,
|
||||||
|
tzinfo as TzInfo,
|
||||||
|
)
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import (
|
from typing import (
|
||||||
Literal,
|
Literal,
|
||||||
|
@ -75,7 +80,7 @@ def try_xdo_manual(
|
||||||
return True
|
return True
|
||||||
except OSError:
|
except OSError:
|
||||||
log.exception(
|
log.exception(
|
||||||
no_setup_msg.format(vnc_sockaddr)
|
no_setup_msg.format(vnc_sockaddr=vnc_sockaddr)
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -124,7 +129,7 @@ async def data_reset_hack(
|
||||||
|
|
||||||
if not vnc_sockaddr:
|
if not vnc_sockaddr:
|
||||||
log.warning(
|
log.warning(
|
||||||
no_setup_msg.format(vnc_sockaddr)
|
no_setup_msg.format(vnc_sockaddr=vnc_sockaddr)
|
||||||
+
|
+
|
||||||
'REQUIRES A `vnc_addrs: array` ENTRY'
|
'REQUIRES A `vnc_addrs: array` ENTRY'
|
||||||
)
|
)
|
||||||
|
@ -153,7 +158,7 @@ async def data_reset_hack(
|
||||||
import i3ipc # noqa (since a deps dynamic check)
|
import i3ipc # noqa (since a deps dynamic check)
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
log.warning(
|
log.warning(
|
||||||
no_setup_msg.format(vnc_sockaddr)
|
no_setup_msg.format(vnc_sockaddr=vnc_sockaddr)
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -164,7 +169,7 @@ async def data_reset_hack(
|
||||||
focussed, matches = i3ipc_fin_wins_titled()
|
focussed, matches = i3ipc_fin_wins_titled()
|
||||||
if not matches:
|
if not matches:
|
||||||
log.warning(
|
log.warning(
|
||||||
no_setup_msg.format(vnc_sockaddr)
|
no_setup_msg.format(vnc_sockaddr=vnc_sockaddr)
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
|
@ -337,3 +342,99 @@ def i3ipc_xdotool_manual_click_hack() -> None:
|
||||||
])
|
])
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired:
|
||||||
log.exception('xdotool timed out?')
|
log.exception('xdotool timed out?')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def is_current_time_in_range(
|
||||||
|
start_dt: datetime,
|
||||||
|
end_dt: datetime,
|
||||||
|
) -> bool:
|
||||||
|
'''
|
||||||
|
Check if current time is within the datetime range.
|
||||||
|
|
||||||
|
Use any/the-same timezone as provided by `start_dt.tzinfo` value
|
||||||
|
in the range.
|
||||||
|
|
||||||
|
'''
|
||||||
|
now: datetime = datetime.now(start_dt.tzinfo)
|
||||||
|
return start_dt <= now <= end_dt
|
||||||
|
|
||||||
|
|
||||||
|
# TODO, put this into `._util` and call it from here!
|
||||||
|
#
|
||||||
|
# NOTE, this was generated by @guille from a gpt5 prompt
|
||||||
|
# and was originally thot to be needed before learning about
|
||||||
|
# `ib_insync.contract.ContractDetails._parseSessions()` and
|
||||||
|
# it's downstream meths..
|
||||||
|
#
|
||||||
|
# This is still likely useful to keep for now to parse the
|
||||||
|
# `.tradingHours: str` value manually if we ever decide
|
||||||
|
# to move off `ib_async` and implement our own `trio`/`anyio`
|
||||||
|
# based version Bp
|
||||||
|
#
|
||||||
|
# >attempt to parse the retarted ib "time stampy thing" they
|
||||||
|
# >do for "venue hours" with this.. written by
|
||||||
|
# >gpt5-"thinking",
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
def parse_trading_hours(
|
||||||
|
spec: str,
|
||||||
|
tz: TzInfo|None = None
|
||||||
|
) -> dict[
|
||||||
|
date,
|
||||||
|
tuple[datetime, datetime]
|
||||||
|
]|None:
|
||||||
|
'''
|
||||||
|
Parse venue hours like:
|
||||||
|
'YYYYMMDD:HHMM-YYYYMMDD:HHMM;YYYYMMDD:CLOSED;...'
|
||||||
|
|
||||||
|
Returns `dict[date] = (open_dt, close_dt)` or `None` if
|
||||||
|
closed.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if (
|
||||||
|
not isinstance(spec, str)
|
||||||
|
or
|
||||||
|
not spec
|
||||||
|
):
|
||||||
|
raise ValueError('spec must be a non-empty string')
|
||||||
|
|
||||||
|
out: dict[
|
||||||
|
date,
|
||||||
|
tuple[datetime, datetime]
|
||||||
|
]|None = {}
|
||||||
|
|
||||||
|
for part in (p.strip() for p in spec.split(';') if p.strip()):
|
||||||
|
if part.endswith(':CLOSED'):
|
||||||
|
day_s, _ = part.split(':', 1)
|
||||||
|
d = datetime.strptime(day_s, '%Y%m%d').date()
|
||||||
|
out[d] = None
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
start_s, end_s = part.split('-', 1)
|
||||||
|
start_dt = datetime.strptime(start_s, '%Y%m%d:%H%M')
|
||||||
|
end_dt = datetime.strptime(end_s, '%Y%m%d:%H%M')
|
||||||
|
except ValueError as exc:
|
||||||
|
raise ValueError(f'invalid segment: {part}') from exc
|
||||||
|
|
||||||
|
if tz is not None:
|
||||||
|
start_dt = start_dt.replace(tzinfo=tz)
|
||||||
|
end_dt = end_dt.replace(tzinfo=tz)
|
||||||
|
|
||||||
|
out[start_dt.date()] = (start_dt, end_dt)
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
# ORIG desired usage,
|
||||||
|
#
|
||||||
|
# TODO, for non-drunk tomorrow,
|
||||||
|
# - call above fn and check that `output[today] is not None`
|
||||||
|
# trading_hrs: dict = parse_trading_hours(
|
||||||
|
# details.tradingHours
|
||||||
|
# )
|
||||||
|
# liq_hrs: dict = parse_trading_hours(
|
||||||
|
# details.liquidHours
|
||||||
|
# )
|
||||||
|
|
|
@ -26,7 +26,6 @@ from dataclasses import asdict
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from math import isnan
|
|
||||||
import time
|
import time
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
@ -69,7 +68,10 @@ from .api import (
|
||||||
Contract,
|
Contract,
|
||||||
RequestError,
|
RequestError,
|
||||||
)
|
)
|
||||||
from ._util import data_reset_hack
|
from ._util import (
|
||||||
|
data_reset_hack,
|
||||||
|
is_current_time_in_range,
|
||||||
|
)
|
||||||
from .symbols import get_mkt_info
|
from .symbols import get_mkt_info
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
@ -184,7 +186,8 @@ async def open_history_client(
|
||||||
|
|
||||||
if (
|
if (
|
||||||
start_dt
|
start_dt
|
||||||
and start_dt.timestamp() == 0
|
and
|
||||||
|
start_dt.timestamp() == 0
|
||||||
):
|
):
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
|
|
||||||
|
@ -203,7 +206,7 @@ async def open_history_client(
|
||||||
):
|
):
|
||||||
count += 1
|
count += 1
|
||||||
mean += latency / count
|
mean += latency / count
|
||||||
print(
|
log.debug(
|
||||||
f'HISTORY FRAME QUERY LATENCY: {latency}\n'
|
f'HISTORY FRAME QUERY LATENCY: {latency}\n'
|
||||||
f'mean: {mean}'
|
f'mean: {mean}'
|
||||||
)
|
)
|
||||||
|
@ -607,7 +610,10 @@ async def get_bars(
|
||||||
# such that simultaneous symbol queries don't try data resettingn
|
# such that simultaneous symbol queries don't try data resettingn
|
||||||
# too fast..
|
# too fast..
|
||||||
unset_resetter: bool = False
|
unset_resetter: bool = False
|
||||||
async with trio.open_nursery() as nurse:
|
async with (
|
||||||
|
tractor.trionics.collapse_eg(),
|
||||||
|
trio.open_nursery() as nurse
|
||||||
|
):
|
||||||
|
|
||||||
# start history request that we allow
|
# start history request that we allow
|
||||||
# to run indefinitely until a result is acquired
|
# to run indefinitely until a result is acquired
|
||||||
|
@ -689,10 +695,17 @@ async def _setup_quote_stream(
|
||||||
async with load_aio_clients(
|
async with load_aio_clients(
|
||||||
disconnect_on_exit=False,
|
disconnect_on_exit=False,
|
||||||
) as accts2clients:
|
) as accts2clients:
|
||||||
|
|
||||||
|
# since asyncio.Task
|
||||||
|
# tractor.pause_from_sync()
|
||||||
|
|
||||||
caccount_name, client = get_preferred_data_client(accts2clients)
|
caccount_name, client = get_preferred_data_client(accts2clients)
|
||||||
contract = contract or (await client.find_contract(symbol))
|
contract = contract or (await client.find_contract(symbol))
|
||||||
to_trio.send_nowait(contract) # cuz why not
|
to_trio.send_nowait(contract) # cuz why not
|
||||||
ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts))
|
ticker: Ticker = client.ib.reqMktData(
|
||||||
|
contract,
|
||||||
|
','.join(opts),
|
||||||
|
)
|
||||||
|
|
||||||
# NOTE: it's batch-wise and slow af but I guess could
|
# NOTE: it's batch-wise and slow af but I guess could
|
||||||
# be good for backchecking? Seems to be every 5s maybe?
|
# be good for backchecking? Seems to be every 5s maybe?
|
||||||
|
@ -716,10 +729,10 @@ async def _setup_quote_stream(
|
||||||
Push quotes to trio task.
|
Push quotes to trio task.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# log.debug(t)
|
|
||||||
|
# log.debug(f'new IB quote: {t}\n')
|
||||||
try:
|
try:
|
||||||
to_trio.send_nowait(t)
|
to_trio.send_nowait(t)
|
||||||
|
|
||||||
except (
|
except (
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
|
|
||||||
|
@ -734,21 +747,47 @@ async def _setup_quote_stream(
|
||||||
# resulting in tracebacks spammed to console..
|
# resulting in tracebacks spammed to console..
|
||||||
# Manually do the dereg ourselves.
|
# Manually do the dereg ourselves.
|
||||||
teardown()
|
teardown()
|
||||||
except trio.WouldBlock:
|
|
||||||
# log.warning(
|
|
||||||
# f'channel is blocking symbol feed for {symbol}?'
|
|
||||||
# f'\n{to_trio.statistics}'
|
|
||||||
# )
|
|
||||||
pass
|
|
||||||
|
|
||||||
# except trio.WouldBlock:
|
# for slow debugging purposes to avoid clobbering prompt
|
||||||
# # for slow debugging purposes to avoid clobbering prompt
|
# with log msgs
|
||||||
# # with log msgs
|
except trio.WouldBlock:
|
||||||
# pass
|
log.exception(
|
||||||
|
f'Asyncio->Trio `to_trio.send_nowait()` blocked !?\n'
|
||||||
|
f'\n'
|
||||||
|
f'{to_trio.statistics()}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# ?TODO, handle re-connection attempts?
|
||||||
|
except BaseException as _berr:
|
||||||
|
berr = _berr
|
||||||
|
log.exception(
|
||||||
|
f'Failed to push ticker quote !?\n'
|
||||||
|
f'cause: {berr}\n'
|
||||||
|
f'\n'
|
||||||
|
f't: {t}\n'
|
||||||
|
f'{to_trio.statistics}\n'
|
||||||
|
)
|
||||||
|
# raise berr
|
||||||
|
|
||||||
|
|
||||||
ticker.updateEvent.connect(push)
|
ticker.updateEvent.connect(push)
|
||||||
try:
|
try:
|
||||||
await asyncio.sleep(float('inf'))
|
await asyncio.sleep(float('inf'))
|
||||||
|
|
||||||
|
# XXX, just for debug..
|
||||||
|
# tractor.pause_from_sync()
|
||||||
|
# while True:
|
||||||
|
# await asyncio.sleep(1.6)
|
||||||
|
# if ticker.ticks:
|
||||||
|
# log.debug(
|
||||||
|
# f'ticker.ticks = \n'
|
||||||
|
# f'{ticker.ticks}\n'
|
||||||
|
# )
|
||||||
|
# else:
|
||||||
|
# log.warning(
|
||||||
|
# 'UHH no ticker.ticks ??'
|
||||||
|
# )
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
teardown()
|
teardown()
|
||||||
|
|
||||||
|
@ -820,7 +859,7 @@ def normalize(
|
||||||
|
|
||||||
tbt = ticker.tickByTicks
|
tbt = ticker.tickByTicks
|
||||||
if tbt:
|
if tbt:
|
||||||
print(f'tickbyticks:\n {ticker.tickByTicks}')
|
log.info(f'tickbyticks:\n {ticker.tickByTicks}')
|
||||||
|
|
||||||
ticker.ticks = new_ticks
|
ticker.ticks = new_ticks
|
||||||
|
|
||||||
|
@ -861,22 +900,28 @@ async def stream_quotes(
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
symbols: list[str],
|
symbols: list[str],
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
loglevel: str = None,
|
|
||||||
|
# TODO? we need to hook into the `ib_async` logger like
|
||||||
|
# we can with i3ipc from modden!
|
||||||
|
# loglevel: str|None = None,
|
||||||
|
|
||||||
# startup sync
|
# startup sync
|
||||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Stream symbol quotes.
|
Stream `symbols[0]` quotes back via `send_chan`.
|
||||||
|
|
||||||
This is a ``trio`` callable routine meant to be invoked
|
The `feed_is_live: Event` is set to signal the caller that it can
|
||||||
once the brokerd is up.
|
begin processing msgs from the mem-chan.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# TODO: support multiple subscriptions
|
# TODO: support multiple subscriptions
|
||||||
sym = symbols[0]
|
sym: str = symbols[0]
|
||||||
log.info(f'request for real-time quotes: {sym}')
|
log.info(
|
||||||
|
f'request for real-time quotes\n'
|
||||||
|
f'sym: {sym!r}\n'
|
||||||
|
)
|
||||||
|
|
||||||
init_msgs: list[FeedInit] = []
|
init_msgs: list[FeedInit] = []
|
||||||
|
|
||||||
|
@ -885,21 +930,30 @@ async def stream_quotes(
|
||||||
details: ibis.ContractDetails
|
details: ibis.ContractDetails
|
||||||
async with (
|
async with (
|
||||||
open_data_client() as proxy,
|
open_data_client() as proxy,
|
||||||
# trio.open_nursery() as tn,
|
|
||||||
):
|
):
|
||||||
mkt, details = await get_mkt_info(
|
mkt, details = await get_mkt_info(
|
||||||
sym,
|
sym,
|
||||||
proxy=proxy, # passed to avoid implicit client load
|
proxy=proxy, # passed to avoid implicit client load
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# is venue active rn?
|
||||||
|
venue_is_open: bool = any(
|
||||||
|
is_current_time_in_range(
|
||||||
|
start_dt=sesh.start,
|
||||||
|
end_dt=sesh.end,
|
||||||
|
)
|
||||||
|
for sesh in details.tradingSessions()
|
||||||
|
)
|
||||||
|
|
||||||
init_msg = FeedInit(mkt_info=mkt)
|
init_msg = FeedInit(mkt_info=mkt)
|
||||||
|
|
||||||
|
# NOTE, tell sampler (via config) to skip vlm summing for dst
|
||||||
|
# assets which provide no vlm data..
|
||||||
if mkt.dst.atype in {
|
if mkt.dst.atype in {
|
||||||
'fiat',
|
'fiat',
|
||||||
'index',
|
'index',
|
||||||
'commodity',
|
'commodity',
|
||||||
}:
|
}:
|
||||||
# tell sampler config that it shouldn't do vlm summing.
|
|
||||||
init_msg.shm_write_opts['sum_tick_vlm'] = False
|
init_msg.shm_write_opts['sum_tick_vlm'] = False
|
||||||
init_msg.shm_write_opts['has_vlm'] = False
|
init_msg.shm_write_opts['has_vlm'] = False
|
||||||
|
|
||||||
|
@ -907,12 +961,18 @@ async def stream_quotes(
|
||||||
|
|
||||||
con: Contract = details.contract
|
con: Contract = details.contract
|
||||||
first_ticker: Ticker|None = None
|
first_ticker: Ticker|None = None
|
||||||
with trio.move_on_after(1):
|
|
||||||
|
with trio.move_on_after(1.6) as quote_cs:
|
||||||
first_ticker: Ticker = await proxy.get_quote(
|
first_ticker: Ticker = await proxy.get_quote(
|
||||||
contract=con,
|
contract=con,
|
||||||
raise_on_timeout=False,
|
raise_on_timeout=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# XXX should never happen with this ep right?
|
||||||
|
# but if so then, more then likely mkt is closed?
|
||||||
|
if quote_cs.cancelled_caught:
|
||||||
|
await tractor.pause()
|
||||||
|
|
||||||
if first_ticker:
|
if first_ticker:
|
||||||
first_quote: dict = normalize(first_ticker)
|
first_quote: dict = normalize(first_ticker)
|
||||||
|
|
||||||
|
@ -924,28 +984,27 @@ async def stream_quotes(
|
||||||
f'{pformat(first_quote)}\n'
|
f'{pformat(first_quote)}\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE: it might be outside regular trading hours for
|
# XXX NOTE: whenever we're "outside regular trading hours"
|
||||||
# assets with "standard venue operating hours" so we
|
# (only relevant for assets coming from the "legacy markets"
|
||||||
# only "pretend the feed is live" when the dst asset
|
# space) so we basically (from an API/runtime-operational
|
||||||
# type is NOT within the NON-NORMAL-venue set: aka not
|
# perspective) "pretend the feed is live" even if it's
|
||||||
# commodities, forex or crypto currencies which CAN
|
# actually closed.
|
||||||
# always return a NaN on a snap quote request during
|
#
|
||||||
# normal venue hours. In the case of a closed venue
|
# IOW, we signal to the effective caller (task) that the live
|
||||||
# (equitiies, futes, bonds etc.) we at least try to
|
# feed is "already up" but really we're just indicating that
|
||||||
# grab the OHLC history.
|
# the OHLCV history can start being loaded immediately by the
|
||||||
if (
|
# `piker.data`/`.tsp` layers.
|
||||||
first_ticker
|
#
|
||||||
and
|
# XXX, deats: the "pretend we're live" is just done by
|
||||||
isnan(first_ticker.last)
|
# a `feed_is_live.set()` even though nothing is actually live
|
||||||
# SO, if the last quote price value is NaN we ONLY
|
# Bp
|
||||||
# "pretend to do" `feed_is_live.set()` if it's a known
|
if not venue_is_open:
|
||||||
# dst asset venue with a lot of closed operating hours.
|
log.warning(
|
||||||
and mkt.dst.atype not in {
|
f'Venue is closed, unable to establish real-time feed.\n'
|
||||||
'commodity',
|
f'mkt: {mkt!r}\n'
|
||||||
'fiat',
|
f'\n'
|
||||||
'crypto',
|
f'first_ticker: {first_ticker}\n'
|
||||||
}
|
)
|
||||||
):
|
|
||||||
task_status.started((
|
task_status.started((
|
||||||
init_msgs,
|
init_msgs,
|
||||||
first_quote,
|
first_quote,
|
||||||
|
@ -956,10 +1015,12 @@ async def stream_quotes(
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
|
||||||
# block and let data history backfill code run.
|
# block and let data history backfill code run.
|
||||||
|
# XXX obvi given the venue is closed, we never expect feed
|
||||||
|
# to come up; a taskc should be the only way to
|
||||||
|
# terminate this task.
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
return # we never expect feed to come up?
|
|
||||||
|
|
||||||
# TODO: we should instead spawn a task that waits on a feed
|
# ?TODO, we could instead spawn a task that waits on a feed
|
||||||
# to start and let it wait indefinitely..instead of this
|
# to start and let it wait indefinitely..instead of this
|
||||||
# hard coded stuff.
|
# hard coded stuff.
|
||||||
# async def wait_for_first_quote():
|
# async def wait_for_first_quote():
|
||||||
|
@ -985,19 +1046,22 @@ async def stream_quotes(
|
||||||
startup: bool = True
|
startup: bool = True
|
||||||
while (
|
while (
|
||||||
startup
|
startup
|
||||||
or cs.cancel_called
|
or
|
||||||
|
cs.cancel_called
|
||||||
):
|
):
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
async with (
|
async with (
|
||||||
|
tractor.trionics.collapse_eg(),
|
||||||
trio.open_nursery() as nurse,
|
trio.open_nursery() as nurse,
|
||||||
open_aio_quote_stream(
|
open_aio_quote_stream(
|
||||||
symbol=sym,
|
symbol=sym,
|
||||||
contract=con,
|
contract=con,
|
||||||
) as stream,
|
) as stream,
|
||||||
):
|
):
|
||||||
|
# ?TODO? can we rm this - particularly for `ib_async`?
|
||||||
# ugh, clear ticks since we've consumed them
|
# ugh, clear ticks since we've consumed them
|
||||||
# (ahem, ib_insync is stateful trash)
|
# (ahem, ib_insync is stateful trash)
|
||||||
first_ticker.ticks = []
|
# first_ticker.ticks = []
|
||||||
|
|
||||||
# only on first entry at feed boot up
|
# only on first entry at feed boot up
|
||||||
if startup:
|
if startup:
|
||||||
|
@ -1011,8 +1075,8 @@ async def stream_quotes(
|
||||||
# data feed event.
|
# data feed event.
|
||||||
async def reset_on_feed():
|
async def reset_on_feed():
|
||||||
|
|
||||||
# TODO: this seems to be surpressed from the
|
# ??TODO? this seems to be surpressed from the
|
||||||
# traceback in ``tractor``?
|
# traceback in `tractor`?
|
||||||
# assert 0
|
# assert 0
|
||||||
|
|
||||||
rt_ev = proxy.status_event(
|
rt_ev = proxy.status_event(
|
||||||
|
@ -1056,7 +1120,7 @@ async def stream_quotes(
|
||||||
# ugh, clear ticks since we've
|
# ugh, clear ticks since we've
|
||||||
# consumed them (ahem, ib_insync is
|
# consumed them (ahem, ib_insync is
|
||||||
# truly stateful trash)
|
# truly stateful trash)
|
||||||
ticker.ticks = []
|
# ticker.ticks = []
|
||||||
|
|
||||||
# XXX: this works because we don't use
|
# XXX: this works because we don't use
|
||||||
# ``aclosing()`` above?
|
# ``aclosing()`` above?
|
||||||
|
@ -1073,8 +1137,12 @@ async def stream_quotes(
|
||||||
async for ticker in stream:
|
async for ticker in stream:
|
||||||
quote = normalize(ticker)
|
quote = normalize(ticker)
|
||||||
fqme = quote['fqme']
|
fqme = quote['fqme']
|
||||||
|
log.debug(
|
||||||
|
f'Sending quote\n'
|
||||||
|
f'{quote}'
|
||||||
|
)
|
||||||
await send_chan.send({fqme: quote})
|
await send_chan.send({fqme: quote})
|
||||||
|
|
||||||
# ugh, clear ticks since we've consumed them
|
# ugh, clear ticks since we've consumed them
|
||||||
ticker.ticks = []
|
# ticker.ticks = []
|
||||||
# last = time.time()
|
# last = time.time()
|
||||||
|
|
Loading…
Reference in New Issue