Compare commits
No commits in common. "ce1f038b53c5d6cc850bbe0318f7239c65d5a1a8" and "77518f0758fbf6d4f983a17e7a34ae0b279b38ef" have entirely different histories.
ce1f038b53
...
77518f0758
|
|
@ -326,6 +326,7 @@ def i3ipc_fin_wins_titled(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def i3ipc_xdotool_manual_click_hack() -> None:
|
def i3ipc_xdotool_manual_click_hack() -> None:
|
||||||
'''
|
'''
|
||||||
Do the data reset hack but expecting a local X-window using `xdotool`.
|
Do the data reset hack but expecting a local X-window using `xdotool`.
|
||||||
|
|
@ -387,3 +388,99 @@ def i3ipc_xdotool_manual_click_hack() -> None:
|
||||||
])
|
])
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired:
|
||||||
log.exception('xdotool timed out?')
|
log.exception('xdotool timed out?')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def is_current_time_in_range(
|
||||||
|
start_dt: datetime,
|
||||||
|
end_dt: datetime,
|
||||||
|
) -> bool:
|
||||||
|
'''
|
||||||
|
Check if current time is within the datetime range.
|
||||||
|
|
||||||
|
Use any/the-same timezone as provided by `start_dt.tzinfo` value
|
||||||
|
in the range.
|
||||||
|
|
||||||
|
'''
|
||||||
|
now: datetime = datetime.now(start_dt.tzinfo)
|
||||||
|
return start_dt <= now <= end_dt
|
||||||
|
|
||||||
|
|
||||||
|
# TODO, put this into `._util` and call it from here!
|
||||||
|
#
|
||||||
|
# NOTE, this was generated by @guille from a gpt5 prompt
|
||||||
|
# and was originally thot to be needed before learning about
|
||||||
|
# `ib_insync.contract.ContractDetails._parseSessions()` and
|
||||||
|
# it's downstream meths..
|
||||||
|
#
|
||||||
|
# This is still likely useful to keep for now to parse the
|
||||||
|
# `.tradingHours: str` value manually if we ever decide
|
||||||
|
# to move off `ib_async` and implement our own `trio`/`anyio`
|
||||||
|
# based version Bp
|
||||||
|
#
|
||||||
|
# >attempt to parse the retarted ib "time stampy thing" they
|
||||||
|
# >do for "venue hours" with this.. written by
|
||||||
|
# >gpt5-"thinking",
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
def parse_trading_hours(
|
||||||
|
spec: str,
|
||||||
|
tz: TzInfo|None = None
|
||||||
|
) -> dict[
|
||||||
|
date,
|
||||||
|
tuple[datetime, datetime]
|
||||||
|
]|None:
|
||||||
|
'''
|
||||||
|
Parse venue hours like:
|
||||||
|
'YYYYMMDD:HHMM-YYYYMMDD:HHMM;YYYYMMDD:CLOSED;...'
|
||||||
|
|
||||||
|
Returns `dict[date] = (open_dt, close_dt)` or `None` if
|
||||||
|
closed.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if (
|
||||||
|
not isinstance(spec, str)
|
||||||
|
or
|
||||||
|
not spec
|
||||||
|
):
|
||||||
|
raise ValueError('spec must be a non-empty string')
|
||||||
|
|
||||||
|
out: dict[
|
||||||
|
date,
|
||||||
|
tuple[datetime, datetime]
|
||||||
|
]|None = {}
|
||||||
|
|
||||||
|
for part in (p.strip() for p in spec.split(';') if p.strip()):
|
||||||
|
if part.endswith(':CLOSED'):
|
||||||
|
day_s, _ = part.split(':', 1)
|
||||||
|
d = datetime.strptime(day_s, '%Y%m%d').date()
|
||||||
|
out[d] = None
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
start_s, end_s = part.split('-', 1)
|
||||||
|
start_dt = datetime.strptime(start_s, '%Y%m%d:%H%M')
|
||||||
|
end_dt = datetime.strptime(end_s, '%Y%m%d:%H%M')
|
||||||
|
except ValueError as exc:
|
||||||
|
raise ValueError(f'invalid segment: {part}') from exc
|
||||||
|
|
||||||
|
if tz is not None:
|
||||||
|
start_dt = start_dt.replace(tzinfo=tz)
|
||||||
|
end_dt = end_dt.replace(tzinfo=tz)
|
||||||
|
|
||||||
|
out[start_dt.date()] = (start_dt, end_dt)
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
# ORIG desired usage,
|
||||||
|
#
|
||||||
|
# TODO, for non-drunk tomorrow,
|
||||||
|
# - call above fn and check that `output[today] is not None`
|
||||||
|
# trading_hrs: dict = parse_trading_hours(
|
||||||
|
# details.tradingHours
|
||||||
|
# )
|
||||||
|
# liq_hrs: dict = parse_trading_hours(
|
||||||
|
# details.liquidHours
|
||||||
|
# )
|
||||||
|
|
|
||||||
|
|
@ -50,11 +50,10 @@ import tractor
|
||||||
from tractor import to_asyncio
|
from tractor import to_asyncio
|
||||||
from tractor import trionics
|
from tractor import trionics
|
||||||
from pendulum import (
|
from pendulum import (
|
||||||
|
from_timestamp,
|
||||||
DateTime,
|
DateTime,
|
||||||
Duration,
|
Duration,
|
||||||
duration as mk_duration,
|
duration as mk_duration,
|
||||||
from_timestamp,
|
|
||||||
Interval,
|
|
||||||
)
|
)
|
||||||
from eventkit import Event
|
from eventkit import Event
|
||||||
from ib_insync import (
|
from ib_insync import (
|
||||||
|
|
@ -261,16 +260,6 @@ def remove_handler_on_err(
|
||||||
event.disconnect(handler)
|
event.disconnect(handler)
|
||||||
|
|
||||||
|
|
||||||
# (originally?) i thot that,
|
|
||||||
# > "EST in ISO 8601 format is required.."
|
|
||||||
#
|
|
||||||
# XXX, but see `ib_async`'s impl,
|
|
||||||
# - `ib_async.ib.IB.reqHistoricalDataAsync()`
|
|
||||||
# - `ib_async.util.formatIBDatetime()`
|
|
||||||
# below is EPOCH.
|
|
||||||
_iso8601_epoch_in_est: str = "1970-01-01T00:00:00.000000-05:00"
|
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
'''
|
'''
|
||||||
IB wrapped for our broker backend API.
|
IB wrapped for our broker backend API.
|
||||||
|
|
@ -344,11 +333,9 @@ class Client:
|
||||||
self,
|
self,
|
||||||
fqme: str,
|
fqme: str,
|
||||||
|
|
||||||
# EST in ISO 8601 format is required..
|
# EST in ISO 8601 format is required... below is EPOCH
|
||||||
# XXX, see `ib_async.ib.IB.reqHistoricalDataAsync()`
|
start_dt: datetime|str = "1970-01-01T00:00:00.000000-05:00",
|
||||||
# below is EPOCH.
|
end_dt: datetime|str = "",
|
||||||
start_dt: datetime|None = None, # _iso8601_epoch_in_est,
|
|
||||||
end_dt: datetime|None = None,
|
|
||||||
|
|
||||||
# ohlc sample period in seconds
|
# ohlc sample period in seconds
|
||||||
sample_period_s: int = 1,
|
sample_period_s: int = 1,
|
||||||
|
|
@ -359,17 +346,9 @@ class Client:
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[BarDataList, np.ndarray, Duration]:
|
||||||
BarDataList,
|
|
||||||
np.ndarray,
|
|
||||||
Duration,
|
|
||||||
]:
|
|
||||||
'''
|
'''
|
||||||
Retreive the `fqme`'s OHLCV-bars for the time-range "until `end_dt`".
|
Retreive OHLCV bars for a fqme over a range to the present.
|
||||||
|
|
||||||
Notes:
|
|
||||||
- IB's api doesn't support a `start_dt` (which is why default
|
|
||||||
is null) so we only use it for bar-frame duration checking.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# See API docs here:
|
# See API docs here:
|
||||||
|
|
@ -384,19 +363,13 @@ class Client:
|
||||||
|
|
||||||
dt_duration: Duration = (
|
dt_duration: Duration = (
|
||||||
duration
|
duration
|
||||||
or
|
or default_dt_duration
|
||||||
default_dt_duration
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: maybe remove all this?
|
# TODO: maybe remove all this?
|
||||||
global _enters
|
global _enters
|
||||||
if end_dt is None:
|
if not end_dt:
|
||||||
end_dt: str = ''
|
end_dt = ''
|
||||||
|
|
||||||
else:
|
|
||||||
est_end_dt = end_dt.in_tz('EST')
|
|
||||||
if est_end_dt != end_dt:
|
|
||||||
breakpoint()
|
|
||||||
|
|
||||||
_enters += 1
|
_enters += 1
|
||||||
|
|
||||||
|
|
@ -465,127 +438,58 @@ class Client:
|
||||||
+ query_info
|
+ query_info
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: we could maybe raise `NoData` instead if we
|
# TODO: we could maybe raise ``NoData`` instead if we
|
||||||
# rewrite the method in the first case?
|
# rewrite the method in the first case?
|
||||||
# right now there's no way to detect a timeout..
|
# right now there's no way to detect a timeout..
|
||||||
return [], np.empty(0), dt_duration
|
return [], np.empty(0), dt_duration
|
||||||
|
|
||||||
log.info(query_info)
|
log.info(query_info)
|
||||||
|
|
||||||
# ------ GAP-DETECTION ------
|
|
||||||
# NOTE XXX: ensure minimum duration in bars?
|
# NOTE XXX: ensure minimum duration in bars?
|
||||||
# => recursively call this method until we get at least as
|
# => recursively call this method until we get at least as
|
||||||
# many bars such that they sum in aggregate to the the
|
# many bars such that they sum in aggregate to the the
|
||||||
# desired total time (duration) at most.
|
# desired total time (duration) at most.
|
||||||
# - if you query over a gap and get no data
|
# - if you query over a gap and get no data
|
||||||
# that may short circuit the history
|
# that may short circuit the history
|
||||||
if end_dt:
|
if (
|
||||||
|
# XXX XXX XXX
|
||||||
|
# => WHY DID WE EVEN NEED THIS ORIGINALLY!? <=
|
||||||
|
# XXX XXX XXX
|
||||||
|
False
|
||||||
|
and end_dt
|
||||||
|
):
|
||||||
nparr: np.ndarray = bars_to_np(bars)
|
nparr: np.ndarray = bars_to_np(bars)
|
||||||
times: np.ndarray = nparr['time']
|
times: np.ndarray = nparr['time']
|
||||||
first: float = times[0]
|
first: float = times[0]
|
||||||
last: float = times[-1]
|
tdiff: float = times[-1] - first
|
||||||
# frame_dur: float = times[-1] - first
|
|
||||||
|
|
||||||
first_dt: DateTime = from_timestamp(first)
|
|
||||||
last_dt: DateTime = from_timestamp(last)
|
|
||||||
tdiff: int = (
|
|
||||||
last_dt
|
|
||||||
-
|
|
||||||
first_dt
|
|
||||||
).in_seconds() + sample_period_s
|
|
||||||
|
|
||||||
# XXX, do gap detections.
|
|
||||||
if (
|
|
||||||
last_dt.add(seconds=sample_period_s)
|
|
||||||
<
|
|
||||||
end_dt
|
|
||||||
):
|
|
||||||
details: ContractDetails = (
|
|
||||||
await self.ib.reqContractDetailsAsync(contract)
|
|
||||||
)[0]
|
|
||||||
from .venues import (
|
|
||||||
is_venue_open,
|
|
||||||
has_weekend,
|
|
||||||
sesh_times,
|
|
||||||
is_venue_closure,
|
|
||||||
)
|
|
||||||
_open_now: bool = is_venue_open(
|
|
||||||
con_deats=details,
|
|
||||||
)
|
|
||||||
open_time, close_time = sesh_times(details)
|
|
||||||
# XXX, always calc gap in mkt-venue-local timezone
|
|
||||||
tz: str = details.timeZoneId
|
|
||||||
gap: Interval = (
|
|
||||||
end_dt.in_tz(tz)
|
|
||||||
-
|
|
||||||
last_dt.in_tz(tz)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
not has_weekend(gap)
|
|
||||||
and
|
|
||||||
# XXX NOT outside venue closures.
|
|
||||||
# !TODO, replace with,
|
|
||||||
# `not is_venue_closure()`
|
|
||||||
# per below assert on inverse case!
|
|
||||||
gap.end.time() != open_time
|
|
||||||
and
|
|
||||||
gap.start.time() != close_time
|
|
||||||
):
|
|
||||||
breakpoint()
|
|
||||||
log.warning(
|
|
||||||
f'Invalid non-closure gap for {fqme!r} ?!?\n'
|
|
||||||
f'is-open-now: {_open_now}\n'
|
|
||||||
f'\n'
|
|
||||||
f'{gap}\n'
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
assert is_venue_closure(
|
|
||||||
gap=gap,
|
|
||||||
con_deats=details,
|
|
||||||
)
|
|
||||||
log.debug(
|
|
||||||
f'Detected venue closure gap (weekend),\n'
|
|
||||||
f'{gap}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
start_dt is None
|
|
||||||
and
|
|
||||||
tdiff
|
|
||||||
<
|
|
||||||
dt_duration.in_seconds()
|
|
||||||
# and
|
|
||||||
# len(bars) * sample_period_s) < dt_duration.in_seconds()
|
# len(bars) * sample_period_s) < dt_duration.in_seconds()
|
||||||
|
tdiff < dt_duration.in_seconds()
|
||||||
|
# and False
|
||||||
):
|
):
|
||||||
end_dt: DateTime = from_timestamp(first)
|
end_dt: DateTime = from_timestamp(first)
|
||||||
log.error(
|
log.warning(
|
||||||
f'Frame result was shorter then {dt_duration}!?\n'
|
f'Frame result was shorter then {dt_duration}!?\n'
|
||||||
|
'Recursing for more bars:\n'
|
||||||
f'end_dt: {end_dt}\n'
|
f'end_dt: {end_dt}\n'
|
||||||
f'dt_duration: {dt_duration}\n'
|
f'dt_duration: {dt_duration}\n'
|
||||||
# f'\n'
|
|
||||||
# f'Recursing for more bars:\n'
|
|
||||||
)
|
)
|
||||||
breakpoint()
|
(
|
||||||
# XXX ? TODO? recursively try to re-request?
|
r_bars,
|
||||||
# => i think *NO* right?
|
r_arr,
|
||||||
#
|
r_duration,
|
||||||
# (
|
) = await self.bars(
|
||||||
# r_bars,
|
fqme,
|
||||||
# r_arr,
|
start_dt=start_dt,
|
||||||
# r_duration,
|
end_dt=end_dt,
|
||||||
# ) = await self.bars(
|
sample_period_s=sample_period_s,
|
||||||
# fqme,
|
|
||||||
# start_dt=start_dt,
|
|
||||||
# end_dt=end_dt,
|
|
||||||
# sample_period_s=sample_period_s,
|
|
||||||
|
|
||||||
# # TODO: make a table for Duration to
|
# TODO: make a table for Duration to
|
||||||
# # the ib str values in order to use this?
|
# the ib str values in order to use this?
|
||||||
# # duration=duration,
|
# duration=duration,
|
||||||
# )
|
)
|
||||||
# r_bars.extend(bars)
|
r_bars.extend(bars)
|
||||||
# bars = r_bars
|
bars = r_bars
|
||||||
|
|
||||||
nparr: np.ndarray = bars_to_np(bars)
|
nparr: np.ndarray = bars_to_np(bars)
|
||||||
|
|
||||||
|
|
@ -880,16 +784,9 @@ class Client:
|
||||||
# crypto$
|
# crypto$
|
||||||
elif exch == 'PAXOS': # btc.paxos
|
elif exch == 'PAXOS': # btc.paxos
|
||||||
con = Crypto(
|
con = Crypto(
|
||||||
symbol=symbol.upper(),
|
symbol=symbol,
|
||||||
currency='USD',
|
currency=currency,
|
||||||
exchange='PAXOS',
|
|
||||||
)
|
)
|
||||||
# XXX, on `ib_insync` when first tried this,
|
|
||||||
# > Error 10299, reqId 141: Expected what to show is
|
|
||||||
# > AGGTRADES, please use that instead of TRADES.,
|
|
||||||
# > contract: Crypto(conId=479624278, symbol='BTC',
|
|
||||||
# > exchange='PAXOS', currency='USD',
|
|
||||||
# > localSymbol='BTC.USD', tradingClass='BTC')
|
|
||||||
|
|
||||||
# stonks
|
# stonks
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
|
|
@ -69,9 +69,9 @@ from .api import (
|
||||||
Contract,
|
Contract,
|
||||||
RequestError,
|
RequestError,
|
||||||
)
|
)
|
||||||
from .venues import is_venue_open
|
|
||||||
from ._util import (
|
from ._util import (
|
||||||
data_reset_hack,
|
data_reset_hack,
|
||||||
|
is_current_time_in_range,
|
||||||
)
|
)
|
||||||
from .symbols import get_mkt_info
|
from .symbols import get_mkt_info
|
||||||
|
|
||||||
|
|
@ -203,8 +203,7 @@ async def open_history_client(
|
||||||
latency = time.time() - query_start
|
latency = time.time() - query_start
|
||||||
if (
|
if (
|
||||||
not timedout
|
not timedout
|
||||||
# and
|
# and latency <= max_timeout
|
||||||
# latency <= max_timeout
|
|
||||||
):
|
):
|
||||||
count += 1
|
count += 1
|
||||||
mean += latency / count
|
mean += latency / count
|
||||||
|
|
@ -220,10 +219,8 @@ async def open_history_client(
|
||||||
)
|
)
|
||||||
if (
|
if (
|
||||||
end_dt
|
end_dt
|
||||||
and
|
and head_dt
|
||||||
head_dt
|
and end_dt <= head_dt
|
||||||
and
|
|
||||||
end_dt <= head_dt
|
|
||||||
):
|
):
|
||||||
raise DataUnavailable(
|
raise DataUnavailable(
|
||||||
f'First timestamp is {head_dt}\n'
|
f'First timestamp is {head_dt}\n'
|
||||||
|
|
@ -281,7 +278,7 @@ async def open_history_client(
|
||||||
start_dt
|
start_dt
|
||||||
):
|
):
|
||||||
# TODO! rm this once we're more confident it never hits!
|
# TODO! rm this once we're more confident it never hits!
|
||||||
# breakpoint()
|
breakpoint()
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f'OHLC-bars array start is gt `start_dt` limit !!\n'
|
f'OHLC-bars array start is gt `start_dt` limit !!\n'
|
||||||
f'start_dt: {start_dt}\n'
|
f'start_dt: {start_dt}\n'
|
||||||
|
|
@ -301,7 +298,7 @@ async def open_history_client(
|
||||||
# TODO: it seems like we can do async queries for ohlc
|
# TODO: it seems like we can do async queries for ohlc
|
||||||
# but getting the order right still isn't working and I'm not
|
# but getting the order right still isn't working and I'm not
|
||||||
# quite sure why.. needs some tinkering and probably
|
# quite sure why.. needs some tinkering and probably
|
||||||
# a lookthrough of the `ib_insync` machinery, for eg. maybe
|
# a lookthrough of the ``ib_insync`` machinery, for eg. maybe
|
||||||
# we have to do the batch queries on the `asyncio` side?
|
# we have to do the batch queries on the `asyncio` side?
|
||||||
yield (
|
yield (
|
||||||
get_hist,
|
get_hist,
|
||||||
|
|
@ -424,13 +421,14 @@ _failed_resets: int = 0
|
||||||
|
|
||||||
|
|
||||||
async def get_bars(
|
async def get_bars(
|
||||||
|
|
||||||
proxy: MethodProxy,
|
proxy: MethodProxy,
|
||||||
fqme: str,
|
fqme: str,
|
||||||
timeframe: int,
|
timeframe: int,
|
||||||
|
|
||||||
# blank to start which tells ib to look up the latest datum
|
# blank to start which tells ib to look up the latest datum
|
||||||
end_dt: datetime|None = None,
|
end_dt: str = '',
|
||||||
start_dt: datetime|None = None,
|
start_dt: str|None = '',
|
||||||
|
|
||||||
# TODO: make this more dynamic based on measured frame rx latency?
|
# TODO: make this more dynamic based on measured frame rx latency?
|
||||||
# how long before we trigger a feed reset (seconds)
|
# how long before we trigger a feed reset (seconds)
|
||||||
|
|
@ -484,8 +482,7 @@ async def get_bars(
|
||||||
dt_duration,
|
dt_duration,
|
||||||
) = await proxy.bars(
|
) = await proxy.bars(
|
||||||
fqme=fqme,
|
fqme=fqme,
|
||||||
# XXX TODO! LOL we're not using this and IB dun
|
# XXX TODO! lol we're not using this..
|
||||||
# support it anyway..
|
|
||||||
# start_dt=start_dt,
|
# start_dt=start_dt,
|
||||||
end_dt=end_dt,
|
end_dt=end_dt,
|
||||||
sample_period_s=timeframe,
|
sample_period_s=timeframe,
|
||||||
|
|
@ -737,7 +734,7 @@ async def _setup_quote_stream(
|
||||||
# '294', # Trade rate / minute
|
# '294', # Trade rate / minute
|
||||||
# '295', # Vlm rate / minute
|
# '295', # Vlm rate / minute
|
||||||
),
|
),
|
||||||
contract: Contract|None = None,
|
contract: Contract | None = None,
|
||||||
|
|
||||||
) -> trio.abc.ReceiveChannel:
|
) -> trio.abc.ReceiveChannel:
|
||||||
'''
|
'''
|
||||||
|
|
@ -759,12 +756,7 @@ async def _setup_quote_stream(
|
||||||
# XXX since this is an `asyncio.Task`, we must use
|
# XXX since this is an `asyncio.Task`, we must use
|
||||||
# tractor.pause_from_sync()
|
# tractor.pause_from_sync()
|
||||||
|
|
||||||
(
|
caccount_name, client = get_preferred_data_client(accts2clients)
|
||||||
_account_name,
|
|
||||||
client,
|
|
||||||
) = get_preferred_data_client(
|
|
||||||
accts2clients,
|
|
||||||
)
|
|
||||||
contract = (
|
contract = (
|
||||||
contract
|
contract
|
||||||
or
|
or
|
||||||
|
|
@ -1099,9 +1091,14 @@ async def stream_quotes(
|
||||||
)
|
)
|
||||||
|
|
||||||
# is venue active rn?
|
# is venue active rn?
|
||||||
venue_is_open: bool = is_venue_open(
|
venue_is_open: bool = any(
|
||||||
con_deats=details,
|
is_current_time_in_range(
|
||||||
|
start_dt=sesh.start,
|
||||||
|
end_dt=sesh.end,
|
||||||
|
)
|
||||||
|
for sesh in details.tradingSessions()
|
||||||
)
|
)
|
||||||
|
|
||||||
init_msg = FeedInit(mkt_info=mkt)
|
init_msg = FeedInit(mkt_info=mkt)
|
||||||
|
|
||||||
# NOTE, tell sampler (via config) to skip vlm summing for dst
|
# NOTE, tell sampler (via config) to skip vlm summing for dst
|
||||||
|
|
|
||||||
|
|
@ -149,83 +149,3 @@ def is_venue_closure(
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
# TODO, put this into `._util` and call it from here!
|
|
||||||
#
|
|
||||||
# NOTE, this was generated by @guille from a gpt5 prompt
|
|
||||||
# and was originally thot to be needed before learning about
|
|
||||||
# `ib_insync.contract.ContractDetails._parseSessions()` and
|
|
||||||
# it's downstream meths..
|
|
||||||
#
|
|
||||||
# This is still likely useful to keep for now to parse the
|
|
||||||
# `.tradingHours: str` value manually if we ever decide
|
|
||||||
# to move off `ib_async` and implement our own `trio`/`anyio`
|
|
||||||
# based version Bp
|
|
||||||
#
|
|
||||||
# >attempt to parse the retarted ib "time stampy thing" they
|
|
||||||
# >do for "venue hours" with this.. written by
|
|
||||||
# >gpt5-"thinking",
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
def parse_trading_hours(
|
|
||||||
spec: str,
|
|
||||||
tz: TzInfo|None = None
|
|
||||||
) -> dict[
|
|
||||||
date,
|
|
||||||
tuple[datetime, datetime]
|
|
||||||
]|None:
|
|
||||||
'''
|
|
||||||
Parse venue hours like:
|
|
||||||
'YYYYMMDD:HHMM-YYYYMMDD:HHMM;YYYYMMDD:CLOSED;...'
|
|
||||||
|
|
||||||
Returns `dict[date] = (open_dt, close_dt)` or `None` if
|
|
||||||
closed.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if (
|
|
||||||
not isinstance(spec, str)
|
|
||||||
or
|
|
||||||
not spec
|
|
||||||
):
|
|
||||||
raise ValueError('spec must be a non-empty string')
|
|
||||||
|
|
||||||
out: dict[
|
|
||||||
date,
|
|
||||||
tuple[datetime, datetime]
|
|
||||||
]|None = {}
|
|
||||||
|
|
||||||
for part in (p.strip() for p in spec.split(';') if p.strip()):
|
|
||||||
if part.endswith(':CLOSED'):
|
|
||||||
day_s, _ = part.split(':', 1)
|
|
||||||
d = datetime.strptime(day_s, '%Y%m%d').date()
|
|
||||||
out[d] = None
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
start_s, end_s = part.split('-', 1)
|
|
||||||
start_dt = datetime.strptime(start_s, '%Y%m%d:%H%M')
|
|
||||||
end_dt = datetime.strptime(end_s, '%Y%m%d:%H%M')
|
|
||||||
except ValueError as exc:
|
|
||||||
raise ValueError(f'invalid segment: {part}') from exc
|
|
||||||
|
|
||||||
if tz is not None:
|
|
||||||
start_dt = start_dt.replace(tzinfo=tz)
|
|
||||||
end_dt = end_dt.replace(tzinfo=tz)
|
|
||||||
|
|
||||||
out[start_dt.date()] = (start_dt, end_dt)
|
|
||||||
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
# ORIG desired usage,
|
|
||||||
#
|
|
||||||
# TODO, for non-drunk tomorrow,
|
|
||||||
# - call above fn and check that `output[today] is not None`
|
|
||||||
# trading_hrs: dict = parse_trading_hours(
|
|
||||||
# details.tradingHours
|
|
||||||
# )
|
|
||||||
# liq_hrs: dict = parse_trading_hours(
|
|
||||||
# details.liquidHours
|
|
||||||
# )
|
|
||||||
|
|
|
||||||
|
|
@ -49,7 +49,6 @@ from pendulum import (
|
||||||
Duration,
|
Duration,
|
||||||
duration as mk_duration,
|
duration as mk_duration,
|
||||||
from_timestamp,
|
from_timestamp,
|
||||||
timezone,
|
|
||||||
)
|
)
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import polars as pl
|
import polars as pl
|
||||||
|
|
@ -58,7 +57,9 @@ from piker.brokers import NoData
|
||||||
from piker.accounting import (
|
from piker.accounting import (
|
||||||
MktPair,
|
MktPair,
|
||||||
)
|
)
|
||||||
from piker.log import get_logger
|
from piker.data._util import (
|
||||||
|
log,
|
||||||
|
)
|
||||||
from ..data._sharedmem import (
|
from ..data._sharedmem import (
|
||||||
maybe_open_shm_array,
|
maybe_open_shm_array,
|
||||||
ShmArray,
|
ShmArray,
|
||||||
|
|
@ -96,9 +97,6 @@ if TYPE_CHECKING:
|
||||||
# from .feed import _FeedsBus
|
# from .feed import _FeedsBus
|
||||||
|
|
||||||
|
|
||||||
log = get_logger()
|
|
||||||
|
|
||||||
|
|
||||||
# `ShmArray` buffer sizing configuration:
|
# `ShmArray` buffer sizing configuration:
|
||||||
_mins_in_day = int(60 * 24)
|
_mins_in_day = int(60 * 24)
|
||||||
# how much is probably dependent on lifestyle
|
# how much is probably dependent on lifestyle
|
||||||
|
|
@ -403,9 +401,7 @@ async def start_backfill(
|
||||||
|
|
||||||
# based on the sample step size, maybe load a certain amount history
|
# based on the sample step size, maybe load a certain amount history
|
||||||
update_start_on_prepend: bool = False
|
update_start_on_prepend: bool = False
|
||||||
if (
|
if backfill_until_dt is None:
|
||||||
_until_was_none := (backfill_until_dt is None)
|
|
||||||
):
|
|
||||||
|
|
||||||
# TODO: per-provider default history-durations?
|
# TODO: per-provider default history-durations?
|
||||||
# -[ ] inside the `open_history_client()` config allow
|
# -[ ] inside the `open_history_client()` config allow
|
||||||
|
|
@ -439,8 +435,6 @@ async def start_backfill(
|
||||||
last_start_dt: datetime = backfill_from_dt
|
last_start_dt: datetime = backfill_from_dt
|
||||||
next_prepend_index: int = backfill_from_shm_index
|
next_prepend_index: int = backfill_from_shm_index
|
||||||
|
|
||||||
est = timezone('EST')
|
|
||||||
|
|
||||||
while last_start_dt > backfill_until_dt:
|
while last_start_dt > backfill_until_dt:
|
||||||
log.info(
|
log.info(
|
||||||
f'Requesting {timeframe}s frame:\n'
|
f'Requesting {timeframe}s frame:\n'
|
||||||
|
|
@ -454,10 +448,9 @@ async def start_backfill(
|
||||||
next_end_dt,
|
next_end_dt,
|
||||||
) = await get_hist(
|
) = await get_hist(
|
||||||
timeframe,
|
timeframe,
|
||||||
end_dt=(end_dt_param := last_start_dt),
|
end_dt=last_start_dt,
|
||||||
)
|
)
|
||||||
except NoData as nodata:
|
except NoData as _daterr:
|
||||||
_nodata = nodata
|
|
||||||
orig_last_start_dt: datetime = last_start_dt
|
orig_last_start_dt: datetime = last_start_dt
|
||||||
gap_report: str = (
|
gap_report: str = (
|
||||||
f'EMPTY FRAME for `end_dt: {last_start_dt}`?\n'
|
f'EMPTY FRAME for `end_dt: {last_start_dt}`?\n'
|
||||||
|
|
@ -525,32 +518,8 @@ async def start_backfill(
|
||||||
==
|
==
|
||||||
next_start_dt.timestamp()
|
next_start_dt.timestamp()
|
||||||
)
|
)
|
||||||
assert (
|
|
||||||
(last_time := time[-1])
|
|
||||||
==
|
|
||||||
next_end_dt.timestamp()
|
|
||||||
)
|
|
||||||
|
|
||||||
frame_last_dt = from_timestamp(last_time)
|
assert time[-1] == next_end_dt.timestamp()
|
||||||
if (
|
|
||||||
frame_last_dt.add(seconds=timeframe)
|
|
||||||
<
|
|
||||||
end_dt_param
|
|
||||||
):
|
|
||||||
est_frame_last_dt = est.convert(frame_last_dt)
|
|
||||||
est_end_dt_param = est.convert(end_dt_param)
|
|
||||||
log.warning(
|
|
||||||
f'Provider frame ending BEFORE requested end_dt={end_dt_param} ??\n'
|
|
||||||
f'frame_last_dt (EST): {est_frame_last_dt!r}\n'
|
|
||||||
f'end_dt_param (EST): {est_end_dt_param!r}\n'
|
|
||||||
f'\n'
|
|
||||||
f'Likely contains,\n'
|
|
||||||
f'- a venue closure.\n'
|
|
||||||
f'- (maybe?) missing data ?\n'
|
|
||||||
)
|
|
||||||
# ?TODO, check against venue closure hours
|
|
||||||
# if/when provided by backend?
|
|
||||||
await tractor.pause()
|
|
||||||
|
|
||||||
expected_dur: Interval = (
|
expected_dur: Interval = (
|
||||||
last_start_dt.subtract(
|
last_start_dt.subtract(
|
||||||
|
|
@ -612,11 +581,10 @@ async def start_backfill(
|
||||||
'0 BARS TO PUSH after diff!?\n'
|
'0 BARS TO PUSH after diff!?\n'
|
||||||
f'{next_start_dt} -> {last_start_dt}'
|
f'{next_start_dt} -> {last_start_dt}'
|
||||||
)
|
)
|
||||||
await tractor.pause()
|
|
||||||
|
|
||||||
# Check if we're about to exceed buffer capacity BEFORE
|
# Check if we're about to exceed buffer capacity BEFORE
|
||||||
# attempting the push
|
# attempting the push
|
||||||
if (next_prepend_index - ln) < 0:
|
if next_prepend_index - ln < 0:
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Backfill would exceed buffer capacity!\n'
|
f'Backfill would exceed buffer capacity!\n'
|
||||||
f'next_prepend_index: {next_prepend_index}\n'
|
f'next_prepend_index: {next_prepend_index}\n'
|
||||||
|
|
@ -687,7 +655,7 @@ async def start_backfill(
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
# XXX, can't push the entire frame? so
|
# can't push the entire frame? so
|
||||||
# push only the amount that can fit..
|
# push only the amount that can fit..
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
@ -747,8 +715,8 @@ async def start_backfill(
|
||||||
) = dedupe(df)
|
) = dedupe(df)
|
||||||
if diff:
|
if diff:
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Found {diff!r} duplicates in tsdb! '
|
f'Found {diff} duplicates in tsdb, '
|
||||||
f'=> Overwriting with `deduped` data !! <=\n'
|
f'overwriting with deduped data\n'
|
||||||
)
|
)
|
||||||
await storage.write_ohlcv(
|
await storage.write_ohlcv(
|
||||||
col_sym_key,
|
col_sym_key,
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue