Add search pause configs to backends

symbol_search
Tyler Goodlet 2021-05-18 08:35:39 -04:00
parent b2ff09f193
commit bbd5883e52
2 changed files with 32 additions and 14 deletions

View File

@ -89,7 +89,15 @@ _time_frames = {
'Y': 'OneYear', 'Y': 'OneYear',
} }
_show_wap_in_history = False _show_wap_in_history: bool = False
# optional search config the backend can register for
# it's symbol search handling (in this case we avoid
# accepting patterns before the kb has settled more then
# a quarter second).
_search_conf = {
'pause_period': 6/16,
}
# overrides to sidestep pretty questionable design decisions in # overrides to sidestep pretty questionable design decisions in
@ -156,7 +164,7 @@ _adhoc_futes_set = {
'mes.globex', 'mes.globex',
} }
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
_enters = 0 _enters = 0
@ -875,7 +883,16 @@ async def backfill_bars(
i = 0 i = 0
while i < count: while i < count:
bars, bars_array, next_dt = await get_bars(sym, end_dt=next_dt) out = await get_bars(sym, end_dt=next_dt)
if out is None:
# could be trying to retreive bars over weekend
# TODO: add logic here to handle tradable hours and only grab
# valid bars in the range
log.error(f"Can't grab bars starting at {next_dt}!?!?")
continue
bars, bars_array, next_dt = out
shm.push(bars_array, prepend=True) shm.push(bars_array, prepend=True)
i += 1 i += 1
@ -1255,6 +1272,10 @@ async def open_symbol_search(
except trio.WouldBlock: except trio.WouldBlock:
pass pass
if not pattern:
log.warning(f'empty pattern received, skipping..')
continue
log.debug(f'searching for {pattern}') log.debug(f'searching for {pattern}')
# await tractor.breakpoint() # await tractor.breakpoint()
last = time.time() last = time.time()
@ -1264,14 +1285,6 @@ async def open_symbol_search(
upto=5, upto=5,
) )
log.debug(f'got results {results.keys()}') log.debug(f'got results {results.keys()}')
# results = await client.search_stocks(
# pattern=pattern, upto=5)
# if cs.cancelled_caught:
# print(f'timed out search for {pattern} !?')
# # await tractor.breakpoint()
# await stream.send({})
# continue
log.debug("fuzzy matching") log.debug("fuzzy matching")
matches = fuzzy.extractBests( matches = fuzzy.extractBests(

View File

@ -57,6 +57,11 @@ log = get_logger(__name__)
_url = 'https://api.kraken.com/0' _url = 'https://api.kraken.com/0'
_search_conf = {
'pause_period': 0.0616
}
# Broker specific ohlc schema which includes a vwap field # Broker specific ohlc schema which includes a vwap field
_ohlc_dtype = [ _ohlc_dtype = [
('index', int), ('index', int),
@ -231,6 +236,7 @@ class Client:
if since is None: if since is None:
since = arrow.utcnow().floor('minute').shift( since = arrow.utcnow().floor('minute').shift(
minutes=-count).timestamp() minutes=-count).timestamp()
# UTC 2017-07-02 12:53:20 is oldest seconds value # UTC 2017-07-02 12:53:20 is oldest seconds value
since = str(max(1499000000, since)) since = str(max(1499000000, since))
json = await self._public( json = await self._public(
@ -488,12 +494,12 @@ async def open_autorecon_ws(url):
async def backfill_bars( async def backfill_bars(
sym: str, sym: str,
shm: ShmArray, # type: ignore # noqa shm: ShmArray, # type: ignore # noqa
count: int = 10, # NOTE: any more and we'll overrun the underlying buffer count: int = 10, # NOTE: any more and we'll overrun the underlying buffer
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
) -> None: ) -> None:
"""Fill historical bars into shared mem / storage afap. """Fill historical bars into shared mem / storage afap.
""" """
@ -639,7 +645,6 @@ async def stream_quotes(
await send_chan.send({topic: quote}) await send_chan.send({topic: quote})
@tractor.context @tractor.context
async def open_symbol_search( async def open_symbol_search(
ctx: tractor.Context, ctx: tractor.Context,