Include symbol deats in feed init message from ib

Async spawn a deats getter task whenever we load a symbol data feed.
Pass these symbol details in the first message delivered by the feed at
open. Move stream loop into a new func.
basic_orders
Tyler Goodlet 2021-01-22 17:11:53 -05:00
parent 5327d7be5e
commit 10e47e349c
1 changed files with 173 additions and 128 deletions

View File

@ -426,12 +426,15 @@ class Client:
""" """
contract = await self.find_contract(symbol) contract = await self.find_contract(symbol)
details_fute = self.ib.reqContractDetailsAsync(contract)
ticker: Ticker = self.ib.reqMktData( ticker: Ticker = self.ib.reqMktData(
contract, contract,
snapshot=True, snapshot=True,
) )
ticker = await ticker.updateEvent ticker = await ticker.updateEvent
return contract, ticker details = (await details_fute)[0]
return contract, ticker, details
# async to be consistent for the client proxy, and cuz why not. # async to be consistent for the client proxy, and cuz why not.
async def submit_limit( async def submit_limit(
@ -440,7 +443,7 @@ class Client:
symbol: str, symbol: str,
price: float, price: float,
action: str, action: str,
size: int = 100, size: int,
) -> int: ) -> int:
"""Place an order and return integer request id provided by client. """Place an order and return integer request id provided by client.
@ -870,6 +873,7 @@ async def stream_quotes(
symbols: List[str], symbols: List[str],
shm_token: Tuple[str, str, List[tuple]], shm_token: Tuple[str, str, List[tuple]],
loglevel: str = None, loglevel: str = None,
# compat for @tractor.msg.pub # compat for @tractor.msg.pub
topics: Any = None, topics: Any = None,
get_topics: Callable = None, get_topics: Callable = None,
@ -885,7 +889,8 @@ async def stream_quotes(
# TODO: support multiple subscriptions # TODO: support multiple subscriptions
sym = symbols[0] sym = symbols[0]
contract, first_ticker = await _trio_run_client_method( async with trio.open_nursery() as n:
contract, first_ticker, details = await _trio_run_client_method(
method='get_quote', method='get_quote',
symbol=sym, symbol=sym,
) )
@ -896,8 +901,8 @@ async def stream_quotes(
symbol=sym, symbol=sym,
) )
async with aclosing(stream): shm = None
async with trio.open_nursery() as ln:
# check if a writer already is alive in a streaming task, # check if a writer already is alive in a streaming task,
# otherwise start one and mark it as now existing # otherwise start one and mark it as now existing
@ -908,7 +913,6 @@ async def stream_quotes(
# maybe load historical ohlcv in to shared mem # maybe load historical ohlcv in to shared mem
# check if shm has already been created by previous # check if shm has already been created by previous
# feed initialization # feed initialization
async with trio.open_nursery() as ln:
if not writer_already_exists: if not writer_already_exists:
_local_buffer_writers[key] = True _local_buffer_writers[key] = True
@ -945,9 +949,23 @@ async def stream_quotes(
delay_s = times[-1] - times[times != times[-1]][-1] delay_s = times[-1] - times[times != times[-1]][-1]
subscribe_ohlc_for_increment(shm, delay_s) subscribe_ohlc_for_increment(shm, delay_s)
# pass back some symbol info like min_tick, trading_hours, etc.
# con = asdict(contract)
# syminfo = contract
symdeats = asdict(details)
symdeats.update(symdeats['contract'])
# TODO: for loop through all symbols passed in
init_msgs = {
# pass back token, and bool, signalling if we're the writer # pass back token, and bool, signalling if we're the writer
# and that history has been written # and that history has been written
await ctx.send_yield((shm_token, not writer_already_exists)) sym: {
'is_shm_writer': not writer_already_exists,
'shm_token': shm_token,
'symbol_info': symdeats,
}
}
await ctx.send_yield(init_msgs)
# check for special contract types # check for special contract types
if type(first_ticker.contract) not in (ibis.Commodity, ibis.Forex): if type(first_ticker.contract) not in (ibis.Commodity, ibis.Forex):
@ -988,6 +1006,7 @@ async def stream_quotes(
# with trio.move_on_after(10) as cs: # with trio.move_on_after(10) as cs:
# wait for real volume on feed (trading might be closed) # wait for real volume on feed (trading might be closed)
async with aclosing(stream):
async for ticker in stream: async for ticker in stream:
# for a real volume contract we rait for the first # for a real volume contract we rait for the first
@ -1009,9 +1028,34 @@ async def stream_quotes(
# ``aclosing()`` above? # ``aclosing()`` above?
break break
# if cs.cancelled_caught: # enter stream loop
# await tractor.breakpoint() try:
await stream_and_write(
stream=stream,
calc_price=calc_price,
topic=topic,
writer_already_exists=writer_already_exists,
shm=shm,
suffix=suffix,
ctx=ctx,
)
finally:
if not writer_already_exists:
_local_buffer_writers[key] = False
async def stream_and_write(
stream,
calc_price: bool,
topic: str,
writer_already_exists: bool,
suffix: str,
ctx: tractor.Context,
shm: Optional['SharedArray'], # noqa
) -> None:
"""Core quote streaming and shm writing loop; optimize for speed!
"""
# real-time stream # real-time stream
async for ticker in stream: async for ticker in stream:
@ -1073,12 +1117,16 @@ async def stream_quotes(
ticker.ticks = [] ticker.ticks = []
@tractor.msg.pub @tractor.msg.pub(
send_on_connect={'local_trades': 'start'}
)
async def stream_trades( async def stream_trades(
loglevel: str = None, loglevel: str = None,
get_topics: Callable = None, get_topics: Callable = None,
) -> AsyncIterator[Dict[str, Any]]: ) -> AsyncIterator[Dict[str, Any]]:
global _trades_stream_is_live
# XXX: required to propagate ``tractor`` loglevel to piker logging # XXX: required to propagate ``tractor`` loglevel to piker logging
get_console_log(loglevel or tractor.current_actor().loglevel) get_console_log(loglevel or tractor.current_actor().loglevel)
@ -1086,9 +1134,6 @@ async def stream_trades(
method='recv_trade_updates', method='recv_trade_updates',
) )
# startup msg
yield {'local_trades': 'start'}
async for event_name, item in stream: async for event_name, item in stream:
# XXX: begin normalization of nonsense ib_insync internal # XXX: begin normalization of nonsense ib_insync internal