Add better shared mem writer task checking

If you have a common broker feed daemon then likely you don't want to
create superfluous shared mem buffers for the same symbol. This adds an
ad hoc little context manger which keeps a bool state of whether
a buffer writer task currently is running in this process. Before we
were checking the shared array token cache and **not** clearing it when
the writer task exited, resulting in incorrect writer/loader logic on
the next entry..

Really, we need a better set of SC semantics around the shared mem stuff
presuming there's only ever one writer per shared buffer at given time.
Hopefully that will come soon!
bar_select
Tyler Goodlet 2020-10-15 15:02:42 -04:00
parent e0613675c7
commit 454b445b4b
2 changed files with 116 additions and 94 deletions

View File

@ -5,7 +5,7 @@ Note the client runs under an ``asyncio`` loop (since ``ib_insync`` is
built on it) and thus actor aware API calls must be spawned with built on it) and thus actor aware API calls must be spawned with
``infected_aio==True``. ``infected_aio==True``.
""" """
from contextlib import asynccontextmanager from contextlib import asynccontextmanager, contextmanager
from dataclasses import asdict from dataclasses import asdict
from functools import partial from functools import partial
from typing import List, Dict, Any, Tuple, Optional, AsyncIterator, Callable from typing import List, Dict, Any, Tuple, Optional, AsyncIterator, Callable
@ -292,7 +292,7 @@ class Client:
ticker: Ticker = self.ib.reqMktData(contract, ','.join(opts)) ticker: Ticker = self.ib.reqMktData(contract, ','.join(opts))
def push(t): def push(t):
log.debug(t) # log.debug(t)
try: try:
to_trio.send_nowait(t) to_trio.send_nowait(t)
except trio.BrokenResourceError: except trio.BrokenResourceError:
@ -497,6 +497,21 @@ def normalize(
return data return data
_local_buffer_writers = {}
@contextmanager
def activate_writer(key: str):
try:
writer_already_exists = _local_buffer_writers.get(key, False)
if not writer_already_exists:
_local_buffer_writers[key] = True
yield writer_already_exists
finally:
_local_buffer_writers.pop(key, None)
# TODO: figure out how to share quote feeds sanely despite # TODO: figure out how to share quote feeds sanely despite
# the wacky ``ib_insync`` api. # the wacky ``ib_insync`` api.
# @tractor.msg.pub # @tractor.msg.pub
@ -528,15 +543,19 @@ async def stream_quotes(
async with aclosing(stream): async with aclosing(stream):
# check if a writer already is alive in a streaming task,
# otherwise start one and mark it as now existing
with activate_writer(shm_token['shm_name']) as writer_already_exists:
# maybe load historical ohlcv in to shared mem # maybe load historical ohlcv in to shared mem
# check if shm has already been created by previous # check if shm has already been created by previous
# feed initialization # feed initialization
writer_exists = get_shm_token(shm_token['shm_name']) if not writer_already_exists:
if not writer_exists:
shm = attach_shm_array( shm = attach_shm_array(
token=shm_token, token=shm_token,
# we are writer
# we are the buffer writer
readonly=False, readonly=False,
) )
bars = await _trio_run_client_method( bars = await _trio_run_client_method(
@ -544,6 +563,7 @@ async def stream_quotes(
symbol=sym, symbol=sym,
) )
# write historical data to buffer
shm.push(bars) shm.push(bars)
shm_token = shm.token shm_token = shm.token
@ -552,7 +572,7 @@ async def stream_quotes(
subscribe_ohlc_for_increment(shm, delay_s) subscribe_ohlc_for_increment(shm, delay_s)
# pass back token, and bool, signalling if we're the writer # pass back token, and bool, signalling if we're the writer
await ctx.send_yield((shm_token, not writer_exists)) await ctx.send_yield((shm_token, not writer_already_exists))
# first quote can be ignored as a 2nd with newer data is sent? # first quote can be ignored as a 2nd with newer data is sent?
first_ticker = await stream.__anext__() first_ticker = await stream.__anext__()
@ -612,7 +632,7 @@ async def stream_quotes(
# if we are the lone tick writer start writing # if we are the lone tick writer start writing
# the buffer with appropriate trade data # the buffer with appropriate trade data
if not writer_exists: if not writer_already_exists:
for tick in iterticks(quote, type='trade'): for tick in iterticks(quote, type='trade'):
last = tick['price'] last = tick['price']
# print(f'broker last: {tick}') # print(f'broker last: {tick}')

View File

@ -167,6 +167,7 @@ async def open_feed(
# Attempt to allocate (or attach to) shm array for this broker/symbol # Attempt to allocate (or attach to) shm array for this broker/symbol
shm, opened = maybe_open_shm_array( shm, opened = maybe_open_shm_array(
key=sym_to_shm_key(name, symbols[0]), key=sym_to_shm_key(name, symbols[0]),
# use any broker defined ohlc dtype: # use any broker defined ohlc dtype:
dtype=getattr(mod, '_ohlc_dtype', base_ohlc_dtype), dtype=getattr(mod, '_ohlc_dtype', base_ohlc_dtype),
@ -193,12 +194,13 @@ async def open_feed(
# tests are in? # tests are in?
shm_token, is_writer = await stream.receive() shm_token, is_writer = await stream.receive()
if opened:
assert is_writer
log.info("Started shared mem bar writer")
shm_token['dtype_descr'] = list(shm_token['dtype_descr']) shm_token['dtype_descr'] = list(shm_token['dtype_descr'])
assert shm_token == shm.token # sanity assert shm_token == shm.token # sanity
if is_writer:
log.info("Started shared mem bar writer")
yield Feed( yield Feed(
name=name, name=name,
stream=stream, stream=stream,