Merge pull request #241 from pikers/fsp_hotfixes

Fsp hotfixes
simpler_quote_throttle_logic
goodboy 2021-11-05 15:44:08 -04:00 committed by GitHub
commit 837c34e24b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 23 additions and 27 deletions

View File

@ -79,7 +79,7 @@ async def filter_quotes_by_sym(
async def fsp_compute(
stream: tractor.MsgStream,
ctx: tractor.Context,
symbol: str,
feed: Feed,
quote_stream: trio.abc.ReceiveChannel,
@ -90,7 +90,7 @@ async def fsp_compute(
func_name: str,
func: Callable,
attach_stream: bool = False,
attach_stream: bool = True,
task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
) -> None:
@ -147,6 +147,7 @@ async def fsp_compute(
# setup a respawn handle
with trio.CancelScope() as cs:
tracker = TaskTracker(trio.Event(), cs)
await ctx.started(index)
task_status.started((tracker, index))
profiler(f'{func_name} yield last index')
@ -155,23 +156,24 @@ async def fsp_compute(
try:
# rt stream
async for processed in out_stream:
async with ctx.open_stream() as stream:
async for processed in out_stream:
log.debug(f"{func_name}: {processed}")
index = src.index
dst.array[-1][func_name] = processed
log.debug(f"{func_name}: {processed}")
index = src.index
dst.array[-1][func_name] = processed
# NOTE: for now we aren't streaming this to the consumer
# stream latest array index entry which basically just acts
# as trigger msg to tell the consumer to read from shm
if attach_stream:
await stream.send(index)
# NOTE: for now we aren't streaming this to the consumer
# stream latest array index entry which basically just acts
# as trigger msg to tell the consumer to read from shm
if attach_stream:
await stream.send(index)
# period = time.time() - last
# hz = 1/period if period else float('nan')
# if hz > 60:
# log.info(f'FSP quote too fast: {hz}')
# last = time.time()
# period = time.time() - last
# hz = 1/period if period else float('nan')
# if hz > 60:
# log.info(f'FSP quote too fast: {hz}')
# last = time.time()
finally:
tracker.complete.set()
@ -229,14 +231,13 @@ async def cascade(
# last_len = new_len = len(src.array)
async with (
ctx.open_stream() as stream,
trio.open_nursery() as n,
):
fsp_target = partial(
fsp_compute,
stream=stream,
ctx=ctx,
symbol=symbol,
feed=feed,
quote_stream=quote_stream,
@ -255,7 +256,6 @@ async def cascade(
last = dst.array[-1:]
zeroed = np.zeros(last.shape, dtype=last.dtype)
await ctx.started(index)
profiler(f'{func_name}: fsp up')
async def resync(tracker: TaskTracker) -> tuple[TaskTracker, int]:

View File

@ -427,7 +427,7 @@ async def run_fsp(
)
async with (
portal.open_stream_from(
portal.open_context(
# chaining entrypoint
fsp.cascade,
@ -437,21 +437,17 @@ async def run_fsp(
src_shm_token=src_shm.token,
dst_shm_token=conf['shm'].token,
symbol=sym,
fsp_func_name=fsp_func_name,
func_name=fsp_func_name,
loglevel=loglevel,
) as stream,
) as (ctx, last_index),
ctx.open_stream() as stream,
open_sidepane(
linkedsplits,
display_name,
) as sidepane,
):
# receive last index for processed historical
# data-array as first msg
_ = await stream.receive()
shm = conf['shm']
if conf.get('overlay'):