Breakout fsp rt loop as non-closure for readability
parent
f6f4a0cd8d
commit
efd93d058a
|
@ -17,6 +17,7 @@
|
|||
"""
|
||||
Financial signal processing for the peeps.
|
||||
"""
|
||||
from functools import partial
|
||||
from typing import AsyncIterator, Callable, Tuple
|
||||
|
||||
import trio
|
||||
|
@ -29,6 +30,8 @@ from .. import data
|
|||
from ._momo import _rsi, _wma
|
||||
from ._volume import _tina_vwap
|
||||
from ..data import attach_shm_array
|
||||
from ..data.feed import Feed
|
||||
from ..data._sharedmem import ShmArray
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -62,32 +65,20 @@ async def latency(
|
|||
yield value
|
||||
|
||||
|
||||
@tractor.stream
|
||||
async def cascade(
|
||||
async def fsp_compute(
|
||||
ctx: tractor.Context,
|
||||
brokername: str,
|
||||
src_shm_token: dict,
|
||||
dst_shm_token: Tuple[str, np.dtype],
|
||||
symbol: str,
|
||||
feed: Feed,
|
||||
|
||||
src: ShmArray,
|
||||
dst: ShmArray,
|
||||
|
||||
fsp_func_name: str,
|
||||
) -> AsyncIterator[dict]:
|
||||
"""Chain streaming signal processors and deliver output to
|
||||
destination mem buf.
|
||||
func: Callable,
|
||||
|
||||
"""
|
||||
src = attach_shm_array(token=src_shm_token)
|
||||
dst = attach_shm_array(readonly=False, token=dst_shm_token)
|
||||
|
||||
func: Callable = _fsps[fsp_func_name]
|
||||
|
||||
# open a data feed stream with requested broker
|
||||
async with data.open_feed(brokername, [symbol]) as feed:
|
||||
|
||||
assert src.token == feed.shm.token
|
||||
|
||||
async def fsp_compute(
|
||||
task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
|
||||
) -> None:
|
||||
|
||||
) -> None:
|
||||
|
||||
# TODO: load appropriate fsp with input args
|
||||
|
||||
|
@ -95,6 +86,12 @@ async def cascade(
|
|||
sym: str,
|
||||
stream,
|
||||
):
|
||||
|
||||
# TODO: make this the actualy first quote from feed
|
||||
# XXX: this allows for a single iteration to run for history
|
||||
# processing without waiting on the real-time feed for a new quote
|
||||
yield {}
|
||||
|
||||
# task cancellation won't kill the channel
|
||||
with stream.shield():
|
||||
async for quotes in stream:
|
||||
|
@ -158,11 +155,48 @@ async def cascade(
|
|||
# stream latest shm array index entry
|
||||
await ctx.send_yield(index)
|
||||
|
||||
|
||||
@tractor.stream
|
||||
async def cascade(
|
||||
ctx: tractor.Context,
|
||||
brokername: str,
|
||||
src_shm_token: dict,
|
||||
dst_shm_token: Tuple[str, np.dtype],
|
||||
symbol: str,
|
||||
fsp_func_name: str,
|
||||
) -> AsyncIterator[dict]:
|
||||
"""Chain streaming signal processors and deliver output to
|
||||
destination mem buf.
|
||||
|
||||
"""
|
||||
src = attach_shm_array(token=src_shm_token)
|
||||
dst = attach_shm_array(readonly=False, token=dst_shm_token)
|
||||
|
||||
func: Callable = _fsps[fsp_func_name]
|
||||
|
||||
# open a data feed stream with requested broker
|
||||
async with data.open_feed(brokername, [symbol]) as feed:
|
||||
|
||||
assert src.token == feed.shm.token
|
||||
|
||||
last_len = new_len = len(src.array)
|
||||
|
||||
fsp_target = partial(
|
||||
fsp_compute,
|
||||
ctx=ctx,
|
||||
symbol=symbol,
|
||||
feed=feed,
|
||||
|
||||
src=src,
|
||||
dst=dst,
|
||||
|
||||
fsp_func_name=fsp_func_name,
|
||||
func=func
|
||||
)
|
||||
|
||||
async with trio.open_nursery() as n:
|
||||
|
||||
cs = await n.start(fsp_compute)
|
||||
cs = await n.start(fsp_target)
|
||||
|
||||
# Increment the underlying shared memory buffer on every
|
||||
# "increment" msg received from the underlying data feed.
|
||||
|
@ -176,7 +210,7 @@ async def cascade(
|
|||
# respawn the signal compute task if the source
|
||||
# signal has been updated
|
||||
cs.cancel()
|
||||
cs = await n.start(fsp_compute)
|
||||
cs = await n.start(fsp_target)
|
||||
|
||||
# TODO: adopt an incremental update engine/approach
|
||||
# where possible here eventually!
|
||||
|
|
Loading…
Reference in New Issue