Merge pull request #260 from pikers/dark_vlm

Dark vlm
py3.10_support
goodboy 2022-01-30 14:10:19 -05:00 committed by GitHub
commit a2698c73b5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 366 additions and 177 deletions

View File

@ -38,6 +38,7 @@ import contextlib
import trio import trio
import tractor import tractor
from tractor.experimental import msgpub
from async_generator import asynccontextmanager from async_generator import asynccontextmanager
from ..log import get_logger, get_console_log from ..log import get_logger, get_console_log
@ -98,7 +99,7 @@ class BrokerFeed:
) )
@tractor.msg.pub(tasks=['stock', 'option']) @msgpub(tasks=['stock', 'option'])
async def stream_poll_requests( async def stream_poll_requests(
get_topics: Callable, get_topics: Callable,
get_quotes: Coroutine, get_quotes: Coroutine,
@ -293,7 +294,7 @@ async def start_quote_stream(
await stream_poll_requests( await stream_poll_requests(
# ``msg.pub`` required kwargs # ``trionics.msgpub`` required kwargs
task_name=feed_type, task_name=feed_type,
ctx=ctx, ctx=ctx,
topics=symbols, topics=symbols,

View File

@ -1032,7 +1032,11 @@ async def get_client(
# https://interactivebrokers.github.io/tws-api/tick_types.html # https://interactivebrokers.github.io/tws-api/tick_types.html
tick_types = { tick_types = {
77: 'trade', 77: 'trade',
48: 'utrade',
# a "utrade" aka an off exchange "unreportable" (dark) vlm:
# https://interactivebrokers.github.io/tws-api/tick_types.html#rt_volume
48: 'dark_trade',
0: 'bsize', 0: 'bsize',
1: 'bid', 1: 'bid',
2: 'ask', 2: 'ask',
@ -1046,13 +1050,17 @@ tick_types = {
def normalize( def normalize(
ticker: Ticker, ticker: Ticker,
calc_price: bool = False calc_price: bool = False
) -> dict: ) -> dict:
# convert named tuples to dicts so we send usable keys # convert named tuples to dicts so we send usable keys
new_ticks = [] new_ticks = []
for tick in ticker.ticks: for tick in ticker.ticks:
if tick and not isinstance(tick, dict): if tick and not isinstance(tick, dict):
td = tick._asdict() td = tick._asdict()
td['type'] = tick_types.get(td['tickType'], 'n/a') td['type'] = tick_types.get(
td['tickType'],
'n/a',
)
new_ticks.append(td) new_ticks.append(td)

View File

@ -553,8 +553,6 @@ async def stream_quotes(
quote = ohlc quote = ohlc
topic = quote['symbol'].lower() topic = quote['symbol'].lower()
# XXX: format required by ``tractor.msg.pub``
# requires a ``Dict[topic: str, quote: dict]``
await send_chan.send({topic: quote}) await send_chan.send({topic: quote})

View File

@ -25,14 +25,18 @@ import numpy as np
def iterticks( def iterticks(
quote: dict, quote: dict,
types: Tuple[str] = ('trade', 'utrade'), types: Tuple[str] = ('trade', 'dark_trade'),
) -> AsyncIterator: ) -> AsyncIterator:
"""Iterate through ticks delivered per quote cycle. '''
""" Iterate through ticks delivered per quote cycle.
'''
# print(f"{quote}\n\n") # print(f"{quote}\n\n")
ticks = quote.get('ticks', ()) ticks = quote.get('ticks', ())
if ticks: if ticks:
for tick in ticks: for tick in ticks:
# print(f"{quote['symbol']}: {tick}") # print(f"{quote['symbol']}: {tick}")
if tick.get('type') in types: ttype = tick.get('type')
if ttype in types:
yield tick yield tick

View File

@ -176,12 +176,11 @@ async def sample_and_broadcast(
# TODO: ``numba`` this! # TODO: ``numba`` this!
for sym, quote in quotes.items(): for sym, quote in quotes.items():
# TODO: in theory you can send the IPC msg *before* # TODO: in theory you can send the IPC msg *before* writing
# writing to the sharedmem array to decrease latency, # to the sharedmem array to decrease latency, however, that
# however, that will require `tractor.msg.pub` support # will require at least some way to prevent task switching
# here or at least some way to prevent task switching # at the yield such that the array write isn't delayed while
# at the yield such that the array write isn't delayed # another consumer is serviced..
# while another consumer is serviced..
# start writing the shm buffer with appropriate # start writing the shm buffer with appropriate
# trade data # trade data

163
piker/fsp/_api.py 100644
View File

@ -0,0 +1,163 @@
# piker: trading gear for hackers
# Copyright (C) Tyler Goodlet (in stewardship of pikers)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
FSP (financial signal processing) apis.
'''
# TODO: things to figure the heck out:
# - how to handle non-plottable values (pyqtgraph has facility for this
# now in `arrayToQPath()`)
# - composition of fsps / implicit chaining syntax (we need an issue)
from __future__ import annotations
from functools import partial
from typing import (
Any,
Callable,
Awaitable,
Optional,
)
import numpy as np
import tractor
from tractor.msg import NamespacePath
from ..data._sharedmem import (
ShmArray,
maybe_open_shm_array,
)
from ..log import get_logger
log = get_logger(__name__)
# global fsp registry filled out by @fsp decorator below
_fsp_registry = {}
def _load_builtins() -> dict[tuple, Callable]:
# import to implicity trigger registration via ``@fsp``
from . import _momo # noqa
from . import _volume # noqa
return _fsp_registry
class Fsp:
'''
"Financial signal processor" decorator wrapped async function.
'''
# TODO: checkout the advanced features from ``wrapt``:
# - dynamic enable toggling,
# https://wrapt.readthedocs.io/en/latest/decorators.html#dynamically-disabling-decorators
# - custom object proxies, might be useful for implementing n-compose
# https://wrapt.readthedocs.io/en/latest/wrappers.html#custom-object-proxies
# - custom function wrappers,
# https://wrapt.readthedocs.io/en/latest/wrappers.html#custom-function-wrappers
def __init__(
self,
func: Callable[..., Awaitable],
*,
outputs: tuple[str] = (),
display_name: Optional[str] = None,
**config,
) -> None:
# TODO (maybe):
# - type introspection?
# - should we make this a wrapt object proxy?
self.func = func
self.__name__ = func.__name__ # XXX: must have func-object name
self.ns_path: tuple[str, str] = NamespacePath.from_ref(func)
self.outputs = outputs
self.config: dict[str, Any] = config
# register with declared set.
_fsp_registry[self.ns_path] = func
@property
def name(self) -> str:
return self.__name__
def __call__(
self,
# TODO: when we settle on py3.10 we should probably use the new
# type annots from pep 612:
# https://www.python.org/dev/peps/pep-0612/
# instance,
*args,
**kwargs
):
return self.func(*args, **kwargs)
def fsp(
wrapped=None,
*,
outputs: tuple[str] = (),
display_name: Optional[str] = None,
**config,
) -> Fsp:
if wrapped is None:
return partial(
Fsp,
outputs=outputs,
display_name=display_name,
**config,
)
return Fsp(wrapped, outputs=(wrapped.__name__,))
def maybe_mk_fsp_shm(
sym: str,
target: fsp,
readonly: bool = True,
) -> (ShmArray, bool):
'''
Allocate a single row shm array for an symbol-fsp pair if none
exists, otherwise load the shm already existing for that token.
'''
uid = tractor.current_actor().uid
# TODO: load output types from `Fsp`
# - should `index` be a required internal field?
fsp_dtype = np.dtype(
[('index', int)] +
[(field_name, float) for field_name in target.outputs]
)
key = f'{sym}.fsp.{target.name}.{".".join(uid)}'
shm, opened = maybe_open_shm_array(
key,
# TODO: create entry for each time frame
dtype=fsp_dtype,
readonly=True,
)
return shm, opened

View File

@ -1,5 +1,5 @@
# piker: trading gear for hackers # piker: trading gear for hackers
# Copyright (C) Tyler Goodlet (in stewardship of piker0) # Copyright (C) Tyler Goodlet (in stewardship of pikers)
# This program is free software: you can redistribute it and/or modify # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by # it under the terms of the GNU Affero General Public License as published by
@ -27,29 +27,18 @@ import pyqtgraph as pg
import trio import trio
from trio_typing import TaskStatus from trio_typing import TaskStatus
import tractor import tractor
from tractor.msg import NamespacePath
from ..log import get_logger, get_console_log from ..log import get_logger, get_console_log
from .. import data from .. import data
from ..data import attach_shm_array from ..data import attach_shm_array
from ..data.feed import Feed from ..data.feed import Feed
from ..data._sharedmem import ShmArray from ..data._sharedmem import ShmArray
from ._momo import _rsi, _wma from ._api import Fsp
from ._volume import _tina_vwap, dolla_vlm from ._api import _load_builtins
log = get_logger(__name__) log = get_logger(__name__)
_fsp_builtins = {
'rsi': _rsi,
'wma': _wma,
'vwap': _tina_vwap,
'dolla_vlm': dolla_vlm,
}
# TODO: things to figure the heck out:
# - how to handle non-plottable values (pyqtgraph has facility for this
# now in `arrayToQPath()`)
# - composition of fsps / implicit chaining syntax (we need an issue)
@dataclass @dataclass
class TaskTracker: class TaskTracker:
@ -88,7 +77,6 @@ async def fsp_compute(
src: ShmArray, src: ShmArray,
dst: ShmArray, dst: ShmArray,
func_name: str,
func: Callable, func: Callable,
attach_stream: bool = False, attach_stream: bool = False,
@ -115,14 +103,26 @@ async def fsp_compute(
# and get historical output # and get historical output
history_output = await out_stream.__anext__() history_output = await out_stream.__anext__()
func_name = func.__name__
profiler(f'{func_name} generated history') profiler(f'{func_name} generated history')
# build a struct array which includes an 'index' field to push # build struct array with an 'index' field to push as history
# as history history = np.zeros(
history = np.array( len(history_output),
np.arange(len(history_output)),
dtype=dst.array.dtype dtype=dst.array.dtype
) )
# TODO: push using a[['f0', 'f1', .., 'fn']] = .. syntax no?
# if the output array is multi-field then push
# each respective field.
fields = getattr(history.dtype, 'fields', None)
if fields:
for key in fields.keys():
if key in history.dtype.fields:
history[func_name] = history_output
# single-key output stream
else:
history[func_name] = history_output history[func_name] = history_output
# TODO: XXX: # TODO: XXX:
@ -164,8 +164,9 @@ async def fsp_compute(
async for processed in out_stream: async for processed in out_stream:
log.debug(f"{func_name}: {processed}") log.debug(f"{func_name}: {processed}")
key, output = processed
index = src.index index = src.index
dst.array[-1][func_name] = processed dst.array[-1][key] = output
# NOTE: for now we aren't streaming this to the consumer # NOTE: for now we aren't streaming this to the consumer
# stream latest array index entry which basically just acts # stream latest array index entry which basically just acts
@ -194,7 +195,7 @@ async def cascade(
src_shm_token: dict, src_shm_token: dict,
dst_shm_token: tuple[str, np.dtype], dst_shm_token: tuple[str, np.dtype],
func_name: str, ns_path: NamespacePath,
zero_on_step: bool = False, zero_on_step: bool = False,
loglevel: Optional[str] = None, loglevel: Optional[str] = None,
@ -213,10 +214,18 @@ async def cascade(
src = attach_shm_array(token=src_shm_token) src = attach_shm_array(token=src_shm_token)
dst = attach_shm_array(readonly=False, token=dst_shm_token) dst = attach_shm_array(readonly=False, token=dst_shm_token)
func: Callable = _fsp_builtins.get(func_name) reg = _load_builtins()
lines = '\n'.join([f'{key.rpartition(":")[2]} => {key}' for key in reg])
log.info(
f'Registered FSP set:\n{lines}'
)
func: Fsp = reg.get(
NamespacePath(ns_path)
)
if not func: if not func:
# TODO: assume it's a func target path # TODO: assume it's a func target path
raise ValueError('Unknown fsp target: {func_name}') raise ValueError(f'Unknown fsp target: {ns_path}')
# open a data feed stream with requested broker # open a data feed stream with requested broker
async with data.feed.maybe_open_feed( async with data.feed.maybe_open_feed(
@ -231,11 +240,12 @@ async def cascade(
) as (feed, quote_stream): ) as (feed, quote_stream):
profiler(f'{func_name}: feed up') profiler(f'{func}: feed up')
assert src.token == feed.shm.token assert src.token == feed.shm.token
# last_len = new_len = len(src.array) # last_len = new_len = len(src.array)
func_name = func.__name__
async with ( async with (
trio.open_nursery() as n, trio.open_nursery() as n,
): ):
@ -252,7 +262,7 @@ async def cascade(
src=src, src=src,
dst=dst, dst=dst,
func_name=func_name, # func_name=func_name,
func=func func=func
) )

View File

@ -23,6 +23,7 @@ from typing import AsyncIterator, Optional
import numpy as np import numpy as np
from numba import jit, float64, optional, int64 from numba import jit, float64, optional, int64
from ._api import fsp
from ..data._normalize import iterticks from ..data._normalize import iterticks
from ..data._sharedmem import ShmArray from ..data._sharedmem import ShmArray
@ -106,7 +107,7 @@ def ema(
# nopython=True, # nopython=True,
# nogil=True # nogil=True
# ) # )
def rsi( def _rsi(
# TODO: use https://github.com/ramonhagenaars/nptyping # TODO: use https://github.com/ramonhagenaars/nptyping
signal: 'np.ndarray[float64]', signal: 'np.ndarray[float64]',
@ -146,7 +147,7 @@ def rsi(
return rsi, up_ema[-1], down_ema[-1] return rsi, up_ema[-1], down_ema[-1]
def wma( def _wma(
signal: np.ndarray, signal: np.ndarray,
length: int, length: int,
@ -169,10 +170,8 @@ def wma(
return np.convolve(signal, weights, 'valid') return np.convolve(signal, weights, 'valid')
# @piker.fsp.emit( @fsp
# timeframes=['1s', '5s', '15s', '1m', '5m', '1H'], async def rsi(
# )
async def _rsi(
source: 'QuoteStream[Dict[str, Any]]', # noqa source: 'QuoteStream[Dict[str, Any]]', # noqa
ohlcv: ShmArray, ohlcv: ShmArray,
@ -188,11 +187,11 @@ async def _rsi(
sig = ohlcv.array['close'] sig = ohlcv.array['close']
# wilder says to seed the RSI EMAs with the SMA for the "period" # wilder says to seed the RSI EMAs with the SMA for the "period"
seed = wma(ohlcv.last(period)['close'], period)[0] seed = _wma(ohlcv.last(period)['close'], period)[0]
# TODO: the emas here should be seeded with a period SMA as per # TODO: the emas here should be seeded with a period SMA as per
# wilder's original formula.. # wilder's original formula..
rsi_h, last_up_ema_close, last_down_ema_close = rsi( rsi_h, last_up_ema_close, last_down_ema_close = _rsi(
sig, period, seed, seed) sig, period, seed, seed)
up_ema_last = last_up_ema_close up_ema_last = last_up_ema_close
down_ema_last = last_down_ema_close down_ema_last = last_down_ema_close
@ -218,7 +217,7 @@ async def _rsi(
last_down_ema_close = down_ema_last last_down_ema_close = down_ema_last
index = ohlcv.index index = ohlcv.index
rsi_out, up_ema_last, down_ema_last = rsi( rsi_out, up_ema_last, down_ema_last = _rsi(
sig, sig,
period=period, period=period,
up_ema_last=last_up_ema_close, up_ema_last=last_up_ema_close,
@ -227,7 +226,8 @@ async def _rsi(
yield rsi_out[-1:] yield rsi_out[-1:]
async def _wma( @fsp
async def wma(
source, #: AsyncStream[np.ndarray], source, #: AsyncStream[np.ndarray],
length: int, length: int,
@ -243,10 +243,10 @@ async def _wma(
''' '''
# deliver historical output as "first yield" # deliver historical output as "first yield"
yield wma(ohlcv.array['close'], length) yield _wma(ohlcv.array['close'], length)
# begin real-time section # begin real-time section
async for quote in source: async for quote in source:
for tick in iterticks(quote, type='trade'): for tick in iterticks(quote, type='trade'):
yield wma(ohlcv.last(length)) yield _wma(ohlcv.last(length))

View File

@ -1,5 +1,5 @@
# piker: trading gear for hackers # piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0) # Copyright (C) Tyler Goodlet (in stewardship of pikers)
# This program is free software: you can redistribute it and/or modify # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by # it under the terms of the GNU Affero General Public License as published by
@ -19,6 +19,7 @@ from typing import AsyncIterator, Optional, Union
import numpy as np import numpy as np
from tractor.trionics._broadcast import AsyncReceiver from tractor.trionics._broadcast import AsyncReceiver
from ._api import fsp
from ..data._normalize import iterticks from ..data._normalize import iterticks
from ..data._sharedmem import ShmArray from ..data._sharedmem import ShmArray
@ -50,7 +51,8 @@ def wap(
) )
async def _tina_vwap( @fsp
async def tina_vwap(
source: AsyncReceiver[dict], source: AsyncReceiver[dict],
ohlcv: ShmArray, # OHLC sampled history ohlcv: ShmArray, # OHLC sampled history
@ -62,7 +64,8 @@ async def _tina_vwap(
AsyncIterator[np.ndarray], AsyncIterator[np.ndarray],
float float
]: ]:
'''Streaming volume weighted moving average. '''
Streaming volume weighted moving average.
Calling this "tina" for now since we're using HLC3 instead of tick. Calling this "tina" for now since we're using HLC3 instead of tick.
@ -100,26 +103,25 @@ async def _tina_vwap(
w_tot += price * size w_tot += price * size
# yield ((((o + h + l) / 3) * v) weights_tot) / v_tot # yield ((((o + h + l) / 3) * v) weights_tot) / v_tot
yield w_tot / v_tot yield 'tina_vwap', w_tot / v_tot
# @fsp.config( @fsp(
# name='dolla_vlm', outputs=('dolla_vlm', 'dark_vlm'),
# ohlc=False, ohlc=False,
# style='step', curve_style='step',
# ) )
async def dolla_vlm( async def dolla_vlm(
source: AsyncReceiver[dict], source: AsyncReceiver[dict],
ohlcv: ShmArray, # OHLC sampled history ohlcv: ShmArray, # OHLC sampled history
) -> Union[ ) -> AsyncIterator[
AsyncIterator[np.ndarray], tuple[str, Union[np.ndarray, float]],
float
]: ]:
''' '''
"Dollar Volume", aka the volume in asset-currency-units (usually "Dollar Volume", aka the volume in asset-currency-units (usually
a fiat) computed from some price function for the sample step a fiat) computed from some price function for the sample step
*times* the asset unit volume. *multiplied* (*) by the asset unit volume.
Useful for comparing cross asset "money flow" in #s that are Useful for comparing cross asset "money flow" in #s that are
asset-currency-independent. asset-currency-independent.
@ -129,11 +131,12 @@ async def dolla_vlm(
chl3 = (a['close'] + a['high'] + a['low']) / 3 chl3 = (a['close'] + a['high'] + a['low']) / 3
v = a['volume'] v = a['volume']
# history # on first iteration yield history
yield chl3 * v yield chl3 * v
i = ohlcv.index i = ohlcv.index
lvlm = 0 output = vlm = 0
dvlm = 0
async for quote in source: async for quote in source:
for tick in iterticks(quote): for tick in iterticks(quote):
@ -145,14 +148,30 @@ async def dolla_vlm(
li = ohlcv.index li = ohlcv.index
if li > i: if li > i:
i = li i = li
lvlm = 0 vlm = 0
dvlm = 0
c, h, l, v = ohlcv.last()[ # TODO: for marginned instruments (futes, etfs?) we need to
['close', 'high', 'low', 'volume'] # show the margin $vlm by multiplying by whatever multiplier
][0] # is reported in the sym info.
lvlm += price * size ttype = tick.get('type')
tina_lvlm = c+h+l/3 * v if ttype == 'dark_trade':
print(f'dark_trade: {tick}')
key = 'dark_vlm'
dvlm += price * size
output = dvlm
else:
key = 'dolla_vlm'
vlm += price * size
output = vlm
# TODO: plot both to compare?
# c, h, l, v = ohlcv.last()[
# ['close', 'high', 'low', 'volume']
# ][0]
# tina_lvlm = c+h+l/3 * v
# print(f' tinal vlm: {tina_lvlm}') # print(f' tinal vlm: {tina_lvlm}')
yield lvlm yield key, output

View File

@ -28,7 +28,6 @@ from typing import Optional, AsyncGenerator, Any
import numpy as np import numpy as np
from pydantic import create_model from pydantic import create_model
import tractor import tractor
# from tractor.trionics import gather_contexts
import pyqtgraph as pg import pyqtgraph as pg
import trio import trio
from trio_typing import TaskStatus from trio_typing import TaskStatus
@ -38,57 +37,25 @@ from .._cacheables import maybe_open_context
from ..calc import humanize from ..calc import humanize
from ..data._sharedmem import ( from ..data._sharedmem import (
ShmArray, ShmArray,
maybe_open_shm_array,
try_read, try_read,
) )
from ._chart import ( from ._chart import (
ChartPlotWidget, ChartPlotWidget,
LinkedSplits, LinkedSplits,
) )
from .. import fsp
from ._forms import ( from ._forms import (
FieldsForm, FieldsForm,
mk_form, mk_form,
open_form_input_handling, open_form_input_handling,
) )
from ..fsp._api import maybe_mk_fsp_shm, Fsp
from ..fsp import cascade
from ..fsp._volume import tina_vwap, dolla_vlm
from ..log import get_logger from ..log import get_logger
log = get_logger(__name__) log = get_logger(__name__)
def maybe_mk_fsp_shm(
sym: str,
field_name: str,
display_name: Optional[str] = None,
readonly: bool = True,
) -> (ShmArray, bool):
'''
Allocate a single row shm array for an symbol-fsp pair if none
exists, otherwise load the shm already existing for that token.
'''
uid = tractor.current_actor().uid
if not display_name:
display_name = field_name
# TODO: load function here and introspect
# return stream type(s)
# TODO: should `index` be a required internal field?
fsp_dtype = np.dtype([('index', int), (field_name, float)])
key = f'{sym}.fsp.{display_name}.{".".join(uid)}'
shm, opened = maybe_open_shm_array(
key,
# TODO: create entry for each time frame
dtype=fsp_dtype,
readonly=True,
)
return shm, opened
def has_vlm(ohlcv: ShmArray) -> bool: def has_vlm(ohlcv: ShmArray) -> bool:
# make sure that the instrument supports volume history # make sure that the instrument supports volume history
# (sometimes this is not the case for some commodities and # (sometimes this is not the case for some commodities and
@ -148,11 +115,11 @@ async def open_fsp_sidepane(
assert len(conf) == 1 # for now assert len(conf) == 1 # for now
# add (single) selection widget # add (single) selection widget
for display_name, config in conf.items(): for name, config in conf.items():
schema[display_name] = { schema[name] = {
'label': '**fsp**:', 'label': '**fsp**:',
'type': 'select', 'type': 'select',
'default_value': [display_name], 'default_value': [name],
} }
# add parameters for selection "options" # add parameters for selection "options"
@ -180,7 +147,7 @@ async def open_fsp_sidepane(
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation # https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
FspConfig = create_model( FspConfig = create_model(
'FspConfig', 'FspConfig',
name=display_name, name=name,
**params, **params,
) )
sidepane.model = FspConfig() sidepane.model = FspConfig()
@ -228,8 +195,7 @@ async def run_fsp_ui(
linkedsplits: LinkedSplits, linkedsplits: LinkedSplits,
shm: ShmArray, shm: ShmArray,
started: trio.Event, started: trio.Event,
func_name: str, target: Fsp,
display_name: str,
conf: dict[str, dict], conf: dict[str, dict],
loglevel: str, loglevel: str,
# profiler: pg.debug.Profiler, # profiler: pg.debug.Profiler,
@ -244,17 +210,18 @@ async def run_fsp_ui(
config. config.
''' '''
# profiler(f'started UI task for fsp: {func_name}') name = target.name
# profiler(f'started UI task for fsp: {name}')
async with ( async with (
# side UI for parameters/controls # side UI for parameters/controls
open_fsp_sidepane( open_fsp_sidepane(
linkedsplits, linkedsplits,
{display_name: conf}, {name: conf},
) as sidepane, ) as sidepane,
): ):
await started.wait() await started.wait()
# profiler(f'fsp:{func_name} attached to fsp ctx-stream') # profiler(f'fsp:{name} attached to fsp ctx-stream')
overlay_with = conf.get('overlay', False) overlay_with = conf.get('overlay', False)
if overlay_with: if overlay_with:
@ -264,24 +231,24 @@ async def run_fsp_ui(
chart = linkedsplits.subplots[overlay_with] chart = linkedsplits.subplots[overlay_with]
chart.draw_curve( chart.draw_curve(
name=display_name, name=name,
data=shm.array, data=shm.array,
overlay=True, overlay=True,
color='default_light', color='default_light',
array_key=func_name, array_key=name,
separate_axes=conf.get('separate_axes', False), separate_axes=conf.get('separate_axes', False),
**conf.get('chart_kwargs', {}) **conf.get('chart_kwargs', {})
) )
# specially store ref to shm for lookup in display loop # specially store ref to shm for lookup in display loop
chart._overlays[display_name] = shm chart._overlays[name] = shm
else: else:
# create a new sub-chart widget for this fsp # create a new sub-chart widget for this fsp
chart = linkedsplits.add_plot( chart = linkedsplits.add_plot(
name=display_name, name=name,
array=shm.array, array=shm.array,
array_key=func_name, array_key=name,
sidepane=sidepane, sidepane=sidepane,
# curve by default # curve by default
@ -299,15 +266,15 @@ async def run_fsp_ui(
# should **not** be the same sub-chart widget # should **not** be the same sub-chart widget
assert chart.name != linkedsplits.chart.name assert chart.name != linkedsplits.chart.name
array_key = func_name array_key = name
# profiler(f'fsp:{func_name} chart created') # profiler(f'fsp:{name} chart created')
# first UI update, usually from shm pushed history # first UI update, usually from shm pushed history
update_fsp_chart( update_fsp_chart(
chart, chart,
shm, shm,
display_name, name,
array_key=array_key, array_key=array_key,
) )
@ -320,7 +287,7 @@ async def run_fsp_ui(
# logic inside ``.paint()`` for ``self.opts['fillLevel']`` which # logic inside ``.paint()`` for ``self.opts['fillLevel']`` which
# might be the best solution? # might be the best solution?
# graphics = chart.update_from_array(chart.name, array[func_name]) # graphics = chart.update_from_array(chart.name, array[name])
# graphics.curve.setBrush(50, 50, 200, 100) # graphics.curve.setBrush(50, 50, 200, 100)
# graphics.curve.setFillLevel(50) # graphics.curve.setFillLevel(50)
@ -410,7 +377,7 @@ class FspAdmin:
started: trio.Event, started: trio.Event,
dst_shm: ShmArray, dst_shm: ShmArray,
conf: dict, conf: dict,
func_name: str, target: Fsp,
loglevel: str, loglevel: str,
) -> None: ) -> None:
@ -420,11 +387,12 @@ class FspAdmin:
''' '''
brokername, sym = self.linked.symbol.front_feed() brokername, sym = self.linked.symbol.front_feed()
ns_path = str(target.ns_path)
async with ( async with (
portal.open_context( portal.open_context(
# chaining entrypoint # chaining entrypoint
fsp.cascade, cascade,
# data feed key # data feed key
brokername=brokername, brokername=brokername,
@ -435,7 +403,7 @@ class FspAdmin:
dst_shm_token=dst_shm.token, dst_shm_token=dst_shm.token,
# target # target
func_name=func_name, ns_path=ns_path,
loglevel=loglevel, loglevel=loglevel,
zero_on_step=conf.get('zero_on_step', False), zero_on_step=conf.get('zero_on_step', False),
@ -444,8 +412,13 @@ class FspAdmin:
ctx.open_stream() as stream, ctx.open_stream() as stream,
): ):
# register output data # register output data
self._registry[(brokername, sym, func_name)] = ( self._registry[
stream, dst_shm, complete) (brokername, sym, ns_path)
] = (
stream,
dst_shm,
complete
)
started.set() started.set()
@ -455,39 +428,38 @@ class FspAdmin:
async def start_engine_task( async def start_engine_task(
self, self,
display_name: str, target: Fsp,
conf: dict[str, dict[str, Any]], conf: dict[str, dict[str, Any]],
worker_name: Optional[str] = None, worker_name: Optional[str] = None,
loglevel: str = 'error', loglevel: str = 'info',
) -> (ShmArray, trio.Event): ) -> (ShmArray, trio.Event):
# unpack FSP details from config dict fqsn = self.linked.symbol.front_feed()
func_name = conf['func_name']
# allocate an output shm array # allocate an output shm array
dst_shm, opened = maybe_mk_fsp_shm( dst_shm, opened = maybe_mk_fsp_shm(
self.linked.symbol.front_feed(), fqsn,
field_name=func_name, target=target,
display_name=display_name,
readonly=True, readonly=True,
) )
if not opened: # if not opened:
raise RuntimeError(f'Already started FSP {func_name}') # raise RuntimeError(
# f'Already started FSP `{fqsn}:{func_name}`'
# )
portal = self.cluster.get(worker_name) or self.rr_next_portal() portal = self.cluster.get(worker_name) or self.rr_next_portal()
complete = trio.Event() complete = trio.Event()
started = trio.Event() started = trio.Event()
self.tn.start_soon( self.tn.start_soon(
self.open_chain, self.open_chain,
portal, portal,
complete, complete,
started, started,
dst_shm, dst_shm,
conf, conf,
func_name, target,
loglevel, loglevel,
) )
@ -495,16 +467,16 @@ class FspAdmin:
async def open_fsp_chart( async def open_fsp_chart(
self, self,
display_name: str,
target: Fsp,
conf: dict, # yeah probably dumb.. conf: dict, # yeah probably dumb..
loglevel: str = 'error', loglevel: str = 'error',
) -> (trio.Event, ChartPlotWidget): ) -> (trio.Event, ChartPlotWidget):
func_name = conf['func_name']
shm, started = await self.start_engine_task( shm, started = await self.start_engine_task(
display_name, target,
conf, conf,
loglevel, loglevel,
) )
@ -517,8 +489,7 @@ class FspAdmin:
self.linked, self.linked,
shm, shm,
started, started,
func_name, target,
display_name,
conf=conf, conf=conf,
loglevel=loglevel, loglevel=loglevel,
@ -621,14 +592,22 @@ async def open_vlm_displays(
) )
# force 0 to always be in view # force 0 to always be in view
def maxmin(name) -> tuple[float, float]: def maxmin(
names: list[str],
) -> tuple[float, float]:
mx = 0
for name in names:
mxmn = chart.maxmin(name=name) mxmn = chart.maxmin(name=name)
if mxmn: if mxmn:
return 0, mxmn[1] mx = max(mxmn[1], mx)
return 0, 0 # if mx:
# return 0, mxmn[1]
chart.view._maxmin = partial(maxmin, name='volume') return 0, mx
chart.view._maxmin = partial(maxmin, names=['volume'])
# TODO: fix the x-axis label issue where if you put # TODO: fix the x-axis label issue where if you put
# the axis on the left it's totally not lined up... # the axis on the left it's totally not lined up...
@ -671,8 +650,8 @@ async def open_vlm_displays(
# spawn and overlay $ vlm on the same subchart # spawn and overlay $ vlm on the same subchart
shm, started = await admin.start_engine_task( shm, started = await admin.start_engine_task(
'dolla_vlm', dolla_vlm,
# linked.symbol.front_feed(), # data-feed symbol key
{ # fsp engine conf { # fsp engine conf
'func_name': 'dolla_vlm', 'func_name': 'dolla_vlm',
'zero_on_step': True, 'zero_on_step': True,
@ -704,18 +683,17 @@ async def open_vlm_displays(
) )
# add custom auto range handler # add custom auto range handler
pi.vb._maxmin = partial(maxmin, name='dolla_vlm') pi.vb._maxmin = partial(
maxmin,
# keep both regular and dark vlm in view
names=['dolla_vlm', 'dark_vlm'],
)
curve, _ = chart.draw_curve( curve, _ = chart.draw_curve(
name='dolla_vlm', name='dolla_vlm',
data=shm.array, data=shm.array,
array_key='dolla_vlm', array_key='dolla_vlm',
overlay=pi, overlay=pi,
# color='bracket',
# TODO: this color or dark volume
# color='charcoal',
step_mode=True, step_mode=True,
# **conf.get('chart_kwargs', {}) # **conf.get('chart_kwargs', {})
) )
@ -732,6 +710,17 @@ async def open_vlm_displays(
# ``.draw_curve()``. # ``.draw_curve()``.
chart._overlays['dolla_vlm'] = shm chart._overlays['dolla_vlm'] = shm
curve, _ = chart.draw_curve(
name='dark_vlm',
data=shm.array,
array_key='dark_vlm',
overlay=pi,
color='charcoal', # darker theme hue
step_mode=True,
# **conf.get('chart_kwargs', {})
)
chart._overlays['dark_vlm'] = shm
# XXX: old dict-style config before it was moved into the # XXX: old dict-style config before it was moved into the
# helper task # helper task
# 'dolla_vlm': { # 'dolla_vlm': {
@ -759,15 +748,14 @@ async def open_vlm_displays(
axis.size_to_values() axis.size_to_values()
# built-in vlm fsps # built-in vlm fsps
for display_name, conf in { for target, conf in {
'vwap': { tina_vwap: {
'func_name': 'vwap',
'overlay': 'ohlc', # overlays with OHLCV (main) chart 'overlay': 'ohlc', # overlays with OHLCV (main) chart
'anchor': 'session', 'anchor': 'session',
}, },
}.items(): }.items():
started = await admin.open_fsp_chart( started = await admin.open_fsp_chart(
display_name, target,
conf, conf,
) )
@ -815,27 +803,26 @@ async def start_fsp_displays(
disabled=False disabled=False
) )
# async with gather_contexts((
async with ( async with (
# NOTE: this admin internally opens an actor cluster # NOTE: this admin internally opens an actor cluster
open_fsp_admin(linked, ohlcv) as admin, open_fsp_admin(linked, ohlcv) as admin,
): ):
statuses = [] statuses = []
for display_name, conf in fsp_conf.items(): for target, conf in fsp_conf.items():
started = await admin.open_fsp_chart( started = await admin.open_fsp_chart(
display_name, target,
conf, conf,
) )
done = linked.window().status_bar.open_status( done = linked.window().status_bar.open_status(
f'loading fsp, {display_name}..', f'loading fsp, {target}..',
group_key=group_status_key, group_key=group_status_key,
) )
statuses.append((started, done)) statuses.append((started, done))
for fsp_loaded, status_cb in statuses: for fsp_loaded, status_cb in statuses:
await fsp_loaded.wait() await fsp_loaded.wait()
profiler(f'attached to fsp portal: {display_name}') profiler(f'attached to fsp portal: {target}')
status_cb() status_cb()
# blocks on nursery until all fsp actors complete # blocks on nursery until all fsp actors complete

View File

@ -1,7 +1,7 @@
# we require a pinned dev branch to get some edge features that # we require a pinned dev branch to get some edge features that
# are often untested in tractor's CI and/or being tested by us # are often untested in tractor's CI and/or being tested by us
# first before committing as core features in tractor's base. # first before committing as core features in tractor's base.
-e git+git://github.com/goodboy/tractor.git@piker_pin#egg=tractor -e git+git://github.com/goodboy/tractor.git@master#egg=tractor
# `pyqtgraph` peeps keep breaking, fixing, improving so might as well # `pyqtgraph` peeps keep breaking, fixing, improving so might as well
# pin this to a dev branch that we have more control over especially # pin this to a dev branch that we have more control over especially