Merge pull request #169 from pikers/tractor_open_stream_from
Port to new tractor stream apibinance_backend
commit
9de02321d8
|
@ -20,7 +20,6 @@ Real-time data feed machinery
|
|||
import time
|
||||
from functools import partial
|
||||
from dataclasses import dataclass, field
|
||||
from itertools import cycle
|
||||
import socket
|
||||
import json
|
||||
from types import ModuleType
|
||||
|
@ -31,7 +30,6 @@ from typing import (
|
|||
Sequence
|
||||
)
|
||||
import contextlib
|
||||
from operator import itemgetter
|
||||
|
||||
import trio
|
||||
import tractor
|
||||
|
@ -182,6 +180,8 @@ async def symbol_data(broker: str, tickers: List[str]):
|
|||
|
||||
_feeds_cache = {}
|
||||
|
||||
|
||||
# TODO: use the version of this from .api ?
|
||||
@asynccontextmanager
|
||||
async def get_cached_feed(
|
||||
brokername: str,
|
||||
|
@ -326,6 +326,7 @@ class DataFeed:
|
|||
self.quote_gen = None
|
||||
self._symbol_data_cache: Dict[str, Any] = {}
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_stream(
|
||||
self,
|
||||
symbols: Sequence[str],
|
||||
|
@ -351,31 +352,22 @@ class DataFeed:
|
|||
# subscribe for tickers (this performs a possible filtering
|
||||
# where invalid symbols are discarded)
|
||||
sd = await self.portal.run(
|
||||
"piker.brokers.data",
|
||||
'symbol_data',
|
||||
symbol_data,
|
||||
broker=self.brokermod.name,
|
||||
tickers=symbols
|
||||
)
|
||||
self._symbol_data_cache.update(sd)
|
||||
|
||||
if test:
|
||||
# stream from a local test file
|
||||
quote_gen = await self.portal.run(
|
||||
"piker.brokers.data",
|
||||
'stream_from_file',
|
||||
filename=test,
|
||||
)
|
||||
else:
|
||||
log.info(f"Starting new stream for {symbols}")
|
||||
|
||||
# start live streaming from broker daemon
|
||||
quote_gen = await self.portal.run(
|
||||
"piker.brokers.data",
|
||||
'start_quote_stream',
|
||||
async with self.portal.open_stream_from(
|
||||
start_quote_stream,
|
||||
broker=self.brokermod.name,
|
||||
symbols=symbols,
|
||||
feed_type=feed_type,
|
||||
rate=rate,
|
||||
)
|
||||
) as quote_gen:
|
||||
|
||||
# get first quotes response
|
||||
log.debug(f"Waiting on first quote for {symbols}...")
|
||||
|
@ -384,7 +376,8 @@ class DataFeed:
|
|||
|
||||
self.quote_gen = quote_gen
|
||||
self.first_quotes = quotes
|
||||
return quote_gen, quotes
|
||||
yield quote_gen, quotes
|
||||
|
||||
except Exception:
|
||||
if self.quote_gen:
|
||||
await self.quote_gen.aclose()
|
||||
|
@ -406,8 +399,7 @@ class DataFeed:
|
|||
"""Call a broker ``Client`` method using RPC and return result.
|
||||
"""
|
||||
return await self.portal.run(
|
||||
'piker.brokers.data',
|
||||
'call_client',
|
||||
call_client,
|
||||
broker=self.brokermod.name,
|
||||
methname=method,
|
||||
**kwargs
|
||||
|
@ -425,9 +417,11 @@ async def stream_to_file(
|
|||
"""Record client side received quotes to file ``filename``.
|
||||
"""
|
||||
# an async generator instance
|
||||
agen = await portal.run(
|
||||
"piker.brokers.data", 'start_quote_stream',
|
||||
broker=brokermod.name, symbols=tickers)
|
||||
async with portal.open_stream_from(
|
||||
start_quote_stream,
|
||||
broker=brokermod.name,
|
||||
symbols=tickers
|
||||
) as agen:
|
||||
|
||||
fname = filename or f'{watchlist_name}.jsonstream'
|
||||
with open(fname, 'a') as f:
|
||||
|
@ -438,14 +432,14 @@ async def stream_to_file(
|
|||
return fname
|
||||
|
||||
|
||||
async def stream_from_file(
|
||||
filename: str,
|
||||
):
|
||||
with open(filename, 'r') as quotes_file:
|
||||
content = quotes_file.read()
|
||||
# async def stream_from_file(
|
||||
# filename: str,
|
||||
# ):
|
||||
# with open(filename, 'r') as quotes_file:
|
||||
# content = quotes_file.read()
|
||||
|
||||
pkts = content.split('--')[:-1] # simulate 2 separate quote packets
|
||||
payloads = [json.loads(pkt) for pkt in pkts]
|
||||
for payload in cycle(payloads):
|
||||
yield payload
|
||||
await trio.sleep(0.3)
|
||||
# pkts = content.split('--')[:-1] # simulate 2 separate quote packets
|
||||
# payloads = [json.loads(pkt) for pkt in pkts]
|
||||
# for payload in cycle(payloads):
|
||||
# yield payload
|
||||
# await trio.sleep(0.3)
|
||||
|
|
|
@ -246,13 +246,14 @@ async def open_ems(
|
|||
|
||||
async with maybe_open_emsd(broker) as portal:
|
||||
|
||||
trades_stream = await portal.run(
|
||||
async with portal.open_stream_from(
|
||||
|
||||
_emsd_main,
|
||||
client_actor_name=actor.name,
|
||||
broker=broker,
|
||||
symbol=symbol.key,
|
||||
|
||||
)
|
||||
) as trades_stream:
|
||||
with trio.fail_after(10):
|
||||
await book._ready_to_receive.wait()
|
||||
|
||||
|
|
|
@ -339,9 +339,9 @@ async def process_broker_trades(
|
|||
"""
|
||||
broker = feed.mod.name
|
||||
|
||||
with trio.fail_after(5):
|
||||
# TODO: make this a context
|
||||
# in the paper engine case this is just a mem receive channel
|
||||
trades_stream = await feed.recv_trades_data()
|
||||
async with feed.receive_trades_data() as trades_stream:
|
||||
first = await trades_stream.__anext__()
|
||||
|
||||
# startup msg expected as first from broker backend
|
||||
|
@ -426,7 +426,8 @@ async def process_broker_trades(
|
|||
# - PendingSubmit
|
||||
# - PendingCancel
|
||||
# - PreSubmitted (simulated orders)
|
||||
# - ApiCancelled (cancelled by client before submission to routing)
|
||||
# - ApiCancelled (cancelled by client before submission
|
||||
# to routing)
|
||||
# - Cancelled
|
||||
# - Filled
|
||||
# - Inactive (reject or cancelled but not by trader)
|
||||
|
@ -675,10 +676,10 @@ async def _emsd_main(
|
|||
# acting as an EMS client and will submit orders) to
|
||||
# receive requests pushed over a tractor stream
|
||||
# using (for now) an async generator.
|
||||
order_stream = await portal.run(
|
||||
async with portal.open_stream_from(
|
||||
send_order_cmds,
|
||||
symbol_key=symbol,
|
||||
)
|
||||
) as order_stream:
|
||||
|
||||
# start inbound order request processing
|
||||
await process_order_cmds(
|
||||
|
|
|
@ -51,6 +51,7 @@ from ._sampling import (
|
|||
iter_ohlc_periods,
|
||||
sample_and_broadcast,
|
||||
)
|
||||
from .ingest import get_ingestormod
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -302,6 +303,7 @@ class Feed:
|
|||
async def receive(self) -> dict:
|
||||
return await self.stream.__anext__()
|
||||
|
||||
@asynccontextmanager
|
||||
async def index_stream(
|
||||
self,
|
||||
delay_s: Optional[int] = None
|
||||
|
@ -312,14 +314,16 @@ class Feed:
|
|||
# XXX: this should be singleton on a host,
|
||||
# a lone broker-daemon per provider should be
|
||||
# created for all practical purposes
|
||||
self._index_stream = await self._brokerd_portal.run(
|
||||
async with self._brokerd_portal.open_stream_from(
|
||||
iter_ohlc_periods,
|
||||
delay_s=delay_s or self._max_sample_rate,
|
||||
)
|
||||
) as self._index_stream:
|
||||
yield self._index_stream
|
||||
else:
|
||||
yield self._index_stream
|
||||
|
||||
return self._index_stream
|
||||
|
||||
async def recv_trades_data(self) -> AsyncIterator[dict]:
|
||||
@asynccontextmanager
|
||||
async def receive_trades_data(self) -> AsyncIterator[dict]:
|
||||
|
||||
if not getattr(self.mod, 'stream_trades', False):
|
||||
log.warning(
|
||||
|
@ -333,7 +337,7 @@ class Feed:
|
|||
# using the ``_.set_fake_trades_stream()`` method
|
||||
if self._trade_stream is None:
|
||||
|
||||
self._trade_stream = await self._brokerd_portal.run(
|
||||
async with self._brokerd_portal.open_stream_from(
|
||||
|
||||
self.mod.stream_trades,
|
||||
|
||||
|
@ -342,9 +346,10 @@ class Feed:
|
|||
# in messages, though we could probably use
|
||||
# more then one?
|
||||
topics=['local_trades'],
|
||||
)
|
||||
|
||||
return self._trade_stream
|
||||
) as self._trade_stream:
|
||||
yield self._trade_stream
|
||||
else:
|
||||
yield self._trade_stream
|
||||
|
||||
|
||||
def sym_to_shm_key(
|
||||
|
@ -373,17 +378,17 @@ async def open_feed(
|
|||
# TODO: do all!
|
||||
sym = symbols[0]
|
||||
|
||||
async with maybe_spawn_brokerd(
|
||||
brokername,
|
||||
loglevel=loglevel,
|
||||
) as portal:
|
||||
# TODO: compress these to one line with py3.9+
|
||||
async with maybe_spawn_brokerd(brokername, loglevel=loglevel) as portal:
|
||||
|
||||
async with portal.open_stream_from(
|
||||
|
||||
stream = await portal.run(
|
||||
attach_feed_bus,
|
||||
brokername=brokername,
|
||||
symbol=sym,
|
||||
loglevel=loglevel,
|
||||
)
|
||||
loglevel=loglevel
|
||||
|
||||
) as stream:
|
||||
|
||||
# TODO: can we make this work better with the proposed
|
||||
# context based bidirectional streaming style api proposed in:
|
||||
|
|
|
@ -167,7 +167,8 @@ async def cascade(
|
|||
# Increment the underlying shared memory buffer on every
|
||||
# "increment" msg received from the underlying data feed.
|
||||
|
||||
async for msg in await feed.index_stream():
|
||||
async with feed.index_stream() as stream:
|
||||
async for msg in stream:
|
||||
|
||||
new_len = len(src.array)
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@ High level Qt chart widgets.
|
|||
|
||||
"""
|
||||
from typing import Tuple, Dict, Any, Optional, Callable
|
||||
from types import ModuleType
|
||||
from functools import partial
|
||||
|
||||
from PyQt5 import QtCore, QtGui
|
||||
|
@ -26,6 +27,7 @@ import numpy as np
|
|||
import pyqtgraph as pg
|
||||
import tractor
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
|
||||
from ._axes import (
|
||||
DynamicDateAxis,
|
||||
|
@ -53,6 +55,7 @@ from ._style import (
|
|||
_bars_to_left_in_follow_mode,
|
||||
)
|
||||
from ..data._source import Symbol
|
||||
from ..data._sharedmem import ShmArray
|
||||
from .. import brokers
|
||||
from .. import data
|
||||
from ..data import maybe_open_shm_array
|
||||
|
@ -128,7 +131,8 @@ class ChartSpace(QtGui.QWidget):
|
|||
# self.toolbar_layout.addWidget(self.strategy_box)
|
||||
def load_symbol(
|
||||
self,
|
||||
symbol: Symbol,
|
||||
brokername: str,
|
||||
symbol_key: str,
|
||||
data: np.ndarray,
|
||||
ohlc: bool = True,
|
||||
) -> None:
|
||||
|
@ -136,12 +140,6 @@ class ChartSpace(QtGui.QWidget):
|
|||
|
||||
Expects a ``numpy`` structured array containing all the ohlcv fields.
|
||||
"""
|
||||
# XXX: let's see if this causes mem problems
|
||||
self.window.setWindowTitle(
|
||||
f'{symbol.key}@{symbol.brokers} '
|
||||
f'tick:{symbol.tick_size}'
|
||||
)
|
||||
|
||||
# TODO: symbol search
|
||||
# # of course this doesn't work :eyeroll:
|
||||
# h = _font.boundingRect('Ag').height()
|
||||
|
@ -151,19 +149,18 @@ class ChartSpace(QtGui.QWidget):
|
|||
# self.symbol_label.setText(f'/`{symbol}`')
|
||||
|
||||
linkedcharts = self._chart_cache.setdefault(
|
||||
symbol.key,
|
||||
LinkedSplitCharts(symbol)
|
||||
symbol_key,
|
||||
LinkedSplitCharts(self)
|
||||
)
|
||||
self.linkedcharts = linkedcharts
|
||||
|
||||
# remove any existing plots
|
||||
if not self.v_layout.isEmpty():
|
||||
self.v_layout.removeWidget(linkedcharts)
|
||||
|
||||
main_chart = linkedcharts.plot_ohlc_main(symbol, data)
|
||||
|
||||
self.v_layout.addWidget(linkedcharts)
|
||||
|
||||
return linkedcharts, main_chart
|
||||
return linkedcharts
|
||||
|
||||
# TODO: add signalling painter system
|
||||
# def add_signals(self):
|
||||
|
@ -187,13 +184,14 @@ class LinkedSplitCharts(QtGui.QWidget):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
symbol: Symbol,
|
||||
chart_space: ChartSpace,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self.signals_visible: bool = False
|
||||
self._cursor: Cursor = None # crosshair graphics
|
||||
self.chart: ChartPlotWidget = None # main (ohlc) chart
|
||||
self.subplots: Dict[Tuple[str, ...], ChartPlotWidget] = {}
|
||||
self.chart_space = chart_space
|
||||
|
||||
self.xaxis = DynamicDateAxis(
|
||||
orientation='bottom',
|
||||
|
@ -215,7 +213,7 @@ class LinkedSplitCharts(QtGui.QWidget):
|
|||
self.layout.addWidget(self.splitter)
|
||||
|
||||
# state tracker?
|
||||
self._symbol: Symbol = symbol
|
||||
self._symbol: Symbol = None
|
||||
|
||||
@property
|
||||
def symbol(self) -> Symbol:
|
||||
|
@ -939,135 +937,6 @@ async def test_bed(
|
|||
# rlabel.setPos(vb_right - 2*w, d_coords.y())
|
||||
|
||||
|
||||
async def _async_main(
|
||||
# implicit required argument provided by ``qtractor_run()``
|
||||
widgets: Dict[str, Any],
|
||||
|
||||
sym: str,
|
||||
brokername: str,
|
||||
loglevel: str,
|
||||
|
||||
) -> None:
|
||||
"""Main Qt-trio routine invoked by the Qt loop with
|
||||
the widgets ``dict``.
|
||||
"""
|
||||
chart_app = widgets['main']
|
||||
|
||||
# attempt to configure DPI aware font size
|
||||
_font.configure_to_dpi(current_screen())
|
||||
|
||||
# chart_app.init_search()
|
||||
|
||||
# historical data fetch
|
||||
brokermod = brokers.get_brokermod(brokername)
|
||||
|
||||
async with data.open_feed(
|
||||
brokername,
|
||||
[sym],
|
||||
loglevel=loglevel,
|
||||
) as feed:
|
||||
|
||||
ohlcv = feed.shm
|
||||
bars = ohlcv.array
|
||||
symbol = feed.symbols[sym]
|
||||
|
||||
# load in symbol's ohlc data
|
||||
linked_charts, chart = chart_app.load_symbol(symbol, bars)
|
||||
|
||||
# plot historical vwap if available
|
||||
wap_in_history = False
|
||||
|
||||
if brokermod._show_wap_in_history:
|
||||
|
||||
if 'bar_wap' in bars.dtype.fields:
|
||||
wap_in_history = True
|
||||
chart.draw_curve(
|
||||
name='bar_wap',
|
||||
data=bars,
|
||||
add_label=False,
|
||||
)
|
||||
|
||||
# size view to data once at outset
|
||||
chart._set_yrange()
|
||||
|
||||
# TODO: a data view api that makes this less shit
|
||||
chart._shm = ohlcv
|
||||
|
||||
# TODO: eventually we'll support some kind of n-compose syntax
|
||||
fsp_conf = {
|
||||
'rsi': {
|
||||
'period': 14,
|
||||
'chart_kwargs': {
|
||||
'static_yrange': (0, 100),
|
||||
},
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
# make sure that the instrument supports volume history
|
||||
# (sometimes this is not the case for some commodities and
|
||||
# derivatives)
|
||||
volm = ohlcv.array['volume']
|
||||
if (
|
||||
np.all(np.isin(volm, -1)) or
|
||||
np.all(np.isnan(volm))
|
||||
):
|
||||
log.warning(
|
||||
f"{sym} does not seem to have volume info,"
|
||||
" dropping volume signals")
|
||||
else:
|
||||
fsp_conf.update({
|
||||
'vwap': {
|
||||
'overlay': True,
|
||||
'anchor': 'session',
|
||||
},
|
||||
})
|
||||
|
||||
async with trio.open_nursery() as n:
|
||||
|
||||
# load initial fsp chain (otherwise known as "indicators")
|
||||
n.start_soon(
|
||||
spawn_fsps,
|
||||
linked_charts,
|
||||
fsp_conf,
|
||||
sym,
|
||||
ohlcv,
|
||||
brokermod,
|
||||
loglevel,
|
||||
)
|
||||
|
||||
# start graphics update loop(s)after receiving first live quote
|
||||
n.start_soon(
|
||||
chart_from_quotes,
|
||||
chart,
|
||||
feed.stream,
|
||||
ohlcv,
|
||||
wap_in_history,
|
||||
)
|
||||
|
||||
# wait for a first quote before we start any update tasks
|
||||
quote = await feed.receive()
|
||||
|
||||
log.info(f'Received first quote {quote}')
|
||||
|
||||
n.start_soon(
|
||||
check_for_new_bars,
|
||||
feed,
|
||||
# delay,
|
||||
ohlcv,
|
||||
linked_charts
|
||||
)
|
||||
|
||||
# interactive testing
|
||||
# n.start_soon(
|
||||
# test_bed,
|
||||
# ohlcv,
|
||||
# chart,
|
||||
# linked_charts,
|
||||
# )
|
||||
await start_order_mode(chart, symbol, brokername)
|
||||
|
||||
|
||||
async def chart_from_quotes(
|
||||
chart: ChartPlotWidget,
|
||||
stream,
|
||||
|
@ -1245,7 +1114,7 @@ async def spawn_fsps(
|
|||
|
||||
"""
|
||||
# spawns sub-processes which execute cpu bound FSP code
|
||||
async with tractor.open_nursery() as n:
|
||||
async with tractor.open_nursery(loglevel=loglevel) as n:
|
||||
|
||||
# spawns local task that consume and chart data streams from
|
||||
# sub-procs
|
||||
|
@ -1280,66 +1149,36 @@ async def spawn_fsps(
|
|||
|
||||
conf['shm'] = shm
|
||||
|
||||
# spawn closure, can probably define elsewhere
|
||||
async def spawn_fsp_daemon(
|
||||
fsp_name: str,
|
||||
display_name: str,
|
||||
conf: dict,
|
||||
):
|
||||
"""Start an fsp subactor async.
|
||||
|
||||
"""
|
||||
# print(f'FSP NAME: {fsp_name}')
|
||||
portal = await n.run_in_actor(
|
||||
|
||||
# subactor entrypoint
|
||||
fsp.cascade,
|
||||
|
||||
# name as title of sub-chart
|
||||
portal = await n.start_actor(
|
||||
enable_modules=['piker.fsp'],
|
||||
name=display_name,
|
||||
brokername=brokermod.name,
|
||||
src_shm_token=src_shm.token,
|
||||
dst_shm_token=conf['shm'].token,
|
||||
symbol=sym,
|
||||
fsp_func_name=fsp_name,
|
||||
|
||||
# tractor config
|
||||
loglevel=loglevel,
|
||||
)
|
||||
|
||||
stream = await portal.result()
|
||||
|
||||
# receive last index for processed historical
|
||||
# data-array as first msg
|
||||
_ = await stream.receive()
|
||||
|
||||
conf['stream'] = stream
|
||||
conf['portal'] = portal
|
||||
|
||||
# new local task
|
||||
# init async
|
||||
ln.start_soon(
|
||||
spawn_fsp_daemon,
|
||||
run_fsp,
|
||||
portal,
|
||||
linked_charts,
|
||||
brokermod,
|
||||
sym,
|
||||
src_shm,
|
||||
fsp_func_name,
|
||||
display_name,
|
||||
conf,
|
||||
)
|
||||
|
||||
# blocks here until all daemons up
|
||||
|
||||
# start and block on update loops
|
||||
async with trio.open_nursery() as ln:
|
||||
for fsp_func_name, conf in fsps.items():
|
||||
ln.start_soon(
|
||||
update_signals,
|
||||
linked_charts,
|
||||
fsp_func_name,
|
||||
conf,
|
||||
)
|
||||
# blocks here until all fsp actors complete
|
||||
|
||||
|
||||
async def update_signals(
|
||||
async def run_fsp(
|
||||
|
||||
portal: tractor._portal.Portal,
|
||||
linked_charts: LinkedSplitCharts,
|
||||
brokermod: ModuleType,
|
||||
sym: str,
|
||||
src_shm: ShmArray,
|
||||
fsp_func_name: str,
|
||||
display_name: str,
|
||||
conf: Dict[str, Any],
|
||||
|
||||
) -> None:
|
||||
|
@ -1348,6 +1187,27 @@ async def update_signals(
|
|||
This is called once for each entry in the fsp
|
||||
config map.
|
||||
"""
|
||||
async with portal.open_stream_from(
|
||||
|
||||
# subactor entrypoint
|
||||
fsp.cascade,
|
||||
|
||||
# name as title of sub-chart
|
||||
brokername=brokermod.name,
|
||||
src_shm_token=src_shm.token,
|
||||
dst_shm_token=conf['shm'].token,
|
||||
symbol=sym,
|
||||
fsp_func_name=fsp_func_name,
|
||||
|
||||
) as stream:
|
||||
|
||||
# receive last index for processed historical
|
||||
# data-array as first msg
|
||||
_ = await stream.receive()
|
||||
|
||||
conf['stream'] = stream
|
||||
conf['portal'] = portal
|
||||
|
||||
shm = conf['shm']
|
||||
|
||||
if conf.get('overlay'):
|
||||
|
@ -1453,7 +1313,8 @@ async def check_for_new_bars(feed, ohlcv, linked_charts):
|
|||
price_chart = linked_charts.chart
|
||||
price_chart.default_view()
|
||||
|
||||
async for index in await feed.index_stream():
|
||||
async with feed.index_stream() as stream:
|
||||
async for index in stream:
|
||||
|
||||
# update chart historical bars graphics by incrementing
|
||||
# a time step and drawing the history and new bar
|
||||
|
@ -1494,6 +1355,186 @@ async def check_for_new_bars(feed, ohlcv, linked_charts):
|
|||
price_chart.increment_view()
|
||||
|
||||
|
||||
async def chart_symbol(
|
||||
chart_app: ChartSpace,
|
||||
brokername: str,
|
||||
sym: str,
|
||||
loglevel: str,
|
||||
task_status: TaskStatus[Symbol] = trio.TASK_STATUS_IGNORED,
|
||||
) -> None:
|
||||
"""Spawn a real-time chart widget for this symbol and app session.
|
||||
|
||||
These widgets can remain up but hidden so that multiple symbols
|
||||
can be viewed and switched between extremely fast.
|
||||
|
||||
"""
|
||||
# historical data fetch
|
||||
brokermod = brokers.get_brokermod(brokername)
|
||||
|
||||
async with data.open_feed(
|
||||
brokername,
|
||||
[sym],
|
||||
loglevel=loglevel,
|
||||
) as feed:
|
||||
|
||||
ohlcv: ShmArray = feed.shm
|
||||
bars = ohlcv.array
|
||||
symbol = feed.symbols[sym]
|
||||
|
||||
task_status.started(symbol)
|
||||
|
||||
# load in symbol's ohlc data
|
||||
chart_app.window.setWindowTitle(
|
||||
f'{symbol.key}@{symbol.brokers} '
|
||||
f'tick:{symbol.tick_size}'
|
||||
)
|
||||
|
||||
# await tractor.breakpoint()
|
||||
linked_charts = chart_app.linkedcharts
|
||||
linked_charts._symbol = symbol
|
||||
chart = linked_charts.plot_ohlc_main(symbol, bars)
|
||||
|
||||
chart.setFocus()
|
||||
|
||||
# plot historical vwap if available
|
||||
wap_in_history = False
|
||||
|
||||
if brokermod._show_wap_in_history:
|
||||
|
||||
if 'bar_wap' in bars.dtype.fields:
|
||||
wap_in_history = True
|
||||
chart.draw_curve(
|
||||
name='bar_wap',
|
||||
data=bars,
|
||||
add_label=False,
|
||||
)
|
||||
|
||||
# size view to data once at outset
|
||||
chart._set_yrange()
|
||||
|
||||
# TODO: a data view api that makes this less shit
|
||||
chart._shm = ohlcv
|
||||
|
||||
# TODO: eventually we'll support some kind of n-compose syntax
|
||||
fsp_conf = {
|
||||
'rsi': {
|
||||
'period': 14,
|
||||
'chart_kwargs': {
|
||||
'static_yrange': (0, 100),
|
||||
},
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
# make sure that the instrument supports volume history
|
||||
# (sometimes this is not the case for some commodities and
|
||||
# derivatives)
|
||||
volm = ohlcv.array['volume']
|
||||
if (
|
||||
np.all(np.isin(volm, -1)) or
|
||||
np.all(np.isnan(volm))
|
||||
):
|
||||
log.warning(
|
||||
f"{sym} does not seem to have volume info,"
|
||||
" dropping volume signals")
|
||||
else:
|
||||
fsp_conf.update({
|
||||
'vwap': {
|
||||
'overlay': True,
|
||||
'anchor': 'session',
|
||||
},
|
||||
})
|
||||
|
||||
async with trio.open_nursery() as n:
|
||||
|
||||
# load initial fsp chain (otherwise known as "indicators")
|
||||
n.start_soon(
|
||||
spawn_fsps,
|
||||
linked_charts,
|
||||
fsp_conf,
|
||||
sym,
|
||||
ohlcv,
|
||||
brokermod,
|
||||
loglevel,
|
||||
)
|
||||
|
||||
# start graphics update loop(s)after receiving first live quote
|
||||
n.start_soon(
|
||||
chart_from_quotes,
|
||||
chart,
|
||||
feed.stream,
|
||||
ohlcv,
|
||||
wap_in_history,
|
||||
)
|
||||
|
||||
# wait for a first quote before we start any update tasks
|
||||
quote = await feed.receive()
|
||||
|
||||
log.info(f'Received first quote {quote}')
|
||||
|
||||
n.start_soon(
|
||||
check_for_new_bars,
|
||||
feed,
|
||||
# delay,
|
||||
ohlcv,
|
||||
linked_charts
|
||||
)
|
||||
|
||||
# interactive testing
|
||||
# n.start_soon(
|
||||
# test_bed,
|
||||
# ohlcv,
|
||||
# chart,
|
||||
# linked_charts,
|
||||
# )
|
||||
|
||||
await start_order_mode(chart, symbol, brokername)
|
||||
|
||||
|
||||
async def _async_main(
|
||||
# implicit required argument provided by ``qtractor_run()``
|
||||
widgets: Dict[str, Any],
|
||||
|
||||
symbol_key: str,
|
||||
brokername: str,
|
||||
loglevel: str,
|
||||
|
||||
) -> None:
|
||||
"""
|
||||
Main Qt-trio routine invoked by the Qt loop with the widgets ``dict``.
|
||||
|
||||
Provision the "main" widget with initial symbol data and root nursery.
|
||||
|
||||
"""
|
||||
chart_app = widgets['main']
|
||||
|
||||
# attempt to configure DPI aware font size
|
||||
_font.configure_to_dpi(current_screen())
|
||||
|
||||
async with trio.open_nursery() as root_n:
|
||||
|
||||
# set root nursery for spawning other charts/feeds
|
||||
# that run cached in the bg
|
||||
chart_app._root_n = root_n
|
||||
|
||||
chart_app.load_symbol(brokername, symbol_key, loglevel)
|
||||
|
||||
symbol = await root_n.start(
|
||||
chart_symbol,
|
||||
chart_app,
|
||||
brokername,
|
||||
symbol_key,
|
||||
loglevel,
|
||||
)
|
||||
|
||||
chart_app.window.setWindowTitle(
|
||||
f'{symbol.key}@{symbol.brokers} '
|
||||
f'tick:{symbol.tick_size}'
|
||||
)
|
||||
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
||||
def _main(
|
||||
sym: str,
|
||||
brokername: str,
|
||||
|
|
|
@ -179,12 +179,12 @@ async def _async_main(
|
|||
This is started with cli cmd `piker monitor`.
|
||||
'''
|
||||
feed = DataFeed(portal, brokermod)
|
||||
quote_gen, first_quotes = await feed.open_stream(
|
||||
async with feed.open_stream(
|
||||
symbols,
|
||||
'stock',
|
||||
rate=rate,
|
||||
test=test,
|
||||
)
|
||||
) as (quote_gen, first_quotes):
|
||||
first_quotes_list = list(first_quotes.copy().values())
|
||||
quotes = list(first_quotes.copy().values())
|
||||
|
||||
|
|
Loading…
Reference in New Issue