commit
9c821c8cfd
|
@ -19,6 +19,8 @@ piker: trading gear for hackers.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import msgpack # noqa
|
import msgpack # noqa
|
||||||
|
|
||||||
|
# TODO: remove this now right?
|
||||||
import msgpack_numpy
|
import msgpack_numpy
|
||||||
|
|
||||||
# patch msgpack for numpy arrays
|
# patch msgpack for numpy arrays
|
||||||
|
|
|
@ -19,8 +19,9 @@ Structured, daemon tree service management.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union, Callable, Any
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager, AsyncExitStack
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
import trio
|
import trio
|
||||||
|
@ -33,6 +34,10 @@ from .brokers import get_brokermod
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
_root_dname = 'pikerd'
|
_root_dname = 'pikerd'
|
||||||
|
_tractor_kwargs: dict[str, Any] = {
|
||||||
|
# use a different registry addr then tractor's default
|
||||||
|
'arbiter_addr': ('127.0.0.1', 6116),
|
||||||
|
}
|
||||||
_root_modules = [
|
_root_modules = [
|
||||||
__name__,
|
__name__,
|
||||||
'piker.clearing._ems',
|
'piker.clearing._ems',
|
||||||
|
@ -44,10 +49,34 @@ class Services(BaseModel):
|
||||||
actor_n: tractor._trionics.ActorNursery
|
actor_n: tractor._trionics.ActorNursery
|
||||||
service_n: trio.Nursery
|
service_n: trio.Nursery
|
||||||
debug_mode: bool # tractor sub-actor debug mode flag
|
debug_mode: bool # tractor sub-actor debug mode flag
|
||||||
|
ctx_stack: AsyncExitStack
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
arbitrary_types_allowed = True
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
|
async def open_remote_ctx(
|
||||||
|
self,
|
||||||
|
portal: tractor.Portal,
|
||||||
|
target: Callable,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> tractor.Context:
|
||||||
|
'''
|
||||||
|
Open a context in a service sub-actor, add to a stack
|
||||||
|
that gets unwound at ``pikerd`` tearodwn.
|
||||||
|
|
||||||
|
This allows for allocating long-running sub-services in our main
|
||||||
|
daemon and explicitly controlling their lifetimes.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ctx, first = await self.ctx_stack.enter_async_context(
|
||||||
|
portal.open_context(
|
||||||
|
target,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return ctx
|
||||||
|
|
||||||
|
|
||||||
_services: Optional[Services] = None
|
_services: Optional[Services] = None
|
||||||
|
|
||||||
|
@ -62,20 +91,21 @@ async def open_pikerd(
|
||||||
debug_mode: bool = False,
|
debug_mode: bool = False,
|
||||||
|
|
||||||
) -> Optional[tractor._portal.Portal]:
|
) -> Optional[tractor._portal.Portal]:
|
||||||
"""
|
'''
|
||||||
Start a root piker daemon who's lifetime extends indefinitely
|
Start a root piker daemon who's lifetime extends indefinitely
|
||||||
until cancelled.
|
until cancelled.
|
||||||
|
|
||||||
A root actor nursery is created which can be used to create and keep
|
A root actor nursery is created which can be used to create and keep
|
||||||
alive underling services (see below).
|
alive underling services (see below).
|
||||||
|
|
||||||
"""
|
'''
|
||||||
global _services
|
global _services
|
||||||
assert _services is None
|
assert _services is None
|
||||||
|
|
||||||
# XXX: this may open a root actor as well
|
# XXX: this may open a root actor as well
|
||||||
async with tractor.open_root_actor(
|
async with tractor.open_root_actor(
|
||||||
# passed through to ``open_root_actor``
|
# passed through to ``open_root_actor``
|
||||||
|
arbiter_addr=_tractor_kwargs['arbiter_addr'],
|
||||||
name=_root_dname,
|
name=_root_dname,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=debug_mode,
|
debug_mode=debug_mode,
|
||||||
|
@ -90,11 +120,15 @@ async def open_pikerd(
|
||||||
) as _, tractor.open_nursery() as actor_nursery:
|
) as _, tractor.open_nursery() as actor_nursery:
|
||||||
async with trio.open_nursery() as service_nursery:
|
async with trio.open_nursery() as service_nursery:
|
||||||
|
|
||||||
|
# setup service mngr singleton instance
|
||||||
|
async with AsyncExitStack() as stack:
|
||||||
|
|
||||||
# assign globally for future daemon/task creation
|
# assign globally for future daemon/task creation
|
||||||
_services = Services(
|
_services = Services(
|
||||||
actor_n=actor_nursery,
|
actor_n=actor_nursery,
|
||||||
service_n=service_nursery,
|
service_n=service_nursery,
|
||||||
debug_mode=debug_mode,
|
debug_mode=debug_mode,
|
||||||
|
ctx_stack=stack,
|
||||||
)
|
)
|
||||||
|
|
||||||
yield _services
|
yield _services
|
||||||
|
@ -140,6 +174,7 @@ async def maybe_open_pikerd(
|
||||||
# presume pikerd role
|
# presume pikerd role
|
||||||
async with open_pikerd(
|
async with open_pikerd(
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
debug_mode=kwargs.get('debug_mode', False),
|
||||||
) as _:
|
) as _:
|
||||||
# in the case where we're starting up the
|
# in the case where we're starting up the
|
||||||
# tractor-piker runtime stack in **this** process
|
# tractor-piker runtime stack in **this** process
|
||||||
|
@ -193,17 +228,19 @@ async def spawn_brokerd(
|
||||||
# call with and then have the ability to unwind the call whenevs.
|
# call with and then have the ability to unwind the call whenevs.
|
||||||
|
|
||||||
# non-blocking setup of brokerd service nursery
|
# non-blocking setup of brokerd service nursery
|
||||||
_services.service_n.start_soon(
|
await _services.open_remote_ctx(
|
||||||
partial(
|
portal,
|
||||||
portal.run,
|
|
||||||
_setup_persistent_brokerd,
|
_setup_persistent_brokerd,
|
||||||
brokername=brokername,
|
brokername=brokername,
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
return dname
|
return dname
|
||||||
|
|
||||||
|
|
||||||
|
class Brokerd:
|
||||||
|
locks = defaultdict(trio.Lock)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def maybe_spawn_brokerd(
|
async def maybe_spawn_brokerd(
|
||||||
|
|
||||||
|
@ -222,9 +259,15 @@ async def maybe_spawn_brokerd(
|
||||||
|
|
||||||
dname = f'brokerd.{brokername}'
|
dname = f'brokerd.{brokername}'
|
||||||
|
|
||||||
|
# serialize access to this section to avoid
|
||||||
|
# 2 or more tasks racing to create a daemon
|
||||||
|
lock = Brokerd.locks[brokername]
|
||||||
|
await lock.acquire()
|
||||||
|
|
||||||
# attach to existing brokerd if possible
|
# attach to existing brokerd if possible
|
||||||
async with tractor.find_actor(dname) as portal:
|
async with tractor.find_actor(dname) as portal:
|
||||||
if portal is not None:
|
if portal is not None:
|
||||||
|
lock.release()
|
||||||
yield portal
|
yield portal
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -249,6 +292,7 @@ async def maybe_spawn_brokerd(
|
||||||
)
|
)
|
||||||
|
|
||||||
async with tractor.wait_for_actor(dname) as portal:
|
async with tractor.wait_for_actor(dname) as portal:
|
||||||
|
lock.release()
|
||||||
yield portal
|
yield portal
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -341,6 +341,10 @@ def make_sub(pairs: List[str], data: Dict[str, Any]) -> Dict[str, str]:
|
||||||
class AutoReconWs:
|
class AutoReconWs:
|
||||||
"""Make ``trio_websocketw` sockets stay up no matter the bs.
|
"""Make ``trio_websocketw` sockets stay up no matter the bs.
|
||||||
|
|
||||||
|
TODO:
|
||||||
|
apply any more insights from this:
|
||||||
|
https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||||
|
|
||||||
"""
|
"""
|
||||||
recon_errors = (
|
recon_errors = (
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
|
|
|
@ -179,6 +179,11 @@ async def execute_triggers(
|
||||||
# majority of iterations will be non-matches
|
# majority of iterations will be non-matches
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
action = cmd['action']
|
||||||
|
|
||||||
|
if action != 'alert':
|
||||||
|
# executable order submission
|
||||||
|
|
||||||
# submit_price = price + price*percent_away
|
# submit_price = price + price*percent_away
|
||||||
submit_price = price + abs_diff_away
|
submit_price = price + abs_diff_away
|
||||||
|
|
||||||
|
@ -205,6 +210,10 @@ async def execute_triggers(
|
||||||
# register broker request id to ems id
|
# register broker request id to ems id
|
||||||
book._broker2ems_ids[reqid] = oid
|
book._broker2ems_ids[reqid] = oid
|
||||||
|
|
||||||
|
else:
|
||||||
|
# alerts have no broker request id
|
||||||
|
reqid = ''
|
||||||
|
|
||||||
resp = {
|
resp = {
|
||||||
'resp': 'dark_executed',
|
'resp': 'dark_executed',
|
||||||
'time_ns': time.time_ns(),
|
'time_ns': time.time_ns(),
|
||||||
|
@ -233,19 +242,20 @@ async def execute_triggers(
|
||||||
|
|
||||||
|
|
||||||
async def exec_loop(
|
async def exec_loop(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
feed: 'Feed', # noqa
|
feed: 'Feed', # noqa
|
||||||
broker: str,
|
broker: str,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
_exec_mode: str,
|
_exec_mode: str,
|
||||||
task_status: TaskStatus[dict] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[dict] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> AsyncIterator[dict]:
|
) -> AsyncIterator[dict]:
|
||||||
"""Main scan loop for order execution conditions and submission
|
"""Main scan loop for order execution conditions and submission
|
||||||
to brokers.
|
to brokers.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# XXX: this should be initial price quote from target provider
|
||||||
# TODO: get initial price quote from target broker
|
|
||||||
first_quote = await feed.receive()
|
first_quote = await feed.receive()
|
||||||
|
|
||||||
book = get_dark_book(broker)
|
book = get_dark_book(broker)
|
||||||
|
@ -342,6 +352,7 @@ async def process_broker_trades(
|
||||||
# TODO: make this a context
|
# TODO: make this a context
|
||||||
# in the paper engine case this is just a mem receive channel
|
# in the paper engine case this is just a mem receive channel
|
||||||
async with feed.receive_trades_data() as trades_stream:
|
async with feed.receive_trades_data() as trades_stream:
|
||||||
|
|
||||||
first = await trades_stream.__anext__()
|
first = await trades_stream.__anext__()
|
||||||
|
|
||||||
# startup msg expected as first from broker backend
|
# startup msg expected as first from broker backend
|
||||||
|
@ -642,9 +653,6 @@ async def _emsd_main(
|
||||||
|
|
||||||
dark_book = get_dark_book(broker)
|
dark_book = get_dark_book(broker)
|
||||||
|
|
||||||
# get a portal back to the client
|
|
||||||
async with tractor.wait_for_actor(client_actor_name) as portal:
|
|
||||||
|
|
||||||
# spawn one task per broker feed
|
# spawn one task per broker feed
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
|
|
||||||
|
@ -655,6 +663,18 @@ async def _emsd_main(
|
||||||
loglevel='info',
|
loglevel='info',
|
||||||
) as feed:
|
) as feed:
|
||||||
|
|
||||||
|
# get a portal back to the client
|
||||||
|
async with tractor.wait_for_actor(client_actor_name) as portal:
|
||||||
|
|
||||||
|
# connect back to the calling actor (the one that is
|
||||||
|
# acting as an EMS client and will submit orders) to
|
||||||
|
# receive requests pushed over a tractor stream
|
||||||
|
# using (for now) an async generator.
|
||||||
|
async with portal.open_stream_from(
|
||||||
|
send_order_cmds,
|
||||||
|
symbol_key=symbol,
|
||||||
|
) as order_stream:
|
||||||
|
|
||||||
# start the condition scan loop
|
# start the condition scan loop
|
||||||
quote, feed, client = await n.start(
|
quote, feed, client = await n.start(
|
||||||
exec_loop,
|
exec_loop,
|
||||||
|
@ -672,15 +692,6 @@ async def _emsd_main(
|
||||||
dark_book,
|
dark_book,
|
||||||
)
|
)
|
||||||
|
|
||||||
# connect back to the calling actor (the one that is
|
|
||||||
# acting as an EMS client and will submit orders) to
|
|
||||||
# receive requests pushed over a tractor stream
|
|
||||||
# using (for now) an async generator.
|
|
||||||
async with portal.open_stream_from(
|
|
||||||
send_order_cmds,
|
|
||||||
symbol_key=symbol,
|
|
||||||
) as order_stream:
|
|
||||||
|
|
||||||
# start inbound order request processing
|
# start inbound order request processing
|
||||||
await process_order_cmds(
|
await process_order_cmds(
|
||||||
ctx,
|
ctx,
|
||||||
|
|
|
@ -37,7 +37,7 @@ _context_defaults = dict(
|
||||||
def pikerd(loglevel, host, tl, pdb):
|
def pikerd(loglevel, host, tl, pdb):
|
||||||
"""Spawn the piker broker-daemon.
|
"""Spawn the piker broker-daemon.
|
||||||
"""
|
"""
|
||||||
from .._daemon import _data_mods, open_pikerd
|
from .._daemon import open_pikerd
|
||||||
log = get_console_log(loglevel)
|
log = get_console_log(loglevel)
|
||||||
|
|
||||||
if pdb:
|
if pdb:
|
||||||
|
@ -112,11 +112,11 @@ def services(config, tl, names):
|
||||||
|
|
||||||
|
|
||||||
def _load_clis() -> None:
|
def _load_clis() -> None:
|
||||||
from ..data import marketstore as _
|
from ..data import marketstore # noqa
|
||||||
from ..data import cli as _
|
from ..data import cli # noqa
|
||||||
from ..brokers import cli as _ # noqa
|
from ..brokers import cli # noqa
|
||||||
from ..ui import cli as _ # noqa
|
from ..ui import cli # noqa
|
||||||
from ..watchlists import cli as _ # noqa
|
from ..watchlists import cli # noqa
|
||||||
|
|
||||||
|
|
||||||
# load downstream cli modules
|
# load downstream cli modules
|
||||||
|
|
|
@ -227,7 +227,7 @@ async def sample_and_broadcast(
|
||||||
# end up triggering backpressure which which will
|
# end up triggering backpressure which which will
|
||||||
# eventually block this producer end of the feed and
|
# eventually block this producer end of the feed and
|
||||||
# thus other consumers still attached.
|
# thus other consumers still attached.
|
||||||
subs = bus.subscribers[sym]
|
subs = bus._subscribers[sym]
|
||||||
for ctx in subs:
|
for ctx in subs:
|
||||||
# print(f'sub is {ctx.chan.uid}')
|
# print(f'sub is {ctx.chan.uid}')
|
||||||
try:
|
try:
|
||||||
|
@ -236,5 +236,8 @@ async def sample_and_broadcast(
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
trio.ClosedResourceError
|
trio.ClosedResourceError
|
||||||
):
|
):
|
||||||
subs.remove(ctx)
|
# XXX: do we need to deregister here
|
||||||
|
# if it's done in the fee bus code?
|
||||||
|
# so far seems like no since this should all
|
||||||
|
# be single-threaded.
|
||||||
log.error(f'{ctx.chan.uid} dropped connection')
|
log.error(f'{ctx.chan.uid} dropped connection')
|
||||||
|
|
|
@ -67,11 +67,19 @@ class _FeedsBus(BaseModel):
|
||||||
brokername: str
|
brokername: str
|
||||||
nursery: trio.Nursery
|
nursery: trio.Nursery
|
||||||
feeds: Dict[str, trio.CancelScope] = {}
|
feeds: Dict[str, trio.CancelScope] = {}
|
||||||
subscribers: Dict[str, List[tractor.Context]] = {}
|
|
||||||
task_lock: trio.StrictFIFOLock = trio.StrictFIFOLock()
|
task_lock: trio.StrictFIFOLock = trio.StrictFIFOLock()
|
||||||
|
|
||||||
|
# XXX: so weird but, apparently without this being `._` private
|
||||||
|
# pydantic will complain about private `tractor.Context` instance
|
||||||
|
# vars (namely `._portal` and `._cancel_scope`) at import time.
|
||||||
|
# Reported this bug:
|
||||||
|
# https://github.com/samuelcolvin/pydantic/issues/2816
|
||||||
|
_subscribers: Dict[str, List[tractor.Context]] = {}
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
arbitrary_types_allowed = True
|
arbitrary_types_allowed = True
|
||||||
|
underscore_attrs_are_private = False
|
||||||
|
|
||||||
async def cancel_all(self) -> None:
|
async def cancel_all(self) -> None:
|
||||||
for sym, (cs, msg, quote) in self.feeds.items():
|
for sym, (cs, msg, quote) in self.feeds.items():
|
||||||
|
@ -108,7 +116,11 @@ def get_feed_bus(
|
||||||
return _bus
|
return _bus
|
||||||
|
|
||||||
|
|
||||||
async def _setup_persistent_brokerd(brokername: str) -> None:
|
@tractor.context
|
||||||
|
async def _setup_persistent_brokerd(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
brokername: str
|
||||||
|
) -> None:
|
||||||
"""Allocate a actor-wide service nursery in ``brokerd``
|
"""Allocate a actor-wide service nursery in ``brokerd``
|
||||||
such that feeds can be run in the background persistently by
|
such that feeds can be run in the background persistently by
|
||||||
the broker backend as needed.
|
the broker backend as needed.
|
||||||
|
@ -121,6 +133,9 @@ async def _setup_persistent_brokerd(brokername: str) -> None:
|
||||||
# background tasks from clients
|
# background tasks from clients
|
||||||
bus = get_feed_bus(brokername, service_nursery)
|
bus = get_feed_bus(brokername, service_nursery)
|
||||||
|
|
||||||
|
# unblock caller
|
||||||
|
await ctx.started()
|
||||||
|
|
||||||
# we pin this task to keep the feeds manager active until the
|
# we pin this task to keep the feeds manager active until the
|
||||||
# parent actor decides to tear it down
|
# parent actor decides to tear it down
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
@ -224,7 +239,7 @@ async def attach_feed_bus(
|
||||||
brokername: str,
|
brokername: str,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
):
|
) -> None:
|
||||||
|
|
||||||
# try:
|
# try:
|
||||||
if loglevel is None:
|
if loglevel is None:
|
||||||
|
@ -256,7 +271,7 @@ async def attach_feed_bus(
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
bus.subscribers.setdefault(symbol, []).append(ctx)
|
bus._subscribers.setdefault(symbol, []).append(ctx)
|
||||||
else:
|
else:
|
||||||
sub_only = True
|
sub_only = True
|
||||||
|
|
||||||
|
@ -266,15 +281,17 @@ async def attach_feed_bus(
|
||||||
|
|
||||||
# send this even to subscribers to existing feed?
|
# send this even to subscribers to existing feed?
|
||||||
await ctx.send_yield(init_msg)
|
await ctx.send_yield(init_msg)
|
||||||
|
|
||||||
|
# deliver a first quote asap
|
||||||
await ctx.send_yield(first_quote)
|
await ctx.send_yield(first_quote)
|
||||||
|
|
||||||
if sub_only:
|
if sub_only:
|
||||||
bus.subscribers[symbol].append(ctx)
|
bus._subscribers[symbol].append(ctx)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
finally:
|
finally:
|
||||||
bus.subscribers[symbol].remove(ctx)
|
bus._subscribers[symbol].remove(ctx)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
"""
|
"""
|
||||||
Financial signal processing for the peeps.
|
Financial signal processing for the peeps.
|
||||||
"""
|
"""
|
||||||
|
from functools import partial
|
||||||
from typing import AsyncIterator, Callable, Tuple
|
from typing import AsyncIterator, Callable, Tuple
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
|
@ -29,6 +30,8 @@ from .. import data
|
||||||
from ._momo import _rsi, _wma
|
from ._momo import _rsi, _wma
|
||||||
from ._volume import _tina_vwap
|
from ._volume import _tina_vwap
|
||||||
from ..data import attach_shm_array
|
from ..data import attach_shm_array
|
||||||
|
from ..data.feed import Feed
|
||||||
|
from ..data._sharedmem import ShmArray
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -62,31 +65,19 @@ async def latency(
|
||||||
yield value
|
yield value
|
||||||
|
|
||||||
|
|
||||||
@tractor.stream
|
|
||||||
async def cascade(
|
|
||||||
ctx: tractor.Context,
|
|
||||||
brokername: str,
|
|
||||||
src_shm_token: dict,
|
|
||||||
dst_shm_token: Tuple[str, np.dtype],
|
|
||||||
symbol: str,
|
|
||||||
fsp_func_name: str,
|
|
||||||
) -> AsyncIterator[dict]:
|
|
||||||
"""Chain streaming signal processors and deliver output to
|
|
||||||
destination mem buf.
|
|
||||||
|
|
||||||
"""
|
|
||||||
src = attach_shm_array(token=src_shm_token)
|
|
||||||
dst = attach_shm_array(readonly=False, token=dst_shm_token)
|
|
||||||
|
|
||||||
func: Callable = _fsps[fsp_func_name]
|
|
||||||
|
|
||||||
# open a data feed stream with requested broker
|
|
||||||
async with data.open_feed(brokername, [symbol]) as feed:
|
|
||||||
|
|
||||||
assert src.token == feed.shm.token
|
|
||||||
|
|
||||||
async def fsp_compute(
|
async def fsp_compute(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
symbol: str,
|
||||||
|
feed: Feed,
|
||||||
|
|
||||||
|
src: ShmArray,
|
||||||
|
dst: ShmArray,
|
||||||
|
|
||||||
|
fsp_func_name: str,
|
||||||
|
func: Callable,
|
||||||
|
|
||||||
task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# TODO: load appropriate fsp with input args
|
# TODO: load appropriate fsp with input args
|
||||||
|
@ -95,6 +86,12 @@ async def cascade(
|
||||||
sym: str,
|
sym: str,
|
||||||
stream,
|
stream,
|
||||||
):
|
):
|
||||||
|
|
||||||
|
# TODO: make this the actualy first quote from feed
|
||||||
|
# XXX: this allows for a single iteration to run for history
|
||||||
|
# processing without waiting on the real-time feed for a new quote
|
||||||
|
yield {}
|
||||||
|
|
||||||
# task cancellation won't kill the channel
|
# task cancellation won't kill the channel
|
||||||
with stream.shield():
|
with stream.shield():
|
||||||
async for quotes in stream:
|
async for quotes in stream:
|
||||||
|
@ -158,11 +155,48 @@ async def cascade(
|
||||||
# stream latest shm array index entry
|
# stream latest shm array index entry
|
||||||
await ctx.send_yield(index)
|
await ctx.send_yield(index)
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.stream
|
||||||
|
async def cascade(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
brokername: str,
|
||||||
|
src_shm_token: dict,
|
||||||
|
dst_shm_token: Tuple[str, np.dtype],
|
||||||
|
symbol: str,
|
||||||
|
fsp_func_name: str,
|
||||||
|
) -> AsyncIterator[dict]:
|
||||||
|
"""Chain streaming signal processors and deliver output to
|
||||||
|
destination mem buf.
|
||||||
|
|
||||||
|
"""
|
||||||
|
src = attach_shm_array(token=src_shm_token)
|
||||||
|
dst = attach_shm_array(readonly=False, token=dst_shm_token)
|
||||||
|
|
||||||
|
func: Callable = _fsps[fsp_func_name]
|
||||||
|
|
||||||
|
# open a data feed stream with requested broker
|
||||||
|
async with data.open_feed(brokername, [symbol]) as feed:
|
||||||
|
|
||||||
|
assert src.token == feed.shm.token
|
||||||
|
|
||||||
last_len = new_len = len(src.array)
|
last_len = new_len = len(src.array)
|
||||||
|
|
||||||
|
fsp_target = partial(
|
||||||
|
fsp_compute,
|
||||||
|
ctx=ctx,
|
||||||
|
symbol=symbol,
|
||||||
|
feed=feed,
|
||||||
|
|
||||||
|
src=src,
|
||||||
|
dst=dst,
|
||||||
|
|
||||||
|
fsp_func_name=fsp_func_name,
|
||||||
|
func=func
|
||||||
|
)
|
||||||
|
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
|
|
||||||
cs = await n.start(fsp_compute)
|
cs = await n.start(fsp_target)
|
||||||
|
|
||||||
# Increment the underlying shared memory buffer on every
|
# Increment the underlying shared memory buffer on every
|
||||||
# "increment" msg received from the underlying data feed.
|
# "increment" msg received from the underlying data feed.
|
||||||
|
@ -176,7 +210,7 @@ async def cascade(
|
||||||
# respawn the signal compute task if the source
|
# respawn the signal compute task if the source
|
||||||
# signal has been updated
|
# signal has been updated
|
||||||
cs.cancel()
|
cs.cancel()
|
||||||
cs = await n.start(fsp_compute)
|
cs = await n.start(fsp_target)
|
||||||
|
|
||||||
# TODO: adopt an incremental update engine/approach
|
# TODO: adopt an incremental update engine/approach
|
||||||
# where possible here eventually!
|
# where possible here eventually!
|
||||||
|
|
|
@ -42,7 +42,7 @@ import trio
|
||||||
import tractor
|
import tractor
|
||||||
from outcome import Error
|
from outcome import Error
|
||||||
|
|
||||||
from .._daemon import maybe_open_pikerd
|
from .._daemon import maybe_open_pikerd, _tractor_kwargs
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._pg_overrides import _do_overrides
|
from ._pg_overrides import _do_overrides
|
||||||
|
|
||||||
|
@ -196,6 +196,10 @@ def run_qtractor(
|
||||||
'main': instance,
|
'main': instance,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# override tractor's defaults
|
||||||
|
tractor_kwargs.update(_tractor_kwargs)
|
||||||
|
|
||||||
# define tractor entrypoint
|
# define tractor entrypoint
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
|
|
|
@ -79,7 +79,7 @@ class DpiAwareFont:
|
||||||
return self._qfont
|
return self._qfont
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def px_size(self):
|
def px_size(self) -> int:
|
||||||
return self._qfont.pixelSize()
|
return self._qfont.pixelSize()
|
||||||
|
|
||||||
def configure_to_dpi(self, screen: Optional[QtGui.QScreen] = None):
|
def configure_to_dpi(self, screen: Optional[QtGui.QScreen] = None):
|
||||||
|
|
|
@ -319,14 +319,10 @@ async def start_order_mode(
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# spawn EMS actor-service
|
# spawn EMS actor-service
|
||||||
async with open_ems(
|
async with (
|
||||||
brokername,
|
open_ems(brokername, symbol) as (book, trades_stream),
|
||||||
symbol,
|
open_order_mode(symbol, chart, book) as order_mode
|
||||||
) as (book, trades_stream), open_order_mode(
|
):
|
||||||
symbol,
|
|
||||||
chart,
|
|
||||||
book,
|
|
||||||
) as order_mode:
|
|
||||||
|
|
||||||
def get_index(time: float):
|
def get_index(time: float):
|
||||||
|
|
||||||
|
@ -337,7 +333,7 @@ async def start_order_mode(
|
||||||
indexes = ohlc['time'] >= time
|
indexes = ohlc['time'] >= time
|
||||||
|
|
||||||
if any(indexes):
|
if any(indexes):
|
||||||
return ohlc['index'][indexes[-1]]
|
return ohlc['index'][indexes][-1]
|
||||||
else:
|
else:
|
||||||
return ohlc['index'][-1]
|
return ohlc['index'][-1]
|
||||||
|
|
||||||
|
|
|
@ -5,41 +5,7 @@ import pyqtgraph as pg
|
||||||
from PyQt5 import QtCore, QtGui
|
from PyQt5 import QtCore, QtGui
|
||||||
|
|
||||||
|
|
||||||
class SampleLegendItem(pg.graphicsItems.LegendItem.ItemSample):
|
# TODO: test this out as our help menu
|
||||||
|
|
||||||
def paint(self, p, *args):
|
|
||||||
p.setRenderHint(p.Antialiasing)
|
|
||||||
if isinstance(self.item, tuple):
|
|
||||||
positive = self.item[0].opts
|
|
||||||
negative = self.item[1].opts
|
|
||||||
p.setPen(pg.mkPen(positive['pen']))
|
|
||||||
p.setBrush(pg.mkBrush(positive['brush']))
|
|
||||||
p.drawPolygon(
|
|
||||||
QtGui.QPolygonF(
|
|
||||||
[
|
|
||||||
QtCore.QPointF(0, 0),
|
|
||||||
QtCore.QPointF(18, 0),
|
|
||||||
QtCore.QPointF(18, 18),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
p.setPen(pg.mkPen(negative['pen']))
|
|
||||||
p.setBrush(pg.mkBrush(negative['brush']))
|
|
||||||
p.drawPolygon(
|
|
||||||
QtGui.QPolygonF(
|
|
||||||
[
|
|
||||||
QtCore.QPointF(0, 0),
|
|
||||||
QtCore.QPointF(0, 18),
|
|
||||||
QtCore.QPointF(18, 18),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
opts = self.item.opts
|
|
||||||
p.setPen(pg.mkPen(opts['pen']))
|
|
||||||
p.drawRect(0, 10, 18, 0.5)
|
|
||||||
|
|
||||||
|
|
||||||
class CenteredTextItem(QtGui.QGraphicsTextItem):
|
class CenteredTextItem(QtGui.QGraphicsTextItem):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
Loading…
Reference in New Issue