Port clearing systems to new tractor context api

This avoids somewhat convoluted "hackery" making 2 one-way streams
between the order client and the EMS and instead uses the new
bi-directional streaming and context API from `tractor`. Add a router
type to the EMS that gets setup by the initial service tree and which
we'll eventually use to work toward multi-provider executions and
order-trigger monitoring. Move to py3.9 style where possible throughout.
ems_to_bidir_streaming
Tyler Goodlet 2021-06-01 10:27:16 -04:00
parent 7fb2c95ef1
commit 9931accc52
3 changed files with 150 additions and 97 deletions

View File

@ -102,7 +102,9 @@ async def open_pikerd(
assert _services is None assert _services is None
# XXX: this may open a root actor as well # XXX: this may open a root actor as well
async with tractor.open_root_actor( async with (
tractor.open_root_actor(
# passed through to ``open_root_actor`` # passed through to ``open_root_actor``
arbiter_addr=_tractor_kwargs['arbiter_addr'], arbiter_addr=_tractor_kwargs['arbiter_addr'],
name=_root_dname, name=_root_dname,
@ -113,10 +115,10 @@ async def open_pikerd(
# TODO: eventually we should be able to avoid # TODO: eventually we should be able to avoid
# having the root have more then permissions to # having the root have more then permissions to
# spawn other specialized daemons I think? # spawn other specialized daemons I think?
# enable_modules=[__name__],
enable_modules=_root_modules, enable_modules=_root_modules,
) as _,
) as _, tractor.open_nursery() as actor_nursery: tractor.open_nursery() as actor_nursery,
):
async with trio.open_nursery() as service_nursery: async with trio.open_nursery() as service_nursery:
# setup service mngr singleton instance # setup service mngr singleton instance
@ -137,6 +139,7 @@ async def open_pikerd(
async def maybe_open_runtime( async def maybe_open_runtime(
loglevel: Optional[str] = None, loglevel: Optional[str] = None,
**kwargs, **kwargs,
) -> None: ) -> None:
""" """
Start the ``tractor`` runtime (a root actor) if none exists. Start the ``tractor`` runtime (a root actor) if none exists.
@ -159,6 +162,7 @@ async def maybe_open_runtime(
async def maybe_open_pikerd( async def maybe_open_pikerd(
loglevel: Optional[str] = None, loglevel: Optional[str] = None,
**kwargs, **kwargs,
) -> Union[tractor._portal.Portal, Services]: ) -> Union[tractor._portal.Portal, Services]:
"""If no ``pikerd`` daemon-root-actor can be found start it and """If no ``pikerd`` daemon-root-actor can be found start it and
yield up (we should probably figure out returning a portal to self yield up (we should probably figure out returning a portal to self
@ -207,7 +211,6 @@ async def maybe_spawn_daemon(
service_name: str, service_name: str,
spawn_func: Callable, spawn_func: Callable,
spawn_args: dict[str, Any], spawn_args: dict[str, Any],
# brokername: str,
loglevel: Optional[str] = None, loglevel: Optional[str] = None,
**kwargs, **kwargs,
@ -236,8 +239,10 @@ async def maybe_spawn_daemon(
# pikerd is not live we now become the root of the # pikerd is not live we now become the root of the
# process tree # process tree
async with maybe_open_pikerd( async with maybe_open_pikerd(
loglevel=loglevel, loglevel=loglevel,
**kwargs, **kwargs,
) as pikerd_portal: ) as pikerd_portal:
if pikerd_portal is None: if pikerd_portal is None:
@ -265,8 +270,6 @@ async def spawn_brokerd(
) -> tractor._portal.Portal: ) -> tractor._portal.Portal:
from .data import _setup_persistent_brokerd
log.info(f'Spawning {brokername} broker daemon') log.info(f'Spawning {brokername} broker daemon')
brokermod = get_brokermod(brokername) brokermod = get_brokermod(brokername)
@ -286,13 +289,9 @@ async def spawn_brokerd(
**tractor_kwargs **tractor_kwargs
) )
# TODO: so i think this is the perfect use case for supporting
# a cross-actor async context manager api instead of this
# shoort-and-forget task spawned in the root nursery, we'd have an
# async exit stack that we'd register the `portal.open_context()`
# call with and then have the ability to unwind the call whenevs.
# non-blocking setup of brokerd service nursery # non-blocking setup of brokerd service nursery
from .data import _setup_persistent_brokerd
await _services.open_remote_ctx( await _services.open_remote_ctx(
portal, portal,
_setup_persistent_brokerd, _setup_persistent_brokerd,
@ -327,7 +326,6 @@ async def maybe_spawn_brokerd(
async def spawn_emsd( async def spawn_emsd(
brokername: str,
loglevel: Optional[str] = None, loglevel: Optional[str] = None,
**extra_tractor_kwargs **extra_tractor_kwargs
@ -338,10 +336,10 @@ async def spawn_emsd(
""" """
log.info('Spawning emsd') log.info('Spawning emsd')
# TODO: raise exception when _services == None?
global _services global _services
assert _services
await _services.actor_n.start_actor( portal = await _services.actor_n.start_actor(
'emsd', 'emsd',
enable_modules=[ enable_modules=[
'piker.clearing._ems', 'piker.clearing._ems',
@ -351,6 +349,15 @@ async def spawn_emsd(
debug_mode=_services.debug_mode, # set by pikerd flag debug_mode=_services.debug_mode, # set by pikerd flag
**extra_tractor_kwargs **extra_tractor_kwargs
) )
# non-blocking setup of clearing service
from .clearing._ems import _setup_persistent_emsd
await _services.open_remote_ctx(
portal,
_setup_persistent_emsd,
)
return 'emsd' return 'emsd'
@ -367,7 +374,7 @@ async def maybe_open_emsd(
'emsd', 'emsd',
spawn_func=spawn_emsd, spawn_func=spawn_emsd,
spawn_args={'brokername': brokername, 'loglevel': loglevel}, spawn_args={'loglevel': loglevel},
loglevel=loglevel, loglevel=loglevel,
**kwargs, **kwargs,

View File

@ -36,6 +36,7 @@ from .._daemon import maybe_open_emsd
log = get_logger(__name__) log = get_logger(__name__)
# TODO: some kinda validation like this
# class Order(msgspec.Struct): # class Order(msgspec.Struct):
# action: str # action: str
# price: float # price: float
@ -137,7 +138,11 @@ def get_orders(
return _orders return _orders
async def send_order_cmds(symbol_key: str): async def relay_order_cmds_from_sync_code(
symbol_key: str,
to_ems_stream: tractor.MsgStream,
) -> None:
""" """
Order streaming task: deliver orders transmitted from UI Order streaming task: deliver orders transmitted from UI
to downstream consumers. to downstream consumers.
@ -157,16 +162,15 @@ async def send_order_cmds(symbol_key: str):
book = get_orders() book = get_orders()
orders_stream = book._from_order_book orders_stream = book._from_order_book
# signal that ems connection is up and ready
book._ready_to_receive.set()
async for cmd in orders_stream: async for cmd in orders_stream:
print(cmd) print(cmd)
if cmd['symbol'] == symbol_key: if cmd['symbol'] == symbol_key:
# send msg over IPC / wire # send msg over IPC / wire
log.info(f'Send order cmd:\n{pformat(cmd)}') log.info(f'Send order cmd:\n{pformat(cmd)}')
yield cmd await to_ems_stream.send(cmd)
else: else:
# XXX BRUTAL HACKZORZES !!! # XXX BRUTAL HACKZORZES !!!
# re-insert for another consumer # re-insert for another consumer
@ -213,32 +217,32 @@ async def open_ems(
- 'broker_filled' - 'broker_filled'
""" """
actor = tractor.current_actor()
# wait for service to connect back to us signalling # wait for service to connect back to us signalling
# ready for order commands # ready for order commands
book = get_orders() book = get_orders()
async with maybe_open_emsd(broker) as portal: async with maybe_open_emsd(broker) as portal:
async with portal.open_stream_from( async with (
_emsd_main, # connect to emsd
client_actor_name=actor.name, portal.open_context(
broker=broker, _emsd_main,
symbol=symbol.key, broker=broker,
symbol=symbol.key,
) as trades_stream: # TODO: ``first`` here should be the active orders/execs
with trio.fail_after(10): # persistent on the ems so that loca UI's can be populated.
await book._ready_to_receive.wait() ) as (ctx, first),
# open 2-way trade command stream
ctx.open_stream() as trades_stream,
):
async with trio.open_nursery() as n:
n.start_soon(
relay_order_cmds_from_sync_code,
symbol.key,
trades_stream
)
try:
yield book, trades_stream yield book, trades_stream
finally:
# TODO: we want to eventually keep this up (by having
# the exec loop keep running in the pikerd tree) but for
# now we have to kill the context to avoid backpressure
# build-up on the shm write loop.
with trio.CancelScope(shield=True):
await trades_stream.aclose()

View File

@ -21,11 +21,10 @@ In da suit parlances: "Execution management systems"
from pprint import pformat from pprint import pformat
import time import time
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import ( from typing import AsyncIterator, Callable
AsyncIterator, Dict, Callable, Tuple,
)
from bidict import bidict from bidict import bidict
from pydantic import BaseModel
import trio import trio
from trio_typing import TaskStatus from trio_typing import TaskStatus
import tractor import tractor
@ -89,11 +88,11 @@ class _DarkBook:
broker: str broker: str
# levels which have an executable action (eg. alert, order, signal) # levels which have an executable action (eg. alert, order, signal)
orders: Dict[ orders: dict[
str, # symbol str, # symbol
Dict[ dict[
str, # uuid str, # uuid
Tuple[ tuple[
Callable[[float], bool], # predicate Callable[[float], bool], # predicate
str, # name str, # name
dict, # cmd / msg type dict, # cmd / msg type
@ -102,22 +101,13 @@ class _DarkBook:
] = field(default_factory=dict) ] = field(default_factory=dict)
# tracks most recent values per symbol each from data feed # tracks most recent values per symbol each from data feed
lasts: Dict[ lasts: dict[
Tuple[str, str], tuple[str, str],
float float
] = field(default_factory=dict) ] = field(default_factory=dict)
# mapping of broker order ids to piker ems ids # mapping of broker order ids to piker ems ids
_broker2ems_ids: Dict[str, str] = field(default_factory=bidict) _broker2ems_ids: dict[str, str] = field(default_factory=bidict)
_books: Dict[str, _DarkBook] = {}
def get_dark_book(broker: str) -> _DarkBook:
global _books
return _books.setdefault(broker, _DarkBook(broker))
# XXX: this is in place to prevent accidental positions that are too # XXX: this is in place to prevent accidental positions that are too
@ -255,10 +245,12 @@ async def exec_loop(
to brokers. to brokers.
""" """
global _router
# XXX: this should be initial price quote from target provider # XXX: this should be initial price quote from target provider
first_quote = await feed.receive() first_quote = await feed.receive()
book = get_dark_book(broker) book = _router.get_dark_book(broker)
book.lasts[(broker, symbol)] = first_quote[symbol]['last'] book.lasts[(broker, symbol)] = first_quote[symbol]['last']
# TODO: wrap this in a more re-usable general api # TODO: wrap this in a more re-usable general api
@ -478,12 +470,14 @@ async def process_broker_trades(
async def process_order_cmds( async def process_order_cmds(
ctx: tractor.Context, ctx: tractor.Context,
cmd_stream: 'tractor.ReceiveStream', # noqa cmd_stream: 'tractor.ReceiveStream', # noqa
symbol: str, symbol: str,
feed: 'Feed', # noqa feed: 'Feed', # noqa
client: 'Client', # noqa client: 'Client', # noqa
dark_book: _DarkBook, dark_book: _DarkBook,
) -> None: ) -> None:
async for cmd in cmd_stream: async for cmd in cmd_stream:
@ -509,6 +503,7 @@ async def process_order_cmds(
try: try:
dark_book.orders[symbol].pop(oid, None) dark_book.orders[symbol].pop(oid, None)
# TODO: move these to `tractor.MsgStream`
await ctx.send_yield({ await ctx.send_yield({
'resp': 'dark_cancelled', 'resp': 'dark_cancelled',
'oid': oid 'oid': oid
@ -616,13 +611,15 @@ async def process_order_cmds(
}) })
@tractor.stream @tractor.context
async def _emsd_main( async def _emsd_main(
ctx: tractor.Context, ctx: tractor.Context,
client_actor_name: str, # client_actor_name: str,
broker: str, broker: str,
symbol: str, symbol: str,
_mode: str = 'dark', # ('paper', 'dark', 'live') _mode: str = 'dark', # ('paper', 'dark', 'live')
) -> None: ) -> None:
"""EMS (sub)actor entrypoint providing the """EMS (sub)actor entrypoint providing the
execution management (micro)service which conducts broker execution management (micro)service which conducts broker
@ -649,9 +646,10 @@ async def _emsd_main(
accept normalized trades responses, process and relay to ems client(s) accept normalized trades responses, process and relay to ems client(s)
""" """
from ._client import send_order_cmds # from ._client import send_order_cmds
dark_book = get_dark_book(broker) global _router
dark_book = _router.get_dark_book(broker)
# spawn one task per broker feed # spawn one task per broker feed
async with trio.open_nursery() as n: async with trio.open_nursery() as n:
@ -664,40 +662,84 @@ async def _emsd_main(
) as feed: ) as feed:
# get a portal back to the client # get a portal back to the client
async with tractor.wait_for_actor(client_actor_name) as portal: # async with tractor.wait_for_actor(client_actor_name) as portal:
# connect back to the calling actor (the one that is await ctx.started()
# acting as an EMS client and will submit orders) to
# receive requests pushed over a tractor stream
# using (for now) an async generator.
async with portal.open_stream_from(
send_order_cmds,
symbol_key=symbol,
) as order_stream:
# start the condition scan loop # establish 2-way stream with requesting order-client
quote, feed, client = await n.start( async with ctx.open_stream() as order_stream:
exec_loop,
ctx,
feed,
broker,
symbol,
_mode,
)
await n.start( # start the condition scan loop
process_broker_trades, quote, feed, client = await n.start(
ctx, exec_loop,
feed, ctx,
dark_book, feed,
) broker,
symbol,
_mode,
)
# start inbound order request processing # begin processing order events from the target brokerd backend
await process_order_cmds( await n.start(
ctx, process_broker_trades,
order_stream, ctx,
symbol, feed,
feed, dark_book,
client, )
dark_book,
) # start inbound (from attached client) order request processing
await process_order_cmds(
ctx,
order_stream,
symbol,
feed,
client,
dark_book,
)
class _Router(BaseModel):
'''Order router which manages per-broker dark books, alerts,
and clearing related data feed management.
'''
nursery: trio.Nursery
feeds: dict[str, tuple[trio.CancelScope, float]] = {}
books: dict[str, _DarkBook] = {}
class Config:
arbitrary_types_allowed = True
underscore_attrs_are_private = False
def get_dark_book(
self,
brokername: str,
) -> _DarkBook:
return self.books.setdefault(brokername, _DarkBook(brokername))
_router: _Router = None
@tractor.context
async def _setup_persistent_emsd(
ctx: tractor.Context,
) -> None:
global _router
# spawn one task per broker feed
async with trio.open_nursery() as service_nursery:
_router = _Router(nursery=service_nursery)
# TODO: send back the full set of persistent orders/execs persistent
await ctx.started()
# we pin this task to keep the feeds manager active until the
# parent actor decides to tear it down
await trio.sleep_forever()