2023-02-26 20:59:55 +00:00
|
|
|
'''
|
2023-04-11 05:26:55 +00:00
|
|
|
Execution mgmt system (EMS) e2e testing.
|
|
|
|
|
|
|
|
Most tests leverage our paper clearing engine found (currently) in
|
|
|
|
``piker.clearing._paper_engine`.
|
|
|
|
|
|
|
|
Ideally in the longer run we are able to support forms of (non-clearing)
|
|
|
|
live order tests against certain backends that make it possible to do
|
|
|
|
so..
|
|
|
|
|
2023-02-26 20:59:55 +00:00
|
|
|
'''
|
2023-04-04 17:03:52 +00:00
|
|
|
from contextlib import (
|
|
|
|
contextmanager as cm,
|
|
|
|
)
|
2023-02-09 23:14:41 +00:00
|
|
|
from typing import (
|
2023-04-04 17:03:52 +00:00
|
|
|
Awaitable,
|
|
|
|
Callable,
|
2023-02-09 23:14:41 +00:00
|
|
|
AsyncContextManager,
|
2023-02-12 22:04:49 +00:00
|
|
|
Literal,
|
2023-02-09 23:14:41 +00:00
|
|
|
)
|
2023-02-23 20:21:10 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
import trio
|
|
|
|
# import pytest_trio
|
|
|
|
from exceptiongroup import BaseExceptionGroup
|
|
|
|
|
2023-02-23 20:21:10 +00:00
|
|
|
import pytest
|
2023-04-04 17:03:52 +00:00
|
|
|
import tractor
|
2023-02-23 20:21:10 +00:00
|
|
|
from uuid import uuid4
|
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
from piker.service import Services
|
2023-02-23 20:21:10 +00:00
|
|
|
from piker.log import get_logger
|
2023-04-04 17:03:52 +00:00
|
|
|
from piker.clearing._messages import (
|
|
|
|
Order,
|
|
|
|
Status,
|
|
|
|
# Cancel,
|
|
|
|
BrokerdPosition,
|
|
|
|
)
|
|
|
|
from piker.clearing import (
|
|
|
|
open_ems,
|
|
|
|
OrderClient,
|
|
|
|
)
|
|
|
|
from piker.accounting._mktinfo import (
|
|
|
|
unpack_fqme,
|
|
|
|
)
|
2023-03-10 21:42:37 +00:00
|
|
|
from piker.accounting import (
|
2023-02-12 22:04:49 +00:00
|
|
|
open_pps,
|
2023-04-04 17:03:52 +00:00
|
|
|
Position,
|
2023-02-09 19:53:57 +00:00
|
|
|
)
|
2023-02-14 22:06:48 +00:00
|
|
|
|
2023-02-12 22:04:49 +00:00
|
|
|
log = get_logger(__name__)
|
2023-02-09 19:53:57 +00:00
|
|
|
|
2023-02-24 18:42:44 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
async def open_pikerd(
|
|
|
|
open_test_pikerd: AsyncContextManager,
|
2023-02-23 20:21:10 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
) -> Services:
|
|
|
|
async with (
|
|
|
|
open_test_pikerd() as (_, _, _, services),
|
|
|
|
):
|
|
|
|
yield services
|
2023-02-26 20:59:55 +00:00
|
|
|
|
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
async def order_and_and_wait_for_ppmsg(
|
2023-04-04 17:03:52 +00:00
|
|
|
client: OrderClient,
|
|
|
|
trades_stream: tractor.MsgStream,
|
|
|
|
fqme: str,
|
2023-04-10 23:05:36 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
action: Literal['buy', 'sell'],
|
2023-04-10 23:05:36 +00:00
|
|
|
price: float = 100e3, # just a super high price.
|
2023-02-26 21:11:28 +00:00
|
|
|
size: float = 0.01,
|
2023-04-10 23:05:36 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
exec_mode: str = 'live',
|
|
|
|
account: str = 'paper',
|
2023-03-09 19:33:12 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
) -> list[Status | BrokerdPosition]:
|
2023-02-26 20:59:55 +00:00
|
|
|
'''
|
2023-02-28 18:16:26 +00:00
|
|
|
Start piker, place a trade and assert data in
|
|
|
|
pps stream, ledger and position table.
|
2023-02-28 18:05:57 +00:00
|
|
|
|
2023-02-26 21:11:28 +00:00
|
|
|
'''
|
2023-04-04 17:03:52 +00:00
|
|
|
sent: list[Order] = []
|
|
|
|
broker, key, suffix = unpack_fqme(fqme)
|
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
order = Order(
|
|
|
|
exec_mode=exec_mode,
|
|
|
|
action=action, # TODO: remove this from our schema?
|
|
|
|
oid=str(uuid4()),
|
|
|
|
account=account,
|
|
|
|
size=size,
|
|
|
|
symbol=fqme,
|
|
|
|
price=price,
|
|
|
|
brokers=[broker],
|
|
|
|
)
|
|
|
|
sent.append(order)
|
|
|
|
await client.send(order)
|
2023-04-04 17:03:52 +00:00
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
# TODO: i guess we should still test the old sync-API?
|
|
|
|
# client.send_nowait(order)
|
2023-04-04 17:03:52 +00:00
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
# Wait for position message before moving on to verify flow(s)
|
|
|
|
# for the multi-order position entry/exit.
|
|
|
|
msgs: list[Status | BrokerdPosition] = []
|
|
|
|
async for msg in trades_stream:
|
|
|
|
match msg:
|
|
|
|
case {'name': 'position'}:
|
|
|
|
ppmsg = BrokerdPosition(**msg)
|
|
|
|
msgs.append(ppmsg)
|
|
|
|
break
|
2023-04-04 17:03:52 +00:00
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
case {'name': 'status'}:
|
|
|
|
msgs.append(Status(**msg))
|
2023-02-28 04:51:03 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
return sent, msgs
|
|
|
|
|
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
def run_and_tollerate_cancels(
|
2023-04-04 17:03:52 +00:00
|
|
|
fn: Callable[..., Awaitable],
|
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
expect_errs: tuple[Exception] | None = None,
|
|
|
|
tollerate_errs: tuple[Exception] = (tractor.ContextCancelled,),
|
2023-02-26 20:59:55 +00:00
|
|
|
|
|
|
|
):
|
2023-03-09 19:33:12 +00:00
|
|
|
'''
|
2023-04-10 23:05:36 +00:00
|
|
|
Run ``trio``-``piker`` runtime with potential tolerance for
|
|
|
|
inter-actor cancellation during teardown (normally just
|
|
|
|
`tractor.ContextCancelled`s).
|
2023-02-28 18:39:13 +00:00
|
|
|
|
|
|
|
'''
|
2023-04-04 17:03:52 +00:00
|
|
|
if expect_errs:
|
|
|
|
with pytest.raises(BaseExceptionGroup) as exc_info:
|
|
|
|
trio.run(fn)
|
|
|
|
|
|
|
|
for err in exc_info.value.exceptions:
|
|
|
|
assert type(err) in expect_errs
|
|
|
|
else:
|
2023-04-10 23:05:36 +00:00
|
|
|
try:
|
|
|
|
trio.run(fn)
|
|
|
|
except tollerate_errs:
|
|
|
|
pass
|
2023-02-25 00:09:36 +00:00
|
|
|
|
2023-02-26 20:59:55 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
@cm
|
|
|
|
def load_and_check_pos(
|
|
|
|
order: Order,
|
|
|
|
ppmsg: BrokerdPosition,
|
2023-02-26 20:59:55 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
) -> None:
|
2023-02-28 18:39:13 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
with open_pps(ppmsg.broker, ppmsg.account) as table:
|
2023-02-25 00:09:36 +00:00
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
if ppmsg.size == 0:
|
|
|
|
assert ppmsg.symbol not in table.pps
|
|
|
|
yield None
|
|
|
|
return
|
2023-04-04 17:03:52 +00:00
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
else:
|
|
|
|
# NOTE: a special case is here since the `PpTable.pps` are
|
|
|
|
# normally indexed by the particular broker's
|
|
|
|
# `Position.bs_mktid: str` (a unique market / symbol id provided
|
|
|
|
# by their systems/design) but for the paper engine case, this
|
|
|
|
# is the same the fqme.
|
|
|
|
pp: Position = table.pps[ppmsg.symbol]
|
2023-02-23 20:21:10 +00:00
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
assert ppmsg.size == pp.size
|
|
|
|
assert ppmsg.avg_price == pp.ppu
|
|
|
|
|
|
|
|
yield pp
|
2023-02-26 20:59:55 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
|
2023-04-11 02:21:22 +00:00
|
|
|
def test_ems_err_on_bad_broker(
|
2023-04-10 23:05:36 +00:00
|
|
|
open_test_pikerd: Services,
|
2023-04-04 17:03:52 +00:00
|
|
|
loglevel: str,
|
|
|
|
):
|
2023-04-11 02:21:22 +00:00
|
|
|
async def load_bad_fqme():
|
|
|
|
try:
|
|
|
|
async with (
|
|
|
|
open_test_pikerd() as (_, _, _, services),
|
|
|
|
|
|
|
|
open_ems(
|
|
|
|
'doggycoin.doggy',
|
|
|
|
mode='paper',
|
|
|
|
loglevel=loglevel,
|
|
|
|
) as _
|
|
|
|
):
|
|
|
|
pytest.fail('EMS is working on non-broker!?')
|
|
|
|
except ModuleNotFoundError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
run_and_tollerate_cancels(load_bad_fqme)
|
2023-04-04 17:03:52 +00:00
|
|
|
|
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
async def match_ppmsgs_on_ems_boot(
|
|
|
|
ppmsgs: list[BrokerdPosition],
|
|
|
|
|
|
|
|
) -> None:
|
|
|
|
'''
|
|
|
|
Given a list of input position msgs, verify they match
|
|
|
|
what is loaded from the EMS on connect.
|
|
|
|
|
|
|
|
'''
|
|
|
|
by_acct: dict[tuple, list[BrokerdPosition]] = {}
|
|
|
|
for msg in ppmsgs:
|
|
|
|
by_acct.setdefault(
|
|
|
|
(msg.broker, msg.account),
|
|
|
|
[],
|
|
|
|
).append(msg)
|
|
|
|
|
|
|
|
# TODO: actually support multi-mkts to `open_ems()`
|
|
|
|
# but for now just pass the first fqme.
|
|
|
|
fqme = msg.symbol
|
|
|
|
|
|
|
|
# disconnect from EMS, reconnect and ensure we get our same
|
|
|
|
# position relayed to us again in the startup msg.
|
|
|
|
async with (
|
|
|
|
open_ems(
|
|
|
|
fqme,
|
|
|
|
mode='paper',
|
|
|
|
loglevel='info',
|
|
|
|
) as (
|
|
|
|
_, # OrderClient
|
|
|
|
_, # tractor.MsgStream
|
|
|
|
startup_pps,
|
|
|
|
accounts,
|
|
|
|
_, # dialogs,
|
|
|
|
)
|
|
|
|
):
|
|
|
|
for (broker, account), ppmsgs in by_acct.items():
|
|
|
|
assert account in accounts
|
|
|
|
|
|
|
|
# lookup all msgs rx-ed for this account
|
|
|
|
rx_msgs = startup_pps[(broker, account)]
|
|
|
|
|
|
|
|
for expect_ppmsg in ppmsgs:
|
|
|
|
rx_msg = BrokerdPosition(**rx_msgs[expect_ppmsg.symbol])
|
|
|
|
assert rx_msg == expect_ppmsg
|
|
|
|
|
|
|
|
|
|
|
|
async def submit_and_check(
|
|
|
|
fills: tuple[dict],
|
2023-04-04 17:03:52 +00:00
|
|
|
loglevel: str,
|
2023-04-10 23:05:36 +00:00
|
|
|
|
|
|
|
) -> tuple[
|
|
|
|
BrokerdPosition,
|
|
|
|
Position,
|
|
|
|
]:
|
2023-02-28 18:39:13 +00:00
|
|
|
'''
|
2023-04-04 17:03:52 +00:00
|
|
|
Enter a trade and assert entries are made in pps and ledger files.
|
|
|
|
|
|
|
|
Shutdown the ems-client and ensure on reconnect we get the expected
|
|
|
|
matching ``BrokerdPosition`` and pps.toml entries.
|
2023-02-28 18:39:13 +00:00
|
|
|
|
|
|
|
'''
|
2023-04-04 17:03:52 +00:00
|
|
|
broker: str = 'kraken'
|
|
|
|
mkt_key: str = 'xbtusdt'
|
|
|
|
fqme: str = f'{mkt_key}.{broker}'
|
|
|
|
|
|
|
|
startup_pps: dict[
|
|
|
|
tuple[str, str], # brokername, acctid
|
|
|
|
list[BrokerdPosition],
|
|
|
|
]
|
|
|
|
async with (
|
|
|
|
open_ems(
|
|
|
|
fqme,
|
|
|
|
mode='paper',
|
|
|
|
loglevel=loglevel,
|
|
|
|
) as (
|
|
|
|
client, # OrderClient
|
2023-04-05 01:28:52 +00:00
|
|
|
trades_stream, # tractor.MsgStream
|
2023-04-04 17:03:52 +00:00
|
|
|
startup_pps,
|
|
|
|
accounts,
|
|
|
|
dialogs,
|
|
|
|
)
|
|
|
|
):
|
|
|
|
# no positions on startup
|
|
|
|
assert not startup_pps
|
|
|
|
assert 'paper' in accounts
|
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
od: dict
|
|
|
|
for od in fills:
|
|
|
|
print(f'Sending order {od} for fill')
|
2023-04-11 02:21:22 +00:00
|
|
|
size = od['size']
|
2023-04-10 23:05:36 +00:00
|
|
|
sent, msgs = await order_and_and_wait_for_ppmsg(
|
|
|
|
client,
|
|
|
|
trades_stream,
|
|
|
|
fqme,
|
2023-04-11 02:21:22 +00:00
|
|
|
action='buy' if size > 0 else 'sell',
|
|
|
|
price=100e3 if size > 0 else 0,
|
|
|
|
size=size,
|
2023-04-10 23:05:36 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
last_order: Order = sent[-1]
|
2023-04-04 17:03:52 +00:00
|
|
|
last_resp = msgs[-1]
|
|
|
|
assert isinstance(last_resp, BrokerdPosition)
|
2023-04-10 23:05:36 +00:00
|
|
|
ppmsg = last_resp
|
2023-04-04 17:03:52 +00:00
|
|
|
|
|
|
|
# check that pps.toml for account has been updated
|
2023-04-10 23:05:36 +00:00
|
|
|
# and all ems position msgs match that state.
|
2023-04-04 17:03:52 +00:00
|
|
|
with load_and_check_pos(
|
|
|
|
last_order,
|
2023-04-10 23:05:36 +00:00
|
|
|
ppmsg,
|
2023-04-04 17:03:52 +00:00
|
|
|
) as pos:
|
2023-04-10 23:05:36 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
return ppmsg, pos
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
'fills',
|
|
|
|
[
|
|
|
|
# buy and leave
|
|
|
|
({'size': 0.001},),
|
|
|
|
|
|
|
|
# sell short, then buy back to net-zero in dst
|
|
|
|
(
|
|
|
|
{'size': -0.001},
|
|
|
|
{'size': 0.001},
|
|
|
|
),
|
|
|
|
|
2023-04-11 02:21:22 +00:00
|
|
|
# multi-partial entry and exits from net-zero, to short and back
|
|
|
|
# to net-zero.
|
2023-04-10 23:05:36 +00:00
|
|
|
(
|
|
|
|
# enters
|
|
|
|
{'size': 0.001},
|
|
|
|
{'size': 0.002},
|
|
|
|
|
|
|
|
# partial exit
|
|
|
|
{'size': -0.001},
|
|
|
|
|
|
|
|
# partial enter
|
|
|
|
{'size': 0.0015},
|
|
|
|
{'size': 0.001},
|
|
|
|
{'size': 0.002},
|
|
|
|
|
2023-04-11 02:21:22 +00:00
|
|
|
# nearly back to zero.
|
2023-04-10 23:05:36 +00:00
|
|
|
{'size': -0.001},
|
2023-04-11 02:21:22 +00:00
|
|
|
|
|
|
|
# switch to net-short
|
2023-04-10 23:05:36 +00:00
|
|
|
{'size': -0.025},
|
|
|
|
{'size': -0.0195},
|
2023-04-11 02:21:22 +00:00
|
|
|
|
|
|
|
# another entry
|
|
|
|
{'size': 0.001},
|
|
|
|
|
|
|
|
# final cover to net-zero again.
|
|
|
|
{'size': 0.038},
|
2023-04-10 23:05:36 +00:00
|
|
|
),
|
|
|
|
],
|
|
|
|
ids='fills={}'.format,
|
|
|
|
)
|
|
|
|
def test_multi_fill_positions(
|
2023-04-04 17:03:52 +00:00
|
|
|
open_test_pikerd: AsyncContextManager,
|
|
|
|
loglevel: str,
|
2023-02-24 18:42:44 +00:00
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
fills: tuple[dict],
|
|
|
|
|
2023-04-11 02:21:22 +00:00
|
|
|
check_cross_session: bool = False,
|
2023-04-10 23:05:36 +00:00
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
) -> None:
|
2023-03-09 19:33:12 +00:00
|
|
|
|
2023-04-10 23:05:36 +00:00
|
|
|
ppmsg: BrokerdPosition
|
|
|
|
pos: Position
|
|
|
|
|
|
|
|
accum_size: float = 0
|
|
|
|
for fill in fills:
|
|
|
|
accum_size += fill['size']
|
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
async def atest():
|
2023-04-10 23:05:36 +00:00
|
|
|
|
|
|
|
# export to outer scope for audit on second runtime-boot.
|
|
|
|
nonlocal ppmsg, pos
|
|
|
|
|
2023-04-04 17:03:52 +00:00
|
|
|
async with (
|
|
|
|
open_test_pikerd() as (_, _, _, services),
|
|
|
|
):
|
2023-04-10 23:05:36 +00:00
|
|
|
ppmsg, pos = await submit_and_check(
|
|
|
|
fills=fills,
|
|
|
|
loglevel=loglevel,
|
|
|
|
)
|
|
|
|
assert ppmsg.size == accum_size
|
|
|
|
|
|
|
|
run_and_tollerate_cancels(atest)
|
|
|
|
|
|
|
|
if check_cross_session or accum_size != 0:
|
|
|
|
# rerun just to check that position info is persistent for the paper
|
|
|
|
# account (i.e. a user can expect to see paper pps persist across
|
|
|
|
# runtime sessions.
|
|
|
|
async def just_check_pp():
|
|
|
|
async with (
|
|
|
|
open_test_pikerd() as (_, _, _, services),
|
|
|
|
):
|
|
|
|
await match_ppmsgs_on_ems_boot([ppmsg])
|
|
|
|
|
|
|
|
run_and_tollerate_cancels(just_check_pp)
|