Compare commits
No commits in common. "drop_pydantic" and "310_plus" have entirely different histories.
drop_pydan
...
310_plus
|
@ -22,10 +22,10 @@ from typing import Optional, Union, Callable, Any
|
|||
from contextlib import asynccontextmanager as acm
|
||||
from collections import defaultdict
|
||||
|
||||
from msgspec import Struct
|
||||
import tractor
|
||||
from pydantic import BaseModel
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
|
||||
from .log import get_logger, get_console_log
|
||||
from .brokers import get_brokermod
|
||||
|
@ -47,13 +47,16 @@ _root_modules = [
|
|||
]
|
||||
|
||||
|
||||
class Services(Struct):
|
||||
class Services(BaseModel):
|
||||
|
||||
actor_n: tractor._supervise.ActorNursery
|
||||
service_n: trio.Nursery
|
||||
debug_mode: bool # tractor sub-actor debug mode flag
|
||||
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
async def start_service_task(
|
||||
self,
|
||||
name: str,
|
||||
|
|
|
@ -34,13 +34,13 @@ from fuzzywuzzy import process as fuzzy
|
|||
import numpy as np
|
||||
import tractor
|
||||
from pydantic.dataclasses import dataclass
|
||||
from pydantic import BaseModel
|
||||
import wsproto
|
||||
|
||||
from .._cacheables import open_cached_client
|
||||
from ._util import resproc, SymbolNotFound
|
||||
from ..log import get_logger, get_console_log
|
||||
from ..data import ShmArray
|
||||
from ..data.types import Struct
|
||||
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -79,14 +79,12 @@ _show_wap_in_history = False
|
|||
|
||||
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
||||
class Pair(Struct, frozen=True):
|
||||
class Pair(BaseModel):
|
||||
symbol: str
|
||||
status: str
|
||||
|
||||
baseAsset: str
|
||||
baseAssetPrecision: int
|
||||
cancelReplaceAllowed: bool
|
||||
allowTrailingStop: bool
|
||||
quoteAsset: str
|
||||
quotePrecision: int
|
||||
quoteAssetPrecision: int
|
||||
|
@ -289,7 +287,7 @@ async def get_client() -> Client:
|
|||
|
||||
|
||||
# validation type
|
||||
class AggTrade(Struct):
|
||||
class AggTrade(BaseModel):
|
||||
e: str # Event type
|
||||
E: int # Event time
|
||||
s: str # Symbol
|
||||
|
@ -343,9 +341,7 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
|||
|
||||
elif msg.get('e') == 'aggTrade':
|
||||
|
||||
# NOTE: this is purely for a definition, ``msgspec.Struct``
|
||||
# does not runtime-validate until you decode/encode.
|
||||
# see: https://jcristharif.com/msgspec/structs.html#type-validation
|
||||
# validate
|
||||
msg = AggTrade(**msg)
|
||||
|
||||
# TODO: type out and require this quote format
|
||||
|
@ -356,8 +352,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
|||
'brokerd_ts': time.time(),
|
||||
'ticks': [{
|
||||
'type': 'trade',
|
||||
'price': float(msg.p),
|
||||
'size': float(msg.q),
|
||||
'price': msg.p,
|
||||
'size': msg.q,
|
||||
'broker_ts': msg.T,
|
||||
}],
|
||||
}
|
||||
|
@ -452,7 +448,7 @@ async def stream_quotes(
|
|||
d = cache[sym.upper()]
|
||||
syminfo = Pair(**d) # validation
|
||||
|
||||
si = sym_infos[sym] = syminfo.to_dict()
|
||||
si = sym_infos[sym] = syminfo.dict()
|
||||
|
||||
# XXX: after manually inspecting the response format we
|
||||
# just directly pick out the info we need
|
||||
|
|
|
@ -20,10 +20,15 @@ Interactive Brokers API backend.
|
|||
Sub-modules within break into the core functionalities:
|
||||
|
||||
- ``broker.py`` part for orders / trading endpoints
|
||||
- ``feed.py`` for real-time data feed endpoints
|
||||
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||
- ``data.py`` for real-time data feed endpoints
|
||||
|
||||
- ``client.py`` for the core API machinery which is ``trio``-ized
|
||||
wrapping around ``ib_insync``.
|
||||
|
||||
- ``report.py`` for the hackery to build manual pp calcs
|
||||
to avoid ib's absolute bullshit FIFO style position
|
||||
tracking..
|
||||
|
||||
"""
|
||||
from .api import (
|
||||
get_client,
|
||||
|
@ -33,10 +38,7 @@ from .feed import (
|
|||
open_symbol_search,
|
||||
stream_quotes,
|
||||
)
|
||||
from .broker import (
|
||||
trades_dialogue,
|
||||
norm_trade_records,
|
||||
)
|
||||
from .broker import trades_dialogue
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
|
|
|
@ -38,21 +38,15 @@ import time
|
|||
from types import SimpleNamespace
|
||||
|
||||
|
||||
from bidict import bidict
|
||||
import trio
|
||||
import tractor
|
||||
from tractor import to_asyncio
|
||||
import ib_insync as ibis
|
||||
from ib_insync.wrapper import RequestError
|
||||
from ib_insync.contract import Contract, ContractDetails
|
||||
from ib_insync.order import Order
|
||||
from ib_insync.ticker import Ticker
|
||||
from ib_insync.objects import (
|
||||
Position,
|
||||
Fill,
|
||||
Execution,
|
||||
CommissionReport,
|
||||
)
|
||||
from ib_insync.objects import Position
|
||||
import ib_insync as ibis
|
||||
from ib_insync.wrapper import Wrapper
|
||||
from ib_insync.client import Client as ib_Client
|
||||
import numpy as np
|
||||
|
@ -161,23 +155,30 @@ class NonShittyIB(ibis.IB):
|
|||
self.client.apiEnd += self.disconnectedEvent
|
||||
|
||||
|
||||
# map of symbols to contract ids
|
||||
_adhoc_cmdty_data_map = {
|
||||
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
||||
|
||||
# NOTE: some cmdtys/metals don't have trade data like gold/usd:
|
||||
# https://groups.io/g/twsapi/message/44174
|
||||
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
|
||||
}
|
||||
|
||||
_futes_venues = (
|
||||
'GLOBEX',
|
||||
'NYMEX',
|
||||
'CME',
|
||||
'CMECRYPTO',
|
||||
'COMEX',
|
||||
'CMDTY', # special name case..
|
||||
)
|
||||
|
||||
_adhoc_futes_set = {
|
||||
|
||||
# equities
|
||||
'nq.globex',
|
||||
'mnq.globex', # micro
|
||||
'mnq.globex',
|
||||
|
||||
'es.globex',
|
||||
'mes.globex', # micro
|
||||
'mes.globex',
|
||||
|
||||
# cypto$
|
||||
'brr.cmecrypto',
|
||||
|
@ -194,46 +195,20 @@ _adhoc_futes_set = {
|
|||
# metals
|
||||
'xauusd.cmdty', # gold spot
|
||||
'gc.nymex',
|
||||
'mgc.nymex', # micro
|
||||
|
||||
# oil & gas
|
||||
'cl.nymex',
|
||||
'mgc.nymex',
|
||||
|
||||
'xagusd.cmdty', # silver spot
|
||||
'ni.nymex', # silver futes
|
||||
'qi.comex', # mini-silver futes
|
||||
}
|
||||
|
||||
|
||||
# map of symbols to contract ids
|
||||
_adhoc_symbol_map = {
|
||||
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
||||
|
||||
# NOTE: some cmdtys/metals don't have trade data like gold/usd:
|
||||
# https://groups.io/g/twsapi/message/44174
|
||||
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
|
||||
}
|
||||
for qsn in _adhoc_futes_set:
|
||||
sym, venue = qsn.split('.')
|
||||
assert venue.upper() in _futes_venues, f'{venue}'
|
||||
_adhoc_symbol_map[sym.upper()] = (
|
||||
{'exchange': venue},
|
||||
{},
|
||||
)
|
||||
|
||||
|
||||
# exchanges we don't support at the moment due to not knowing
|
||||
# how to do symbol-contract lookup correctly likely due
|
||||
# to not having the data feeds subscribed.
|
||||
_exch_skip_list = {
|
||||
|
||||
'ASX', # aussie stocks
|
||||
'MEXI', # mexican stocks
|
||||
|
||||
# no idea
|
||||
'VALUE',
|
||||
'FUNDSERV',
|
||||
'SWB2',
|
||||
'VALUE', # no idea
|
||||
}
|
||||
|
||||
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
||||
|
@ -286,29 +261,27 @@ class Client:
|
|||
|
||||
# NOTE: the ib.client here is "throttled" to 45 rps by default
|
||||
|
||||
async def trades(self) -> dict[str, Any]:
|
||||
'''
|
||||
Return list of trade-fills from current session in ``dict``.
|
||||
async def trades(
|
||||
self,
|
||||
# api_only: bool = False,
|
||||
|
||||
'''
|
||||
fills: list[Fill] = self.ib.fills()
|
||||
norm_fills: list[dict] = []
|
||||
) -> dict[str, Any]:
|
||||
|
||||
# orders = await self.ib.reqCompletedOrdersAsync(
|
||||
# apiOnly=api_only
|
||||
# )
|
||||
fills = await self.ib.reqExecutionsAsync()
|
||||
norm_fills = []
|
||||
for fill in fills:
|
||||
fill = fill._asdict() # namedtuple
|
||||
for key, val in fill.items():
|
||||
match val:
|
||||
case Contract() | Execution() | CommissionReport():
|
||||
for key, val in fill.copy().items():
|
||||
if isinstance(val, Contract):
|
||||
fill[key] = asdict(val)
|
||||
|
||||
norm_fills.append(fill)
|
||||
|
||||
return norm_fills
|
||||
|
||||
async def orders(self) -> list[Order]:
|
||||
return await self.ib.reqAllOpenOrdersAsync(
|
||||
apiOnly=False,
|
||||
)
|
||||
|
||||
async def bars(
|
||||
self,
|
||||
fqsn: str,
|
||||
|
@ -510,14 +483,6 @@ class Client:
|
|||
|
||||
return con
|
||||
|
||||
async def get_con(
|
||||
self,
|
||||
conid: int,
|
||||
) -> Contract:
|
||||
return await self.ib.qualifyContractsAsync(
|
||||
ibis.Contract(conId=conid)
|
||||
)
|
||||
|
||||
async def find_contract(
|
||||
self,
|
||||
pattern: str,
|
||||
|
@ -588,7 +553,7 @@ class Client:
|
|||
|
||||
# commodities
|
||||
elif exch == 'CMDTY': # eg. XAUUSD.CMDTY
|
||||
con_kwargs, bars_kwargs = _adhoc_symbol_map[sym]
|
||||
con_kwargs, bars_kwargs = _adhoc_cmdty_data_map[sym]
|
||||
con = ibis.Commodity(**con_kwargs)
|
||||
con.bars_kwargs = bars_kwargs
|
||||
|
||||
|
@ -846,23 +811,10 @@ _scan_ignore: set[tuple[str, int]] = set()
|
|||
|
||||
def get_config() -> dict[str, Any]:
|
||||
|
||||
conf, path = config.load('brokers')
|
||||
conf, path = config.load()
|
||||
|
||||
section = conf.get('ib')
|
||||
|
||||
accounts = section.get('accounts')
|
||||
if not accounts:
|
||||
raise ValueError(
|
||||
'brokers.toml -> `ib.accounts` must be defined\n'
|
||||
f'location: {path}'
|
||||
)
|
||||
|
||||
names = list(accounts.keys())
|
||||
accts = section['accounts'] = bidict(accounts)
|
||||
log.info(
|
||||
f'brokers.toml defines {len(accts)} accounts: '
|
||||
f'{pformat(names)}'
|
||||
)
|
||||
|
||||
if section is None:
|
||||
log.warning(f'No config section found for ib in {path}')
|
||||
return {}
|
||||
|
@ -1038,7 +990,7 @@ async def load_aio_clients(
|
|||
for acct, client in _accounts2clients.items():
|
||||
log.info(f'Disconnecting {acct}@{client}')
|
||||
client.ib.disconnect()
|
||||
_client_cache.pop((host, port), None)
|
||||
_client_cache.pop((host, port))
|
||||
|
||||
|
||||
async def load_clients_for_trio(
|
||||
|
@ -1067,6 +1019,9 @@ async def load_clients_for_trio(
|
|||
await asyncio.sleep(float('inf'))
|
||||
|
||||
|
||||
_proxies: dict[str, MethodProxy] = {}
|
||||
|
||||
|
||||
@acm
|
||||
async def open_client_proxies() -> tuple[
|
||||
dict[str, MethodProxy],
|
||||
|
@ -1089,14 +1044,13 @@ async def open_client_proxies() -> tuple[
|
|||
if cache_hit:
|
||||
log.info(f'Re-using cached clients: {clients}')
|
||||
|
||||
proxies = {}
|
||||
for acct_name, client in clients.items():
|
||||
proxy = await stack.enter_async_context(
|
||||
open_client_proxy(client),
|
||||
)
|
||||
proxies[acct_name] = proxy
|
||||
_proxies[acct_name] = proxy
|
||||
|
||||
yield proxies, clients
|
||||
yield _proxies, clients
|
||||
|
||||
|
||||
def get_preferred_data_client(
|
||||
|
@ -1245,13 +1199,11 @@ async def open_client_proxy(
|
|||
event_table = {}
|
||||
|
||||
async with (
|
||||
|
||||
to_asyncio.open_channel_from(
|
||||
open_aio_client_method_relay,
|
||||
client=client,
|
||||
event_consumers=event_table,
|
||||
) as (first, chan),
|
||||
|
||||
trio.open_nursery() as relay_n,
|
||||
):
|
||||
|
||||
|
|
|
@ -26,10 +26,8 @@ from typing import (
|
|||
Any,
|
||||
Optional,
|
||||
AsyncIterator,
|
||||
Union,
|
||||
)
|
||||
|
||||
from bidict import bidict
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
|
@ -44,13 +42,10 @@ from ib_insync.order import (
|
|||
from ib_insync.objects import (
|
||||
Fill,
|
||||
Execution,
|
||||
CommissionReport,
|
||||
)
|
||||
from ib_insync.objects import Position
|
||||
import pendulum
|
||||
|
||||
from piker import config
|
||||
from piker import pp
|
||||
from piker.log import get_console_log
|
||||
from piker.clearing._messages import (
|
||||
BrokerdOrder,
|
||||
|
@ -61,16 +56,13 @@ from piker.clearing._messages import (
|
|||
BrokerdFill,
|
||||
BrokerdError,
|
||||
)
|
||||
from piker.data._source import Symbol
|
||||
from .api import (
|
||||
_accounts2clients,
|
||||
# _adhoc_futes_set,
|
||||
_adhoc_symbol_map,
|
||||
_adhoc_futes_set,
|
||||
log,
|
||||
get_config,
|
||||
open_client_proxies,
|
||||
Client,
|
||||
MethodProxy,
|
||||
)
|
||||
|
||||
|
||||
|
@ -88,39 +80,29 @@ def pack_position(
|
|||
# TODO: lookup fqsn even for derivs.
|
||||
symbol = con.symbol.lower()
|
||||
|
||||
# try our best to figure out the exchange / venue
|
||||
exch = (con.primaryExchange or con.exchange).lower()
|
||||
symkey = '.'.join((symbol, exch))
|
||||
if not exch:
|
||||
# for wtv cucked reason some futes don't show their
|
||||
# exchange (like CL.NYMEX) ...
|
||||
entry = _adhoc_symbol_map.get(
|
||||
con.symbol or con.localSymbol
|
||||
)
|
||||
if entry:
|
||||
meta, kwargs = entry
|
||||
cid = meta.get('conId')
|
||||
if cid:
|
||||
assert con.conId == meta['conId']
|
||||
exch = meta['exchange']
|
||||
|
||||
assert exch, f'No clue:\n {con}'
|
||||
fqsn = '.'.join((symbol, exch))
|
||||
# attempt to lookup the symbol from our
|
||||
# hacked set..
|
||||
for sym in _adhoc_futes_set:
|
||||
if symbol in sym:
|
||||
symkey = sym
|
||||
break
|
||||
|
||||
expiry = con.lastTradeDateOrContractMonth
|
||||
if expiry:
|
||||
fqsn += f'.{expiry}'
|
||||
symkey += f'.{expiry}'
|
||||
|
||||
# TODO: options contracts into a sane format..
|
||||
return (
|
||||
con.conId,
|
||||
BrokerdPosition(
|
||||
|
||||
return BrokerdPosition(
|
||||
broker='ib',
|
||||
account=pos.account,
|
||||
symbol=fqsn,
|
||||
symbol=symkey,
|
||||
currency=con.currency,
|
||||
size=float(pos.position),
|
||||
avg_price=float(pos.avgCost) / float(con.multiplier or 1.0),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -148,7 +130,7 @@ async def handle_order_requests(
|
|||
oid=request_msg['oid'],
|
||||
symbol=request_msg['symbol'],
|
||||
reason=f'No account found: `{account}` ?',
|
||||
))
|
||||
).dict())
|
||||
continue
|
||||
|
||||
client = _accounts2clients.get(account)
|
||||
|
@ -161,7 +143,7 @@ async def handle_order_requests(
|
|||
oid=request_msg['oid'],
|
||||
symbol=request_msg['symbol'],
|
||||
reason=f'No api client loaded for account: `{account}` ?',
|
||||
))
|
||||
).dict())
|
||||
continue
|
||||
|
||||
if action in {'buy', 'sell'}:
|
||||
|
@ -188,7 +170,7 @@ async def handle_order_requests(
|
|||
oid=request_msg['oid'],
|
||||
symbol=request_msg['symbol'],
|
||||
reason='Order already active?',
|
||||
))
|
||||
).dict())
|
||||
|
||||
# deliver ack that order has been submitted to broker routing
|
||||
await ems_order_stream.send(
|
||||
|
@ -197,8 +179,9 @@ async def handle_order_requests(
|
|||
oid=order.oid,
|
||||
# broker specific request id
|
||||
reqid=reqid,
|
||||
time_ns=time.time_ns(),
|
||||
account=account,
|
||||
)
|
||||
).dict()
|
||||
)
|
||||
|
||||
elif action == 'cancel':
|
||||
|
@ -222,35 +205,19 @@ async def recv_trade_updates(
|
|||
# sync with trio task
|
||||
to_trio.send_nowait(None)
|
||||
|
||||
def push_tradesies(
|
||||
eventkit_obj,
|
||||
obj,
|
||||
fill: Optional[Fill] = None,
|
||||
report: Optional[CommissionReport] = None,
|
||||
):
|
||||
'''
|
||||
Push events to trio task.
|
||||
def push_tradesies(eventkit_obj, obj, fill=None):
|
||||
"""Push events to trio task.
|
||||
|
||||
'''
|
||||
match eventkit_obj.name():
|
||||
|
||||
case 'orderStatusEvent':
|
||||
item = ('status', obj)
|
||||
|
||||
case 'commissionReportEvent':
|
||||
assert report
|
||||
item = ('cost', report)
|
||||
|
||||
case 'execDetailsEvent':
|
||||
"""
|
||||
if fill is not None:
|
||||
# execution details event
|
||||
item = ('fill', (obj, fill))
|
||||
|
||||
case 'positionEvent':
|
||||
elif eventkit_obj.name() == 'positionEvent':
|
||||
item = ('position', obj)
|
||||
|
||||
case _:
|
||||
log.error(f'Error unknown event {obj}')
|
||||
return
|
||||
else:
|
||||
item = ('status', obj)
|
||||
|
||||
log.info(f'eventkit event ->\n{pformat(item)}')
|
||||
|
||||
|
@ -266,15 +233,15 @@ async def recv_trade_updates(
|
|||
'execDetailsEvent', # all "fill" updates
|
||||
'positionEvent', # avg price updates per symbol per account
|
||||
|
||||
# 'commissionReportEvent',
|
||||
# XXX: ugh, it is a separate event from IB and it's
|
||||
# emitted as follows:
|
||||
# self.ib.commissionReportEvent.emit(trade, fill, report)
|
||||
'commissionReportEvent',
|
||||
|
||||
# XXX: not sure yet if we need these
|
||||
# 'updatePortfolioEvent',
|
||||
|
||||
# XXX: these all seem to be weird ib_insync internal
|
||||
# XXX: these all seem to be weird ib_insync intrernal
|
||||
# events that we probably don't care that much about
|
||||
# given the internal design is wonky af..
|
||||
# 'newOrderEvent',
|
||||
|
@ -290,149 +257,6 @@ async def recv_trade_updates(
|
|||
await client.ib.disconnectedEvent
|
||||
|
||||
|
||||
async def update_ledger_from_api_trades(
|
||||
trade_entries: list[dict[str, Any]],
|
||||
client: Union[Client, MethodProxy],
|
||||
|
||||
) -> tuple[
|
||||
dict[str, pp.Transaction],
|
||||
dict[str, dict],
|
||||
]:
|
||||
|
||||
conf = get_config()
|
||||
|
||||
# XXX; ERRGGG..
|
||||
# pack in the "primary/listing exchange" value from a
|
||||
# contract lookup since it seems this isn't available by
|
||||
# default from the `.fills()` method endpoint...
|
||||
for entry in trade_entries:
|
||||
condict = entry['contract']
|
||||
conid = condict['conId']
|
||||
pexch = condict['primaryExchange']
|
||||
|
||||
if not pexch:
|
||||
cons = await client.get_con(conid=conid)
|
||||
if cons:
|
||||
con = cons[0]
|
||||
pexch = con.primaryExchange or con.exchange
|
||||
else:
|
||||
# for futes it seems like the primary is always empty?
|
||||
pexch = condict['exchange']
|
||||
|
||||
entry['listingExchange'] = pexch
|
||||
|
||||
entries = trades_to_ledger_entries(
|
||||
conf['accounts'].inverse,
|
||||
trade_entries,
|
||||
)
|
||||
|
||||
# write recent session's trades to the user's (local) ledger file.
|
||||
records: dict[str, pp.Transactions] = {}
|
||||
|
||||
for acctid, trades_by_id in entries.items():
|
||||
# normalize to transaction form
|
||||
records[acctid] = norm_trade_records(trades_by_id)
|
||||
|
||||
return records, entries
|
||||
|
||||
|
||||
async def update_and_audit_msgs(
|
||||
acctid: str, # no `ib.` prefix is required!
|
||||
pps: list[pp.Position],
|
||||
cids2pps: dict[tuple[str, int], BrokerdPosition],
|
||||
validate: bool = False,
|
||||
|
||||
) -> list[BrokerdPosition]:
|
||||
|
||||
msgs: list[BrokerdPosition] = []
|
||||
# pps: dict[int, pp.Position] = {}
|
||||
|
||||
for p in pps:
|
||||
bsuid = p.bsuid
|
||||
|
||||
# build trade-session-actor local table
|
||||
# of pps from unique symbol ids.
|
||||
# pps[bsuid] = p
|
||||
|
||||
# retreive equivalent ib reported position message
|
||||
# for comparison/audit versus the piker equivalent
|
||||
# breakeven pp calcs.
|
||||
ibppmsg = cids2pps.get((acctid, bsuid))
|
||||
|
||||
if ibppmsg:
|
||||
msg = BrokerdPosition(
|
||||
broker='ib',
|
||||
|
||||
# XXX: ok so this is annoying, we're relaying
|
||||
# an account name with the backend suffix prefixed
|
||||
# but when reading accounts from ledgers we don't
|
||||
# need it and/or it's prefixed in the section
|
||||
# table..
|
||||
account=ibppmsg.account,
|
||||
# XXX: the `.ib` is stripped..?
|
||||
symbol=ibppmsg.symbol,
|
||||
currency=ibppmsg.currency,
|
||||
size=p.size,
|
||||
avg_price=p.be_price,
|
||||
)
|
||||
msgs.append(msg)
|
||||
|
||||
if validate:
|
||||
ibsize = ibppmsg.size
|
||||
pikersize = msg.size
|
||||
diff = pikersize - ibsize
|
||||
|
||||
# if ib reports a lesser pp it's not as bad since we can
|
||||
# presume we're at least not more in the shit then we
|
||||
# thought.
|
||||
if diff:
|
||||
raise ValueError(
|
||||
f'POSITION MISMATCH ib <-> piker ledger:\n'
|
||||
f'ib: {ibppmsg}\n'
|
||||
f'piker: {msg}\n'
|
||||
'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?'
|
||||
)
|
||||
msg.size = ibsize
|
||||
|
||||
if ibppmsg.avg_price != msg.avg_price:
|
||||
|
||||
# TODO: make this a "propoganda" log level?
|
||||
log.warning(
|
||||
'The mega-cucks at IB want you to believe with their '
|
||||
f'"FIFO" positioning for {msg.symbol}:\n'
|
||||
f'"ib" mega-cucker avg price: {ibppmsg.avg_price}\n'
|
||||
f'piker, LIFO breakeven PnL price: {msg.avg_price}'
|
||||
)
|
||||
|
||||
else:
|
||||
# make brand new message
|
||||
msg = BrokerdPosition(
|
||||
broker='ib',
|
||||
|
||||
# XXX: ok so this is annoying, we're relaying
|
||||
# an account name with the backend suffix prefixed
|
||||
# but when reading accounts from ledgers we don't
|
||||
# need it and/or it's prefixed in the section
|
||||
# table.. we should just strip this from the message
|
||||
# right since `.broker` is already included?
|
||||
account=f'ib.{acctid}',
|
||||
# XXX: the `.ib` is stripped..?
|
||||
symbol=p.symbol.front_fqsn(),
|
||||
# currency=ibppmsg.currency,
|
||||
size=p.size,
|
||||
avg_price=p.be_price,
|
||||
)
|
||||
if validate and p.size:
|
||||
raise ValueError(
|
||||
f'UNEXPECTED POSITION ib <-> piker ledger:\n'
|
||||
f'piker: {msg}\n'
|
||||
'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?'
|
||||
)
|
||||
msgs.append(msg)
|
||||
|
||||
return msgs
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def trades_dialogue(
|
||||
|
||||
|
@ -453,14 +277,6 @@ async def trades_dialogue(
|
|||
accounts = set()
|
||||
clients: list[tuple[Client, trio.MemoryReceiveChannel]] = []
|
||||
|
||||
# TODO: this causes a massive tractor bug when you run marketstored
|
||||
# with ``--tsdb``... you should get:
|
||||
# - first error the assertion
|
||||
# - chart should get that error and die
|
||||
# - pikerd goes to debugger again from trio nursery multi-error
|
||||
# - hitting final control-c to kill daemon will lead to hang
|
||||
# assert 0
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as nurse,
|
||||
open_client_proxies() as (proxies, aioclients),
|
||||
|
@ -490,83 +306,22 @@ async def trades_dialogue(
|
|||
assert account in accounts_def
|
||||
accounts.add(account)
|
||||
|
||||
cids2pps: dict[str, BrokerdPosition] = {}
|
||||
update_records: dict[str, bidict] = {}
|
||||
|
||||
# process pp value reported from ib's system. we only use these
|
||||
# to cross-check sizing since average pricing on their end uses
|
||||
# the so called (bs) "FIFO" style which more or less results in
|
||||
# a price that's not useful for traders who want to not lose
|
||||
# money.. xb
|
||||
for client in aioclients.values():
|
||||
for pos in client.positions():
|
||||
|
||||
cid, msg = pack_position(pos)
|
||||
acctid = msg.account = accounts_def.inverse[msg.account]
|
||||
acctid = acctid.strip('ib.')
|
||||
cids2pps[(acctid, cid)] = msg
|
||||
msg = pack_position(pos)
|
||||
msg.account = accounts_def.inverse[msg.account]
|
||||
|
||||
assert msg.account in accounts, (
|
||||
f'Position for unknown account: {msg.account}')
|
||||
|
||||
# collect all ib-pp reported positions so that we can be
|
||||
# sure know which positions to update from the ledger if
|
||||
# any are missing from the ``pps.toml``
|
||||
update_records.setdefault(acctid, bidict())[cid] = msg.symbol
|
||||
all_positions.append(msg.dict())
|
||||
|
||||
# update trades ledgers for all accounts from
|
||||
# connected api clients which report trades for **this session**.
|
||||
new_trades = {}
|
||||
for account, proxy in proxies.items():
|
||||
trades = await proxy.trades()
|
||||
(
|
||||
records_by_acct,
|
||||
ledger_entries,
|
||||
) = await update_ledger_from_api_trades(
|
||||
trades,
|
||||
proxy,
|
||||
)
|
||||
new_trades.update(records_by_acct)
|
||||
trades: list[dict] = []
|
||||
for proxy in proxies.values():
|
||||
trades.append(await proxy.trades())
|
||||
|
||||
for acctid, trans in new_trades.items():
|
||||
for t in trans:
|
||||
bsuid = t.bsuid
|
||||
if bsuid in update_records:
|
||||
assert update_records[bsuid] == t.fqsn
|
||||
else:
|
||||
update_records.setdefault(acctid, bidict())[bsuid] = t.fqsn
|
||||
|
||||
# load all positions from `pps.toml`, cross check with ib's
|
||||
# positions data, and relay re-formatted pps as msgs to the ems.
|
||||
# __2 cases__:
|
||||
# - new trades have taken place this session that we want to
|
||||
# always reprocess indempotently,
|
||||
# - no new trades yet but we want to reload and audit any
|
||||
# positions reported by ib's sys that may not yet be in
|
||||
# piker's ``pps.toml`` state-file.
|
||||
for acctid, to_update in update_records.items():
|
||||
trans = new_trades.get(acctid)
|
||||
active, closed = pp.update_pps_conf(
|
||||
'ib',
|
||||
acctid,
|
||||
trade_records=trans,
|
||||
ledger_reload=to_update,
|
||||
)
|
||||
for pps in [active, closed]:
|
||||
msgs = await update_and_audit_msgs(
|
||||
acctid,
|
||||
pps.values(),
|
||||
cids2pps,
|
||||
validate=True,
|
||||
)
|
||||
all_positions.extend(msg for msg in msgs)
|
||||
|
||||
if not all_positions and cids2pps:
|
||||
raise RuntimeError(
|
||||
'Positions reported by ib but not found in `pps.toml`!?\n'
|
||||
f'{pformat(cids2pps)}'
|
||||
)
|
||||
|
||||
# log.info(f'Loaded {len(trades)} from this session')
|
||||
log.info(f'Loaded {len(trades)} from this session')
|
||||
# TODO: write trades to local ``trades.toml``
|
||||
# - use above per-session trades data and write to local file
|
||||
# - get the "flex reports" working and pull historical data and
|
||||
|
@ -577,16 +332,6 @@ async def trades_dialogue(
|
|||
tuple(name for name in accounts_def if name in accounts),
|
||||
))
|
||||
|
||||
# TODO: maybe just write on teardown?
|
||||
# we might also want to delegate a specific actor for
|
||||
# ledger writing / reading for speed?
|
||||
|
||||
# write ledger with all new trades **AFTER** we've updated the
|
||||
# `pps.toml` from the original ledger state!
|
||||
for acctid, trades_by_id in ledger_entries.items():
|
||||
with pp.open_trade_ledger('ib', acctid) as ledger:
|
||||
ledger.update(trades_by_id)
|
||||
|
||||
async with (
|
||||
ctx.open_stream() as ems_stream,
|
||||
trio.open_nursery() as n,
|
||||
|
@ -600,96 +345,32 @@ async def trades_dialogue(
|
|||
deliver_trade_events,
|
||||
stream,
|
||||
ems_stream,
|
||||
accounts_def,
|
||||
cids2pps,
|
||||
proxies,
|
||||
accounts_def
|
||||
)
|
||||
|
||||
# block until cancelled
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
||||
async def emit_pp_update(
|
||||
ems_stream: tractor.MsgStream,
|
||||
trade_entry: dict,
|
||||
accounts_def: bidict,
|
||||
proxies: dict,
|
||||
cids2pps: dict,
|
||||
|
||||
) -> None:
|
||||
|
||||
# compute and relay incrementally updated piker pp
|
||||
acctid = accounts_def.inverse[trade_entry['execution']['acctNumber']]
|
||||
proxy = proxies[acctid]
|
||||
|
||||
acctname = acctid.strip('ib.')
|
||||
records_by_acct, ledger_entries = await update_ledger_from_api_trades(
|
||||
[trade_entry],
|
||||
proxy,
|
||||
)
|
||||
records = records_by_acct[acctname]
|
||||
r = records[0]
|
||||
|
||||
# update and load all positions from `pps.toml`, cross check with
|
||||
# ib's positions data, and relay re-formatted pps as msgs to the
|
||||
# ems. we report both the open and closed updates in one map since
|
||||
# for incremental update we may have just fully closed a pp and need
|
||||
# to relay that msg as well!
|
||||
active, closed = pp.update_pps_conf(
|
||||
'ib',
|
||||
acctname,
|
||||
trade_records=records,
|
||||
ledger_reload={r.bsuid: r.fqsn},
|
||||
)
|
||||
|
||||
# NOTE: write ledger with all new trades **AFTER** we've updated the
|
||||
# `pps.toml` from the original ledger state!
|
||||
for acctid, trades_by_id in ledger_entries.items():
|
||||
with pp.open_trade_ledger('ib', acctid) as ledger:
|
||||
ledger.update(trades_by_id)
|
||||
|
||||
for pos in filter(
|
||||
bool,
|
||||
[active.get(r.bsuid), closed.get(r.bsuid)]
|
||||
):
|
||||
msgs = await update_and_audit_msgs(
|
||||
acctname,
|
||||
[pos],
|
||||
cids2pps,
|
||||
|
||||
# ib pp event might not have arrived yet
|
||||
validate=False,
|
||||
)
|
||||
if msgs:
|
||||
msg = msgs[0]
|
||||
break
|
||||
|
||||
await ems_stream.send(msg)
|
||||
|
||||
|
||||
async def deliver_trade_events(
|
||||
|
||||
trade_event_stream: trio.MemoryReceiveChannel,
|
||||
ems_stream: tractor.MsgStream,
|
||||
accounts_def: dict[str, str], # eg. `'ib.main'` -> `'DU999999'`
|
||||
cids2pps: dict[tuple[str, str], BrokerdPosition],
|
||||
proxies: dict[str, MethodProxy],
|
||||
accounts_def: dict[str, str],
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Format and relay all trade events for a given client to emsd.
|
||||
'''Format and relay all trade events for a given client to the EMS.
|
||||
|
||||
'''
|
||||
action_map = {'BOT': 'buy', 'SLD': 'sell'}
|
||||
ids2fills: dict[str, dict] = {}
|
||||
|
||||
# TODO: for some reason we can receive a ``None`` here when the
|
||||
# ib-gw goes down? Not sure exactly how that's happening looking
|
||||
# at the eventkit code above but we should probably handle it...
|
||||
async for event_name, item in trade_event_stream:
|
||||
|
||||
log.info(f'ib sending {event_name}:\n{pformat(item)}')
|
||||
|
||||
match event_name:
|
||||
# TODO: templating the ib statuses in comparison with other
|
||||
# brokers is likely the way to go:
|
||||
# https://interactivebrokers.github.io/tws-api/interfaceIBApi_1_1EWrapper.html#a17f2a02d6449710b6394d0266a353313
|
||||
|
@ -713,7 +394,7 @@ async def deliver_trade_events(
|
|||
# reqId 1550: Order held while securities are located.'),
|
||||
# status='PreSubmitted', message='')],
|
||||
|
||||
case 'status':
|
||||
if event_name == 'status':
|
||||
|
||||
# XXX: begin normalization of nonsense ib_insync internal
|
||||
# object-state tracking representations...
|
||||
|
@ -742,9 +423,8 @@ async def deliver_trade_events(
|
|||
|
||||
broker_details={'name': 'ib'},
|
||||
)
|
||||
await ems_stream.send(msg)
|
||||
|
||||
case 'fill':
|
||||
elif event_name == 'fill':
|
||||
|
||||
# for wtv reason this is a separate event type
|
||||
# from IB, not sure why it's needed other then for extra
|
||||
|
@ -758,35 +438,17 @@ async def deliver_trade_events(
|
|||
# https://www.python.org/dev/peps/pep-0526/#global-and-local-variable-annotations
|
||||
trade: Trade
|
||||
fill: Fill
|
||||
|
||||
# TODO: maybe we can use matching to better handle these cases.
|
||||
trade, fill = item
|
||||
execu: Execution = fill.execution
|
||||
execid = execu.execId
|
||||
|
||||
# TODO:
|
||||
# - normalize out commissions details?
|
||||
# - this is the same as the unpacking loop above in
|
||||
# ``trades_to_ledger_entries()`` no?
|
||||
trade_entry = ids2fills.setdefault(execid, {})
|
||||
cost_already_rx = bool(trade_entry)
|
||||
|
||||
# if the costs report was already received this
|
||||
# should be not empty right?
|
||||
comms = fill.commissionReport.commission
|
||||
if cost_already_rx:
|
||||
assert comms
|
||||
|
||||
trade_entry.update(
|
||||
{
|
||||
# TODO: normalize out commissions details?
|
||||
details = {
|
||||
'contract': asdict(fill.contract),
|
||||
'execution': asdict(fill.execution),
|
||||
# 'commissionReport': asdict(fill.commissionReport),
|
||||
# supposedly server fill time?
|
||||
'broker_time': execu.time,
|
||||
'commissions': asdict(fill.commissionReport),
|
||||
'broker_time': execu.time, # supposedly server fill time
|
||||
'name': 'ib',
|
||||
}
|
||||
)
|
||||
|
||||
msg = BrokerdFill(
|
||||
# should match the value returned from `.submit_limit()`
|
||||
|
@ -797,68 +459,14 @@ async def deliver_trade_events(
|
|||
size=execu.shares,
|
||||
price=execu.price,
|
||||
|
||||
broker_details=trade_entry,
|
||||
broker_details=details,
|
||||
# XXX: required by order mode currently
|
||||
broker_time=trade_entry['broker_time'],
|
||||
broker_time=details['broker_time'],
|
||||
|
||||
)
|
||||
await ems_stream.send(msg)
|
||||
|
||||
# 2 cases:
|
||||
# - fill comes first or
|
||||
# - comms report comes first
|
||||
comms = fill.commissionReport.commission
|
||||
if comms:
|
||||
# UGHHH since the commision report object might be
|
||||
# filled in **after** we already serialized to dict..
|
||||
# def need something better for all this.
|
||||
trade_entry.update(
|
||||
{'commissionReport': asdict(fill.commissionReport)}
|
||||
)
|
||||
elif event_name == 'error':
|
||||
|
||||
if comms or cost_already_rx:
|
||||
# only send a pp update once we have a cost report
|
||||
await emit_pp_update(
|
||||
ems_stream,
|
||||
trade_entry,
|
||||
accounts_def,
|
||||
proxies,
|
||||
cids2pps,
|
||||
)
|
||||
|
||||
case 'cost':
|
||||
|
||||
cr: CommissionReport = item
|
||||
execid = cr.execId
|
||||
|
||||
trade_entry = ids2fills.setdefault(execid, {})
|
||||
fill_already_rx = bool(trade_entry)
|
||||
|
||||
# only fire a pp msg update if,
|
||||
# - we haven't already
|
||||
# - the fill event has already arrived
|
||||
# but it didn't yet have a commision report
|
||||
# which we fill in now.
|
||||
if (
|
||||
fill_already_rx
|
||||
and 'commissionReport' not in trade_entry
|
||||
):
|
||||
# no fill msg has arrived yet so just fill out the
|
||||
# cost report for now and when the fill arrives a pp
|
||||
# msg can be emitted.
|
||||
trade_entry.update(
|
||||
{'commissionReport': asdict(cr)}
|
||||
)
|
||||
|
||||
await emit_pp_update(
|
||||
ems_stream,
|
||||
trade_entry,
|
||||
accounts_def,
|
||||
proxies,
|
||||
cids2pps,
|
||||
)
|
||||
|
||||
case 'error':
|
||||
err: dict = item
|
||||
|
||||
# f$#$% gawd dammit insync..
|
||||
|
@ -872,15 +480,13 @@ async def deliver_trade_events(
|
|||
# TODO: what schema for this msg if we're going to make it
|
||||
# portable across all backends?
|
||||
# msg = BrokerdError(**err)
|
||||
continue
|
||||
|
||||
case 'position':
|
||||
elif event_name == 'position':
|
||||
msg = pack_position(item)
|
||||
msg.account = accounts_def.inverse[msg.account]
|
||||
|
||||
cid, msg = pack_position(item)
|
||||
# acctid = msg.account = accounts_def.inverse[msg.account]
|
||||
# cuck ib and it's shitty fifo sys for pps!
|
||||
# await ems_stream.send(msg)
|
||||
|
||||
case 'event':
|
||||
elif event_name == 'event':
|
||||
|
||||
# it's either a general system status event or an external
|
||||
# trade event?
|
||||
|
@ -890,7 +496,9 @@ async def deliver_trade_events(
|
|||
# level...
|
||||
# reqid = item.get('reqid', 0)
|
||||
# if getattr(msg, 'reqid', 0) < -1:
|
||||
# log.info(f"TWS triggered trade\n{pformat(msg)}")
|
||||
# log.info(f"TWS triggered trade\n{pformat(msg.dict())}")
|
||||
|
||||
continue
|
||||
|
||||
# msg.reqid = 'tws-' + str(-1 * reqid)
|
||||
|
||||
|
@ -899,200 +507,19 @@ async def deliver_trade_events(
|
|||
# considering multiplayer/group trades tracking
|
||||
# msg.broker_details['external_src'] = 'tws'
|
||||
|
||||
case _:
|
||||
log.error(f'WTF: {event_name}: {item}')
|
||||
|
||||
|
||||
def norm_trade_records(
|
||||
ledger: dict[str, Any],
|
||||
|
||||
) -> list[pp.Transaction]:
|
||||
'''
|
||||
Normalize a flex report or API retrieved executions
|
||||
ledger into our standard record format.
|
||||
|
||||
'''
|
||||
records: list[pp.Transaction] = []
|
||||
|
||||
for tid, record in ledger.items():
|
||||
|
||||
conid = record.get('conId') or record['conid']
|
||||
comms = record.get('commission') or -1*record['ibCommission']
|
||||
price = record.get('price') or record['tradePrice']
|
||||
|
||||
# the api doesn't do the -/+ on the quantity for you but flex
|
||||
# records do.. are you fucking serious ib...!?
|
||||
size = record.get('quantity') or record['shares'] * {
|
||||
'BOT': 1,
|
||||
'SLD': -1,
|
||||
}[record['side']]
|
||||
|
||||
exch = record['exchange']
|
||||
lexch = record.get('listingExchange')
|
||||
|
||||
suffix = lexch or exch
|
||||
symbol = record['symbol']
|
||||
|
||||
# likely an opts contract record from a flex report..
|
||||
# TODO: no idea how to parse ^ the strike part from flex..
|
||||
# (00010000 any, or 00007500 tsla, ..)
|
||||
# we probably must do the contract lookup for this?
|
||||
if ' ' in symbol or '--' in exch:
|
||||
underlying, _, tail = symbol.partition(' ')
|
||||
suffix = exch = 'opt'
|
||||
expiry = tail[:6]
|
||||
# otype = tail[6]
|
||||
# strike = tail[7:]
|
||||
|
||||
print(f'skipping opts contract {symbol}')
|
||||
continue
|
||||
|
||||
# timestamping is way different in API records
|
||||
date = record.get('date')
|
||||
if not date:
|
||||
# probably a flex record with a wonky non-std timestamp..
|
||||
date, ts = record['dateTime'].split(';')
|
||||
dt = pendulum.parse(date)
|
||||
ts = f'{ts[:2]}:{ts[2:4]}:{ts[4:]}'
|
||||
tsdt = pendulum.parse(ts)
|
||||
dt.set(hour=tsdt.hour, minute=tsdt.minute, second=tsdt.second)
|
||||
|
||||
else:
|
||||
# epoch_dt = pendulum.from_timestamp(record.get('time'))
|
||||
dt = pendulum.parse(date)
|
||||
|
||||
# special handling of symbol extraction from
|
||||
# flex records using some ad-hoc schema parsing.
|
||||
instr = record.get('assetCategory')
|
||||
if instr == 'FUT':
|
||||
symbol = record['description'][:3]
|
||||
|
||||
# try to build out piker fqsn from record.
|
||||
expiry = record.get(
|
||||
'lastTradeDateOrContractMonth') or record.get('expiry')
|
||||
if expiry:
|
||||
expiry = str(expiry).strip(' ')
|
||||
suffix = f'{exch}.{expiry}'
|
||||
expiry = pendulum.parse(expiry)
|
||||
|
||||
fqsn = Symbol.from_fqsn(
|
||||
fqsn=f'{symbol}.{suffix}.ib',
|
||||
info={},
|
||||
).front_fqsn().rstrip('.ib')
|
||||
|
||||
# NOTE: for flex records the normal fields for defining an fqsn
|
||||
# sometimes won't be available so we rely on two approaches for
|
||||
# the "reverse lookup" of piker style fqsn keys:
|
||||
# - when dealing with API trade records received from
|
||||
# `IB.trades()` we do a contract lookup at he time of processing
|
||||
# - when dealing with flex records, it is assumed the record
|
||||
# is at least a day old and thus the TWS position reporting system
|
||||
# should already have entries if the pps are still open, in
|
||||
# which case, we can pull the fqsn from that table (see
|
||||
# `trades_dialogue()` above).
|
||||
|
||||
records.append(pp.Transaction(
|
||||
fqsn=fqsn,
|
||||
tid=tid,
|
||||
size=size,
|
||||
price=price,
|
||||
cost=comms,
|
||||
dt=dt,
|
||||
expiry=expiry,
|
||||
bsuid=conid,
|
||||
))
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def trades_to_ledger_entries(
|
||||
accounts: bidict,
|
||||
trade_entries: list[object],
|
||||
source_type: str = 'api',
|
||||
|
||||
) -> dict:
|
||||
'''
|
||||
Convert either of API execution objects or flex report
|
||||
entry objects into ``dict`` form, pretty much straight up
|
||||
without modification.
|
||||
|
||||
'''
|
||||
trades_by_account = {}
|
||||
|
||||
for t in trade_entries:
|
||||
if source_type == 'flex':
|
||||
entry = t.__dict__
|
||||
|
||||
# XXX: LOL apparently ``toml`` has a bug
|
||||
# where a section key error will show up in the write
|
||||
# if you leave a table key as an `int`? So i guess
|
||||
# cast to strs for all keys..
|
||||
|
||||
# oddly for some so-called "BookTrade" entries
|
||||
# this field seems to be blank, no cuckin clue.
|
||||
# trade['ibExecID']
|
||||
tid = str(entry.get('ibExecID') or entry['tradeID'])
|
||||
# date = str(entry['tradeDate'])
|
||||
|
||||
# XXX: is it going to cause problems if a account name
|
||||
# get's lost? The user should be able to find it based
|
||||
# on the actual exec history right?
|
||||
acctid = accounts[str(entry['accountId'])]
|
||||
|
||||
elif source_type == 'api':
|
||||
# NOTE: example of schema we pull from the API client.
|
||||
# {
|
||||
# 'commissionReport': CommissionReport(...
|
||||
# 'contract': {...
|
||||
# 'execution': Execution(...
|
||||
# 'time': 1654801166.0
|
||||
# }
|
||||
|
||||
# flatten all sub-dicts and values into one top level entry.
|
||||
entry = {}
|
||||
for section, val in t.items():
|
||||
match section:
|
||||
case 'contract' | 'execution' | 'commissionReport':
|
||||
# sub-dict cases
|
||||
entry.update(val)
|
||||
|
||||
case 'time':
|
||||
# ib has wack ns timestamps, or is that us?
|
||||
continue
|
||||
|
||||
case _:
|
||||
entry[section] = val
|
||||
|
||||
tid = str(entry['execId'])
|
||||
dt = pendulum.from_timestamp(entry['time'])
|
||||
# TODO: why isn't this showing seconds in the str?
|
||||
entry['date'] = str(dt)
|
||||
acctid = accounts[entry['acctNumber']]
|
||||
|
||||
if not tid:
|
||||
# this is likely some kind of internal adjustment
|
||||
# transaction, likely one of the following:
|
||||
# - an expiry event that will show a "book trade" indicating
|
||||
# some adjustment to cash balances: zeroing or itm settle.
|
||||
# - a manual cash balance position adjustment likely done by
|
||||
# the user from the accounts window in TWS where they can
|
||||
# manually set the avg price and size:
|
||||
# https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST
|
||||
log.warning(f'Skipping ID-less ledger entry:\n{pformat(entry)}')
|
||||
continue
|
||||
|
||||
trades_by_account.setdefault(
|
||||
acctid, {}
|
||||
)[tid] = entry
|
||||
|
||||
return trades_by_account
|
||||
# XXX: we always serialize to a dict for msgpack
|
||||
# translations, ideally we can move to an msgspec (or other)
|
||||
# encoder # that can be enabled in ``tractor`` ahead of
|
||||
# time so we can pass through the message types directly.
|
||||
await ems_stream.send(msg.dict())
|
||||
|
||||
|
||||
def load_flex_trades(
|
||||
path: Optional[str] = None,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
) -> dict[str, str]:
|
||||
|
||||
from pprint import pprint
|
||||
from ib_insync import flexreport, util
|
||||
|
||||
conf = get_config()
|
||||
|
@ -1128,38 +555,36 @@ def load_flex_trades(
|
|||
report = flexreport.FlexReport(path=path)
|
||||
|
||||
trade_entries = report.extract('Trade')
|
||||
ln = len(trade_entries)
|
||||
# log.info(f'Loaded {ln} trades from flex query')
|
||||
print(f'Loaded {ln} trades from flex query')
|
||||
trades = {
|
||||
# XXX: LOL apparently ``toml`` has a bug
|
||||
# where a section key error will show up in the write
|
||||
# if you leave this as an ``int``?
|
||||
str(t.__dict__['tradeID']): t.__dict__
|
||||
for t in trade_entries
|
||||
}
|
||||
|
||||
trades_by_account = trades_to_ledger_entries(
|
||||
# get reverse map to user account names
|
||||
conf['accounts'].inverse,
|
||||
trade_entries,
|
||||
source_type='flex',
|
||||
)
|
||||
ln = len(trades)
|
||||
log.info(f'Loaded {ln} trades from flex query')
|
||||
|
||||
ledgers = {}
|
||||
for acctid, trades_by_id in trades_by_account.items():
|
||||
with pp.open_trade_ledger('ib', acctid) as ledger:
|
||||
ledger.update(trades_by_id)
|
||||
trades_by_account = {}
|
||||
for tid, trade in trades.items():
|
||||
trades_by_account.setdefault(
|
||||
# oddly for some so-called "BookTrade" entries
|
||||
# this field seems to be blank, no cuckin clue.
|
||||
# trade['ibExecID']
|
||||
str(trade['accountId']), {}
|
||||
)[tid] = trade
|
||||
|
||||
ledgers[acctid] = ledger
|
||||
section = {'ib': trades_by_account}
|
||||
pprint(section)
|
||||
|
||||
return ledgers
|
||||
# TODO: load the config first and append in
|
||||
# the new trades loaded here..
|
||||
try:
|
||||
config.write(section, 'trades')
|
||||
except KeyError:
|
||||
import pdbpp; pdbpp.set_trace() # noqa
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
import os
|
||||
|
||||
args = sys.argv
|
||||
if len(args) > 1:
|
||||
args = args[1:]
|
||||
for arg in args:
|
||||
path = os.path.abspath(arg)
|
||||
load_flex_trades(path=path)
|
||||
else:
|
||||
# expect brokers.toml to have an entry and
|
||||
# pull from the web service.
|
||||
load_flex_trades()
|
||||
|
|
|
@ -217,8 +217,8 @@ async def get_bars(
|
|||
)
|
||||
|
||||
elif (
|
||||
err.code == 162 and
|
||||
'HMDS query returned no data' in err.message
|
||||
err.code == 162
|
||||
and 'HMDS query returned no data' in err.message
|
||||
):
|
||||
# XXX: this is now done in the storage mgmt layer
|
||||
# and we shouldn't implicitly decrement the frame dt
|
||||
|
@ -237,13 +237,6 @@ async def get_bars(
|
|||
frame_size=2000,
|
||||
)
|
||||
|
||||
# elif (
|
||||
# err.code == 162 and
|
||||
# 'Trading TWS session is connected from a different IP address' in err.message
|
||||
# ):
|
||||
# log.warning("ignoring ip address warning")
|
||||
# continue
|
||||
|
||||
elif _pacing in msg:
|
||||
|
||||
log.warning(
|
||||
|
@ -916,17 +909,17 @@ async def open_symbol_search(
|
|||
# trigger async request
|
||||
await trio.sleep(0)
|
||||
|
||||
# # match against our ad-hoc set immediately
|
||||
# adhoc_matches = fuzzy.extractBests(
|
||||
# pattern,
|
||||
# list(_adhoc_futes_set),
|
||||
# score_cutoff=90,
|
||||
# )
|
||||
# log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
||||
# adhoc_match_results = {}
|
||||
# if adhoc_matches:
|
||||
# # TODO: do we need to pull contract details?
|
||||
# adhoc_match_results = {i[0]: {} for i in adhoc_matches}
|
||||
# match against our ad-hoc set immediately
|
||||
adhoc_matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
list(_adhoc_futes_set),
|
||||
score_cutoff=90,
|
||||
)
|
||||
log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
||||
adhoc_match_results = {}
|
||||
if adhoc_matches:
|
||||
# TODO: do we need to pull contract details?
|
||||
adhoc_match_results = {i[0]: {} for i in adhoc_matches}
|
||||
|
||||
log.debug(f'fuzzy matching stocks {stock_results}')
|
||||
stock_matches = fuzzy.extractBests(
|
||||
|
@ -935,8 +928,7 @@ async def open_symbol_search(
|
|||
score_cutoff=50,
|
||||
)
|
||||
|
||||
# matches = adhoc_match_results | {
|
||||
matches = {
|
||||
matches = adhoc_match_results | {
|
||||
item[0]: {} for item in stock_matches
|
||||
}
|
||||
# TODO: we used to deliver contract details
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,61 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Kraken backend.
|
||||
|
||||
Sub-modules within break into the core functionalities:
|
||||
|
||||
- ``broker.py`` part for orders / trading endpoints
|
||||
- ``feed.py`` for real-time data feed endpoints
|
||||
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||
wrapping around ``ib_insync``.
|
||||
|
||||
'''
|
||||
|
||||
from piker.log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
from .api import (
|
||||
get_client,
|
||||
)
|
||||
from .feed import (
|
||||
open_history_client,
|
||||
open_symbol_search,
|
||||
stream_quotes,
|
||||
)
|
||||
from .broker import (
|
||||
trades_dialogue,
|
||||
norm_trade_records,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
'trades_dialogue',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
'norm_trade_records',
|
||||
]
|
||||
|
||||
|
||||
# tractor RPC enable arg
|
||||
__enable_modules__: list[str] = [
|
||||
'api',
|
||||
'feed',
|
||||
'broker',
|
||||
]
|
|
@ -1,468 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Kraken web API wrapping.
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from dataclasses import field
|
||||
from datetime import datetime
|
||||
import itertools
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
import time
|
||||
|
||||
# import trio
|
||||
# import tractor
|
||||
import pendulum
|
||||
import asks
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
from pydantic.dataclasses import dataclass
|
||||
import urllib.parse
|
||||
import hashlib
|
||||
import hmac
|
||||
import base64
|
||||
|
||||
from piker import config
|
||||
from piker.brokers._util import (
|
||||
resproc,
|
||||
SymbolNotFound,
|
||||
BrokerError,
|
||||
DataThrottle,
|
||||
)
|
||||
from . import log
|
||||
|
||||
# <uri>/<version>/
|
||||
_url = 'https://api.kraken.com/0'
|
||||
|
||||
|
||||
# Broker specific ohlc schema which includes a vwap field
|
||||
_ohlc_dtype = [
|
||||
('index', int),
|
||||
('time', int),
|
||||
('open', float),
|
||||
('high', float),
|
||||
('low', float),
|
||||
('close', float),
|
||||
('volume', float),
|
||||
('count', int),
|
||||
('bar_wap', float),
|
||||
]
|
||||
|
||||
# UI components allow this to be declared such that additional
|
||||
# (historical) fields can be exposed.
|
||||
ohlc_dtype = np.dtype(_ohlc_dtype)
|
||||
|
||||
_show_wap_in_history = True
|
||||
_symbol_info_translation: dict[str, str] = {
|
||||
'tick_decimals': 'pair_decimals',
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class OHLC:
|
||||
'''
|
||||
Description of the flattened OHLC quote format.
|
||||
|
||||
For schema details see:
|
||||
https://docs.kraken.com/websockets/#message-ohlc
|
||||
|
||||
'''
|
||||
chan_id: int # internal kraken id
|
||||
chan_name: str # eg. ohlc-1 (name-interval)
|
||||
pair: str # fx pair
|
||||
time: float # Begin time of interval, in seconds since epoch
|
||||
etime: float # End time of interval, in seconds since epoch
|
||||
open: float # Open price of interval
|
||||
high: float # High price within interval
|
||||
low: float # Low price within interval
|
||||
close: float # Close price of interval
|
||||
vwap: float # Volume weighted average price within interval
|
||||
volume: float # Accumulated volume **within interval**
|
||||
count: int # Number of trades within interval
|
||||
# (sampled) generated tick data
|
||||
ticks: list[Any] = field(default_factory=list)
|
||||
|
||||
|
||||
def get_config() -> dict[str, Any]:
|
||||
|
||||
conf, path = config.load()
|
||||
section = conf.get('kraken')
|
||||
|
||||
if section is None:
|
||||
log.warning(f'No config section found for kraken in {path}')
|
||||
return {}
|
||||
|
||||
return section
|
||||
|
||||
|
||||
def get_kraken_signature(
|
||||
urlpath: str,
|
||||
data: dict[str, Any],
|
||||
secret: str
|
||||
) -> str:
|
||||
postdata = urllib.parse.urlencode(data)
|
||||
encoded = (str(data['nonce']) + postdata).encode()
|
||||
message = urlpath.encode() + hashlib.sha256(encoded).digest()
|
||||
|
||||
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
|
||||
sigdigest = base64.b64encode(mac.digest())
|
||||
return sigdigest.decode()
|
||||
|
||||
|
||||
class InvalidKey(ValueError):
|
||||
'''
|
||||
EAPI:Invalid key
|
||||
This error is returned when the API key used for the call is
|
||||
either expired or disabled, please review the API key in your
|
||||
Settings -> API tab of account management or generate a new one
|
||||
and update your application.
|
||||
|
||||
'''
|
||||
|
||||
|
||||
class Client:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str = '',
|
||||
api_key: str = '',
|
||||
secret: str = ''
|
||||
) -> None:
|
||||
self._sesh = asks.Session(connections=4)
|
||||
self._sesh.base_location = _url
|
||||
self._sesh.headers.update({
|
||||
'User-Agent':
|
||||
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||
})
|
||||
self._pairs: list[str] = []
|
||||
self._name = name
|
||||
self._api_key = api_key
|
||||
self._secret = secret
|
||||
|
||||
@property
|
||||
def pairs(self) -> dict[str, Any]:
|
||||
if self._pairs is None:
|
||||
raise RuntimeError(
|
||||
"Make sure to run `cache_symbols()` on startup!"
|
||||
)
|
||||
# retreive and cache all symbols
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def _public(
|
||||
self,
|
||||
method: str,
|
||||
data: dict,
|
||||
) -> dict[str, Any]:
|
||||
resp = await self._sesh.post(
|
||||
path=f'/public/{method}',
|
||||
json=data,
|
||||
timeout=float('inf')
|
||||
)
|
||||
return resproc(resp, log)
|
||||
|
||||
async def _private(
|
||||
self,
|
||||
method: str,
|
||||
data: dict,
|
||||
uri_path: str
|
||||
) -> dict[str, Any]:
|
||||
headers = {
|
||||
'Content-Type':
|
||||
'application/x-www-form-urlencoded',
|
||||
'API-Key':
|
||||
self._api_key,
|
||||
'API-Sign':
|
||||
get_kraken_signature(uri_path, data, self._secret)
|
||||
}
|
||||
resp = await self._sesh.post(
|
||||
path=f'/private/{method}',
|
||||
data=data,
|
||||
headers=headers,
|
||||
timeout=float('inf')
|
||||
)
|
||||
return resproc(resp, log)
|
||||
|
||||
async def endpoint(
|
||||
self,
|
||||
method: str,
|
||||
data: dict[str, Any]
|
||||
|
||||
) -> dict[str, Any]:
|
||||
uri_path = f'/0/private/{method}'
|
||||
data['nonce'] = str(int(1000*time.time()))
|
||||
return await self._private(method, data, uri_path)
|
||||
|
||||
async def get_trades(
|
||||
self,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
'''
|
||||
Get the trades (aka cleared orders) history from the rest endpoint:
|
||||
https://docs.kraken.com/rest/#operation/getTradeHistory
|
||||
|
||||
'''
|
||||
ofs = 0
|
||||
trades_by_id: dict[str, Any] = {}
|
||||
|
||||
for i in itertools.count():
|
||||
|
||||
# increment 'ofs' pagination offset
|
||||
ofs = i*50
|
||||
|
||||
resp = await self.endpoint(
|
||||
'TradesHistory',
|
||||
{'ofs': ofs},
|
||||
)
|
||||
by_id = resp['result']['trades']
|
||||
trades_by_id.update(by_id)
|
||||
|
||||
# we can get up to 50 results per query
|
||||
if (
|
||||
len(by_id) < 50
|
||||
):
|
||||
err = resp.get('error')
|
||||
if err:
|
||||
raise BrokerError(err)
|
||||
|
||||
# we know we received the max amount of
|
||||
# trade results so there may be more history.
|
||||
# catch the end of the trades
|
||||
count = resp['result']['count']
|
||||
break
|
||||
|
||||
# santity check on update
|
||||
assert count == len(trades_by_id.values())
|
||||
return trades_by_id
|
||||
|
||||
async def submit_limit(
|
||||
self,
|
||||
symbol: str,
|
||||
price: float,
|
||||
action: str,
|
||||
size: float,
|
||||
reqid: str = None,
|
||||
validate: bool = False # set True test call without a real submission
|
||||
|
||||
) -> dict:
|
||||
'''
|
||||
Place an order and return integer request id provided by client.
|
||||
|
||||
'''
|
||||
# Build common data dict for common keys from both endpoints
|
||||
data = {
|
||||
"pair": symbol,
|
||||
"price": str(price),
|
||||
"validate": validate
|
||||
}
|
||||
if reqid is None:
|
||||
# Build order data for kraken api
|
||||
data |= {
|
||||
"ordertype": "limit",
|
||||
"type": action,
|
||||
"volume": str(size),
|
||||
}
|
||||
return await self.endpoint('AddOrder', data)
|
||||
else:
|
||||
# Edit order data for kraken api
|
||||
data["txid"] = reqid
|
||||
return await self.endpoint('EditOrder', data)
|
||||
|
||||
async def submit_cancel(
|
||||
self,
|
||||
reqid: str,
|
||||
) -> dict:
|
||||
'''
|
||||
Send cancel request for order id ``reqid``.
|
||||
|
||||
'''
|
||||
# txid is a transaction id given by kraken
|
||||
return await self.endpoint('CancelOrder', {"txid": reqid})
|
||||
|
||||
async def symbol_info(
|
||||
self,
|
||||
pair: Optional[str] = None,
|
||||
):
|
||||
if pair is not None:
|
||||
pairs = {'pair': pair}
|
||||
else:
|
||||
pairs = None # get all pairs
|
||||
|
||||
resp = await self._public('AssetPairs', pairs)
|
||||
err = resp['error']
|
||||
if err:
|
||||
symbolname = pairs['pair'] if pair else None
|
||||
raise SymbolNotFound(f'{symbolname}.kraken')
|
||||
|
||||
pairs = resp['result']
|
||||
|
||||
if pair is not None:
|
||||
_, data = next(iter(pairs.items()))
|
||||
return data
|
||||
else:
|
||||
return pairs
|
||||
|
||||
async def cache_symbols(
|
||||
self,
|
||||
) -> dict:
|
||||
if not self._pairs:
|
||||
self._pairs = await self.symbol_info()
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def search_symbols(
|
||||
self,
|
||||
pattern: str,
|
||||
limit: int = None,
|
||||
) -> dict[str, Any]:
|
||||
if self._pairs is not None:
|
||||
data = self._pairs
|
||||
else:
|
||||
data = await self.symbol_info()
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
data,
|
||||
score_cutoff=50,
|
||||
)
|
||||
# repack in dict form
|
||||
return {item[0]['altname']: item[0] for item in matches}
|
||||
|
||||
async def bars(
|
||||
self,
|
||||
symbol: str = 'XBTUSD',
|
||||
|
||||
# UTC 2017-07-02 12:53:20
|
||||
since: Optional[Union[int, datetime]] = None,
|
||||
count: int = 720, # <- max allowed per query
|
||||
as_np: bool = True,
|
||||
|
||||
) -> dict:
|
||||
|
||||
if since is None:
|
||||
since = pendulum.now('UTC').start_of('minute').subtract(
|
||||
minutes=count).timestamp()
|
||||
|
||||
elif isinstance(since, int):
|
||||
since = pendulum.from_timestamp(since).timestamp()
|
||||
|
||||
else: # presumably a pendulum datetime
|
||||
since = since.timestamp()
|
||||
|
||||
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||
since = str(max(1499000000, int(since)))
|
||||
json = await self._public(
|
||||
'OHLC',
|
||||
data={
|
||||
'pair': symbol,
|
||||
'since': since,
|
||||
},
|
||||
)
|
||||
try:
|
||||
res = json['result']
|
||||
res.pop('last')
|
||||
bars = next(iter(res.values()))
|
||||
|
||||
new_bars = []
|
||||
|
||||
first = bars[0]
|
||||
last_nz_vwap = first[-3]
|
||||
if last_nz_vwap == 0:
|
||||
# use close if vwap is zero
|
||||
last_nz_vwap = first[-4]
|
||||
|
||||
# convert all fields to native types
|
||||
for i, bar in enumerate(bars):
|
||||
# normalize weird zero-ed vwap values..cmon kraken..
|
||||
# indicates vwap didn't change since last bar
|
||||
vwap = float(bar.pop(-3))
|
||||
if vwap != 0:
|
||||
last_nz_vwap = vwap
|
||||
if vwap == 0:
|
||||
vwap = last_nz_vwap
|
||||
|
||||
# re-insert vwap as the last of the fields
|
||||
bar.append(vwap)
|
||||
|
||||
new_bars.append(
|
||||
(i,) + tuple(
|
||||
ftype(bar[j]) for j, (name, ftype) in enumerate(
|
||||
_ohlc_dtype[1:]
|
||||
)
|
||||
)
|
||||
)
|
||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||
return array
|
||||
except KeyError:
|
||||
errmsg = json['error'][0]
|
||||
|
||||
if 'not found' in errmsg:
|
||||
raise SymbolNotFound(errmsg + f': {symbol}')
|
||||
|
||||
elif 'Too many requests' in errmsg:
|
||||
raise DataThrottle(f'{symbol}')
|
||||
|
||||
else:
|
||||
raise BrokerError(errmsg)
|
||||
|
||||
|
||||
@acm
|
||||
async def get_client() -> Client:
|
||||
|
||||
section = get_config()
|
||||
if section:
|
||||
client = Client(
|
||||
name=section['key_descr'],
|
||||
api_key=section['api_key'],
|
||||
secret=section['secret']
|
||||
)
|
||||
else:
|
||||
client = Client()
|
||||
|
||||
# at startup, load all symbols locally for fast search
|
||||
await client.cache_symbols()
|
||||
|
||||
yield client
|
||||
|
||||
|
||||
def normalize_symbol(
|
||||
ticker: str
|
||||
) -> str:
|
||||
'''
|
||||
Normalize symbol names to to a 3x3 pair.
|
||||
|
||||
'''
|
||||
remap = {
|
||||
'XXBTZEUR': 'XBTEUR',
|
||||
'XXMRZEUR': 'XMREUR',
|
||||
|
||||
# ws versions? pretty weird..
|
||||
'XBT/EUR': 'XBTEUR',
|
||||
'XMR/EUR': 'XMREUR',
|
||||
}
|
||||
symlen = len(ticker)
|
||||
if symlen != 6:
|
||||
ticker = remap[ticker]
|
||||
else:
|
||||
raise ValueError(f'Unhandled symbol: {ticker}')
|
||||
|
||||
return ticker.lower()
|
|
@ -1,540 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Order api and machinery
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
from pprint import pformat
|
||||
import time
|
||||
from typing import (
|
||||
Any,
|
||||
AsyncIterator,
|
||||
# Callable,
|
||||
# Optional,
|
||||
# Union,
|
||||
)
|
||||
|
||||
import pendulum
|
||||
import trio
|
||||
import tractor
|
||||
import wsproto
|
||||
|
||||
from piker import pp
|
||||
from piker.clearing._messages import (
|
||||
BrokerdCancel,
|
||||
BrokerdError,
|
||||
BrokerdFill,
|
||||
BrokerdOrder,
|
||||
BrokerdOrderAck,
|
||||
BrokerdPosition,
|
||||
BrokerdStatus,
|
||||
)
|
||||
from piker.data.types import Struct
|
||||
from . import log
|
||||
from .api import (
|
||||
Client,
|
||||
BrokerError,
|
||||
get_client,
|
||||
normalize_symbol,
|
||||
)
|
||||
from .feed import (
|
||||
get_console_log,
|
||||
open_autorecon_ws,
|
||||
NoBsWs,
|
||||
stream_messages,
|
||||
)
|
||||
|
||||
|
||||
class Trade(Struct):
|
||||
'''
|
||||
Trade class that helps parse and validate ownTrades stream
|
||||
|
||||
'''
|
||||
reqid: str # kraken order transaction id
|
||||
action: str # buy or sell
|
||||
price: float # price of asset
|
||||
size: float # vol of asset
|
||||
broker_time: str # e.g GTC, GTD
|
||||
|
||||
|
||||
async def handle_order_requests(
|
||||
|
||||
client: Client,
|
||||
ems_order_stream: tractor.MsgStream,
|
||||
|
||||
) -> None:
|
||||
|
||||
request_msg: dict
|
||||
order: BrokerdOrder
|
||||
|
||||
async for request_msg in ems_order_stream:
|
||||
log.info(
|
||||
'Received order request:\n'
|
||||
f'{pformat(request_msg)}'
|
||||
)
|
||||
|
||||
action = request_msg['action']
|
||||
|
||||
if action in {'buy', 'sell'}:
|
||||
|
||||
account = request_msg['account']
|
||||
if account != 'kraken.spot':
|
||||
log.error(
|
||||
'This is a kraken account, \
|
||||
only a `kraken.spot` selection is valid'
|
||||
)
|
||||
await ems_order_stream.send(BrokerdError(
|
||||
oid=request_msg['oid'],
|
||||
symbol=request_msg['symbol'],
|
||||
|
||||
# reason=f'Kraken only, No account found: `{account}` ?',
|
||||
reason=(
|
||||
'Kraken only, order mode disabled due to '
|
||||
'https://github.com/pikers/piker/issues/299'
|
||||
),
|
||||
|
||||
))
|
||||
continue
|
||||
|
||||
# validate
|
||||
order = BrokerdOrder(**request_msg)
|
||||
# call our client api to submit the order
|
||||
resp = await client.submit_limit(
|
||||
symbol=order.symbol,
|
||||
price=order.price,
|
||||
action=order.action,
|
||||
size=order.size,
|
||||
reqid=order.reqid,
|
||||
)
|
||||
|
||||
err = resp['error']
|
||||
if err:
|
||||
oid = order.oid
|
||||
log.error(f'Failed to submit order: {oid}')
|
||||
|
||||
await ems_order_stream.send(
|
||||
BrokerdError(
|
||||
oid=order.oid,
|
||||
reqid=order.reqid,
|
||||
symbol=order.symbol,
|
||||
reason="Failed order submission",
|
||||
broker_details=resp
|
||||
)
|
||||
)
|
||||
else:
|
||||
# TODO: handle multiple orders (cancels?)
|
||||
# txid is an array of strings
|
||||
if order.reqid is None:
|
||||
reqid = resp['result']['txid'][0]
|
||||
else:
|
||||
# update the internal pairing of oid to krakens
|
||||
# txid with the new txid that is returned on edit
|
||||
reqid = resp['result']['txid']
|
||||
|
||||
# deliver ack that order has been submitted to broker routing
|
||||
await ems_order_stream.send(
|
||||
BrokerdOrderAck(
|
||||
|
||||
# ems order request id
|
||||
oid=order.oid,
|
||||
|
||||
# broker specific request id
|
||||
reqid=reqid,
|
||||
|
||||
# account the made the order
|
||||
account=order.account
|
||||
|
||||
)
|
||||
)
|
||||
|
||||
elif action == 'cancel':
|
||||
msg = BrokerdCancel(**request_msg)
|
||||
|
||||
# Send order cancellation to kraken
|
||||
resp = await client.submit_cancel(
|
||||
reqid=msg.reqid
|
||||
)
|
||||
|
||||
# Check to make sure there was no error returned by
|
||||
# the kraken endpoint. Assert one order was cancelled.
|
||||
try:
|
||||
result = resp['result']
|
||||
count = result['count']
|
||||
|
||||
# check for 'error' key if we received no 'result'
|
||||
except KeyError:
|
||||
error = resp.get('error')
|
||||
|
||||
await ems_order_stream.send(
|
||||
BrokerdError(
|
||||
oid=msg.oid,
|
||||
reqid=msg.reqid,
|
||||
symbol=msg.symbol,
|
||||
reason="Failed order cancel",
|
||||
broker_details=resp
|
||||
)
|
||||
)
|
||||
|
||||
if not error:
|
||||
raise BrokerError(f'Unknown order cancel response: {resp}')
|
||||
|
||||
else:
|
||||
if not count: # no orders were cancelled?
|
||||
|
||||
# XXX: what exactly is this from and why would we care?
|
||||
# there doesn't seem to be any docs here?
|
||||
# https://docs.kraken.com/rest/#operation/cancelOrder
|
||||
|
||||
# Check to make sure the cancellation is NOT pending,
|
||||
# then send the confirmation to the ems order stream
|
||||
pending = result.get('pending')
|
||||
if pending:
|
||||
log.error(f'Order {oid} cancel was not yet successful')
|
||||
|
||||
await ems_order_stream.send(
|
||||
BrokerdError(
|
||||
oid=msg.oid,
|
||||
reqid=msg.reqid,
|
||||
symbol=msg.symbol,
|
||||
# TODO: maybe figure out if pending
|
||||
# cancels will eventually get cancelled
|
||||
reason="Order cancel is still pending?",
|
||||
broker_details=resp
|
||||
)
|
||||
)
|
||||
|
||||
else: # order cancel success case.
|
||||
|
||||
await ems_order_stream.send(
|
||||
BrokerdStatus(
|
||||
reqid=msg.reqid,
|
||||
account=msg.account,
|
||||
time_ns=time.time_ns(),
|
||||
status='cancelled',
|
||||
reason='Order cancelled',
|
||||
broker_details={'name': 'kraken'}
|
||||
)
|
||||
)
|
||||
else:
|
||||
log.error(f'Unknown order command: {request_msg}')
|
||||
|
||||
|
||||
@acm
|
||||
async def subscribe(
|
||||
ws: wsproto.WSConnection,
|
||||
token: str,
|
||||
subs: list[str] = ['ownTrades', 'openOrders'],
|
||||
):
|
||||
'''
|
||||
Setup ws api subscriptions:
|
||||
https://docs.kraken.com/websockets/#message-subscribe
|
||||
|
||||
By default we sign up for trade and order update events.
|
||||
|
||||
'''
|
||||
# more specific logic for this in kraken's sync client:
|
||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||
|
||||
assert token
|
||||
for sub in subs:
|
||||
msg = {
|
||||
'event': 'subscribe',
|
||||
'subscription': {
|
||||
'name': sub,
|
||||
'token': token,
|
||||
}
|
||||
}
|
||||
|
||||
# TODO: we want to eventually allow unsubs which should
|
||||
# be completely fine to request from a separate task
|
||||
# since internally the ws methods appear to be FIFO
|
||||
# locked.
|
||||
await ws.send_msg(msg)
|
||||
|
||||
yield
|
||||
|
||||
for sub in subs:
|
||||
# unsub from all pairs on teardown
|
||||
await ws.send_msg({
|
||||
'event': 'unsubscribe',
|
||||
'subscription': [sub],
|
||||
})
|
||||
|
||||
# XXX: do we need to ack the unsub?
|
||||
# await ws.recv_msg()
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def trades_dialogue(
|
||||
ctx: tractor.Context,
|
||||
loglevel: str = None,
|
||||
) -> AsyncIterator[dict[str, Any]]:
|
||||
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
async with get_client() as client:
|
||||
|
||||
# TODO: make ems flip to paper mode via
|
||||
# some returned signal if the user only wants to use
|
||||
# the data feed or we return this?
|
||||
# await ctx.started(({}, ['paper']))
|
||||
|
||||
if not client._api_key:
|
||||
raise RuntimeError(
|
||||
'Missing Kraken API key in `brokers.toml`!?!?')
|
||||
|
||||
# auth required block
|
||||
acctid = client._name
|
||||
acc_name = 'kraken.' + acctid
|
||||
|
||||
# pull and deliver trades ledger
|
||||
trades = await client.get_trades()
|
||||
log.info(
|
||||
f'Loaded {len(trades)} trades from account `{acc_name}`'
|
||||
)
|
||||
trans = await update_ledger(acctid, trades)
|
||||
active, closed = pp.update_pps_conf(
|
||||
'kraken',
|
||||
acctid,
|
||||
trade_records=trans,
|
||||
ledger_reload={}.fromkeys(t.bsuid for t in trans),
|
||||
)
|
||||
|
||||
position_msgs: list[dict] = []
|
||||
pps: dict[int, pp.Position]
|
||||
for pps in [active, closed]:
|
||||
for tid, p in pps.items():
|
||||
msg = BrokerdPosition(
|
||||
broker='kraken',
|
||||
account=acc_name,
|
||||
symbol=p.symbol.front_fqsn(),
|
||||
size=p.size,
|
||||
avg_price=p.be_price,
|
||||
currency='',
|
||||
)
|
||||
position_msgs.append(msg)
|
||||
|
||||
await ctx.started(
|
||||
(position_msgs, [acc_name])
|
||||
)
|
||||
|
||||
# Get websocket token for authenticated data stream
|
||||
# Assert that a token was actually received.
|
||||
resp = await client.endpoint('GetWebSocketsToken', {})
|
||||
|
||||
err = resp.get('error')
|
||||
if err:
|
||||
raise BrokerError(err)
|
||||
|
||||
token = resp['result']['token']
|
||||
|
||||
ws: NoBsWs
|
||||
async with (
|
||||
ctx.open_stream() as ems_stream,
|
||||
open_autorecon_ws(
|
||||
'wss://ws-auth.kraken.com/',
|
||||
fixture=partial(
|
||||
subscribe,
|
||||
token=token,
|
||||
),
|
||||
) as ws,
|
||||
trio.open_nursery() as n,
|
||||
):
|
||||
# task for processing inbound requests from ems
|
||||
n.start_soon(handle_order_requests, client, ems_stream)
|
||||
|
||||
count: int = 0
|
||||
|
||||
# process and relay trades events to ems
|
||||
# https://docs.kraken.com/websockets/#message-ownTrades
|
||||
async for msg in stream_messages(ws):
|
||||
match msg:
|
||||
case [
|
||||
trades_msgs,
|
||||
'ownTrades',
|
||||
{'sequence': seq},
|
||||
]:
|
||||
# XXX: do we actually need this orrr?
|
||||
# ensure that we are only processing new trades?
|
||||
assert seq > count
|
||||
count += 1
|
||||
|
||||
# flatten msgs for processing
|
||||
trades = {
|
||||
tid: trade
|
||||
for entry in trades_msgs
|
||||
for (tid, trade) in entry.items()
|
||||
|
||||
# only emit entries which are already not-in-ledger
|
||||
if tid not in {r.tid for r in trans}
|
||||
}
|
||||
for tid, trade in trades.items():
|
||||
|
||||
# parse-cast
|
||||
reqid = trade['ordertxid']
|
||||
action = trade['type']
|
||||
price = float(trade['price'])
|
||||
size = float(trade['vol'])
|
||||
broker_time = float(trade['time'])
|
||||
|
||||
# send a fill msg for gui update
|
||||
fill_msg = BrokerdFill(
|
||||
reqid=reqid,
|
||||
time_ns=time.time_ns(),
|
||||
|
||||
action=action,
|
||||
size=size,
|
||||
price=price,
|
||||
# TODO: maybe capture more msg data
|
||||
# i.e fees?
|
||||
broker_details={'name': 'kraken'},
|
||||
broker_time=broker_time
|
||||
)
|
||||
await ems_stream.send(fill_msg)
|
||||
|
||||
filled_msg = BrokerdStatus(
|
||||
reqid=reqid,
|
||||
time_ns=time.time_ns(),
|
||||
|
||||
account=acc_name,
|
||||
status='filled',
|
||||
filled=size,
|
||||
reason='Order filled by kraken',
|
||||
broker_details={
|
||||
'name': 'kraken',
|
||||
'broker_time': broker_time
|
||||
},
|
||||
|
||||
# TODO: figure out if kraken gives a count
|
||||
# of how many units of underlying were
|
||||
# filled. Alternatively we can decrement
|
||||
# this value ourselves by associating and
|
||||
# calcing from the diff with the original
|
||||
# client-side request, see:
|
||||
# https://github.com/pikers/piker/issues/296
|
||||
remaining=0,
|
||||
)
|
||||
await ems_stream.send(filled_msg)
|
||||
|
||||
# update ledger and position tracking
|
||||
trans = await update_ledger(acctid, trades)
|
||||
active, closed = pp.update_pps_conf(
|
||||
'kraken',
|
||||
acctid,
|
||||
trade_records=trans,
|
||||
ledger_reload={}.fromkeys(
|
||||
t.bsuid for t in trans),
|
||||
)
|
||||
|
||||
# emit pp msgs
|
||||
for pos in filter(
|
||||
bool,
|
||||
chain(active.values(), closed.values()),
|
||||
):
|
||||
pp_msg = BrokerdPosition(
|
||||
broker='kraken',
|
||||
|
||||
# XXX: ok so this is annoying, we're
|
||||
# relaying an account name with the
|
||||
# backend suffix prefixed but when
|
||||
# reading accounts from ledgers we
|
||||
# don't need it and/or it's prefixed
|
||||
# in the section table.. we should
|
||||
# just strip this from the message
|
||||
# right since `.broker` is already
|
||||
# included?
|
||||
account=f'kraken.{acctid}',
|
||||
symbol=pos.symbol.front_fqsn(),
|
||||
size=pos.size,
|
||||
avg_price=pos.be_price,
|
||||
|
||||
# TODO
|
||||
# currency=''
|
||||
)
|
||||
await ems_stream.send(pp_msg)
|
||||
|
||||
case [
|
||||
trades_msgs,
|
||||
'openOrders',
|
||||
{'sequence': seq},
|
||||
]:
|
||||
# TODO: async order update handling which we
|
||||
# should remove from `handle_order_requests()`
|
||||
# above:
|
||||
# https://github.com/pikers/piker/issues/293
|
||||
# https://github.com/pikers/piker/issues/310
|
||||
log.info(f'Order update {seq}:{trades_msgs}')
|
||||
|
||||
case _:
|
||||
log.warning(f'Unhandled trades msg: {msg}')
|
||||
await tractor.breakpoint()
|
||||
|
||||
|
||||
def norm_trade_records(
|
||||
ledger: dict[str, Any],
|
||||
|
||||
) -> list[pp.Transaction]:
|
||||
|
||||
records: list[pp.Transaction] = []
|
||||
|
||||
for tid, record in ledger.items():
|
||||
|
||||
size = record.get('vol') * {
|
||||
'buy': 1,
|
||||
'sell': -1,
|
||||
}[record['type']]
|
||||
bsuid = record['pair']
|
||||
norm_sym = normalize_symbol(bsuid)
|
||||
|
||||
records.append(
|
||||
pp.Transaction(
|
||||
fqsn=f'{norm_sym}.kraken',
|
||||
tid=tid,
|
||||
size=float(size),
|
||||
price=float(record['price']),
|
||||
cost=float(record['fee']),
|
||||
dt=pendulum.from_timestamp(float(record['time'])),
|
||||
bsuid=bsuid,
|
||||
|
||||
# XXX: there are no derivs on kraken right?
|
||||
# expiry=expiry,
|
||||
)
|
||||
)
|
||||
|
||||
return records
|
||||
|
||||
|
||||
async def update_ledger(
|
||||
acctid: str,
|
||||
trade_entries: list[dict[str, Any]],
|
||||
|
||||
) -> list[pp.Transaction]:
|
||||
|
||||
# write recent session's trades to the user's (local) ledger file.
|
||||
with pp.open_trade_ledger(
|
||||
'kraken',
|
||||
acctid,
|
||||
) as ledger:
|
||||
ledger.update(trade_entries)
|
||||
|
||||
# normalize to transaction form
|
||||
records = norm_trade_records(trade_entries)
|
||||
return records
|
|
@ -1,464 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Real-time and historical data feed endpoints.
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from dataclasses import asdict
|
||||
from datetime import datetime
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Callable,
|
||||
)
|
||||
import time
|
||||
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import pendulum
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
import trio
|
||||
import wsproto
|
||||
|
||||
from piker._cacheables import open_cached_client
|
||||
from piker.brokers._util import (
|
||||
BrokerError,
|
||||
DataThrottle,
|
||||
DataUnavailable,
|
||||
)
|
||||
from piker.log import get_console_log
|
||||
from piker.data import ShmArray
|
||||
from piker.data.types import Struct
|
||||
from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
||||
from . import log
|
||||
from .api import (
|
||||
Client,
|
||||
OHLC,
|
||||
)
|
||||
|
||||
|
||||
# https://www.kraken.com/features/api#get-tradable-pairs
|
||||
class Pair(Struct):
|
||||
altname: str # alternate pair name
|
||||
wsname: str # WebSocket pair name (if available)
|
||||
aclass_base: str # asset class of base component
|
||||
base: str # asset id of base component
|
||||
aclass_quote: str # asset class of quote component
|
||||
quote: str # asset id of quote component
|
||||
lot: str # volume lot size
|
||||
|
||||
pair_decimals: int # scaling decimal places for pair
|
||||
lot_decimals: int # scaling decimal places for volume
|
||||
|
||||
# amount to multiply lot volume by to get currency volume
|
||||
lot_multiplier: float
|
||||
|
||||
# array of leverage amounts available when buying
|
||||
leverage_buy: list[int]
|
||||
# array of leverage amounts available when selling
|
||||
leverage_sell: list[int]
|
||||
|
||||
# fee schedule array in [volume, percent fee] tuples
|
||||
fees: list[tuple[int, float]]
|
||||
|
||||
# maker fee schedule array in [volume, percent fee] tuples (if on
|
||||
# maker/taker)
|
||||
fees_maker: list[tuple[int, float]]
|
||||
|
||||
fee_volume_currency: str # volume discount currency
|
||||
margin_call: str # margin call level
|
||||
margin_stop: str # stop-out/liquidation margin level
|
||||
ordermin: float # minimum order volume for pair
|
||||
|
||||
|
||||
async def stream_messages(
|
||||
ws: NoBsWs,
|
||||
):
|
||||
'''
|
||||
Message stream parser and heartbeat handler.
|
||||
|
||||
Deliver ws subscription messages as well as handle heartbeat logic
|
||||
though a single async generator.
|
||||
|
||||
'''
|
||||
too_slow_count = last_hb = 0
|
||||
|
||||
while True:
|
||||
|
||||
with trio.move_on_after(5) as cs:
|
||||
msg = await ws.recv_msg()
|
||||
|
||||
# trigger reconnection if heartbeat is laggy
|
||||
if cs.cancelled_caught:
|
||||
|
||||
too_slow_count += 1
|
||||
|
||||
if too_slow_count > 20:
|
||||
log.warning(
|
||||
"Heartbeat is too slow, resetting ws connection")
|
||||
|
||||
await ws._connect()
|
||||
too_slow_count = 0
|
||||
continue
|
||||
|
||||
if isinstance(msg, dict):
|
||||
if msg.get('event') == 'heartbeat':
|
||||
|
||||
now = time.time()
|
||||
delay = now - last_hb
|
||||
last_hb = now
|
||||
|
||||
# XXX: why tf is this not printing without --tl flag?
|
||||
log.debug(f"Heartbeat after {delay}")
|
||||
# print(f"Heartbeat after {delay}")
|
||||
|
||||
continue
|
||||
|
||||
err = msg.get('errorMessage')
|
||||
if err:
|
||||
raise BrokerError(err)
|
||||
else:
|
||||
yield msg
|
||||
|
||||
|
||||
async def process_data_feed_msgs(
|
||||
ws: NoBsWs,
|
||||
):
|
||||
'''
|
||||
Parse and pack data feed messages.
|
||||
|
||||
'''
|
||||
async for msg in stream_messages(ws):
|
||||
|
||||
chan_id, *payload_array, chan_name, pair = msg
|
||||
|
||||
if 'ohlc' in chan_name:
|
||||
|
||||
yield 'ohlc', OHLC(chan_id, chan_name, pair, *payload_array[0])
|
||||
|
||||
elif 'spread' in chan_name:
|
||||
|
||||
bid, ask, ts, bsize, asize = map(float, payload_array[0])
|
||||
|
||||
# TODO: really makes you think IB has a horrible API...
|
||||
quote = {
|
||||
'symbol': pair.replace('/', ''),
|
||||
'ticks': [
|
||||
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||
|
||||
{'type': 'ask', 'price': ask, 'size': asize},
|
||||
{'type': 'asize', 'price': ask, 'size': asize},
|
||||
],
|
||||
}
|
||||
yield 'l1', quote
|
||||
|
||||
# elif 'book' in msg[-2]:
|
||||
# chan_id, *payload_array, chan_name, pair = msg
|
||||
# print(msg)
|
||||
|
||||
else:
|
||||
print(f'UNHANDLED MSG: {msg}')
|
||||
yield msg
|
||||
|
||||
|
||||
def normalize(
|
||||
ohlc: OHLC,
|
||||
|
||||
) -> dict:
|
||||
quote = asdict(ohlc)
|
||||
quote['broker_ts'] = quote['time']
|
||||
quote['brokerd_ts'] = time.time()
|
||||
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
||||
quote['last'] = quote['close']
|
||||
quote['bar_wap'] = ohlc.vwap
|
||||
|
||||
# seriously eh? what's with this non-symmetry everywhere
|
||||
# in subscription systems...
|
||||
# XXX: piker style is always lowercases symbols.
|
||||
topic = quote['pair'].replace('/', '').lower()
|
||||
|
||||
# print(quote)
|
||||
return topic, quote
|
||||
|
||||
|
||||
def make_sub(pairs: list[str], data: dict[str, Any]) -> dict[str, str]:
|
||||
'''
|
||||
Create a request subscription packet dict.
|
||||
|
||||
https://docs.kraken.com/websockets/#message-subscribe
|
||||
|
||||
'''
|
||||
# eg. specific logic for this in kraken's sync client:
|
||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||
return {
|
||||
'pair': pairs,
|
||||
'event': 'subscribe',
|
||||
'subscription': data,
|
||||
}
|
||||
|
||||
|
||||
@acm
|
||||
async def open_history_client(
|
||||
symbol: str,
|
||||
|
||||
) -> tuple[Callable, int]:
|
||||
|
||||
# TODO implement history getter for the new storage layer.
|
||||
async with open_cached_client('kraken') as client:
|
||||
|
||||
# lol, kraken won't send any more then the "last"
|
||||
# 720 1m bars.. so we have to just ignore further
|
||||
# requests of this type..
|
||||
queries: int = 0
|
||||
|
||||
async def get_ohlc(
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
|
||||
nonlocal queries
|
||||
if queries > 0:
|
||||
raise DataUnavailable
|
||||
|
||||
count = 0
|
||||
while count <= 3:
|
||||
try:
|
||||
array = await client.bars(
|
||||
symbol,
|
||||
since=end_dt,
|
||||
)
|
||||
count += 1
|
||||
queries += 1
|
||||
break
|
||||
except DataThrottle:
|
||||
log.warning(f'kraken OHLC throttle for {symbol}')
|
||||
await trio.sleep(1)
|
||||
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
||||
|
||||
|
||||
async def backfill_bars(
|
||||
|
||||
sym: str,
|
||||
shm: ShmArray, # type: ignore # noqa
|
||||
count: int = 10, # NOTE: any more and we'll overrun the underlying buffer
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Fill historical bars into shared mem / storage afap.
|
||||
'''
|
||||
with trio.CancelScope() as cs:
|
||||
async with open_cached_client('kraken') as client:
|
||||
bars = await client.bars(symbol=sym)
|
||||
shm.push(bars)
|
||||
task_status.started(cs)
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: list[str],
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# backend specific
|
||||
sub_type: str = 'ohlc',
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Subscribe for ohlc stream of quotes for ``pairs``.
|
||||
|
||||
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||
|
||||
'''
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
ws_pairs = {}
|
||||
sym_infos = {}
|
||||
|
||||
async with open_cached_client('kraken') as client, send_chan as send_chan:
|
||||
|
||||
# keep client cached for real-time section
|
||||
for sym in symbols:
|
||||
|
||||
# transform to upper since piker style is always lower
|
||||
sym = sym.upper()
|
||||
|
||||
si = Pair(**await client.symbol_info(sym)) # validation
|
||||
syminfo = si.to_dict()
|
||||
syminfo['price_tick_size'] = 1 / 10**si.pair_decimals
|
||||
syminfo['lot_tick_size'] = 1 / 10**si.lot_decimals
|
||||
syminfo['asset_type'] = 'crypto'
|
||||
sym_infos[sym] = syminfo
|
||||
ws_pairs[sym] = si.wsname
|
||||
|
||||
symbol = symbols[0].lower()
|
||||
|
||||
init_msgs = {
|
||||
# pass back token, and bool, signalling if we're the writer
|
||||
# and that history has been written
|
||||
symbol: {
|
||||
'symbol_info': sym_infos[sym],
|
||||
'shm_write_opts': {'sum_tick_vml': False},
|
||||
'fqsn': sym,
|
||||
},
|
||||
}
|
||||
|
||||
@acm
|
||||
async def subscribe(ws: wsproto.WSConnection):
|
||||
# XXX: setup subs
|
||||
# https://docs.kraken.com/websockets/#message-subscribe
|
||||
# specific logic for this in kraken's shitty sync client:
|
||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||
ohlc_sub = make_sub(
|
||||
list(ws_pairs.values()),
|
||||
{'name': 'ohlc', 'interval': 1}
|
||||
)
|
||||
|
||||
# TODO: we want to eventually allow unsubs which should
|
||||
# be completely fine to request from a separate task
|
||||
# since internally the ws methods appear to be FIFO
|
||||
# locked.
|
||||
await ws.send_msg(ohlc_sub)
|
||||
|
||||
# trade data (aka L1)
|
||||
l1_sub = make_sub(
|
||||
list(ws_pairs.values()),
|
||||
{'name': 'spread'} # 'depth': 10}
|
||||
)
|
||||
|
||||
# pull a first quote and deliver
|
||||
await ws.send_msg(l1_sub)
|
||||
|
||||
yield
|
||||
|
||||
# unsub from all pairs on teardown
|
||||
await ws.send_msg({
|
||||
'pair': list(ws_pairs.values()),
|
||||
'event': 'unsubscribe',
|
||||
'subscription': ['ohlc', 'spread'],
|
||||
})
|
||||
|
||||
# XXX: do we need to ack the unsub?
|
||||
# await ws.recv_msg()
|
||||
|
||||
# see the tips on reconnection logic:
|
||||
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||
ws: NoBsWs
|
||||
async with open_autorecon_ws(
|
||||
'wss://ws.kraken.com/',
|
||||
fixture=subscribe,
|
||||
) as ws:
|
||||
|
||||
# pull a first quote and deliver
|
||||
msg_gen = process_data_feed_msgs(ws)
|
||||
|
||||
# TODO: use ``anext()`` when it lands in 3.10!
|
||||
typ, ohlc_last = await msg_gen.__anext__()
|
||||
|
||||
topic, quote = normalize(ohlc_last)
|
||||
|
||||
task_status.started((init_msgs, quote))
|
||||
|
||||
# lol, only "closes" when they're margin squeezing clients ;P
|
||||
feed_is_live.set()
|
||||
|
||||
# keep start of last interval for volume tracking
|
||||
last_interval_start = ohlc_last.etime
|
||||
|
||||
# start streaming
|
||||
async for typ, ohlc in msg_gen:
|
||||
|
||||
if typ == 'ohlc':
|
||||
|
||||
# TODO: can get rid of all this by using
|
||||
# ``trades`` subscription...
|
||||
|
||||
# generate tick values to match time & sales pane:
|
||||
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
|
||||
volume = ohlc.volume
|
||||
|
||||
# new OHLC sample interval
|
||||
if ohlc.etime > last_interval_start:
|
||||
last_interval_start = ohlc.etime
|
||||
tick_volume = volume
|
||||
|
||||
else:
|
||||
# this is the tick volume *within the interval*
|
||||
tick_volume = volume - ohlc_last.volume
|
||||
|
||||
ohlc_last = ohlc
|
||||
last = ohlc.close
|
||||
|
||||
if tick_volume:
|
||||
ohlc.ticks.append({
|
||||
'type': 'trade',
|
||||
'price': last,
|
||||
'size': tick_volume,
|
||||
})
|
||||
|
||||
topic, quote = normalize(ohlc)
|
||||
|
||||
elif typ == 'l1':
|
||||
quote = ohlc
|
||||
topic = quote['symbol'].lower()
|
||||
|
||||
await send_chan.send({topic: quote})
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(
|
||||
ctx: tractor.Context,
|
||||
|
||||
) -> Client:
|
||||
async with open_cached_client('kraken') as client:
|
||||
|
||||
# load all symbols locally for fast search
|
||||
cache = await client.cache_symbols()
|
||||
await ctx.started(cache)
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
async for pattern in stream:
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
cache,
|
||||
score_cutoff=50,
|
||||
)
|
||||
# repack in dict form
|
||||
await stream.send(
|
||||
{item[0]['altname']: item[0]
|
||||
for item in matches}
|
||||
)
|
|
@ -22,10 +22,54 @@ from enum import Enum
|
|||
from typing import Optional
|
||||
|
||||
from bidict import bidict
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
from ..data._source import Symbol
|
||||
from ..data.types import Struct
|
||||
from ..pp import Position
|
||||
from ._messages import BrokerdPosition, Status
|
||||
|
||||
|
||||
class Position(BaseModel):
|
||||
'''
|
||||
Basic pp (personal position) model with attached fills history.
|
||||
|
||||
This type should be IPC wire ready?
|
||||
|
||||
'''
|
||||
symbol: Symbol
|
||||
|
||||
# last size and avg entry price
|
||||
size: float
|
||||
avg_price: float # TODO: contextual pricing
|
||||
|
||||
# ordered record of known constituent trade messages
|
||||
fills: list[Status] = []
|
||||
|
||||
def update_from_msg(
|
||||
self,
|
||||
msg: BrokerdPosition,
|
||||
|
||||
) -> None:
|
||||
|
||||
# XXX: better place to do this?
|
||||
symbol = self.symbol
|
||||
|
||||
lot_size_digits = symbol.lot_size_digits
|
||||
avg_price, size = (
|
||||
round(msg['avg_price'], ndigits=symbol.tick_size_digits),
|
||||
round(msg['size'], ndigits=lot_size_digits),
|
||||
)
|
||||
|
||||
self.avg_price = avg_price
|
||||
self.size = size
|
||||
|
||||
@property
|
||||
def dsize(self) -> float:
|
||||
'''
|
||||
The "dollar" size of the pp, normally in trading (fiat) unit
|
||||
terms.
|
||||
|
||||
'''
|
||||
return self.avg_price * self.size
|
||||
|
||||
|
||||
_size_units = bidict({
|
||||
|
@ -40,30 +84,33 @@ SizeUnit = Enum(
|
|||
)
|
||||
|
||||
|
||||
class Allocator(Struct):
|
||||
class Allocator(BaseModel):
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
copy_on_model_validation = False
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
# required to get the account validator lookup working?
|
||||
extra = 'allow'
|
||||
underscore_attrs_are_private = False
|
||||
|
||||
symbol: Symbol
|
||||
account: Optional[str] = 'paper'
|
||||
|
||||
_size_units: bidict[str, Optional[str]] = _size_units
|
||||
|
||||
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||
# a default at startup by passing in a `dict` but yet you can set
|
||||
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||
# unintuitive garbage.
|
||||
_size_unit: str = 'currency'
|
||||
size_unit: str = 'currency'
|
||||
_size_units: dict[str, Optional[str]] = _size_units
|
||||
|
||||
@property
|
||||
def size_unit(self) -> str:
|
||||
return self._size_unit
|
||||
|
||||
@size_unit.setter
|
||||
def size_unit(self, v: str) -> Optional[str]:
|
||||
@validator('size_unit', pre=True)
|
||||
def maybe_lookup_key(cls, v):
|
||||
# apply the corresponding enum key for the text "description" value
|
||||
if v not in _size_units:
|
||||
v = _size_units.inverse[v]
|
||||
return _size_units.inverse[v]
|
||||
|
||||
assert v in _size_units
|
||||
self._size_unit = v
|
||||
return v
|
||||
|
||||
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
||||
|
@ -126,7 +173,7 @@ class Allocator(Struct):
|
|||
l_sub_pp = self.units_limit - abs_live_size
|
||||
|
||||
elif size_unit == 'currency':
|
||||
live_cost_basis = abs_live_size * live_pp.be_price
|
||||
live_cost_basis = abs_live_size * live_pp.avg_price
|
||||
slot_size = currency_per_slot / price
|
||||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
||||
|
||||
|
@ -158,7 +205,7 @@ class Allocator(Struct):
|
|||
if size_unit == 'currency':
|
||||
# compute the "projected" limit's worth of units at the
|
||||
# current pp (weighted) price:
|
||||
slot_size = currency_per_slot / live_pp.be_price
|
||||
slot_size = currency_per_slot / live_pp.avg_price
|
||||
|
||||
else:
|
||||
slot_size = u_per_slot
|
||||
|
@ -197,12 +244,7 @@ class Allocator(Struct):
|
|||
if order_size < slot_size:
|
||||
# compute a fractional slots size to display
|
||||
slots_used = self.slots_used(
|
||||
Position(
|
||||
symbol=sym,
|
||||
size=order_size,
|
||||
be_price=price,
|
||||
bsuid=sym,
|
||||
)
|
||||
Position(symbol=sym, size=order_size, avg_price=price)
|
||||
)
|
||||
|
||||
return {
|
||||
|
@ -229,8 +271,8 @@ class Allocator(Struct):
|
|||
abs_pp_size = abs(pp.size)
|
||||
|
||||
if self.size_unit == 'currency':
|
||||
# live_currency_size = size or (abs_pp_size * pp.be_price)
|
||||
live_currency_size = abs_pp_size * pp.be_price
|
||||
# live_currency_size = size or (abs_pp_size * pp.avg_price)
|
||||
live_currency_size = abs_pp_size * pp.avg_price
|
||||
prop = live_currency_size / self.currency_limit
|
||||
|
||||
else:
|
||||
|
@ -258,7 +300,7 @@ def mk_allocator(
|
|||
# default allocation settings
|
||||
defaults: dict[str, float] = {
|
||||
'account': None, # select paper by default
|
||||
# 'size_unit': 'currency',
|
||||
'size_unit': 'currency',
|
||||
'units_limit': 400,
|
||||
'currency_limit': 5e3,
|
||||
'slots': 4,
|
||||
|
@ -297,13 +339,10 @@ def mk_allocator(
|
|||
# entry step 1.0
|
||||
alloc.units_limit = alloc.slots
|
||||
|
||||
else:
|
||||
alloc.size_unit = 'currency'
|
||||
|
||||
# if the current position is already greater then the limit
|
||||
# settings, increase the limit to the current position
|
||||
if alloc.size_unit == 'currency':
|
||||
startup_size = startup_pp.size * startup_pp.be_price
|
||||
startup_size = startup_pp.size * startup_pp.avg_price
|
||||
|
||||
if startup_size > alloc.currency_limit:
|
||||
alloc.currency_limit = round(startup_size, ndigits=2)
|
||||
|
|
|
@ -58,11 +58,11 @@ class OrderBook:
|
|||
|
||||
def send(
|
||||
self,
|
||||
msg: Order | dict,
|
||||
msg: Order,
|
||||
|
||||
) -> dict:
|
||||
self._sent_orders[msg.oid] = msg
|
||||
self._to_ems.send_nowait(msg)
|
||||
self._to_ems.send_nowait(msg.dict())
|
||||
return msg
|
||||
|
||||
def update(
|
||||
|
@ -73,8 +73,9 @@ class OrderBook:
|
|||
|
||||
) -> dict:
|
||||
cmd = self._sent_orders[uuid]
|
||||
msg = cmd.copy(update=data)
|
||||
self._sent_orders[uuid] = msg
|
||||
msg = cmd.dict()
|
||||
msg.update(data)
|
||||
self._sent_orders[uuid] = Order(**msg)
|
||||
self._to_ems.send_nowait(msg)
|
||||
return cmd
|
||||
|
||||
|
@ -87,7 +88,7 @@ class OrderBook:
|
|||
oid=uuid,
|
||||
symbol=cmd.symbol,
|
||||
)
|
||||
self._to_ems.send_nowait(msg)
|
||||
self._to_ems.send_nowait(msg.dict())
|
||||
|
||||
|
||||
_orders: OrderBook = None
|
||||
|
@ -148,7 +149,7 @@ async def relay_order_cmds_from_sync_code(
|
|||
book = get_orders()
|
||||
async with book._from_order_book.subscribe() as orders_stream:
|
||||
async for cmd in orders_stream:
|
||||
if cmd.symbol == symbol_key:
|
||||
if cmd['symbol'] == symbol_key:
|
||||
log.info(f'Send order cmd:\n{pformat(cmd)}')
|
||||
# send msg over IPC / wire
|
||||
await to_ems_stream.send(cmd)
|
||||
|
|
|
@ -20,12 +20,12 @@ In da suit parlances: "Execution management systems"
|
|||
"""
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass, field
|
||||
from math import isnan
|
||||
from pprint import pformat
|
||||
import time
|
||||
from typing import AsyncIterator, Callable
|
||||
|
||||
from bidict import bidict
|
||||
from pydantic import BaseModel
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
|
@ -33,7 +33,6 @@ import tractor
|
|||
from ..log import get_logger
|
||||
from ..data._normalize import iterticks
|
||||
from ..data.feed import Feed, maybe_open_feed
|
||||
from ..data.types import Struct
|
||||
from .._daemon import maybe_spawn_brokerd
|
||||
from . import _paper_engine as paper
|
||||
from ._messages import (
|
||||
|
@ -231,7 +230,7 @@ async def clear_dark_triggers(
|
|||
price=submit_price,
|
||||
size=cmd['size'],
|
||||
)
|
||||
await brokerd_orders_stream.send(msg)
|
||||
await brokerd_orders_stream.send(msg.dict())
|
||||
|
||||
# mark this entry as having sent an order
|
||||
# request. the entry will be replaced once the
|
||||
|
@ -247,11 +246,14 @@ async def clear_dark_triggers(
|
|||
|
||||
msg = Status(
|
||||
oid=oid, # ems order id
|
||||
time_ns=time.time_ns(),
|
||||
resp=resp,
|
||||
time_ns=time.time_ns(),
|
||||
symbol=fqsn,
|
||||
trigger_price=price,
|
||||
brokerd_msg=cmd,
|
||||
)
|
||||
broker_details={'name': broker},
|
||||
cmd=cmd, # original request message
|
||||
|
||||
).dict()
|
||||
|
||||
# remove exec-condition from set
|
||||
log.info(f'removing pred for {oid}')
|
||||
|
@ -287,11 +289,7 @@ class TradesRelay:
|
|||
brokerd_dialogue: tractor.MsgStream
|
||||
|
||||
# map of symbols to dicts of accounts to pp msgs
|
||||
positions: dict[
|
||||
# brokername, acctid
|
||||
tuple[str, str],
|
||||
list[BrokerdPosition],
|
||||
]
|
||||
positions: dict[str, dict[str, BrokerdPosition]]
|
||||
|
||||
# allowed account names
|
||||
accounts: tuple[str]
|
||||
|
@ -300,7 +298,7 @@ class TradesRelay:
|
|||
consumers: int = 0
|
||||
|
||||
|
||||
class Router(Struct):
|
||||
class Router(BaseModel):
|
||||
'''
|
||||
Order router which manages and tracks per-broker dark book,
|
||||
alerts, clearing and related data feed management.
|
||||
|
@ -321,6 +319,10 @@ class Router(Struct):
|
|||
# brokername to trades-dialogues streams with ``brokerd`` actors
|
||||
relays: dict[str, TradesRelay] = {}
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
underscore_attrs_are_private = False
|
||||
|
||||
def get_dark_book(
|
||||
self,
|
||||
brokername: str,
|
||||
|
@ -459,24 +461,18 @@ async def open_brokerd_trades_dialogue(
|
|||
# normalizing them to EMS messages and relaying back to
|
||||
# the piker order client set.
|
||||
|
||||
# locally cache and track positions per account with
|
||||
# a table of (brokername, acctid) -> `BrokerdPosition`
|
||||
# msgs.
|
||||
# locally cache and track positions per account.
|
||||
pps = {}
|
||||
for msg in positions:
|
||||
log.info(f'loading pp: {msg}')
|
||||
|
||||
account = msg['account']
|
||||
|
||||
# TODO: better value error for this which
|
||||
# dumps the account and message and states the
|
||||
# mismatch..
|
||||
assert account in accounts
|
||||
|
||||
pps.setdefault(
|
||||
(broker, account),
|
||||
[],
|
||||
).append(msg)
|
||||
f'{msg["symbol"]}.{broker}',
|
||||
{}
|
||||
)[account] = msg
|
||||
|
||||
relay = TradesRelay(
|
||||
brokerd_dialogue=brokerd_trades_stream,
|
||||
|
@ -574,17 +570,19 @@ async def translate_and_relay_brokerd_events(
|
|||
|
||||
if name == 'position':
|
||||
|
||||
pos_msg = BrokerdPosition(**brokerd_msg)
|
||||
pos_msg = BrokerdPosition(**brokerd_msg).dict()
|
||||
|
||||
# XXX: this will be useful for automatic strats yah?
|
||||
# keep pps per account up to date locally in ``emsd`` mem
|
||||
sym, broker = pos_msg.symbol, pos_msg.broker
|
||||
sym, broker = pos_msg['symbol'], pos_msg['broker']
|
||||
|
||||
relay.positions.setdefault(
|
||||
# NOTE: translate to a FQSN!
|
||||
(broker, sym),
|
||||
[]
|
||||
).append(pos_msg)
|
||||
f'{sym}.{broker}',
|
||||
{}
|
||||
).setdefault(
|
||||
pos_msg['account'], {}
|
||||
).update(pos_msg)
|
||||
|
||||
# fan-out-relay position msgs immediately by
|
||||
# broadcasting updates on all client streams
|
||||
|
@ -637,8 +635,8 @@ async def translate_and_relay_brokerd_events(
|
|||
# something is out of order, we don't have an oid for
|
||||
# this broker-side message.
|
||||
log.error(
|
||||
f'Unknown oid: {oid} for msg:\n'
|
||||
f'{pformat(brokerd_msg)}\n'
|
||||
'Unknown oid:{oid} for msg:\n'
|
||||
f'{pformat(brokerd_msg)}'
|
||||
'Unable to relay message to client side!?'
|
||||
)
|
||||
|
||||
|
@ -669,7 +667,7 @@ async def translate_and_relay_brokerd_events(
|
|||
entry.reqid = reqid
|
||||
|
||||
# tell broker to cancel immediately
|
||||
await brokerd_trades_stream.send(entry)
|
||||
await brokerd_trades_stream.send(entry.dict())
|
||||
|
||||
# - the order is now active and will be mirrored in
|
||||
# our book -> registered as live flow
|
||||
|
@ -709,7 +707,7 @@ async def translate_and_relay_brokerd_events(
|
|||
# if 10147 in message: cancel
|
||||
|
||||
resp = 'broker_errored'
|
||||
broker_details = msg
|
||||
broker_details = msg.dict()
|
||||
|
||||
# don't relay message to order requester client
|
||||
# continue
|
||||
|
@ -744,7 +742,7 @@ async def translate_and_relay_brokerd_events(
|
|||
resp = 'broker_' + msg.status
|
||||
|
||||
# pass the BrokerdStatus msg inside the broker details field
|
||||
broker_details = msg
|
||||
broker_details = msg.dict()
|
||||
|
||||
elif name in (
|
||||
'fill',
|
||||
|
@ -753,7 +751,7 @@ async def translate_and_relay_brokerd_events(
|
|||
|
||||
# proxy through the "fill" result(s)
|
||||
resp = 'broker_filled'
|
||||
broker_details = msg
|
||||
broker_details = msg.dict()
|
||||
|
||||
log.info(f'\nFill for {oid} cleared with:\n{pformat(resp)}')
|
||||
|
||||
|
@ -771,7 +769,7 @@ async def translate_and_relay_brokerd_events(
|
|||
time_ns=time.time_ns(),
|
||||
broker_reqid=reqid,
|
||||
brokerd_msg=broker_details,
|
||||
)
|
||||
).dict()
|
||||
)
|
||||
except KeyError:
|
||||
log.error(
|
||||
|
@ -836,14 +834,14 @@ async def process_client_order_cmds(
|
|||
|
||||
# NOTE: cancel response will be relayed back in messages
|
||||
# from corresponding broker
|
||||
if reqid is not None:
|
||||
if reqid:
|
||||
|
||||
# send cancel to brokerd immediately!
|
||||
log.info(
|
||||
f'Submitting cancel for live order {reqid}'
|
||||
)
|
||||
|
||||
await brokerd_order_stream.send(msg)
|
||||
await brokerd_order_stream.send(msg.dict())
|
||||
|
||||
else:
|
||||
# this might be a cancel for an order that hasn't been
|
||||
|
@ -865,7 +863,7 @@ async def process_client_order_cmds(
|
|||
resp='dark_cancelled',
|
||||
oid=oid,
|
||||
time_ns=time.time_ns(),
|
||||
)
|
||||
).dict()
|
||||
)
|
||||
# de-register this client dialogue
|
||||
router.dialogues.pop(oid)
|
||||
|
@ -920,7 +918,7 @@ async def process_client_order_cmds(
|
|||
# handle relaying the ems side responses back to
|
||||
# the client/cmd sender from this request
|
||||
log.info(f'Sending live order to {broker}:\n{pformat(msg)}')
|
||||
await brokerd_order_stream.send(msg)
|
||||
await brokerd_order_stream.send(msg.dict())
|
||||
|
||||
# an immediate response should be ``BrokerdOrderAck``
|
||||
# with ems order id from the ``trades_dialogue()``
|
||||
|
@ -945,12 +943,6 @@ async def process_client_order_cmds(
|
|||
# like every other shitty tina platform that makes
|
||||
# the user choose the predicate operator.
|
||||
last = dark_book.lasts[fqsn]
|
||||
|
||||
# sometimes the real-time feed hasn't come up
|
||||
# so just pull from the latest history.
|
||||
if isnan(last):
|
||||
last = feed.shm.array[-1]['close']
|
||||
|
||||
pred = mk_check(trigger_price, last, action)
|
||||
|
||||
spread_slap: float = 5
|
||||
|
@ -1000,7 +992,7 @@ async def process_client_order_cmds(
|
|||
resp=resp,
|
||||
oid=oid,
|
||||
time_ns=time.time_ns(),
|
||||
)
|
||||
).dict()
|
||||
)
|
||||
|
||||
|
||||
|
@ -1096,12 +1088,15 @@ async def _emsd_main(
|
|||
|
||||
brokerd_stream = relay.brokerd_dialogue # .clone()
|
||||
|
||||
# flatten out collected pps from brokerd for delivery
|
||||
pp_msgs = {
|
||||
fqsn: list(pps.values())
|
||||
for fqsn, pps in relay.positions.items()
|
||||
}
|
||||
|
||||
# signal to client that we're started and deliver
|
||||
# all known pps and accounts for this ``brokerd``.
|
||||
await ems_ctx.started((
|
||||
relay.positions,
|
||||
list(relay.accounts),
|
||||
))
|
||||
await ems_ctx.started((pp_msgs, list(relay.accounts)))
|
||||
|
||||
# establish 2-way stream with requesting order-client and
|
||||
# begin handling inbound order requests and updates
|
||||
|
@ -1138,14 +1133,8 @@ async def _emsd_main(
|
|||
)
|
||||
|
||||
finally:
|
||||
# try to remove client from "registry"
|
||||
try:
|
||||
# remove client from "registry"
|
||||
_router.clients.remove(ems_client_order_stream)
|
||||
except KeyError:
|
||||
log.warning(
|
||||
f'Stream {ems_client_order_stream._ctx.chan.uid}'
|
||||
' was already dropped?'
|
||||
)
|
||||
|
||||
dialogues = _router.dialogues
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -15,26 +15,21 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Clearing sub-system message and protocols.
|
||||
Clearing system messagingn types and protocols.
|
||||
|
||||
"""
|
||||
from typing import Optional, Union
|
||||
|
||||
# TODO: try out just encoding/send direction for now?
|
||||
# import msgspec
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..data._source import Symbol
|
||||
from ..data.types import Struct
|
||||
|
||||
|
||||
# TODO: ``msgspec`` stuff worth paying attention to:
|
||||
# - schema evolution: https://jcristharif.com/msgspec/usage.html#schema-evolution
|
||||
# - use literals for a common msg determined by diff keys?
|
||||
# - https://jcristharif.com/msgspec/usage.html#literal
|
||||
# - for eg. ``BrokerdStatus``, instead just have separate messages?
|
||||
|
||||
# --------------
|
||||
# Client -> emsd
|
||||
# --------------
|
||||
|
||||
class Cancel(Struct):
|
||||
|
||||
class Cancel(BaseModel):
|
||||
'''Cancel msg for removing a dark (ems triggered) or
|
||||
broker-submitted (live) trigger/order.
|
||||
|
||||
|
@ -44,10 +39,8 @@ class Cancel(Struct):
|
|||
symbol: str
|
||||
|
||||
|
||||
class Order(Struct):
|
||||
class Order(BaseModel):
|
||||
|
||||
# TODO: use ``msgspec.Literal``
|
||||
# https://jcristharif.com/msgspec/usage.html#literal
|
||||
action: str # {'buy', 'sell', 'alert'}
|
||||
# internal ``emdsd`` unique "order id"
|
||||
oid: str # uuid4
|
||||
|
@ -55,9 +48,6 @@ class Order(Struct):
|
|||
account: str # should we set a default as '' ?
|
||||
|
||||
price: float
|
||||
# TODO: could we drop the ``.action`` field above and instead just
|
||||
# use +/- values here? Would make the msg smaller at the sake of a
|
||||
# teensie fp precision?
|
||||
size: float
|
||||
brokers: list[str]
|
||||
|
||||
|
@ -69,14 +59,20 @@ class Order(Struct):
|
|||
# the backend broker
|
||||
exec_mode: str # {'dark', 'live', 'paper'}
|
||||
|
||||
class Config:
|
||||
# just for pre-loading a ``Symbol`` when used
|
||||
# in the order mode staging process
|
||||
arbitrary_types_allowed = True
|
||||
# don't copy this model instance when used in
|
||||
# a recursive model
|
||||
copy_on_model_validation = False
|
||||
|
||||
# --------------
|
||||
# Client <- emsd
|
||||
# --------------
|
||||
# update msgs from ems which relay state change info
|
||||
# from the active clearing engine.
|
||||
|
||||
class Status(Struct):
|
||||
|
||||
class Status(BaseModel):
|
||||
|
||||
name: str = 'status'
|
||||
oid: str # uuid4
|
||||
|
@ -99,6 +95,8 @@ class Status(Struct):
|
|||
# }
|
||||
resp: str # "response", see above
|
||||
|
||||
# symbol: str
|
||||
|
||||
# trigger info
|
||||
trigger_price: Optional[float] = None
|
||||
# price: float
|
||||
|
@ -113,12 +111,10 @@ class Status(Struct):
|
|||
brokerd_msg: dict = {}
|
||||
|
||||
|
||||
# ---------------
|
||||
# emsd -> brokerd
|
||||
# ---------------
|
||||
# requests *sent* from ems to respective backend broker daemon
|
||||
|
||||
class BrokerdCancel(Struct):
|
||||
class BrokerdCancel(BaseModel):
|
||||
|
||||
action: str = 'cancel'
|
||||
oid: str # piker emsd order id
|
||||
|
@ -134,7 +130,7 @@ class BrokerdCancel(Struct):
|
|||
reqid: Optional[Union[int, str]] = None
|
||||
|
||||
|
||||
class BrokerdOrder(Struct):
|
||||
class BrokerdOrder(BaseModel):
|
||||
|
||||
action: str # {buy, sell}
|
||||
oid: str
|
||||
|
@ -154,12 +150,11 @@ class BrokerdOrder(Struct):
|
|||
size: float
|
||||
|
||||
|
||||
# ---------------
|
||||
# emsd <- brokerd
|
||||
# ---------------
|
||||
# requests *received* to ems from broker backend
|
||||
|
||||
class BrokerdOrderAck(Struct):
|
||||
|
||||
class BrokerdOrderAck(BaseModel):
|
||||
'''
|
||||
Immediate reponse to a brokerd order request providing the broker
|
||||
specific unique order id so that the EMS can associate this
|
||||
|
@ -177,7 +172,7 @@ class BrokerdOrderAck(Struct):
|
|||
account: str = ''
|
||||
|
||||
|
||||
class BrokerdStatus(Struct):
|
||||
class BrokerdStatus(BaseModel):
|
||||
|
||||
name: str = 'status'
|
||||
reqid: Union[int, str]
|
||||
|
@ -210,7 +205,7 @@ class BrokerdStatus(Struct):
|
|||
}
|
||||
|
||||
|
||||
class BrokerdFill(Struct):
|
||||
class BrokerdFill(BaseModel):
|
||||
'''
|
||||
A single message indicating a "fill-details" event from the broker
|
||||
if avaiable.
|
||||
|
@ -235,7 +230,7 @@ class BrokerdFill(Struct):
|
|||
broker_time: float
|
||||
|
||||
|
||||
class BrokerdError(Struct):
|
||||
class BrokerdError(BaseModel):
|
||||
'''
|
||||
Optional error type that can be relayed to emsd for error handling.
|
||||
|
||||
|
@ -254,7 +249,7 @@ class BrokerdError(Struct):
|
|||
broker_details: dict = {}
|
||||
|
||||
|
||||
class BrokerdPosition(Struct):
|
||||
class BrokerdPosition(BaseModel):
|
||||
'''Position update event from brokerd.
|
||||
|
||||
'''
|
||||
|
@ -263,6 +258,6 @@ class BrokerdPosition(Struct):
|
|||
broker: str
|
||||
account: str
|
||||
symbol: str
|
||||
currency: str
|
||||
size: float
|
||||
avg_price: float
|
||||
currency: str = ''
|
||||
|
|
|
@ -31,8 +31,6 @@ import tractor
|
|||
from dataclasses import dataclass
|
||||
|
||||
from .. import data
|
||||
from ..data._source import Symbol
|
||||
from ..pp import Position
|
||||
from ..data._normalize import iterticks
|
||||
from ..data._source import unpack_fqsn
|
||||
from ..log import get_logger
|
||||
|
@ -117,7 +115,7 @@ class PaperBoi:
|
|||
reason='paper_trigger',
|
||||
remaining=size,
|
||||
)
|
||||
await self.ems_trades_stream.send(msg)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
|
||||
# if we're already a clearing price simulate an immediate fill
|
||||
if (
|
||||
|
@ -173,7 +171,7 @@ class PaperBoi:
|
|||
broker=self.broker,
|
||||
time_ns=time.time_ns(),
|
||||
)
|
||||
await self.ems_trades_stream.send(msg)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
|
||||
async def fake_fill(
|
||||
self,
|
||||
|
@ -216,7 +214,7 @@ class PaperBoi:
|
|||
'name': self.broker + '_paper',
|
||||
},
|
||||
)
|
||||
await self.ems_trades_stream.send(msg)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
|
||||
if order_complete:
|
||||
|
||||
|
@ -240,7 +238,7 @@ class PaperBoi:
|
|||
'name': self.broker,
|
||||
},
|
||||
)
|
||||
await self.ems_trades_stream.send(msg)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
|
||||
# lookup any existing position
|
||||
token = f'{symbol}.{self.broker}'
|
||||
|
@ -259,16 +257,31 @@ class PaperBoi:
|
|||
)
|
||||
)
|
||||
|
||||
# delegate update to `.pp.Position.lifo_update()`
|
||||
pp = Position(
|
||||
Symbol(key=symbol),
|
||||
size=pp_msg.size,
|
||||
be_price=pp_msg.avg_price,
|
||||
bsuid=symbol,
|
||||
)
|
||||
pp_msg.size, pp_msg.avg_price = pp.lifo_update(size, price)
|
||||
# "avg position price" calcs
|
||||
# TODO: eventually it'd be nice to have a small set of routines
|
||||
# to do this stuff from a sequence of cleared orders to enable
|
||||
# so called "contextual positions".
|
||||
new_size = size + pp_msg.size
|
||||
|
||||
await self.ems_trades_stream.send(pp_msg)
|
||||
# old size minus the new size gives us size differential with
|
||||
# +ve -> increase in pp size
|
||||
# -ve -> decrease in pp size
|
||||
size_diff = abs(new_size) - abs(pp_msg.size)
|
||||
|
||||
if new_size == 0:
|
||||
pp_msg.avg_price = 0
|
||||
|
||||
elif size_diff > 0:
|
||||
# only update the "average position price" when the position
|
||||
# size increases not when it decreases (i.e. the position is
|
||||
# being made smaller)
|
||||
pp_msg.avg_price = (
|
||||
abs(size) * price + pp_msg.avg_price * abs(pp_msg.size)
|
||||
) / abs(new_size)
|
||||
|
||||
pp_msg.size = new_size
|
||||
|
||||
await self.ems_trades_stream.send(pp_msg.dict())
|
||||
|
||||
|
||||
async def simulate_fills(
|
||||
|
@ -377,14 +390,13 @@ async def handle_order_requests(
|
|||
account = request_msg['account']
|
||||
if account != 'paper':
|
||||
log.error(
|
||||
'This is a paper account,'
|
||||
' only a `paper` selection is valid'
|
||||
'This is a paper account, only a `paper` selection is valid'
|
||||
)
|
||||
await ems_order_stream.send(BrokerdError(
|
||||
oid=request_msg['oid'],
|
||||
symbol=request_msg['symbol'],
|
||||
reason=f'Paper only. No account found: `{account}` ?',
|
||||
))
|
||||
).dict())
|
||||
continue
|
||||
|
||||
# validate
|
||||
|
@ -416,7 +428,7 @@ async def handle_order_requests(
|
|||
# broker specific request id
|
||||
reqid=reqid,
|
||||
|
||||
)
|
||||
).dict()
|
||||
)
|
||||
|
||||
elif action == 'cancel':
|
||||
|
@ -452,7 +464,7 @@ async def trades_dialogue(
|
|||
# TODO: load paper positions per broker from .toml config file
|
||||
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
||||
# await ctx.started(all_positions)
|
||||
await ctx.started(({}, ['paper']))
|
||||
await ctx.started(({}, {'paper',}))
|
||||
|
||||
async with (
|
||||
ctx.open_stream() as ems_stream,
|
||||
|
|
|
@ -83,9 +83,9 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
|||
|
||||
)
|
||||
log.info(
|
||||
f'`marketstored` up!\n'
|
||||
f'pid: {pid}\n'
|
||||
f'container id: {cid[:12]}\n'
|
||||
f'`marketstore` up!\n'
|
||||
f'`marketstored` pid: {pid}\n'
|
||||
f'docker container id: {cid}\n'
|
||||
f'config: {pformat(config)}'
|
||||
)
|
||||
|
||||
|
|
|
@ -21,7 +21,6 @@ Broker configuration mgmt.
|
|||
import platform
|
||||
import sys
|
||||
import os
|
||||
from os import path
|
||||
from os.path import dirname
|
||||
import shutil
|
||||
from typing import Optional
|
||||
|
@ -112,7 +111,6 @@ if _parent_user:
|
|||
|
||||
_conf_names: set[str] = {
|
||||
'brokers',
|
||||
'pps',
|
||||
'trades',
|
||||
'watchlists',
|
||||
}
|
||||
|
@ -149,21 +147,19 @@ def get_conf_path(
|
|||
conf_name: str = 'brokers',
|
||||
|
||||
) -> str:
|
||||
'''
|
||||
Return the top-level default config path normally under
|
||||
``~/.config/piker`` on linux for a given ``conf_name``, the config
|
||||
name.
|
||||
"""Return the default config path normally under
|
||||
``~/.config/piker`` on linux.
|
||||
|
||||
Contains files such as:
|
||||
- brokers.toml
|
||||
- pp.toml
|
||||
- watchlists.toml
|
||||
- trades.toml
|
||||
|
||||
# maybe coming soon ;)
|
||||
- signals.toml
|
||||
- strats.toml
|
||||
|
||||
'''
|
||||
"""
|
||||
assert conf_name in _conf_names
|
||||
fn = _conf_fn_w_ext(conf_name)
|
||||
return os.path.join(
|
||||
|
@ -177,7 +173,7 @@ def repodir():
|
|||
Return the abspath to the repo directory.
|
||||
|
||||
'''
|
||||
dirpath = path.abspath(
|
||||
dirpath = os.path.abspath(
|
||||
# we're 3 levels down in **this** module file
|
||||
dirname(dirname(os.path.realpath(__file__)))
|
||||
)
|
||||
|
@ -186,9 +182,7 @@ def repodir():
|
|||
|
||||
def load(
|
||||
conf_name: str = 'brokers',
|
||||
path: str = None,
|
||||
|
||||
**tomlkws,
|
||||
path: str = None
|
||||
|
||||
) -> (dict, str):
|
||||
'''
|
||||
|
@ -196,7 +190,6 @@ def load(
|
|||
|
||||
'''
|
||||
path = path or get_conf_path(conf_name)
|
||||
|
||||
if not os.path.isfile(path):
|
||||
fn = _conf_fn_w_ext(conf_name)
|
||||
|
||||
|
@ -209,11 +202,8 @@ def load(
|
|||
# if one exists.
|
||||
if os.path.isfile(template):
|
||||
shutil.copyfile(template, path)
|
||||
else:
|
||||
with open(path, 'w'):
|
||||
pass # touch
|
||||
|
||||
config = toml.load(path, **tomlkws)
|
||||
config = toml.load(path)
|
||||
log.debug(f"Read config file {path}")
|
||||
return config, path
|
||||
|
||||
|
@ -222,7 +212,6 @@ def write(
|
|||
config: dict, # toml config as dict
|
||||
name: str = 'brokers',
|
||||
path: str = None,
|
||||
**toml_kwargs,
|
||||
|
||||
) -> None:
|
||||
''''
|
||||
|
@ -246,14 +235,11 @@ def write(
|
|||
f"{path}"
|
||||
)
|
||||
with open(path, 'w') as cf:
|
||||
return toml.dump(
|
||||
config,
|
||||
cf,
|
||||
**toml_kwargs,
|
||||
)
|
||||
return toml.dump(config, cf)
|
||||
|
||||
|
||||
def load_accounts(
|
||||
|
||||
providers: Optional[list[str]] = None
|
||||
|
||||
) -> bidict[str, Optional[str]]:
|
||||
|
|
|
@ -37,7 +37,6 @@ from docker.models.containers import Container as DockerContainer
|
|||
from docker.errors import (
|
||||
DockerException,
|
||||
APIError,
|
||||
# ContainerError,
|
||||
)
|
||||
from requests.exceptions import ConnectionError, ReadTimeout
|
||||
|
||||
|
@ -51,8 +50,8 @@ class DockerNotStarted(Exception):
|
|||
'Prolly you dint start da daemon bruh'
|
||||
|
||||
|
||||
class ApplicationLogError(Exception):
|
||||
'App in container reported an error in logs'
|
||||
class ContainerError(RuntimeError):
|
||||
'Error reported via app-container logging level'
|
||||
|
||||
|
||||
@acm
|
||||
|
@ -97,9 +96,9 @@ async def open_docker(
|
|||
# not perms?
|
||||
raise
|
||||
|
||||
# finally:
|
||||
# if client:
|
||||
# client.close()
|
||||
finally:
|
||||
if client:
|
||||
client.close()
|
||||
|
||||
|
||||
class Container:
|
||||
|
@ -157,7 +156,7 @@ class Container:
|
|||
|
||||
# print(f'level: {level}')
|
||||
if level in ('error', 'fatal'):
|
||||
raise ApplicationLogError(msg)
|
||||
raise ContainerError(msg)
|
||||
|
||||
if patt in msg:
|
||||
return True
|
||||
|
@ -186,21 +185,6 @@ class Container:
|
|||
if 'is not running' in err.explanation:
|
||||
return False
|
||||
|
||||
def hard_kill(self, start: float) -> None:
|
||||
delay = time.time() - start
|
||||
log.error(
|
||||
f'Failed to kill container {self.cntr.id} after {delay}s\n'
|
||||
'sending SIGKILL..'
|
||||
)
|
||||
# get out the big guns, bc apparently marketstore
|
||||
# doesn't actually know how to terminate gracefully
|
||||
# :eyeroll:...
|
||||
self.try_signal('SIGKILL')
|
||||
self.cntr.wait(
|
||||
timeout=3,
|
||||
condition='not-running',
|
||||
)
|
||||
|
||||
async def cancel(
|
||||
self,
|
||||
stop_msg: str,
|
||||
|
@ -247,9 +231,21 @@ class Container:
|
|||
ConnectionError,
|
||||
):
|
||||
log.exception('Docker connection failure')
|
||||
self.hard_kill(start)
|
||||
break
|
||||
else:
|
||||
self.hard_kill(start)
|
||||
delay = time.time() - start
|
||||
log.error(
|
||||
f'Failed to kill container {cid} after {delay}s\n'
|
||||
'sending SIGKILL..'
|
||||
)
|
||||
# get out the big guns, bc apparently marketstore
|
||||
# doesn't actually know how to terminate gracefully
|
||||
# :eyeroll:...
|
||||
self.try_signal('SIGKILL')
|
||||
self.cntr.wait(
|
||||
timeout=3,
|
||||
condition='not-running',
|
||||
)
|
||||
|
||||
log.cancel(f'Container stopped: {cid}')
|
||||
|
||||
|
|
|
@ -27,14 +27,13 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
|||
if _USE_POSIX:
|
||||
from _posixshmem import shm_unlink
|
||||
|
||||
# import msgspec
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
import tractor
|
||||
import numpy as np
|
||||
from pydantic import BaseModel
|
||||
from numpy.lib import recfunctions as rfn
|
||||
|
||||
from ..log import get_logger
|
||||
from ._source import base_iohlc_dtype
|
||||
from .types import Struct
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -108,12 +107,15 @@ class SharedInt:
|
|||
log.warning(f'Shm for {name} already unlinked?')
|
||||
|
||||
|
||||
class _Token(Struct, frozen=True):
|
||||
class _Token(BaseModel):
|
||||
'''
|
||||
Internal represenation of a shared memory "token"
|
||||
which can be used to key a system wide post shm entry.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
frozen = True
|
||||
|
||||
shm_name: str # this servers as a "key" value
|
||||
shm_first_index_name: str
|
||||
shm_last_index_name: str
|
||||
|
@ -124,22 +126,17 @@ class _Token(Struct, frozen=True):
|
|||
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
||||
|
||||
def as_msg(self):
|
||||
return self.to_dict()
|
||||
return self.dict()
|
||||
|
||||
@classmethod
|
||||
def from_msg(cls, msg: dict) -> _Token:
|
||||
if isinstance(msg, _Token):
|
||||
return msg
|
||||
|
||||
# TODO: native struct decoding
|
||||
# return _token_dec.decode(msg)
|
||||
|
||||
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
||||
return _Token(**msg)
|
||||
|
||||
|
||||
# _token_dec = msgspec.msgpack.Decoder(_Token)
|
||||
|
||||
# TODO: this api?
|
||||
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
||||
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
||||
|
@ -170,7 +167,7 @@ def _make_token(
|
|||
shm_name=key,
|
||||
shm_first_index_name=key + "_first",
|
||||
shm_last_index_name=key + "_last",
|
||||
dtype_descr=tuple(np.dtype(dtype).descr)
|
||||
dtype_descr=np.dtype(dtype).descr
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import decimal
|
|||
|
||||
from bidict import bidict
|
||||
import numpy as np
|
||||
from msgspec import Struct
|
||||
from pydantic import BaseModel
|
||||
# from numba import from_dtype
|
||||
|
||||
|
||||
|
@ -126,7 +126,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
|||
)
|
||||
|
||||
|
||||
class Symbol(Struct):
|
||||
class Symbol(BaseModel):
|
||||
'''
|
||||
I guess this is some kinda container thing for dealing with
|
||||
all the different meta-data formats from brokers?
|
||||
|
@ -152,7 +152,9 @@ class Symbol(Struct):
|
|||
info: dict[str, Any],
|
||||
suffix: str = '',
|
||||
|
||||
) -> Symbol:
|
||||
# XXX: like wtf..
|
||||
# ) -> 'Symbol':
|
||||
) -> None:
|
||||
|
||||
tick_size = info.get('price_tick_size', 0.01)
|
||||
lot_tick_size = info.get('lot_tick_size', 0.0)
|
||||
|
@ -173,7 +175,9 @@ class Symbol(Struct):
|
|||
fqsn: str,
|
||||
info: dict[str, Any],
|
||||
|
||||
) -> Symbol:
|
||||
# XXX: like wtf..
|
||||
# ) -> 'Symbol':
|
||||
) -> None:
|
||||
broker, key, suffix = unpack_fqsn(fqsn)
|
||||
return cls.from_broker_info(
|
||||
broker,
|
||||
|
@ -236,7 +240,7 @@ class Symbol(Struct):
|
|||
|
||||
'''
|
||||
tokens = self.tokens()
|
||||
fqsn = '.'.join(map(str.lower, tokens))
|
||||
fqsn = '.'.join(tokens)
|
||||
return fqsn
|
||||
|
||||
def iterfqsns(self) -> list[str]:
|
||||
|
|
|
@ -53,11 +53,13 @@ class NoBsWs:
|
|||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
token: str,
|
||||
stack: AsyncExitStack,
|
||||
fixture: Callable,
|
||||
serializer: ModuleType = json,
|
||||
):
|
||||
self.url = url
|
||||
self.token = token
|
||||
self.fixture = fixture
|
||||
self._stack = stack
|
||||
self._ws: 'WebSocketConnection' = None # noqa
|
||||
|
@ -81,9 +83,14 @@ class NoBsWs:
|
|||
trio_websocket.open_websocket_url(self.url)
|
||||
)
|
||||
# rerun user code fixture
|
||||
if self.token == '':
|
||||
ret = await self._stack.enter_async_context(
|
||||
self.fixture(self)
|
||||
)
|
||||
else:
|
||||
ret = await self._stack.enter_async_context(
|
||||
self.fixture(self, self.token)
|
||||
)
|
||||
|
||||
assert ret is None
|
||||
|
||||
|
@ -128,13 +135,14 @@ async def open_autorecon_ws(
|
|||
|
||||
# TODO: proper type annot smh
|
||||
fixture: Callable,
|
||||
|
||||
# used for authenticated websockets
|
||||
token: str = '',
|
||||
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
||||
|
||||
"""
|
||||
async with AsyncExitStack() as stack:
|
||||
ws = NoBsWs(url, stack, fixture=fixture)
|
||||
ws = NoBsWs(url, token, stack, fixture=fixture)
|
||||
await ws._connect()
|
||||
|
||||
try:
|
||||
|
|
|
@ -42,6 +42,7 @@ from trio_typing import TaskStatus
|
|||
import trimeter
|
||||
import tractor
|
||||
from tractor.trionics import maybe_open_context
|
||||
from pydantic import BaseModel
|
||||
import pendulum
|
||||
import numpy as np
|
||||
|
||||
|
@ -58,7 +59,6 @@ from ._sharedmem import (
|
|||
ShmArray,
|
||||
)
|
||||
from .ingest import get_ingestormod
|
||||
from .types import Struct
|
||||
from ._source import (
|
||||
base_iohlc_dtype,
|
||||
Symbol,
|
||||
|
@ -84,7 +84,7 @@ if TYPE_CHECKING:
|
|||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class _FeedsBus(Struct):
|
||||
class _FeedsBus(BaseModel):
|
||||
'''
|
||||
Data feeds broadcaster and persistence management.
|
||||
|
||||
|
@ -100,6 +100,10 @@ class _FeedsBus(Struct):
|
|||
a dedicated cancel scope.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
underscore_attrs_are_private = False
|
||||
|
||||
brokername: str
|
||||
nursery: trio.Nursery
|
||||
feeds: dict[str, tuple[dict, dict]] = {}
|
||||
|
|
|
@ -1,68 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Built-in (extension) types.
|
||||
|
||||
"""
|
||||
from typing import Optional
|
||||
from pprint import pformat
|
||||
|
||||
import msgspec
|
||||
|
||||
|
||||
class Struct(
|
||||
msgspec.Struct,
|
||||
|
||||
# https://jcristharif.com/msgspec/structs.html#tagged-unions
|
||||
# tag='pikerstruct',
|
||||
# tag=True,
|
||||
):
|
||||
'''
|
||||
A "human friendlier" (aka repl buddy) struct subtype.
|
||||
|
||||
'''
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
f: getattr(self, f)
|
||||
for f in self.__struct_fields__
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
return f'Struct({pformat(self.to_dict())})'
|
||||
|
||||
def copy(
|
||||
self,
|
||||
update: Optional[dict] = None,
|
||||
|
||||
) -> msgspec.Struct:
|
||||
'''
|
||||
Validate-typecast all self defined fields, return a copy of us
|
||||
with all such fields.
|
||||
|
||||
This is kinda like the default behaviour in `pydantic.BaseModel`.
|
||||
|
||||
'''
|
||||
if update:
|
||||
for k, v in update.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
# roundtrip serialize to validate
|
||||
return msgspec.msgpack.Decoder(
|
||||
type=type(self)
|
||||
).decode(
|
||||
msgspec.msgpack.Encoder().encode(self)
|
||||
)
|
|
@ -114,7 +114,7 @@ async def fsp_compute(
|
|||
dict[str, np.ndarray], # multi-output case
|
||||
np.ndarray, # single output case
|
||||
]
|
||||
history_output = await anext(out_stream)
|
||||
history_output = await out_stream.__anext__()
|
||||
|
||||
func_name = func.__name__
|
||||
profiler(f'{func_name} generated history')
|
||||
|
@ -374,8 +374,7 @@ async def cascade(
|
|||
'key': dst_shm_token,
|
||||
'first': dst._first.value,
|
||||
'last': dst._last.value,
|
||||
}
|
||||
})
|
||||
}})
|
||||
return tracker, index
|
||||
|
||||
def is_synced(
|
||||
|
|
788
piker/pp.py
788
piker/pp.py
|
@ -1,788 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
Personal/Private position parsing, calculating, summarizing in a way
|
||||
that doesn't try to cuk most humans who prefer to not lose their moneys..
|
||||
(looking at you `ib` and dirt-bird friends)
|
||||
|
||||
'''
|
||||
from collections import deque
|
||||
from contextlib import contextmanager as cm
|
||||
# from pprint import pformat
|
||||
import os
|
||||
from os import path
|
||||
from math import copysign
|
||||
import re
|
||||
import time
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
|
||||
from msgspec import Struct
|
||||
import pendulum
|
||||
from pendulum import datetime, now
|
||||
import tomli
|
||||
import toml
|
||||
|
||||
from . import config
|
||||
from .brokers import get_brokermod
|
||||
from .clearing._messages import BrokerdPosition, Status
|
||||
from .data._source import Symbol
|
||||
from .log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
@cm
|
||||
def open_trade_ledger(
|
||||
broker: str,
|
||||
account: str,
|
||||
|
||||
) -> str:
|
||||
'''
|
||||
Indempotently create and read in a trade log file from the
|
||||
``<configuration_dir>/ledgers/`` directory.
|
||||
|
||||
Files are named per broker account of the form
|
||||
``<brokername>_<accountname>.toml``. The ``accountname`` here is the
|
||||
name as defined in the user's ``brokers.toml`` config.
|
||||
|
||||
'''
|
||||
ldir = path.join(config._config_dir, 'ledgers')
|
||||
if not path.isdir(ldir):
|
||||
os.makedirs(ldir)
|
||||
|
||||
fname = f'trades_{broker}_{account}.toml'
|
||||
tradesfile = path.join(ldir, fname)
|
||||
|
||||
if not path.isfile(tradesfile):
|
||||
log.info(
|
||||
f'Creating new local trades ledger: {tradesfile}'
|
||||
)
|
||||
with open(tradesfile, 'w') as cf:
|
||||
pass # touch
|
||||
with open(tradesfile, 'rb') as cf:
|
||||
start = time.time()
|
||||
ledger = tomli.load(cf)
|
||||
print(f'Ledger load took {time.time() - start}s')
|
||||
cpy = ledger.copy()
|
||||
try:
|
||||
yield cpy
|
||||
finally:
|
||||
if cpy != ledger:
|
||||
# TODO: show diff output?
|
||||
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||
print(f'Updating ledger for {tradesfile}:\n')
|
||||
ledger.update(cpy)
|
||||
|
||||
# we write on close the mutated ledger data
|
||||
with open(tradesfile, 'w') as cf:
|
||||
return toml.dump(ledger, cf)
|
||||
|
||||
|
||||
class Transaction(Struct):
|
||||
# TODO: should this be ``.to`` (see below)?
|
||||
fqsn: str
|
||||
|
||||
tid: Union[str, int] # unique transaction id
|
||||
size: float
|
||||
price: float
|
||||
cost: float # commisions or other additional costs
|
||||
dt: datetime
|
||||
expiry: Optional[datetime] = None
|
||||
|
||||
# optional key normally derived from the broker
|
||||
# backend which ensures the instrument-symbol this record
|
||||
# is for is truly unique.
|
||||
bsuid: Optional[Union[str, int]] = None
|
||||
|
||||
# optional fqsn for the source "asset"/money symbol?
|
||||
# from: Optional[str] = None
|
||||
|
||||
|
||||
class Position(Struct):
|
||||
'''
|
||||
Basic pp (personal/piker position) model with attached clearing
|
||||
transaction history.
|
||||
|
||||
'''
|
||||
symbol: Symbol
|
||||
|
||||
# can be +ve or -ve for long/short
|
||||
size: float
|
||||
|
||||
# "breakeven price" above or below which pnl moves above and below
|
||||
# zero for the entirety of the current "trade state".
|
||||
be_price: float
|
||||
|
||||
# unique backend symbol id
|
||||
bsuid: str
|
||||
|
||||
# ordered record of known constituent trade messages
|
||||
clears: dict[
|
||||
Union[str, int, Status], # trade id
|
||||
dict[str, Any], # transaction history summaries
|
||||
] = {}
|
||||
|
||||
expiry: Optional[datetime] = None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
f: getattr(self, f)
|
||||
for f in self.__struct_fields__
|
||||
}
|
||||
|
||||
def to_pretoml(self) -> dict:
|
||||
'''
|
||||
Prep this position's data contents for export to toml including
|
||||
re-structuring of the ``.clears`` table to an array of
|
||||
inline-subtables for better ``pps.toml`` compactness.
|
||||
|
||||
'''
|
||||
d = self.to_dict()
|
||||
clears = d.pop('clears')
|
||||
expiry = d.pop('expiry')
|
||||
|
||||
if expiry:
|
||||
d['expiry'] = str(expiry)
|
||||
|
||||
clears_list = []
|
||||
|
||||
for tid, data in clears.items():
|
||||
inline_table = toml.TomlDecoder().get_empty_inline_table()
|
||||
inline_table['tid'] = tid
|
||||
|
||||
for k, v in data.items():
|
||||
inline_table[k] = v
|
||||
|
||||
clears_list.append(inline_table)
|
||||
|
||||
d['clears'] = clears_list
|
||||
|
||||
return d
|
||||
|
||||
def update_from_msg(
|
||||
self,
|
||||
msg: BrokerdPosition,
|
||||
|
||||
) -> None:
|
||||
|
||||
# XXX: better place to do this?
|
||||
symbol = self.symbol
|
||||
|
||||
lot_size_digits = symbol.lot_size_digits
|
||||
be_price, size = (
|
||||
round(
|
||||
msg['avg_price'],
|
||||
ndigits=symbol.tick_size_digits
|
||||
),
|
||||
round(
|
||||
msg['size'],
|
||||
ndigits=lot_size_digits
|
||||
),
|
||||
)
|
||||
|
||||
self.be_price = be_price
|
||||
self.size = size
|
||||
|
||||
@property
|
||||
def dsize(self) -> float:
|
||||
'''
|
||||
The "dollar" size of the pp, normally in trading (fiat) unit
|
||||
terms.
|
||||
|
||||
'''
|
||||
return self.be_price * self.size
|
||||
|
||||
def update(
|
||||
self,
|
||||
t: Transaction,
|
||||
|
||||
) -> None:
|
||||
self.clears[t.tid] = {
|
||||
'cost': t.cost,
|
||||
'price': t.price,
|
||||
'size': t.size,
|
||||
'dt': str(t.dt),
|
||||
}
|
||||
|
||||
def lifo_update(
|
||||
self,
|
||||
size: float,
|
||||
price: float,
|
||||
cost: float = 0,
|
||||
|
||||
# TODO: idea: "real LIFO" dynamic positioning.
|
||||
# - when a trade takes place where the pnl for
|
||||
# the (set of) trade(s) is below the breakeven price
|
||||
# it may be that the trader took a +ve pnl on a short(er)
|
||||
# term trade in the same account.
|
||||
# - in this case we could recalc the be price to
|
||||
# be reverted back to it's prior value before the nearest term
|
||||
# trade was opened.?
|
||||
# dynamic_breakeven_price: bool = False,
|
||||
|
||||
) -> (float, float):
|
||||
'''
|
||||
Incremental update using a LIFO-style weighted mean.
|
||||
|
||||
'''
|
||||
# "avg position price" calcs
|
||||
# TODO: eventually it'd be nice to have a small set of routines
|
||||
# to do this stuff from a sequence of cleared orders to enable
|
||||
# so called "contextual positions".
|
||||
new_size = self.size + size
|
||||
|
||||
# old size minus the new size gives us size diff with
|
||||
# +ve -> increase in pp size
|
||||
# -ve -> decrease in pp size
|
||||
size_diff = abs(new_size) - abs(self.size)
|
||||
|
||||
if new_size == 0:
|
||||
self.be_price = 0
|
||||
|
||||
elif size_diff > 0:
|
||||
# XXX: LOFI incremental update:
|
||||
# only update the "average price" when
|
||||
# the size increases not when it decreases (i.e. the
|
||||
# position is being made smaller)
|
||||
self.be_price = (
|
||||
# weight of current exec = (size * price) + cost
|
||||
(abs(size) * price)
|
||||
+
|
||||
(copysign(1, new_size) * cost) # transaction cost
|
||||
+
|
||||
# weight of existing be price
|
||||
self.be_price * abs(self.size) # weight of previous pp
|
||||
) / abs(new_size) # normalized by the new size: weighted mean.
|
||||
|
||||
self.size = new_size
|
||||
|
||||
return new_size, self.be_price
|
||||
|
||||
def minimize_clears(
|
||||
self,
|
||||
|
||||
) -> dict[str, dict]:
|
||||
'''
|
||||
Minimize the position's clears entries by removing
|
||||
all transactions before the last net zero size to avoid
|
||||
unecessary history irrelevant to the current pp state.
|
||||
|
||||
'''
|
||||
size: float = self.size
|
||||
clears_since_zero: deque[tuple(str, dict)] = deque()
|
||||
|
||||
# scan for the last "net zero" position by
|
||||
# iterating clears in reverse.
|
||||
for tid, clear in reversed(self.clears.items()):
|
||||
size -= clear['size']
|
||||
clears_since_zero.appendleft((tid, clear))
|
||||
|
||||
if size == 0:
|
||||
break
|
||||
|
||||
self.clears = dict(clears_since_zero)
|
||||
return self.clears
|
||||
|
||||
|
||||
def update_pps(
|
||||
records: dict[str, Transaction],
|
||||
pps: Optional[dict[str, Position]] = None
|
||||
|
||||
) -> dict[str, Position]:
|
||||
'''
|
||||
Compile a set of positions from a trades ledger.
|
||||
|
||||
'''
|
||||
pps: dict[str, Position] = pps or {}
|
||||
|
||||
# lifo update all pps from records
|
||||
for r in records:
|
||||
|
||||
pp = pps.setdefault(
|
||||
r.bsuid,
|
||||
|
||||
# if no existing pp, allocate fresh one.
|
||||
Position(
|
||||
Symbol.from_fqsn(
|
||||
r.fqsn,
|
||||
info={},
|
||||
),
|
||||
size=0.0,
|
||||
be_price=0.0,
|
||||
bsuid=r.bsuid,
|
||||
expiry=r.expiry,
|
||||
)
|
||||
)
|
||||
|
||||
# don't do updates for ledger records we already have
|
||||
# included in the current pps state.
|
||||
if r.tid in pp.clears:
|
||||
# NOTE: likely you'll see repeats of the same
|
||||
# ``Transaction`` passed in here if/when you are restarting
|
||||
# a ``brokerd.ib`` where the API will re-report trades from
|
||||
# the current session, so we need to make sure we don't
|
||||
# "double count" these in pp calculations.
|
||||
continue
|
||||
|
||||
# lifo style "breakeven" price calc
|
||||
pp.lifo_update(
|
||||
r.size,
|
||||
r.price,
|
||||
|
||||
# include transaction cost in breakeven price
|
||||
# and presume the worst case of the same cost
|
||||
# to exit this transaction (even though in reality
|
||||
# it will be dynamic based on exit stratetgy).
|
||||
cost=2*r.cost,
|
||||
)
|
||||
|
||||
# track clearing data
|
||||
pp.update(r)
|
||||
|
||||
return pps
|
||||
|
||||
|
||||
def load_pps_from_ledger(
|
||||
|
||||
brokername: str,
|
||||
acctname: str,
|
||||
|
||||
# post normalization filter on ledger entries to be processed
|
||||
filter_by: Optional[list[dict]] = None,
|
||||
|
||||
) -> dict[str, Position]:
|
||||
'''
|
||||
Open a ledger file by broker name and account and read in and
|
||||
process any trade records into our normalized ``Transaction``
|
||||
form and then pass these into the position processing routine
|
||||
and deliver the two dict-sets of the active and closed pps.
|
||||
|
||||
'''
|
||||
with open_trade_ledger(
|
||||
brokername,
|
||||
acctname,
|
||||
) as ledger:
|
||||
if not ledger:
|
||||
# null case, no ledger file with content
|
||||
return {}
|
||||
|
||||
brokermod = get_brokermod(brokername)
|
||||
src_records = brokermod.norm_trade_records(ledger)
|
||||
|
||||
if filter_by:
|
||||
bsuids = set(filter_by)
|
||||
records = list(filter(lambda r: r.bsuid in bsuids, src_records))
|
||||
else:
|
||||
records = src_records
|
||||
|
||||
return update_pps(records)
|
||||
|
||||
|
||||
def get_pps(
|
||||
brokername: str,
|
||||
acctids: Optional[set[str]] = set(),
|
||||
|
||||
) -> dict[str, dict[str, Position]]:
|
||||
'''
|
||||
Read out broker-specific position entries from
|
||||
incremental update file: ``pps.toml``.
|
||||
|
||||
'''
|
||||
conf, path = config.load(
|
||||
'pps',
|
||||
# load dicts as inlines to preserve compactness
|
||||
# _dict=toml.decoder.InlineTableDict,
|
||||
)
|
||||
|
||||
all_active = {}
|
||||
all_closed = {}
|
||||
|
||||
# try to load any ledgers if no section found
|
||||
bconf, path = config.load('brokers')
|
||||
accounts = bconf[brokername]['accounts']
|
||||
for account in accounts:
|
||||
|
||||
# TODO: instead of this filter we could
|
||||
# always send all known pps but just not audit
|
||||
# them since an active client might not be up?
|
||||
if (
|
||||
acctids and
|
||||
f'{brokername}.{account}' not in acctids
|
||||
):
|
||||
continue
|
||||
|
||||
active, closed = update_pps_conf(brokername, account)
|
||||
all_active.setdefault(account, {}).update(active)
|
||||
all_closed.setdefault(account, {}).update(closed)
|
||||
|
||||
return all_active, all_closed
|
||||
|
||||
|
||||
# TODO: instead see if we can hack tomli and tomli-w to do the same:
|
||||
# - https://github.com/hukkin/tomli
|
||||
# - https://github.com/hukkin/tomli-w
|
||||
class PpsEncoder(toml.TomlEncoder):
|
||||
'''
|
||||
Special "styled" encoder that makes a ``pps.toml`` redable and
|
||||
compact by putting `.clears` tables inline and everything else
|
||||
flat-ish.
|
||||
|
||||
'''
|
||||
separator = ','
|
||||
|
||||
def dump_list(self, v):
|
||||
'''
|
||||
Dump an inline list with a newline after every element and
|
||||
with consideration for denoted inline table types.
|
||||
|
||||
'''
|
||||
retval = "[\n"
|
||||
for u in v:
|
||||
if isinstance(u, toml.decoder.InlineTableDict):
|
||||
out = self.dump_inline_table(u)
|
||||
else:
|
||||
out = str(self.dump_value(u))
|
||||
|
||||
retval += " " + out + "," + "\n"
|
||||
retval += "]"
|
||||
return retval
|
||||
|
||||
def dump_inline_table(self, section):
|
||||
"""Preserve inline table in its compact syntax instead of expanding
|
||||
into subsection.
|
||||
https://github.com/toml-lang/toml#user-content-inline-table
|
||||
"""
|
||||
val_list = []
|
||||
for k, v in section.items():
|
||||
# if isinstance(v, toml.decoder.InlineTableDict):
|
||||
if isinstance(v, dict):
|
||||
val = self.dump_inline_table(v)
|
||||
else:
|
||||
val = str(self.dump_value(v))
|
||||
|
||||
val_list.append(k + " = " + val)
|
||||
|
||||
retval = "{ " + ", ".join(val_list) + " }"
|
||||
return retval
|
||||
|
||||
def dump_sections(self, o, sup):
|
||||
retstr = ""
|
||||
if sup != "" and sup[-1] != ".":
|
||||
sup += '.'
|
||||
retdict = self._dict()
|
||||
arraystr = ""
|
||||
for section in o:
|
||||
qsection = str(section)
|
||||
value = o[section]
|
||||
|
||||
if not re.match(r'^[A-Za-z0-9_-]+$', section):
|
||||
qsection = toml.encoder._dump_str(section)
|
||||
|
||||
# arrayoftables = False
|
||||
if (
|
||||
self.preserve
|
||||
and isinstance(value, toml.decoder.InlineTableDict)
|
||||
):
|
||||
retstr += (
|
||||
qsection
|
||||
+
|
||||
" = "
|
||||
+
|
||||
self.dump_inline_table(o[section])
|
||||
+
|
||||
'\n' # only on the final terminating left brace
|
||||
)
|
||||
|
||||
# XXX: this code i'm pretty sure is just blatantly bad
|
||||
# and/or wrong..
|
||||
# if isinstance(o[section], list):
|
||||
# for a in o[section]:
|
||||
# if isinstance(a, dict):
|
||||
# arrayoftables = True
|
||||
# if arrayoftables:
|
||||
# for a in o[section]:
|
||||
# arraytabstr = "\n"
|
||||
# arraystr += "[[" + sup + qsection + "]]\n"
|
||||
# s, d = self.dump_sections(a, sup + qsection)
|
||||
# if s:
|
||||
# if s[0] == "[":
|
||||
# arraytabstr += s
|
||||
# else:
|
||||
# arraystr += s
|
||||
# while d:
|
||||
# newd = self._dict()
|
||||
# for dsec in d:
|
||||
# s1, d1 = self.dump_sections(d[dsec], sup +
|
||||
# qsection + "." +
|
||||
# dsec)
|
||||
# if s1:
|
||||
# arraytabstr += ("[" + sup + qsection +
|
||||
# "." + dsec + "]\n")
|
||||
# arraytabstr += s1
|
||||
# for s1 in d1:
|
||||
# newd[dsec + "." + s1] = d1[s1]
|
||||
# d = newd
|
||||
# arraystr += arraytabstr
|
||||
|
||||
elif isinstance(value, dict):
|
||||
retdict[qsection] = o[section]
|
||||
|
||||
elif o[section] is not None:
|
||||
retstr += (
|
||||
qsection
|
||||
+
|
||||
" = "
|
||||
+
|
||||
str(self.dump_value(o[section]))
|
||||
)
|
||||
|
||||
# if not isinstance(value, dict):
|
||||
if not isinstance(value, toml.decoder.InlineTableDict):
|
||||
# inline tables should not contain newlines:
|
||||
# https://toml.io/en/v1.0.0#inline-table
|
||||
retstr += '\n'
|
||||
|
||||
else:
|
||||
raise ValueError(value)
|
||||
|
||||
retstr += arraystr
|
||||
return (retstr, retdict)
|
||||
|
||||
|
||||
def load_pps_from_toml(
|
||||
brokername: str,
|
||||
acctid: str,
|
||||
|
||||
# XXX: there is an edge case here where we may want to either audit
|
||||
# the retrieved ``pps.toml`` output or reprocess it since there was
|
||||
# an error on write on the last attempt to update the state file
|
||||
# even though the ledger *was* updated. For this cases we allow the
|
||||
# caller to pass in a symbol set they'd like to reload from the
|
||||
# underlying ledger to be reprocessed in computing pps state.
|
||||
reload_records: Optional[dict[str, str]] = None,
|
||||
update_from_ledger: bool = False,
|
||||
|
||||
) -> tuple[dict, dict[str, Position]]:
|
||||
'''
|
||||
Load and marshal to objects all pps from either an existing
|
||||
``pps.toml`` config, or from scratch from a ledger file when
|
||||
none yet exists.
|
||||
|
||||
'''
|
||||
conf, path = config.load('pps')
|
||||
brokersection = conf.setdefault(brokername, {})
|
||||
pps = brokersection.setdefault(acctid, {})
|
||||
pp_objs = {}
|
||||
|
||||
# no pps entry yet for this broker/account so parse any available
|
||||
# ledgers to build a brand new pps state.
|
||||
if not pps or update_from_ledger:
|
||||
pp_objs = load_pps_from_ledger(
|
||||
brokername,
|
||||
acctid,
|
||||
)
|
||||
|
||||
# Reload symbol specific ledger entries if requested by the
|
||||
# caller **AND** none exist in the current pps state table.
|
||||
elif (
|
||||
pps and reload_records
|
||||
):
|
||||
# no pps entry yet for this broker/account so parse
|
||||
# any available ledgers to build a pps state.
|
||||
pp_objs = load_pps_from_ledger(
|
||||
brokername,
|
||||
acctid,
|
||||
filter_by=reload_records,
|
||||
)
|
||||
|
||||
if not pps:
|
||||
log.warning(
|
||||
f'No `pps.toml` positions could be loaded {brokername}:{acctid}'
|
||||
)
|
||||
|
||||
# unmarshal/load ``pps.toml`` config entries into object form.
|
||||
for fqsn, entry in pps.items():
|
||||
bsuid = entry['bsuid']
|
||||
|
||||
# convert clears sub-tables (only in this form
|
||||
# for toml re-presentation) back into a master table.
|
||||
clears_list = entry['clears']
|
||||
|
||||
# index clears entries in "object" form by tid in a top
|
||||
# level dict instead of a list (as is presented in our
|
||||
# ``pps.toml``).
|
||||
pp = pp_objs.get(bsuid)
|
||||
if pp:
|
||||
clears = pp.clears
|
||||
else:
|
||||
clears = {}
|
||||
|
||||
for clears_table in clears_list:
|
||||
tid = clears_table.pop('tid')
|
||||
clears[tid] = clears_table
|
||||
|
||||
size = entry['size']
|
||||
|
||||
# TODO: an audit system for existing pps entries?
|
||||
# if not len(clears) == abs(size):
|
||||
# pp_objs = load_pps_from_ledger(
|
||||
# brokername,
|
||||
# acctid,
|
||||
# filter_by=reload_records,
|
||||
# )
|
||||
# reason = 'size <-> len(clears) mismatch'
|
||||
# raise ValueError(
|
||||
# '`pps.toml` entry is invalid:\n'
|
||||
# f'{fqsn}\n'
|
||||
# f'{pformat(entry)}'
|
||||
# )
|
||||
|
||||
expiry = entry.get('expiry')
|
||||
if expiry:
|
||||
expiry = pendulum.parse(expiry)
|
||||
|
||||
pp_objs[bsuid] = Position(
|
||||
Symbol.from_fqsn(fqsn, info={}),
|
||||
size=size,
|
||||
be_price=entry['be_price'],
|
||||
expiry=expiry,
|
||||
bsuid=entry['bsuid'],
|
||||
|
||||
# XXX: super critical, we need to be sure to include
|
||||
# all pps.toml clears to avoid reusing clears that were
|
||||
# already included in the current incremental update
|
||||
# state, since today's records may have already been
|
||||
# processed!
|
||||
clears=clears,
|
||||
)
|
||||
|
||||
return conf, pp_objs
|
||||
|
||||
|
||||
def update_pps_conf(
|
||||
brokername: str,
|
||||
acctid: str,
|
||||
|
||||
trade_records: Optional[list[Transaction]] = None,
|
||||
ledger_reload: Optional[dict[str, str]] = None,
|
||||
|
||||
) -> tuple[
|
||||
dict[str, Position],
|
||||
dict[str, Position],
|
||||
]:
|
||||
|
||||
# this maps `.bsuid` values to positions
|
||||
pp_objs: dict[Union[str, int], Position]
|
||||
|
||||
if trade_records and ledger_reload:
|
||||
for r in trade_records:
|
||||
ledger_reload[r.bsuid] = r.fqsn
|
||||
|
||||
conf, pp_objs = load_pps_from_toml(
|
||||
brokername,
|
||||
acctid,
|
||||
reload_records=ledger_reload,
|
||||
)
|
||||
|
||||
# update all pp objects from any (new) trade records which
|
||||
# were passed in (aka incremental update case).
|
||||
if trade_records:
|
||||
pp_objs = update_pps(
|
||||
trade_records,
|
||||
pps=pp_objs,
|
||||
)
|
||||
|
||||
pp_entries = {} # dict-serialize all active pps
|
||||
# NOTE: newly closed position are also important to report/return
|
||||
# since a consumer, like an order mode UI ;), might want to react
|
||||
# based on the closure.
|
||||
closed_pp_objs: dict[str, Position] = {}
|
||||
|
||||
for bsuid in list(pp_objs):
|
||||
pp = pp_objs[bsuid]
|
||||
|
||||
# XXX: debug hook for size mismatches
|
||||
# if bsuid == 447767096:
|
||||
# breakpoint()
|
||||
|
||||
pp.minimize_clears()
|
||||
|
||||
if (
|
||||
pp.size == 0
|
||||
|
||||
# drop time-expired positions (normally derivatives)
|
||||
or (pp.expiry and pp.expiry < now())
|
||||
):
|
||||
# if expired the position is closed
|
||||
pp.size = 0
|
||||
|
||||
# position is already closed aka "net zero"
|
||||
closed_pp = pp_objs.pop(bsuid, None)
|
||||
if closed_pp:
|
||||
closed_pp_objs[bsuid] = closed_pp
|
||||
|
||||
else:
|
||||
# serialize to pre-toml form
|
||||
asdict = pp.to_pretoml()
|
||||
|
||||
if pp.expiry is None:
|
||||
asdict.pop('expiry', None)
|
||||
|
||||
# TODO: we need to figure out how to have one top level
|
||||
# listing venue here even when the backend isn't providing
|
||||
# it via the trades ledger..
|
||||
# drop symbol obj in serialized form
|
||||
s = asdict.pop('symbol')
|
||||
fqsn = s.front_fqsn()
|
||||
log.info(f'Updating active pp: {fqsn}')
|
||||
|
||||
# XXX: ugh, it's cuz we push the section under
|
||||
# the broker name.. maybe we need to rethink this?
|
||||
brokerless_key = fqsn.removeprefix(f'{brokername}.')
|
||||
|
||||
pp_entries[brokerless_key] = asdict
|
||||
|
||||
conf[brokername][acctid] = pp_entries
|
||||
|
||||
# TODO: why tf haven't they already done this for inline tables smh..
|
||||
enc = PpsEncoder(preserve=True)
|
||||
# table_bs_type = type(toml.TomlDecoder().get_empty_inline_table())
|
||||
enc.dump_funcs[toml.decoder.InlineTableDict] = enc.dump_inline_table
|
||||
|
||||
config.write(
|
||||
conf,
|
||||
'pps',
|
||||
encoder=enc,
|
||||
)
|
||||
|
||||
# deliver object form of all pps in table to caller
|
||||
return pp_objs, closed_pp_objs
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
|
||||
args = sys.argv
|
||||
assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`'
|
||||
args = args[1:]
|
||||
for acctid in args:
|
||||
broker, name = acctid.split('.')
|
||||
update_pps_conf(broker, name)
|
|
@ -230,19 +230,18 @@ class GodWidget(QWidget):
|
|||
# - we'll probably want per-instrument/provider state here?
|
||||
# change the order config form over to the new chart
|
||||
|
||||
# XXX: since the pp config is a singleton widget we have to
|
||||
# also switch it over to the new chart's interal-layout
|
||||
# self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
|
||||
chart = linkedsplits.chart
|
||||
|
||||
# chart is already in memory so just focus it
|
||||
linkedsplits.show()
|
||||
linkedsplits.focus()
|
||||
linkedsplits.graphics_cycle()
|
||||
await trio.sleep(0)
|
||||
|
||||
# XXX: since the pp config is a singleton widget we have to
|
||||
# also switch it over to the new chart's interal-layout
|
||||
# self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
|
||||
chart = linkedsplits.chart
|
||||
|
||||
# resume feeds *after* rendering chart view asap
|
||||
if chart:
|
||||
chart.resume_all_feeds()
|
||||
|
||||
# TODO: we need a check to see if the chart
|
||||
|
@ -761,18 +760,9 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
||||
|
||||
# indempotent startup flag for auto-yrange subsys
|
||||
# to detect the "first time" y-domain graphics begin
|
||||
# to be shown in the (main) graphics view.
|
||||
self._on_screen: bool = False
|
||||
|
||||
def resume_all_feeds(self):
|
||||
try:
|
||||
for feed in self._feeds.values():
|
||||
self.linked.godwidget._root_n.start_soon(feed.resume)
|
||||
except RuntimeError:
|
||||
# TODO: cancel the qtractor runtime here?
|
||||
raise
|
||||
|
||||
def pause_all_feeds(self):
|
||||
for feed in self._feeds.values():
|
||||
|
@ -869,8 +859,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
def default_view(
|
||||
self,
|
||||
bars_from_y: int = 616,
|
||||
do_ds: bool = True,
|
||||
bars_from_y: int = 3000,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
|
@ -931,11 +920,8 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
max=end,
|
||||
padding=0,
|
||||
)
|
||||
|
||||
if do_ds:
|
||||
self.view.maybe_downsample_graphics()
|
||||
view._set_yrange()
|
||||
|
||||
try:
|
||||
self.linked.graphics_cycle()
|
||||
except IndexError:
|
||||
|
@ -1269,6 +1255,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
If ``bars_range`` is provided use that range.
|
||||
|
||||
'''
|
||||
# print(f'Chart[{self.name}].maxmin()')
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
|
@ -1300,18 +1287,11 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
key = round(lbar), round(rbar)
|
||||
res = flow.maxmin(*key)
|
||||
|
||||
if (
|
||||
res is None
|
||||
):
|
||||
log.warning(
|
||||
if res == (None, None):
|
||||
log.error(
|
||||
f"{flow_key} no mxmn for bars_range => {key} !?"
|
||||
)
|
||||
res = 0, 0
|
||||
if not self._on_screen:
|
||||
self.default_view(do_ds=False)
|
||||
self._on_screen = True
|
||||
|
||||
profiler(f'yrange mxmn: {key} -> {res}')
|
||||
# print(f'{flow_key} yrange mxmn: {key} -> {res}')
|
||||
return res
|
||||
|
|
|
@ -223,20 +223,14 @@ def ds_m4(
|
|||
assert frames >= (xrange / uppx)
|
||||
|
||||
# call into ``numba``
|
||||
(
|
||||
nb,
|
||||
x_out,
|
||||
y_out,
|
||||
ymn,
|
||||
ymx,
|
||||
) = _m4(
|
||||
nb, i_win, y_out = _m4(
|
||||
x,
|
||||
y,
|
||||
|
||||
frames,
|
||||
|
||||
# TODO: see func below..
|
||||
# x_out,
|
||||
# i_win,
|
||||
# y_out,
|
||||
|
||||
# first index in x data to start at
|
||||
|
@ -249,11 +243,10 @@ def ds_m4(
|
|||
# filter out any overshoot in the input allocation arrays by
|
||||
# removing zero-ed tail entries which should start at a certain
|
||||
# index.
|
||||
x_out = x_out[x_out != 0]
|
||||
y_out = y_out[:x_out.size]
|
||||
i_win = i_win[i_win != 0]
|
||||
y_out = y_out[:i_win.size]
|
||||
|
||||
# print(f'M4 output ymn, ymx: {ymn},{ymx}')
|
||||
return nb, x_out, y_out, ymn, ymx
|
||||
return nb, i_win, y_out
|
||||
|
||||
|
||||
@jit(
|
||||
|
@ -267,8 +260,8 @@ def _m4(
|
|||
|
||||
frames: int,
|
||||
|
||||
# TODO: using this approach, having the ``.zeros()`` alloc lines
|
||||
# below in pure python, there were segs faults and alloc crashes..
|
||||
# TODO: using this approach by having the ``.zeros()`` alloc lines
|
||||
# below, in put python was causing segs faults and alloc crashes..
|
||||
# we might need to see how it behaves with shm arrays and consider
|
||||
# allocating them once at startup?
|
||||
|
||||
|
@ -281,22 +274,14 @@ def _m4(
|
|||
x_start: int,
|
||||
step: float,
|
||||
|
||||
) -> tuple[
|
||||
int,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
float,
|
||||
float,
|
||||
]:
|
||||
'''
|
||||
Implementation of the m4 algorithm in ``numba``:
|
||||
http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
) -> int:
|
||||
# nbins = len(i_win)
|
||||
# count = len(xs)
|
||||
|
||||
'''
|
||||
# these are pre-allocated and mutated by ``numba``
|
||||
# code in-place.
|
||||
y_out = np.zeros((frames, 4), ys.dtype)
|
||||
x_out = np.zeros(frames, xs.dtype)
|
||||
i_win = np.zeros(frames, xs.dtype)
|
||||
|
||||
bincount = 0
|
||||
x_left = x_start
|
||||
|
@ -310,34 +295,24 @@ def _m4(
|
|||
|
||||
# set all bins in the left-most entry to the starting left-most x value
|
||||
# (aka a row broadcast).
|
||||
x_out[bincount] = x_left
|
||||
i_win[bincount] = x_left
|
||||
# set all y-values to the first value passed in.
|
||||
y_out[bincount] = ys[0]
|
||||
|
||||
# full input y-data mx and mn
|
||||
mx: float = -np.inf
|
||||
mn: float = np.inf
|
||||
|
||||
# compute OHLC style max / min values per window sized x-frame.
|
||||
for i in range(len(xs)):
|
||||
|
||||
x = xs[i]
|
||||
y = ys[i]
|
||||
|
||||
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||
ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||
ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||
y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||
y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||
y_out[bincount, 3] = y
|
||||
mx = max(mx, ymx)
|
||||
mn = min(mn, ymn)
|
||||
|
||||
else:
|
||||
# Find the next bin
|
||||
while x >= x_left + step:
|
||||
x_left += step
|
||||
|
||||
bincount += 1
|
||||
x_out[bincount] = x_left
|
||||
i_win[bincount] = x_left
|
||||
y_out[bincount] = y
|
||||
|
||||
return bincount, x_out, y_out, mn, mx
|
||||
return bincount, i_win, y_out
|
||||
|
|
|
@ -105,10 +105,6 @@ def chart_maxmin(
|
|||
mn, mx = out
|
||||
|
||||
mx_vlm_in_view = 0
|
||||
|
||||
# TODO: we need to NOT call this to avoid a manual
|
||||
# np.max/min trigger and especially on the vlm_chart
|
||||
# flows which aren't shown.. like vlm?
|
||||
if vlm_chart:
|
||||
out = vlm_chart.maxmin()
|
||||
if out:
|
||||
|
@ -226,9 +222,33 @@ async def graphics_update_loop(
|
|||
tick_margin = 3 * tick_size
|
||||
|
||||
chart.show()
|
||||
# view = chart.view
|
||||
last_quote = time.time()
|
||||
i_last = ohlcv.index
|
||||
|
||||
# async def iter_drain_quotes():
|
||||
# # NOTE: all code below this loop is expected to be synchronous
|
||||
# # and thus draw instructions are not picked up jntil the next
|
||||
# # wait / iteration.
|
||||
# async for quotes in stream:
|
||||
# while True:
|
||||
# try:
|
||||
# moar = stream.receive_nowait()
|
||||
# except trio.WouldBlock:
|
||||
# yield quotes
|
||||
# break
|
||||
# else:
|
||||
# for sym, quote in moar.items():
|
||||
# ticks_frame = quote.get('ticks')
|
||||
# if ticks_frame:
|
||||
# quotes[sym].setdefault(
|
||||
# 'ticks', []).extend(ticks_frame)
|
||||
# print('pulled extra')
|
||||
|
||||
# yield quotes
|
||||
|
||||
# async for quotes in iter_drain_quotes():
|
||||
|
||||
ds = linked.display_state = DisplayState(**{
|
||||
'quotes': {},
|
||||
'linked': linked,
|
||||
|
@ -273,7 +293,6 @@ async def graphics_update_loop(
|
|||
|
||||
# chart isn't active/shown so skip render cycle and pause feed(s)
|
||||
if chart.linked.isHidden():
|
||||
print('skipping update')
|
||||
chart.pause_all_feeds()
|
||||
continue
|
||||
|
||||
|
@ -397,8 +416,10 @@ def graphics_update_cycle(
|
|||
)
|
||||
or trigger_all
|
||||
):
|
||||
# TODO: we should track and compute whether the last
|
||||
# pixel in a curve should show new data based on uppx
|
||||
# and then iff update curves and shift?
|
||||
chart.increment_view(steps=i_diff)
|
||||
# chart.increment_view(steps=i_diff + round(append_diff - uppx))
|
||||
|
||||
if vlm_chart:
|
||||
vlm_chart.increment_view(steps=i_diff)
|
||||
|
@ -456,6 +477,7 @@ def graphics_update_cycle(
|
|||
):
|
||||
chart.update_graphics_from_flow(
|
||||
chart.name,
|
||||
# do_append=uppx < update_uppx,
|
||||
do_append=do_append,
|
||||
)
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ Qt event proxying and processing using ``trio`` mem chans.
|
|||
from contextlib import asynccontextmanager, AsyncExitStack
|
||||
from typing import Callable
|
||||
|
||||
from pydantic import BaseModel
|
||||
import trio
|
||||
from PyQt5 import QtCore
|
||||
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
||||
|
@ -29,8 +30,6 @@ from PyQt5.QtWidgets import (
|
|||
QGraphicsSceneMouseEvent as gs_mouse,
|
||||
)
|
||||
|
||||
from ..data.types import Struct
|
||||
|
||||
|
||||
MOUSE_EVENTS = {
|
||||
gs_mouse.GraphicsSceneMousePress,
|
||||
|
@ -44,10 +43,13 @@ MOUSE_EVENTS = {
|
|||
# TODO: maybe consider some constrained ints down the road?
|
||||
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
||||
|
||||
class KeyboardMsg(Struct):
|
||||
class KeyboardMsg(BaseModel):
|
||||
'''Unpacked Qt keyboard event data.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
event: QEvent
|
||||
etype: int
|
||||
key: int
|
||||
|
@ -55,13 +57,16 @@ class KeyboardMsg(Struct):
|
|||
txt: str
|
||||
|
||||
def to_tuple(self) -> tuple:
|
||||
return tuple(self.to_dict().values())
|
||||
return tuple(self.dict().values())
|
||||
|
||||
|
||||
class MouseMsg(Struct):
|
||||
class MouseMsg(BaseModel):
|
||||
'''Unpacked Qt keyboard event data.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
event: QEvent
|
||||
etype: int
|
||||
button: int
|
||||
|
|
|
@ -337,7 +337,6 @@ class Flow(msgspec.Struct): # , frozen=True):
|
|||
name: str
|
||||
plot: pg.PlotItem
|
||||
graphics: Union[Curve, BarItems]
|
||||
yrange: tuple[float, float] = None
|
||||
|
||||
# in some cases a flow may want to change its
|
||||
# graphical "type" or, "form" when downsampling,
|
||||
|
@ -387,11 +386,10 @@ class Flow(msgspec.Struct): # , frozen=True):
|
|||
lbar: int,
|
||||
rbar: int,
|
||||
|
||||
) -> Optional[tuple[float, float]]:
|
||||
) -> tuple[float, float]:
|
||||
'''
|
||||
Compute the cached max and min y-range values for a given
|
||||
x-range determined by ``lbar`` and ``rbar`` or ``None``
|
||||
if no range can be determined (yet).
|
||||
x-range determined by ``lbar`` and ``rbar``.
|
||||
|
||||
'''
|
||||
rkey = (lbar, rbar)
|
||||
|
@ -401,8 +399,9 @@ class Flow(msgspec.Struct): # , frozen=True):
|
|||
|
||||
shm = self.shm
|
||||
if shm is None:
|
||||
return None
|
||||
mxmn = None
|
||||
|
||||
else: # new block for profiling?..
|
||||
arr = shm.array
|
||||
|
||||
# build relative indexes into shm array
|
||||
|
@ -415,11 +414,7 @@ class Flow(msgspec.Struct): # , frozen=True):
|
|||
]
|
||||
|
||||
if not slice_view.size:
|
||||
return None
|
||||
|
||||
elif self.yrange:
|
||||
mxmn = self.yrange
|
||||
# print(f'{self.name} M4 maxmin: {mxmn}')
|
||||
mxmn = None
|
||||
|
||||
else:
|
||||
if self.is_ohlc:
|
||||
|
@ -432,10 +427,9 @@ class Flow(msgspec.Struct): # , frozen=True):
|
|||
yhigh = np.max(view)
|
||||
|
||||
mxmn = ylow, yhigh
|
||||
# print(f'{self.name} MANUAL maxmin: {mxmin}')
|
||||
|
||||
# cache result for input range
|
||||
assert mxmn
|
||||
if mxmn is not None:
|
||||
# cache new mxmn result
|
||||
self._mxmns[rkey] = mxmn
|
||||
|
||||
return mxmn
|
||||
|
@ -634,13 +628,10 @@ class Flow(msgspec.Struct): # , frozen=True):
|
|||
# source data so we clear our path data in prep
|
||||
# to generate a new one from original source data.
|
||||
new_sample_rate = True
|
||||
showing_src_data = True
|
||||
should_ds = False
|
||||
should_redraw = True
|
||||
|
||||
showing_src_data = True
|
||||
# reset yrange to be computed from source data
|
||||
self.yrange = None
|
||||
|
||||
# MAIN RENDER LOGIC:
|
||||
# - determine in view data and redraw on range change
|
||||
# - determine downsampling ops if needed
|
||||
|
@ -666,10 +657,6 @@ class Flow(msgspec.Struct): # , frozen=True):
|
|||
|
||||
**rkwargs,
|
||||
)
|
||||
if showing_src_data:
|
||||
# print(f"{self.name} SHOWING SOURCE")
|
||||
# reset yrange to be computed from source data
|
||||
self.yrange = None
|
||||
|
||||
if not out:
|
||||
log.warning(f'{self.name} failed to render!?')
|
||||
|
@ -677,9 +664,6 @@ class Flow(msgspec.Struct): # , frozen=True):
|
|||
|
||||
path, data, reset = out
|
||||
|
||||
# if self.yrange:
|
||||
# print(f'flow {self.name} yrange from m4: {self.yrange}')
|
||||
|
||||
# XXX: SUPER UGGGHHH... without this we get stale cache
|
||||
# graphics that don't update until you downsampler again..
|
||||
if reset:
|
||||
|
@ -1074,7 +1058,6 @@ class Renderer(msgspec.Struct):
|
|||
# xy-path data transform: convert source data to a format
|
||||
# able to be passed to a `QPainterPath` rendering routine.
|
||||
if not len(hist):
|
||||
# XXX: this might be why the profiler only has exits?
|
||||
return
|
||||
|
||||
x_out, y_out, connect = self.format_xy(
|
||||
|
@ -1161,14 +1144,11 @@ class Renderer(msgspec.Struct):
|
|||
|
||||
elif should_ds and uppx > 1:
|
||||
|
||||
x_out, y_out, ymn, ymx = xy_downsample(
|
||||
x_out, y_out = xy_downsample(
|
||||
x_out,
|
||||
y_out,
|
||||
uppx,
|
||||
)
|
||||
self.flow.yrange = ymn, ymx
|
||||
# print(f'{self.flow.name} post ds: ymn, ymx: {ymn},{ymx}')
|
||||
|
||||
reset = True
|
||||
profiler(f'FULL PATH downsample redraw={should_ds}')
|
||||
self._in_ds = True
|
||||
|
|
|
@ -619,7 +619,7 @@ class FillStatusBar(QProgressBar):
|
|||
# color: #19232D;
|
||||
# width: 10px;
|
||||
|
||||
self.setRange(0, int(slots))
|
||||
self.setRange(0, slots)
|
||||
self.setValue(value)
|
||||
|
||||
|
||||
|
|
|
@ -639,25 +639,20 @@ async def open_vlm_displays(
|
|||
names: list[str],
|
||||
|
||||
) -> tuple[float, float]:
|
||||
'''
|
||||
Flows "group" maxmin loop; assumes all named flows
|
||||
are in the same co-domain and thus can be sorted
|
||||
as one set.
|
||||
|
||||
Iterates all the named flows and calls the chart
|
||||
api to find their range values and return.
|
||||
|
||||
TODO: really we should probably have a more built-in API
|
||||
for this?
|
||||
|
||||
'''
|
||||
mx = 0
|
||||
for name in names:
|
||||
ymn, ymx = chart.maxmin(name=name)
|
||||
mx = max(mx, ymx)
|
||||
|
||||
mxmn = chart.maxmin(name=name)
|
||||
if mxmn:
|
||||
ymax = mxmn[1]
|
||||
if ymax > mx:
|
||||
mx = ymax
|
||||
|
||||
return 0, mx
|
||||
|
||||
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
|
||||
|
||||
# TODO: fix the x-axis label issue where if you put
|
||||
# the axis on the left it's totally not lined up...
|
||||
# show volume units value on LHS (for dinkus)
|
||||
|
@ -781,7 +776,6 @@ async def open_vlm_displays(
|
|||
|
||||
) -> None:
|
||||
for name in names:
|
||||
|
||||
if 'dark' in name:
|
||||
color = dark_vlm_color
|
||||
elif 'rate' in name:
|
||||
|
|
|
@ -923,7 +923,6 @@ class ChartView(ViewBox):
|
|||
# XXX: super important to be aware of this.
|
||||
# or not flow.graphics.isVisible()
|
||||
):
|
||||
# print(f'skipping {flow.name}')
|
||||
continue
|
||||
|
||||
# pass in no array which will read and render from the last
|
||||
|
|
|
@ -22,9 +22,12 @@ from __future__ import annotations
|
|||
from typing import (
|
||||
Optional, Generic,
|
||||
TypeVar, Callable,
|
||||
Literal,
|
||||
)
|
||||
import enum
|
||||
import sys
|
||||
|
||||
# from pydantic import BaseModel, validator
|
||||
from pydantic import BaseModel, validator
|
||||
from pydantic.generics import GenericModel
|
||||
from PyQt5.QtWidgets import (
|
||||
QWidget,
|
||||
|
@ -35,7 +38,6 @@ from ._forms import (
|
|||
# FontScaledDelegate,
|
||||
Edit,
|
||||
)
|
||||
from ..data.types import Struct
|
||||
|
||||
|
||||
DataType = TypeVar('DataType')
|
||||
|
@ -60,7 +62,7 @@ class Selection(Field[DataType], Generic[DataType]):
|
|||
options: dict[str, DataType]
|
||||
# value: DataType = None
|
||||
|
||||
# @validator('value') # , always=True)
|
||||
@validator('value') # , always=True)
|
||||
def set_value_first(
|
||||
cls,
|
||||
|
||||
|
@ -98,7 +100,7 @@ class Edit(Field[DataType], Generic[DataType]):
|
|||
widget_factory = Edit
|
||||
|
||||
|
||||
class AllocatorPane(Struct):
|
||||
class AllocatorPane(BaseModel):
|
||||
|
||||
account = Selection[str](
|
||||
options=dict.fromkeys(
|
||||
|
|
|
@ -49,17 +49,12 @@ def xy_downsample(
|
|||
|
||||
x_spacer: float = 0.5,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
float,
|
||||
float,
|
||||
]:
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
|
||||
# downsample whenever more then 1 pixels per datum can be shown.
|
||||
# always refresh data bounds until we get diffing
|
||||
# working properly, see above..
|
||||
bins, x, y, ymn, ymx = ds_m4(
|
||||
bins, x, y = ds_m4(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
|
@ -72,7 +67,7 @@ def xy_downsample(
|
|||
)).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
return x, y, ymn, ymx
|
||||
return x, y
|
||||
|
||||
|
||||
@njit(
|
||||
|
|
|
@ -19,7 +19,6 @@ Position info and display
|
|||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from copy import copy
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
from math import floor, copysign
|
||||
|
@ -106,8 +105,8 @@ async def update_pnl_from_feed(
|
|||
# compute and display pnl status
|
||||
order_mode.pane.pnl_label.format(
|
||||
pnl=copysign(1, size) * pnl(
|
||||
# live.be_price,
|
||||
order_mode.current_pp.live_pp.be_price,
|
||||
# live.avg_price,
|
||||
order_mode.current_pp.live_pp.avg_price,
|
||||
tick['price'],
|
||||
),
|
||||
)
|
||||
|
@ -357,7 +356,7 @@ class SettingsPane:
|
|||
# last historical close price
|
||||
last = feed.shm.array[-1][['close']][0]
|
||||
pnl_value = copysign(1, size) * pnl(
|
||||
tracker.live_pp.be_price,
|
||||
tracker.live_pp.avg_price,
|
||||
last,
|
||||
)
|
||||
|
||||
|
@ -477,7 +476,7 @@ class PositionTracker:
|
|||
|
||||
self.alloc = alloc
|
||||
self.startup_pp = startup_pp
|
||||
self.live_pp = copy(startup_pp)
|
||||
self.live_pp = startup_pp.copy()
|
||||
|
||||
view = chart.getViewBox()
|
||||
|
||||
|
@ -557,7 +556,7 @@ class PositionTracker:
|
|||
pp = position or self.live_pp
|
||||
|
||||
self.update_line(
|
||||
pp.be_price,
|
||||
pp.avg_price,
|
||||
pp.size,
|
||||
self.chart.linked.symbol.lot_size_digits,
|
||||
)
|
||||
|
@ -571,7 +570,7 @@ class PositionTracker:
|
|||
self.hide()
|
||||
|
||||
else:
|
||||
self._level_marker.level = pp.be_price
|
||||
self._level_marker.level = pp.avg_price
|
||||
|
||||
# these updates are critical to avoid lag on view/scene changes
|
||||
self._level_marker.update() # trigger paint
|
||||
|
|
|
@ -27,20 +27,20 @@ import time
|
|||
from typing import Optional, Dict, Callable, Any
|
||||
import uuid
|
||||
|
||||
from pydantic import BaseModel
|
||||
import tractor
|
||||
import trio
|
||||
from PyQt5.QtCore import Qt
|
||||
|
||||
from .. import config
|
||||
from ..pp import Position
|
||||
from ..clearing._client import open_ems, OrderBook
|
||||
from ..clearing._allocate import (
|
||||
mk_allocator,
|
||||
Position,
|
||||
)
|
||||
from ._style import _font
|
||||
from ..data._source import Symbol
|
||||
from ..data.feed import Feed
|
||||
from ..data.types import Struct
|
||||
from ..log import get_logger
|
||||
from ._editors import LineEditor, ArrowEditor
|
||||
from ._lines import order_line, LevelLine
|
||||
|
@ -58,9 +58,8 @@ from ._forms import open_form_input_handling
|
|||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class OrderDialog(Struct):
|
||||
'''
|
||||
Trade dialogue meta-data describing the lifetime
|
||||
class OrderDialog(BaseModel):
|
||||
'''Trade dialogue meta-data describing the lifetime
|
||||
of an order submission to ``emsd`` from a chart.
|
||||
|
||||
'''
|
||||
|
@ -73,6 +72,10 @@ class OrderDialog(Struct):
|
|||
msgs: dict[str, dict] = {}
|
||||
fills: Dict[str, Any] = {}
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
underscore_attrs_are_private = False
|
||||
|
||||
|
||||
def on_level_change_update_next_order_info(
|
||||
|
||||
|
@ -84,8 +87,7 @@ def on_level_change_update_next_order_info(
|
|||
tracker: PositionTracker,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
A callback applied for each level change to the line
|
||||
'''A callback applied for each level change to the line
|
||||
which will recompute the order size based on allocator
|
||||
settings. this is assigned inside
|
||||
``OrderMode.line_from_order()``
|
||||
|
@ -264,8 +266,7 @@ class OrderMode:
|
|||
self,
|
||||
|
||||
) -> OrderDialog:
|
||||
'''
|
||||
Send execution order to EMS return a level line to
|
||||
'''Send execution order to EMS return a level line to
|
||||
represent the order on a chart.
|
||||
|
||||
'''
|
||||
|
@ -274,9 +275,13 @@ class OrderMode:
|
|||
oid = str(uuid.uuid4())
|
||||
|
||||
# format order data for ems
|
||||
order = staged.copy()
|
||||
order.oid = oid
|
||||
order.symbol = symbol.front_fqsn()
|
||||
fqsn = symbol.front_fqsn()
|
||||
order = staged.copy(
|
||||
update={
|
||||
'symbol': fqsn,
|
||||
'oid': oid,
|
||||
}
|
||||
)
|
||||
|
||||
line = self.line_from_order(
|
||||
order,
|
||||
|
@ -572,9 +577,9 @@ async def open_order_mode(
|
|||
providers=symbol.brokers
|
||||
)
|
||||
|
||||
# XXX: ``brokerd`` delivers a set of account names that it
|
||||
# allows use of but the user also can define the accounts they'd
|
||||
# like to use, in order, in their `brokers.toml` file.
|
||||
# XXX: ``brokerd`` delivers a set of account names that it allows
|
||||
# use of but the user also can define the accounts they'd like
|
||||
# to use, in order, in their `brokers.toml` file.
|
||||
accounts = {}
|
||||
for name in brokerd_accounts:
|
||||
# ensure name is in ``brokers.toml``
|
||||
|
@ -587,21 +592,10 @@ async def open_order_mode(
|
|||
iter(accounts.keys())
|
||||
) if accounts else 'paper'
|
||||
|
||||
# Pack position messages by account, should only be one-to-one.
|
||||
# NOTE: requires the backend exactly specifies
|
||||
# the expected symbol key in its positions msg.
|
||||
pps_by_account = {}
|
||||
for (broker, acctid), msgs in position_msgs.items():
|
||||
for msg in msgs:
|
||||
|
||||
sym = msg['symbol']
|
||||
if (
|
||||
sym == symkey or
|
||||
# mega-UGH, i think we need to fix the FQSN stuff sooner
|
||||
# then later..
|
||||
sym == symkey.removesuffix(f'.{broker}')
|
||||
):
|
||||
pps_by_account[acctid] = msg
|
||||
pp_msgs = position_msgs.get(symkey, ())
|
||||
pps_by_account = {msg['account']: msg for msg in pp_msgs}
|
||||
|
||||
# update pp trackers with data relayed from ``brokerd``.
|
||||
for account_name in accounts:
|
||||
|
@ -610,10 +604,7 @@ async def open_order_mode(
|
|||
startup_pp = Position(
|
||||
symbol=symbol,
|
||||
size=0,
|
||||
be_price=0,
|
||||
|
||||
# XXX: BLEH, do we care about this on the client side?
|
||||
bsuid=symbol,
|
||||
avg_price=0,
|
||||
)
|
||||
msg = pps_by_account.get(account_name)
|
||||
if msg:
|
||||
|
@ -851,9 +842,7 @@ async def process_trades_and_update_ui(
|
|||
# delete level line from view
|
||||
mode.on_cancel(oid)
|
||||
broker_msg = msg['brokerd_msg']
|
||||
log.warning(
|
||||
f'Order {oid} failed with:\n{pformat(broker_msg)}'
|
||||
)
|
||||
log.warning(f'Order {oid} failed with:\n{pformat(broker_msg)}')
|
||||
|
||||
elif resp in (
|
||||
'dark_triggered'
|
||||
|
|
4
setup.py
4
setup.py
|
@ -41,17 +41,17 @@ setup(
|
|||
},
|
||||
install_requires=[
|
||||
'toml',
|
||||
'tomli', # fastest pure py reader
|
||||
'click',
|
||||
'colorlog',
|
||||
'attrs',
|
||||
'pygments',
|
||||
'colorama', # numba traceback coloring
|
||||
'msgspec', # performant IPC messaging and structs
|
||||
'pydantic', # structured data
|
||||
|
||||
# async
|
||||
'trio',
|
||||
'trio-websocket',
|
||||
'msgspec', # performant IPC messaging
|
||||
'async_generator',
|
||||
|
||||
# from github currently (see requirements.txt)
|
||||
|
|
Loading…
Reference in New Issue