Compare commits

..

No commits in common. "asycvnc_pin_bump" and "310_plus" have entirely different histories.

47 changed files with 2940 additions and 6498 deletions

View File

@ -2,19 +2,15 @@
# start VNC server
x11vnc \
-listen 127.0.0.1 \
-allow 127.0.0.1 \
-autoport 3003 \
-no6 \
-noipv6 \
-ncache_cr \
-listen localhost \
-display :1 \
-bg \
-forever \
-shared \
-logappend /var/log/x11vnc.log \
-ncache_cr \
-ncache \
-bg \
-noipv6 \
-autoport 3003 \
# can't use this because of ``asyncvnc`` issue:
# https://github.com/barneygale/asyncvnc/issues/1
# -passwd 'ibcansmbz'

View File

@ -22,10 +22,10 @@ from typing import Optional, Union, Callable, Any
from contextlib import asynccontextmanager as acm
from collections import defaultdict
from msgspec import Struct
import tractor
from pydantic import BaseModel
import trio
from trio_typing import TaskStatus
import tractor
from .log import get_logger, get_console_log
from .brokers import get_brokermod
@ -47,13 +47,16 @@ _root_modules = [
]
class Services(Struct):
class Services(BaseModel):
actor_n: tractor._supervise.ActorNursery
service_n: trio.Nursery
debug_mode: bool # tractor sub-actor debug mode flag
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
class Config:
arbitrary_types_allowed = True
async def start_service_task(
self,
name: str,

View File

@ -33,13 +33,14 @@ import asks
from fuzzywuzzy import process as fuzzy
import numpy as np
import tractor
from pydantic.dataclasses import dataclass
from pydantic import BaseModel
import wsproto
from .._cacheables import open_cached_client
from ._util import resproc, SymbolNotFound
from ..log import get_logger, get_console_log
from ..data import ShmArray
from ..data.types import Struct
from ..data._web_bs import open_autorecon_ws, NoBsWs
log = get_logger(__name__)
@ -78,14 +79,12 @@ _show_wap_in_history = False
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
class Pair(Struct, frozen=True):
class Pair(BaseModel):
symbol: str
status: str
baseAsset: str
baseAssetPrecision: int
cancelReplaceAllowed: bool
allowTrailingStop: bool
quoteAsset: str
quotePrecision: int
quoteAssetPrecision: int
@ -105,14 +104,14 @@ class Pair(Struct, frozen=True):
permissions: list[str]
class OHLC(Struct):
'''
Description of the flattened OHLC quote format.
@dataclass
class OHLC:
"""Description of the flattened OHLC quote format.
For schema details see:
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
'''
"""
time: int
open: float
@ -261,7 +260,6 @@ class Client:
for i, bar in enumerate(bars):
bar = OHLC(*bar)
bar.typecast()
row = []
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
@ -289,7 +287,7 @@ async def get_client() -> Client:
# validation type
class AggTrade(Struct):
class AggTrade(BaseModel):
e: str # Event type
E: int # Event time
s: str # Symbol
@ -343,9 +341,7 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
elif msg.get('e') == 'aggTrade':
# NOTE: this is purely for a definition, ``msgspec.Struct``
# does not runtime-validate until you decode/encode.
# see: https://jcristharif.com/msgspec/structs.html#type-validation
# validate
msg = AggTrade(**msg)
# TODO: type out and require this quote format
@ -356,8 +352,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
'brokerd_ts': time.time(),
'ticks': [{
'type': 'trade',
'price': float(msg.p),
'size': float(msg.q),
'price': msg.p,
'size': msg.q,
'broker_ts': msg.T,
}],
}
@ -452,7 +448,7 @@ async def stream_quotes(
d = cache[sym.upper()]
syminfo = Pair(**d) # validation
si = sym_infos[sym] = syminfo.to_dict()
si = sym_infos[sym] = syminfo.dict()
# XXX: after manually inspecting the response format we
# just directly pick out the info we need

View File

@ -20,10 +20,15 @@ Interactive Brokers API backend.
Sub-modules within break into the core functionalities:
- ``broker.py`` part for orders / trading endpoints
- ``feed.py`` for real-time data feed endpoints
- ``api.py`` for the core API machinery which is ``trio``-ized
- ``data.py`` for real-time data feed endpoints
- ``client.py`` for the core API machinery which is ``trio``-ized
wrapping around ``ib_insync``.
- ``report.py`` for the hackery to build manual pp calcs
to avoid ib's absolute bullshit FIFO style position
tracking..
"""
from .api import (
get_client,
@ -33,10 +38,7 @@ from .feed import (
open_symbol_search,
stream_quotes,
)
from .broker import (
trades_dialogue,
norm_trade_records,
)
from .broker import trades_dialogue
__all__ = [
'get_client',

View File

@ -29,7 +29,6 @@ import itertools
from math import isnan
from typing import (
Any,
Optional,
Union,
)
import asyncio
@ -39,28 +38,16 @@ import time
from types import SimpleNamespace
from bidict import bidict
import trio
import tractor
from tractor import to_asyncio
import ib_insync as ibis
from ib_insync.contract import (
Contract,
ContractDetails,
Option,
)
from ib_insync.wrapper import RequestError
from ib_insync.contract import Contract, ContractDetails
from ib_insync.order import Order
from ib_insync.ticker import Ticker
from ib_insync.objects import (
Position,
Fill,
Execution,
CommissionReport,
)
from ib_insync.wrapper import (
Wrapper,
RequestError,
)
from ib_insync.objects import Position
import ib_insync as ibis
from ib_insync.wrapper import Wrapper
from ib_insync.client import Client as ib_Client
import numpy as np
@ -168,93 +155,60 @@ class NonShittyIB(ibis.IB):
self.client.apiEnd += self.disconnectedEvent
_futes_venues = (
'GLOBEX',
'NYMEX',
'CME',
'CMECRYPTO',
'COMEX',
'CMDTY', # special name case..
)
_adhoc_futes_set = {
# equities
'nq.globex',
'mnq.globex', # micro
'es.globex',
'mes.globex', # micro
# cypto$
'brr.cmecrypto',
'ethusdrr.cmecrypto',
# agriculture
'he.nymex', # lean hogs
'le.nymex', # live cattle (geezers)
'gf.nymex', # feeder cattle (younguns)
# raw
'lb.nymex', # random len lumber
# metals
'xauusd.cmdty', # gold spot
'gc.nymex',
'mgc.nymex', # micro
# oil & gas
'cl.nymex',
'xagusd.cmdty', # silver spot
'ni.nymex', # silver futes
'qi.comex', # mini-silver futes
}
# taken from list here:
# https://www.interactivebrokers.com/en/trading/products-spot-currencies.php
_adhoc_fiat_set = set((
'USD, AED, AUD, CAD,'
'CHF, CNH, CZK, DKK,'
'EUR, GBP, HKD, HUF,'
'ILS, JPY, MXN, NOK,'
'NZD, PLN, RUB, SAR,'
'SEK, SGD, TRY, ZAR'
).split(' ,')
)
# map of symbols to contract ids
_adhoc_symbol_map = {
_adhoc_cmdty_data_map = {
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
# NOTE: some cmdtys/metals don't have trade data like gold/usd:
# https://groups.io/g/twsapi/message/44174
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
}
for qsn in _adhoc_futes_set:
sym, venue = qsn.split('.')
assert venue.upper() in _futes_venues, f'{venue}'
_adhoc_symbol_map[sym.upper()] = (
{'exchange': venue},
{},
)
_futes_venues = (
'GLOBEX',
'NYMEX',
'CME',
'CMECRYPTO',
)
_adhoc_futes_set = {
# equities
'nq.globex',
'mnq.globex',
'es.globex',
'mes.globex',
# cypto$
'brr.cmecrypto',
'ethusdrr.cmecrypto',
# agriculture
'he.globex', # lean hogs
'le.globex', # live cattle (geezers)
'gf.globex', # feeder cattle (younguns)
# raw
'lb.globex', # random len lumber
# metals
'xauusd.cmdty', # gold spot
'gc.nymex',
'mgc.nymex',
'xagusd.cmdty', # silver spot
'ni.nymex', # silver futes
'qi.comex', # mini-silver futes
}
# exchanges we don't support at the moment due to not knowing
# how to do symbol-contract lookup correctly likely due
# to not having the data feeds subscribed.
_exch_skip_list = {
'ASX', # aussie stocks
'MEXI', # mexican stocks
# no idea
'VALUE',
'FUNDSERV',
'SWB2',
'PSE',
'VALUE', # no idea
}
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
@ -307,29 +261,27 @@ class Client:
# NOTE: the ib.client here is "throttled" to 45 rps by default
async def trades(self) -> dict[str, Any]:
'''
Return list of trade-fills from current session in ``dict``.
async def trades(
self,
# api_only: bool = False,
'''
fills: list[Fill] = self.ib.fills()
norm_fills: list[dict] = []
) -> dict[str, Any]:
# orders = await self.ib.reqCompletedOrdersAsync(
# apiOnly=api_only
# )
fills = await self.ib.reqExecutionsAsync()
norm_fills = []
for fill in fills:
fill = fill._asdict() # namedtuple
for key, val in fill.items():
match val:
case Contract() | Execution() | CommissionReport():
fill[key] = asdict(val)
for key, val in fill.copy().items():
if isinstance(val, Contract):
fill[key] = asdict(val)
norm_fills.append(fill)
return norm_fills
async def orders(self) -> list[Order]:
return await self.ib.reqAllOpenOrdersAsync(
apiOnly=False,
)
async def bars(
self,
fqsn: str,
@ -357,7 +309,7 @@ class Client:
_enters += 1
contract = (await self.find_contracts(fqsn))[0]
contract = await self.find_contract(fqsn)
bars_kwargs.update(getattr(contract, 'bars_kwargs', {}))
# _min = min(2000*100, count)
@ -412,15 +364,7 @@ class Client:
futs.append(self.ib.reqContractDetailsAsync(con))
# batch request all details
try:
results = await asyncio.gather(*futs)
except RequestError as err:
msg = err.message
if (
'No security definition' in msg
):
log.warning(f'{msg}: {contracts}')
return {}
results = await asyncio.gather(*futs)
# one set per future result
details = {}
@ -429,11 +373,20 @@ class Client:
# XXX: if there is more then one entry in the details list
# then the contract is so called "ambiguous".
for d in details_set:
con = d.contract
# nested dataclass we probably don't need and that won't
# IPC serialize..
key = '.'.join([
con.symbol,
con.primaryExchange or con.exchange,
])
expiry = con.lastTradeDateOrContractMonth
if expiry:
key += f'.{expiry}'
# nested dataclass we probably don't need and that
# won't IPC serialize..
d.secIdList = ''
key, calc_price = con2fqsn(d.contract)
details[key] = d
return details
@ -463,7 +416,7 @@ class Client:
self,
pattern: str,
# how many contracts to search "up to"
upto: int = 6,
upto: int = 3,
asdicts: bool = True,
) -> dict[str, ContractDetails]:
@ -474,6 +427,7 @@ class Client:
pattern,
upto=upto,
)
for key, deats in results.copy().items():
tract = deats.contract
@ -483,44 +437,21 @@ class Client:
if sectype == 'IND':
results[f'{sym}.IND'] = tract
results.pop(key)
# exch = tract.exchange
# XXX: add back one of these to get the weird deadlock
# on the debugger from root without the latest
# maybe_wait_for_debugger() fix in the `open_context()`
# exit.
# assert 0
# if con.exchange not in _exch_skip_list:
exch = tract.exchange
if exch not in _exch_skip_list:
if exch in _futes_venues:
# try get all possible contracts for symbol as per,
# https://interactivebrokers.github.io/tws-api/basic_contracts.html#fut
con = ibis.Future(
symbol=sym,
exchange=exch,
)
# TODO: make this work, think it's something to do
# with the qualify flag.
# cons = await self.find_contracts(
# contract=con,
# err_on_qualify=False,
# )
# if cons:
all_deats = await self.con_deats([con])
results |= all_deats
try:
all_deats = await self.con_deats([con])
results |= all_deats
# forex pairs
elif sectype == 'CASH':
dst, src = tract.localSymbol.split('.')
pair_key = "/".join([dst, src])
exch = tract.exchange.lower()
results[f'{pair_key}.{exch}'] = tract
results.pop(key)
# XXX: again seems to trigger the weird tractor
# bug with the debugger..
# assert 0
except RequestError as err:
log.warning(err.message)
return results
@ -552,19 +483,13 @@ class Client:
return con
async def get_con(
self,
conid: int,
) -> Contract:
return await self.ib.qualifyContractsAsync(
ibis.Contract(conId=conid)
)
def parse_patt2fqsn(
async def find_contract(
self,
pattern: str,
currency: str = 'USD',
**kwargs,
) -> tuple[str, str, str, str]:
) -> Contract:
# TODO: we can't use this currently because
# ``wrapper.starTicker()`` currently cashes ticker instances
@ -577,104 +502,58 @@ class Client:
# XXX UPDATE: we can probably do the tick/trades scraping
# inside our eventkit handler instead to bypass this entirely?
currency = ''
# fqsn parsing stage
# ------------------
if '.ib' in pattern:
from ..data._source import unpack_fqsn
_, symbol, expiry = unpack_fqsn(pattern)
broker, symbol, expiry = unpack_fqsn(pattern)
else:
symbol = pattern
expiry = ''
# another hack for forex pairs lul.
if (
'.idealpro' in symbol
# or '/' in symbol
):
exch = 'IDEALPRO'
symbol = symbol.removesuffix('.idealpro')
if '/' in symbol:
symbol, currency = symbol.split('/')
# try:
# # give the cache a go
# return self._contracts[symbol]
# except KeyError:
# log.debug(f'Looking up contract for {symbol}')
expiry: str = ''
if symbol.count('.') > 1:
symbol, _, expiry = symbol.rpartition('.')
else:
# TODO: yes, a cache..
# try:
# # give the cache a go
# return self._contracts[symbol]
# except KeyError:
# log.debug(f'Looking up contract for {symbol}')
expiry: str = ''
if symbol.count('.') > 1:
symbol, _, expiry = symbol.rpartition('.')
# use heuristics to figure out contract "type"
sym, exch = symbol.upper().rsplit('.', maxsplit=1)
# use heuristics to figure out contract "type"
symbol, exch = symbol.upper().rsplit('.', maxsplit=1)
return symbol, currency, exch, expiry
async def find_contracts(
self,
pattern: Optional[str] = None,
contract: Optional[Contract] = None,
qualify: bool = True,
err_on_qualify: bool = True,
) -> Contract:
if pattern is not None:
symbol, currency, exch, expiry = self.parse_patt2fqsn(
pattern,
)
sectype = ''
else:
assert contract
symbol = contract.symbol
sectype = contract.secType
exch = contract.exchange or contract.primaryExchange
expiry = contract.lastTradeDateOrContractMonth
currency = contract.currency
# contract searching stage
# ------------------------
qualify: bool = True
# futes
if exch in _futes_venues:
if expiry:
# get the "front" contract
con = await self.get_fute(
symbol=symbol,
contract = await self.get_fute(
symbol=sym,
exchange=exch,
expiry=expiry,
)
else:
# get the "front" contract
con = await self.get_fute(
symbol=symbol,
contract = await self.get_fute(
symbol=sym,
exchange=exch,
front=True,
)
elif (
exch in ('IDEALPRO')
or sectype == 'CASH'
):
# if '/' in symbol:
# currency = ''
# symbol, currency = symbol.split('/')
qualify = False
elif exch in ('FOREX'):
currency = ''
symbol, currency = sym.split('/')
con = ibis.Forex(
pair=''.join((symbol, currency)),
symbol=symbol,
currency=currency,
)
con.bars_kwargs = {'whatToShow': 'MIDPOINT'}
# commodities
elif exch == 'CMDTY': # eg. XAUUSD.CMDTY
con_kwargs, bars_kwargs = _adhoc_symbol_map[symbol]
con_kwargs, bars_kwargs = _adhoc_cmdty_data_map[sym]
con = ibis.Commodity(**con_kwargs)
con.bars_kwargs = bars_kwargs
@ -690,50 +569,33 @@ class Client:
exch = 'SMART'
else:
# XXX: order is super important here since
# a primary == 'SMART' won't ever work.
primaryExchange = exch
exch = 'SMART'
primaryExchange = exch
con = ibis.Stock(
symbol=symbol,
symbol=sym,
exchange=exch,
primaryExchange=primaryExchange,
currency=currency,
)
try:
exch = 'SMART' if not exch else exch
if qualify:
contract = (await self.ib.qualifyContractsAsync(con))[0]
else:
assert contract
contracts = [con]
if qualify:
try:
contracts = await self.ib.qualifyContractsAsync(con)
except RequestError as err:
msg = err.message
if (
'No security definition' in msg
and not err_on_qualify
):
log.warning(
f'Could not find def for {con}')
return None
except IndexError:
raise ValueError(f"No contract could be found {con}")
else:
raise
if not contracts:
raise ValueError(f"No contract could be found {con}")
self._contracts[pattern] = contract
# pack all contracts into cache
for tract in contracts:
exch: str = tract.primaryExchange or tract.exchange or exch
pattern = f'{symbol}.{exch}'
expiry = tract.lastTradeDateOrContractMonth
# add an entry with expiry suffix if available
if expiry:
pattern += f'.{expiry}'
# add an aditional entry with expiry suffix if available
conexp = contract.lastTradeDateOrContractMonth
if conexp:
self._contracts[pattern + f'.{conexp}'] = contract
self._contracts[pattern.lower()] = tract
return contracts
return contract
async def get_head_time(
self,
@ -752,10 +614,9 @@ class Client:
async def get_sym_details(
self,
symbol: str,
) -> tuple[Contract, Ticker, ContractDetails]:
contract = (await self.find_contracts(symbol))[0]
contract = await self.find_contract(symbol)
ticker: Ticker = self.ib.reqMktData(
contract,
snapshot=True,
@ -943,73 +804,6 @@ class Client:
return self.ib.positions(account=account)
def con2fqsn(
con: Contract,
_cache: dict[int, (str, bool)] = {}
) -> tuple[str, bool]:
'''
Convert contracts to fqsn-style strings to be used both in symbol-search
matching and as feed tokens passed to the front end data deed layer.
Previously seen contracts are cached by id.
'''
# should be real volume for this contract by default
calc_price = False
if con.conId:
try:
return _cache[con.conId]
except KeyError:
pass
suffix = con.primaryExchange or con.exchange
symbol = con.symbol
expiry = con.lastTradeDateOrContractMonth or ''
match con:
case Option():
# TODO: option symbol parsing and sane display:
symbol = con.localSymbol.replace(' ', '')
case ibis.Commodity():
# commodities and forex don't have an exchange name and
# no real volume so we have to calculate the price
suffix = con.secType
# no real volume on this tract
calc_price = True
case ibis.Forex() | ibis.Contract(secType='CASH'):
dst, src = con.localSymbol.split('.')
symbol = ''.join([dst, src])
suffix = con.exchange
# no real volume on forex feeds..
calc_price = True
if not suffix:
entry = _adhoc_symbol_map.get(
con.symbol or con.localSymbol
)
if entry:
meta, kwargs = entry
cid = meta.get('conId')
if cid:
assert con.conId == meta['conId']
suffix = meta['exchange']
# append a `.<suffix>` to the returned symbol
# key for derivatives that normally is the expiry
# date key.
if expiry:
suffix += f'.{expiry}'
fqsn_key = '.'.join((symbol, suffix)).lower()
_cache[con.conId] = fqsn_key, calc_price
return fqsn_key, calc_price
# per-actor API ep caching
_client_cache: dict[tuple[str, int], Client] = {}
_scan_ignore: set[tuple[str, int]] = set()
@ -1017,23 +811,10 @@ _scan_ignore: set[tuple[str, int]] = set()
def get_config() -> dict[str, Any]:
conf, path = config.load('brokers')
conf, path = config.load()
section = conf.get('ib')
accounts = section.get('accounts')
if not accounts:
raise ValueError(
'brokers.toml -> `ib.accounts` must be defined\n'
f'location: {path}'
)
names = list(accounts.keys())
accts = section['accounts'] = bidict(accounts)
log.info(
f'brokers.toml defines {len(accts)} accounts: '
f'{pformat(names)}'
)
if section is None:
log.warning(f'No config section found for ib in {path}')
return {}
@ -1127,12 +908,6 @@ async def load_aio_clients(
# careful.
timeout=connect_timeout,
)
# create and cache client
client = Client(ib)
# update all actor-global caches
log.info(f"Caching client for {sockaddr}")
_client_cache[sockaddr] = client
break
except (
@ -1156,9 +931,21 @@ async def load_aio_clients(
log.warning(
f'Failed to connect on {port} for {i} time, retrying...')
# create and cache client
client = Client(ib)
# Pre-collect all accounts available for this
# connection and map account names to this client
# instance.
pps = ib.positions()
if pps:
for pp in pps:
accounts_found[
accounts_def.inverse[pp.account]
] = client
# if there are accounts without positions we should still
# register them for this client
for value in ib.accountValues():
acct_number = value.account
@ -1179,6 +966,10 @@ async def load_aio_clients(
f'{pformat(accounts_found)}'
)
# update all actor-global caches
log.info(f"Caching client for {sockaddr}")
_client_cache[sockaddr] = client
# XXX: why aren't we just updating this directy above
# instead of using the intermediary `accounts_found`?
_accounts2clients.update(accounts_found)
@ -1199,7 +990,7 @@ async def load_aio_clients(
for acct, client in _accounts2clients.items():
log.info(f'Disconnecting {acct}@{client}')
client.ib.disconnect()
_client_cache.pop((host, port), None)
_client_cache.pop((host, port))
async def load_clients_for_trio(
@ -1228,6 +1019,9 @@ async def load_clients_for_trio(
await asyncio.sleep(float('inf'))
_proxies: dict[str, MethodProxy] = {}
@acm
async def open_client_proxies() -> tuple[
dict[str, MethodProxy],
@ -1235,6 +1029,7 @@ async def open_client_proxies() -> tuple[
]:
async with (
tractor.trionics.maybe_open_context(
# acm_func=open_client_proxies,
acm_func=tractor.to_asyncio.open_channel_from,
kwargs={'target': load_clients_for_trio},
@ -1249,14 +1044,13 @@ async def open_client_proxies() -> tuple[
if cache_hit:
log.info(f'Re-using cached clients: {clients}')
proxies = {}
for acct_name, client in clients.items():
proxy = await stack.enter_async_context(
open_client_proxy(client),
)
proxies[acct_name] = proxy
_proxies[acct_name] = proxy
yield proxies, clients
yield _proxies, clients
def get_preferred_data_client(
@ -1405,13 +1199,11 @@ async def open_client_proxy(
event_table = {}
async with (
to_asyncio.open_channel_from(
open_aio_client_method_relay,
client=client,
event_consumers=event_table,
) as (first, chan),
trio.open_nursery() as relay_n,
):

File diff suppressed because it is too large Load Diff

View File

@ -41,8 +41,7 @@ from trio_typing import TaskStatus
from piker.data._sharedmem import ShmArray
from .._util import SymbolNotFound, NoData
from .api import (
# _adhoc_futes_set,
con2fqsn,
_adhoc_futes_set,
log,
load_aio_clients,
ibis,
@ -208,6 +207,8 @@ async def get_bars(
except RequestError as err:
msg = err.message
# why do we always need to rebind this?
# _err = err
if 'No market data permissions for' in msg:
# TODO: signalling for no permissions searches
@ -216,8 +217,8 @@ async def get_bars(
)
elif (
err.code == 162 and
'HMDS query returned no data' in err.message
err.code == 162
and 'HMDS query returned no data' in err.message
):
# XXX: this is now done in the storage mgmt layer
# and we shouldn't implicitly decrement the frame dt
@ -236,14 +237,6 @@ async def get_bars(
frame_size=2000,
)
# elif (
# err.code == 162 and
# 'Trading TWS session is connected from a different IP
# address' in err.message
# ):
# log.warning("ignoring ip address warning")
# continue
elif _pacing in msg:
log.warning(
@ -301,13 +294,7 @@ async def get_bars(
else:
log.warning('Sending CONNECTION RESET')
res = await data_reset_hack(reset_type='connection')
if not res:
log.warning(
'NO VNC DETECTED!\n'
'Manually press ctrl-alt-f on your IB java app'
)
# break
await data_reset_hack(reset_type='connection')
with trio.move_on_after(timeout) as cs:
for name, ev in [
@ -566,18 +553,38 @@ async def open_aio_quote_stream(
# TODO: cython/mypyc/numba this!
# or we can at least cache a majority of the values
# except for the ones we expect to change?..
def normalize(
ticker: Ticker,
calc_price: bool = False
) -> dict:
# should be real volume for this contract by default
calc_price = False
# check for special contract types
con = ticker.contract
if type(con) in (
ibis.Commodity,
ibis.Forex,
):
# commodities and forex don't have an exchange name and
# no real volume so we have to calculate the price
suffix = con.secType
# no real volume on this tract
calc_price = True
fqsn, calc_price = con2fqsn(con)
else:
suffix = con.primaryExchange
if not suffix:
suffix = con.exchange
# append a `.<suffix>` to the returned symbol
# key for derivatives that normally is the expiry
# date key.
expiry = con.lastTradeDateOrContractMonth
if expiry:
suffix += f'.{expiry}'
# convert named tuples to dicts so we send usable keys
new_ticks = []
@ -609,7 +616,9 @@ def normalize(
# generate fqsn with possible specialized suffix
# for derivatives, note the lowercase.
data['symbol'] = data['fqsn'] = fqsn
data['symbol'] = data['fqsn'] = '.'.join(
(con.symbol, suffix)
).lower()
# convert named tuples to dicts for transport
tbts = data.get('tickByTicks')
@ -674,13 +683,6 @@ async def stream_quotes(
# TODO: more consistent field translation
atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']]
if atype in {
'forex',
'index',
'commodity',
}:
syminfo['no_vlm'] = True
# for stocks it seems TWS reports too small a tick size
# such that you can't submit orders with that granularity?
min_tick = 0.01 if atype == 'stock' else 0
@ -707,9 +709,9 @@ async def stream_quotes(
},
}
return init_msgs, syminfo
return init_msgs
init_msgs, syminfo = mk_init_msgs()
init_msgs = mk_init_msgs()
# TODO: we should instead spawn a task that waits on a feed to start
# and let it wait indefinitely..instead of this hard coded stuff.
@ -718,13 +720,7 @@ async def stream_quotes(
# it might be outside regular trading hours so see if we can at
# least grab history.
if (
isnan(first_ticker.last)
and type(first_ticker.contract) not in (
ibis.Commodity,
ibis.Forex
)
):
if isnan(first_ticker.last):
task_status.started((init_msgs, first_quote))
# it's not really live but this will unblock
@ -747,16 +743,10 @@ async def stream_quotes(
task_status.started((init_msgs, first_quote))
async with aclosing(stream):
if syminfo.get('no_vlm', False):
# generally speaking these feeds don't
# include vlm data.
atype = syminfo['asset_type']
log.info(
f'Non-vlm asset {sym}@{atype}, skipping quote poll...'
)
else:
if type(first_ticker.contract) not in (
ibis.Commodity,
ibis.Forex
):
# wait for real volume on feed (trading might be closed)
while True:
ticker = await stream.receive()
@ -815,9 +805,6 @@ async def data_reset_hack(
successful.
- other OS support?
- integration with ``ib-gw`` run in docker + Xorg?
- is it possible to offer a local server that can be accessed by
a client? Would be sure be handy for running native java blobs
that need to be wrangle.
'''
@ -848,10 +835,7 @@ async def data_reset_hack(
client.mouse.click()
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
try:
await tractor.to_asyncio.run_task(vnc_click_hack)
except OSError:
return False
await tractor.to_asyncio.run_task(vnc_click_hack)
# we don't really need the ``xdotool`` approach any more B)
return True
@ -925,18 +909,17 @@ async def open_symbol_search(
# trigger async request
await trio.sleep(0)
# # match against our ad-hoc set immediately
# adhoc_matches = fuzzy.extractBests(
# pattern,
# list(_adhoc_futes_set),
# score_cutoff=90,
# )
# log.info(f'fuzzy matched adhocs: {adhoc_matches}')
# adhoc_match_results = {}
# if adhoc_matches:
# # TODO: do we need to pull contract details?
# adhoc_match_results = {i[0]: {} for i in
# adhoc_matches}
# match against our ad-hoc set immediately
adhoc_matches = fuzzy.extractBests(
pattern,
list(_adhoc_futes_set),
score_cutoff=90,
)
log.info(f'fuzzy matched adhocs: {adhoc_matches}')
adhoc_match_results = {}
if adhoc_matches:
# TODO: do we need to pull contract details?
adhoc_match_results = {i[0]: {} for i in adhoc_matches}
log.debug(f'fuzzy matching stocks {stock_results}')
stock_matches = fuzzy.extractBests(
@ -945,8 +928,7 @@ async def open_symbol_search(
score_cutoff=50,
)
# matches = adhoc_match_results | {
matches = {
matches = adhoc_match_results | {
item[0]: {} for item in stock_matches
}
# TODO: we used to deliver contract details

File diff suppressed because it is too large Load Diff

View File

@ -1,64 +0,0 @@
``kraken`` backend
------------------
though they don't have the most liquidity of all the cexes they sure are
accommodating to those of us who appreciate a little ``xmr``.
status
******
current support is *production grade* and both real-time data and order
management should be correct and fast. this backend is used by core devs
for live trading.
config
******
In order to get order mode support your ``brokers.toml``
needs to have something like the following:
.. code:: toml
[kraken]
accounts.spot = 'spot'
key_descr = "spot"
api_key = "69696969696969696696969696969696969696969696969696969696"
secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696"
If everything works correctly you should see any current positions
loaded in the pps pane on chart load and you should also be able to
check your trade records in the file::
<pikerk_conf_dir>/ledgers/trades_kraken_spot.toml
An example ledger file will have entries written verbatim from the
trade events schema:
.. code:: toml
[TFJBKK-SMBZS-VJ4UWS]
ordertxid = "SMBZSA-7CNQU-3HWLNJ"
postxid = "SMBZSE-M7IF5-CFI7LT"
pair = "XXMRZEUR"
time = 1655691993.4133966
type = "buy"
ordertype = "limit"
price = "103.97000000"
cost = "499.99999977"
fee = "0.80000000"
vol = "4.80907954"
margin = "0.00000000"
misc = ""
your ``pps.toml`` file will have position entries like,
.. code:: toml
[kraken.spot."xmreur.kraken"]
size = 4.80907954
ppu = 103.97000000
bsuid = "XXMRZEUR"
clears = [
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
]

View File

@ -1,61 +0,0 @@
# piker: trading gear for hackers
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
Kraken backend.
Sub-modules within break into the core functionalities:
- ``broker.py`` part for orders / trading endpoints
- ``feed.py`` for real-time data feed endpoints
- ``api.py`` for the core API machinery which is ``trio``-ized
wrapping around ``ib_insync``.
'''
from piker.log import get_logger
log = get_logger(__name__)
from .api import (
get_client,
)
from .feed import (
open_history_client,
open_symbol_search,
stream_quotes,
)
from .broker import (
trades_dialogue,
norm_trade_records,
)
__all__ = [
'get_client',
'trades_dialogue',
'open_history_client',
'open_symbol_search',
'stream_quotes',
'norm_trade_records',
]
# tractor RPC enable arg
__enable_modules__: list[str] = [
'api',
'feed',
'broker',
]

View File

@ -1,540 +0,0 @@
# piker: trading gear for hackers
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
Kraken web API wrapping.
'''
from contextlib import asynccontextmanager as acm
from datetime import datetime
import itertools
from typing import (
Any,
Optional,
Union,
)
import time
from bidict import bidict
import pendulum
import asks
from fuzzywuzzy import process as fuzzy
import numpy as np
import urllib.parse
import hashlib
import hmac
import base64
import trio
from piker import config
from piker.brokers._util import (
resproc,
SymbolNotFound,
BrokerError,
DataThrottle,
)
from piker.pp import Transaction
from . import log
# <uri>/<version>/
_url = 'https://api.kraken.com/0'
# Broker specific ohlc schema which includes a vwap field
_ohlc_dtype = [
('index', int),
('time', int),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', float),
('count', int),
('bar_wap', float),
]
# UI components allow this to be declared such that additional
# (historical) fields can be exposed.
ohlc_dtype = np.dtype(_ohlc_dtype)
_show_wap_in_history = True
_symbol_info_translation: dict[str, str] = {
'tick_decimals': 'pair_decimals',
}
def get_config() -> dict[str, Any]:
conf, path = config.load()
section = conf.get('kraken')
if section is None:
log.warning(f'No config section found for kraken in {path}')
return {}
return section
def get_kraken_signature(
urlpath: str,
data: dict[str, Any],
secret: str
) -> str:
postdata = urllib.parse.urlencode(data)
encoded = (str(data['nonce']) + postdata).encode()
message = urlpath.encode() + hashlib.sha256(encoded).digest()
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
sigdigest = base64.b64encode(mac.digest())
return sigdigest.decode()
class InvalidKey(ValueError):
'''
EAPI:Invalid key
This error is returned when the API key used for the call is
either expired or disabled, please review the API key in your
Settings -> API tab of account management or generate a new one
and update your application.
'''
class Client:
# global symbol normalization table
_ntable: dict[str, str] = {}
_atable: bidict[str, str] = bidict()
def __init__(
self,
config: dict[str, str],
name: str = '',
api_key: str = '',
secret: str = ''
) -> None:
self._sesh = asks.Session(connections=4)
self._sesh.base_location = _url
self._sesh.headers.update({
'User-Agent':
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
})
self.conf: dict[str, str] = config
self._pairs: list[str] = []
self._name = name
self._api_key = api_key
self._secret = secret
@property
def pairs(self) -> dict[str, Any]:
if self._pairs is None:
raise RuntimeError(
"Make sure to run `cache_symbols()` on startup!"
)
# retreive and cache all symbols
return self._pairs
async def _public(
self,
method: str,
data: dict,
) -> dict[str, Any]:
resp = await self._sesh.post(
path=f'/public/{method}',
json=data,
timeout=float('inf')
)
return resproc(resp, log)
async def _private(
self,
method: str,
data: dict,
uri_path: str
) -> dict[str, Any]:
headers = {
'Content-Type':
'application/x-www-form-urlencoded',
'API-Key':
self._api_key,
'API-Sign':
get_kraken_signature(uri_path, data, self._secret)
}
resp = await self._sesh.post(
path=f'/private/{method}',
data=data,
headers=headers,
timeout=float('inf')
)
return resproc(resp, log)
async def endpoint(
self,
method: str,
data: dict[str, Any]
) -> dict[str, Any]:
uri_path = f'/0/private/{method}'
data['nonce'] = str(int(1000*time.time()))
return await self._private(method, data, uri_path)
async def get_balances(
self,
) -> dict[str, float]:
'''
Return the set of asset balances for this account
by symbol.
'''
resp = await self.endpoint(
'Balance',
{},
)
by_bsuid = resp['result']
return {
self._atable[sym].lower(): float(bal)
for sym, bal in by_bsuid.items()
}
async def get_assets(self) -> dict[str, dict]:
resp = await self._public('Assets', {})
return resp['result']
async def cache_assets(self) -> None:
assets = self.assets = await self.get_assets()
for bsuid, info in assets.items():
self._atable[bsuid] = info['altname']
async def get_trades(
self,
fetch_limit: int = 10,
) -> dict[str, Any]:
'''
Get the trades (aka cleared orders) history from the rest endpoint:
https://docs.kraken.com/rest/#operation/getTradeHistory
'''
ofs = 0
trades_by_id: dict[str, Any] = {}
for i in itertools.count():
if i >= fetch_limit:
break
# increment 'ofs' pagination offset
ofs = i*50
resp = await self.endpoint(
'TradesHistory',
{'ofs': ofs},
)
by_id = resp['result']['trades']
trades_by_id.update(by_id)
# we can get up to 50 results per query
if (
len(by_id) < 50
):
err = resp.get('error')
if err:
raise BrokerError(err)
# we know we received the max amount of
# trade results so there may be more history.
# catch the end of the trades
count = resp['result']['count']
break
# santity check on update
assert count == len(trades_by_id.values())
return trades_by_id
async def get_xfers(
self,
asset: str,
src_asset: str = '',
) -> dict[str, Transaction]:
'''
Get asset balance transfer transactions.
Currently only withdrawals are supported.
'''
xfers: list[dict] = (await self.endpoint(
'WithdrawStatus',
{'asset': asset},
))['result']
# eg. resp schema:
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
# 'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid':
# 'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44',
# 'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z',
# 'amount': '0.00300726', 'fee': '0.00001000', 'time':
# 1658347714, 'status': 'Success'}]}
trans: dict[str, Transaction] = {}
for entry in xfers:
# look up the normalized name
asset = self._atable[entry['asset']].lower()
# XXX: this is in the asset units (likely) so it isn't
# quite the same as a commisions cost necessarily..)
cost = float(entry['fee'])
tran = Transaction(
fqsn=asset + '.kraken',
tid=entry['txid'],
dt=pendulum.from_timestamp(entry['time']),
bsuid=f'{asset}{src_asset}',
size=-1*(
float(entry['amount'])
+
cost
),
# since this will be treated as a "sell" it
# shouldn't be needed to compute the be price.
price='NaN',
# XXX: see note above
cost=0,
)
trans[tran.tid] = tran
return trans
async def submit_limit(
self,
symbol: str,
price: float,
action: str,
size: float,
reqid: str = None,
validate: bool = False # set True test call without a real submission
) -> dict:
'''
Place an order and return integer request id provided by client.
'''
# Build common data dict for common keys from both endpoints
data = {
"pair": symbol,
"price": str(price),
"validate": validate
}
if reqid is None:
# Build order data for kraken api
data |= {
"ordertype": "limit",
"type": action,
"volume": str(size),
}
return await self.endpoint('AddOrder', data)
else:
# Edit order data for kraken api
data["txid"] = reqid
return await self.endpoint('EditOrder', data)
async def submit_cancel(
self,
reqid: str,
) -> dict:
'''
Send cancel request for order id ``reqid``.
'''
# txid is a transaction id given by kraken
return await self.endpoint('CancelOrder', {"txid": reqid})
async def symbol_info(
self,
pair: Optional[str] = None,
) -> dict[str, dict[str, str]]:
if pair is not None:
pairs = {'pair': pair}
else:
pairs = None # get all pairs
resp = await self._public('AssetPairs', pairs)
err = resp['error']
if err:
symbolname = pairs['pair'] if pair else None
raise SymbolNotFound(f'{symbolname}.kraken')
pairs = resp['result']
if pair is not None:
_, data = next(iter(pairs.items()))
return data
else:
return pairs
async def cache_symbols(
self,
) -> dict:
if not self._pairs:
self._pairs = await self.symbol_info()
ntable = {}
for restapikey, info in self._pairs.items():
ntable[restapikey] = ntable[info['wsname']] = info['altname']
self._ntable.update(ntable)
return self._pairs
async def search_symbols(
self,
pattern: str,
limit: int = None,
) -> dict[str, Any]:
if self._pairs is not None:
data = self._pairs
else:
data = await self.symbol_info()
matches = fuzzy.extractBests(
pattern,
data,
score_cutoff=50,
)
# repack in dict form
return {item[0]['altname']: item[0] for item in matches}
async def bars(
self,
symbol: str = 'XBTUSD',
# UTC 2017-07-02 12:53:20
since: Optional[Union[int, datetime]] = None,
count: int = 720, # <- max allowed per query
as_np: bool = True,
) -> dict:
if since is None:
since = pendulum.now('UTC').start_of('minute').subtract(
minutes=count).timestamp()
elif isinstance(since, int):
since = pendulum.from_timestamp(since).timestamp()
else: # presumably a pendulum datetime
since = since.timestamp()
# UTC 2017-07-02 12:53:20 is oldest seconds value
since = str(max(1499000000, int(since)))
json = await self._public(
'OHLC',
data={
'pair': symbol,
'since': since,
},
)
try:
res = json['result']
res.pop('last')
bars = next(iter(res.values()))
new_bars = []
first = bars[0]
last_nz_vwap = first[-3]
if last_nz_vwap == 0:
# use close if vwap is zero
last_nz_vwap = first[-4]
# convert all fields to native types
for i, bar in enumerate(bars):
# normalize weird zero-ed vwap values..cmon kraken..
# indicates vwap didn't change since last bar
vwap = float(bar.pop(-3))
if vwap != 0:
last_nz_vwap = vwap
if vwap == 0:
vwap = last_nz_vwap
# re-insert vwap as the last of the fields
bar.append(vwap)
new_bars.append(
(i,) + tuple(
ftype(bar[j]) for j, (name, ftype) in enumerate(
_ohlc_dtype[1:]
)
)
)
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
return array
except KeyError:
errmsg = json['error'][0]
if 'not found' in errmsg:
raise SymbolNotFound(errmsg + f': {symbol}')
elif 'Too many requests' in errmsg:
raise DataThrottle(f'{symbol}')
else:
raise BrokerError(errmsg)
@classmethod
def normalize_symbol(
cls,
ticker: str
) -> str:
'''
Normalize symbol names to to a 3x3 pair from the global
definition map which we build out from the data retreived from
the 'AssetPairs' endpoint, see methods above.
'''
ticker = cls._ntable[ticker]
symlen = len(ticker)
if symlen != 6:
raise ValueError(f'Unhandled symbol: {ticker}')
return ticker.lower()
@acm
async def get_client() -> Client:
conf = get_config()
if conf:
client = Client(
conf,
name=conf['key_descr'],
api_key=conf['api_key'],
secret=conf['secret']
)
else:
client = Client({})
# at startup, load all symbols, and asset info in
# batch requests.
async with trio.open_nursery() as nurse:
nurse.start_soon(client.cache_assets)
await client.cache_symbols()
yield client

File diff suppressed because it is too large Load Diff

View File

@ -1,499 +0,0 @@
# piker: trading gear for hackers
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
Real-time and historical data feed endpoints.
'''
from contextlib import asynccontextmanager as acm
from datetime import datetime
from typing import (
Any,
Optional,
Callable,
)
import time
from async_generator import aclosing
from fuzzywuzzy import process as fuzzy
import numpy as np
import pendulum
from trio_typing import TaskStatus
import tractor
import trio
from piker._cacheables import open_cached_client
from piker.brokers._util import (
BrokerError,
DataThrottle,
DataUnavailable,
)
from piker.log import get_console_log
from piker.data import ShmArray
from piker.data.types import Struct
from piker.data._web_bs import open_autorecon_ws, NoBsWs
from . import log
from .api import (
Client,
)
# https://www.kraken.com/features/api#get-tradable-pairs
class Pair(Struct):
altname: str # alternate pair name
wsname: str # WebSocket pair name (if available)
aclass_base: str # asset class of base component
base: str # asset id of base component
aclass_quote: str # asset class of quote component
quote: str # asset id of quote component
lot: str # volume lot size
pair_decimals: int # scaling decimal places for pair
lot_decimals: int # scaling decimal places for volume
# amount to multiply lot volume by to get currency volume
lot_multiplier: float
# array of leverage amounts available when buying
leverage_buy: list[int]
# array of leverage amounts available when selling
leverage_sell: list[int]
# fee schedule array in [volume, percent fee] tuples
fees: list[tuple[int, float]]
# maker fee schedule array in [volume, percent fee] tuples (if on
# maker/taker)
fees_maker: list[tuple[int, float]]
fee_volume_currency: str # volume discount currency
margin_call: str # margin call level
margin_stop: str # stop-out/liquidation margin level
ordermin: float # minimum order volume for pair
class OHLC(Struct):
'''
Description of the flattened OHLC quote format.
For schema details see:
https://docs.kraken.com/websockets/#message-ohlc
'''
chan_id: int # internal kraken id
chan_name: str # eg. ohlc-1 (name-interval)
pair: str # fx pair
time: float # Begin time of interval, in seconds since epoch
etime: float # End time of interval, in seconds since epoch
open: float # Open price of interval
high: float # High price within interval
low: float # Low price within interval
close: float # Close price of interval
vwap: float # Volume weighted average price within interval
volume: float # Accumulated volume **within interval**
count: int # Number of trades within interval
# (sampled) generated tick data
ticks: list[Any] = []
async def stream_messages(
ws: NoBsWs,
):
'''
Message stream parser and heartbeat handler.
Deliver ws subscription messages as well as handle heartbeat logic
though a single async generator.
'''
too_slow_count = last_hb = 0
while True:
with trio.move_on_after(5) as cs:
msg = await ws.recv_msg()
# trigger reconnection if heartbeat is laggy
if cs.cancelled_caught:
too_slow_count += 1
if too_slow_count > 20:
log.warning(
"Heartbeat is too slow, resetting ws connection")
await ws._connect()
too_slow_count = 0
continue
match msg:
case {'event': 'heartbeat'}:
now = time.time()
delay = now - last_hb
last_hb = now
# XXX: why tf is this not printing without --tl flag?
log.debug(f"Heartbeat after {delay}")
# print(f"Heartbeat after {delay}")
continue
case _:
# passthrough sub msgs
yield msg
async def process_data_feed_msgs(
ws: NoBsWs,
):
'''
Parse and pack data feed messages.
'''
async for msg in stream_messages(ws):
match msg:
case {
'errorMessage': errmsg
}:
raise BrokerError(errmsg)
case {
'event': 'subscriptionStatus',
} as sub:
log.info(
'WS subscription is active:\n'
f'{sub}'
)
continue
case [
chan_id,
*payload_array,
chan_name,
pair
]:
if 'ohlc' in chan_name:
ohlc = OHLC(
chan_id,
chan_name,
pair,
*payload_array[0]
)
ohlc.typecast()
yield 'ohlc', ohlc
elif 'spread' in chan_name:
bid, ask, ts, bsize, asize = map(
float, payload_array[0])
# TODO: really makes you think IB has a horrible API...
quote = {
'symbol': pair.replace('/', ''),
'ticks': [
{'type': 'bid', 'price': bid, 'size': bsize},
{'type': 'bsize', 'price': bid, 'size': bsize},
{'type': 'ask', 'price': ask, 'size': asize},
{'type': 'asize', 'price': ask, 'size': asize},
],
}
yield 'l1', quote
# elif 'book' in msg[-2]:
# chan_id, *payload_array, chan_name, pair = msg
# print(msg)
case _:
print(f'UNHANDLED MSG: {msg}')
# yield msg
def normalize(
ohlc: OHLC,
) -> dict:
quote = ohlc.to_dict()
quote['broker_ts'] = quote['time']
quote['brokerd_ts'] = time.time()
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
quote['last'] = quote['close']
quote['bar_wap'] = ohlc.vwap
# seriously eh? what's with this non-symmetry everywhere
# in subscription systems...
# XXX: piker style is always lowercases symbols.
topic = quote['pair'].replace('/', '').lower()
# print(quote)
return topic, quote
@acm
async def open_history_client(
symbol: str,
) -> tuple[Callable, int]:
# TODO implement history getter for the new storage layer.
async with open_cached_client('kraken') as client:
# lol, kraken won't send any more then the "last"
# 720 1m bars.. so we have to just ignore further
# requests of this type..
queries: int = 0
async def get_ohlc(
end_dt: Optional[datetime] = None,
start_dt: Optional[datetime] = None,
) -> tuple[
np.ndarray,
datetime, # start
datetime, # end
]:
nonlocal queries
if queries > 0:
raise DataUnavailable
count = 0
while count <= 3:
try:
array = await client.bars(
symbol,
since=end_dt,
)
count += 1
queries += 1
break
except DataThrottle:
log.warning(f'kraken OHLC throttle for {symbol}')
await trio.sleep(1)
start_dt = pendulum.from_timestamp(array[0]['time'])
end_dt = pendulum.from_timestamp(array[-1]['time'])
return array, start_dt, end_dt
yield get_ohlc, {'erlangs': 1, 'rate': 1}
async def backfill_bars(
sym: str,
shm: ShmArray, # type: ignore # noqa
count: int = 10, # NOTE: any more and we'll overrun the underlying buffer
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
) -> None:
'''
Fill historical bars into shared mem / storage afap.
'''
with trio.CancelScope() as cs:
async with open_cached_client('kraken') as client:
bars = await client.bars(symbol=sym)
shm.push(bars)
task_status.started(cs)
async def stream_quotes(
send_chan: trio.abc.SendChannel,
symbols: list[str],
feed_is_live: trio.Event,
loglevel: str = None,
# backend specific
sub_type: str = 'ohlc',
# startup sync
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
) -> None:
'''
Subscribe for ohlc stream of quotes for ``pairs``.
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
'''
# XXX: required to propagate ``tractor`` loglevel to piker logging
get_console_log(loglevel or tractor.current_actor().loglevel)
ws_pairs = {}
sym_infos = {}
async with open_cached_client('kraken') as client, send_chan as send_chan:
# keep client cached for real-time section
for sym in symbols:
# transform to upper since piker style is always lower
sym = sym.upper()
si = Pair(**await client.symbol_info(sym)) # validation
syminfo = si.to_dict()
syminfo['price_tick_size'] = 1 / 10**si.pair_decimals
syminfo['lot_tick_size'] = 1 / 10**si.lot_decimals
syminfo['asset_type'] = 'crypto'
sym_infos[sym] = syminfo
ws_pairs[sym] = si.wsname
symbol = symbols[0].lower()
init_msgs = {
# pass back token, and bool, signalling if we're the writer
# and that history has been written
symbol: {
'symbol_info': sym_infos[sym],
'shm_write_opts': {'sum_tick_vml': False},
'fqsn': sym,
},
}
@acm
async def subscribe(ws: NoBsWs):
# XXX: setup subs
# https://docs.kraken.com/websockets/#message-subscribe
# specific logic for this in kraken's sync client:
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
ohlc_sub = {
'event': 'subscribe',
'pair': list(ws_pairs.values()),
'subscription': {
'name': 'ohlc',
'interval': 1,
},
}
# TODO: we want to eventually allow unsubs which should
# be completely fine to request from a separate task
# since internally the ws methods appear to be FIFO
# locked.
await ws.send_msg(ohlc_sub)
# trade data (aka L1)
l1_sub = {
'event': 'subscribe',
'pair': list(ws_pairs.values()),
'subscription': {
'name': 'spread',
# 'depth': 10}
},
}
# pull a first quote and deliver
await ws.send_msg(l1_sub)
yield
# unsub from all pairs on teardown
await ws.send_msg({
'pair': list(ws_pairs.values()),
'event': 'unsubscribe',
'subscription': ['ohlc', 'spread'],
})
# XXX: do we need to ack the unsub?
# await ws.recv_msg()
# see the tips on reconnection logic:
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
ws: NoBsWs
async with (
open_autorecon_ws(
'wss://ws.kraken.com/',
fixture=subscribe,
) as ws,
aclosing(process_data_feed_msgs(ws)) as msg_gen,
):
# pull a first quote and deliver
typ, ohlc_last = await anext(msg_gen)
topic, quote = normalize(ohlc_last)
task_status.started((init_msgs, quote))
# lol, only "closes" when they're margin squeezing clients ;P
feed_is_live.set()
# keep start of last interval for volume tracking
last_interval_start = ohlc_last.etime
# start streaming
async for typ, ohlc in msg_gen:
if typ == 'ohlc':
# TODO: can get rid of all this by using
# ``trades`` subscription...
# generate tick values to match time & sales pane:
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
volume = ohlc.volume
# new OHLC sample interval
if ohlc.etime > last_interval_start:
last_interval_start = ohlc.etime
tick_volume = volume
else:
# this is the tick volume *within the interval*
tick_volume = volume - ohlc_last.volume
ohlc_last = ohlc
last = ohlc.close
if tick_volume:
ohlc.ticks.append({
'type': 'trade',
'price': last,
'size': tick_volume,
})
topic, quote = normalize(ohlc)
elif typ == 'l1':
quote = ohlc
topic = quote['symbol'].lower()
await send_chan.send({topic: quote})
@tractor.context
async def open_symbol_search(
ctx: tractor.Context,
) -> Client:
async with open_cached_client('kraken') as client:
# load all symbols locally for fast search
cache = await client.cache_symbols()
await ctx.started(cache)
async with ctx.open_stream() as stream:
async for pattern in stream:
matches = fuzzy.extractBests(
pattern,
cache,
score_cutoff=50,
)
# repack in dict form
await stream.send(
{item[0]['altname']: item[0]
for item in matches}
)

View File

@ -22,10 +22,54 @@ from enum import Enum
from typing import Optional
from bidict import bidict
from pydantic import BaseModel, validator
from ..data._source import Symbol
from ..data.types import Struct
from ..pp import Position
from ._messages import BrokerdPosition, Status
class Position(BaseModel):
'''
Basic pp (personal position) model with attached fills history.
This type should be IPC wire ready?
'''
symbol: Symbol
# last size and avg entry price
size: float
avg_price: float # TODO: contextual pricing
# ordered record of known constituent trade messages
fills: list[Status] = []
def update_from_msg(
self,
msg: BrokerdPosition,
) -> None:
# XXX: better place to do this?
symbol = self.symbol
lot_size_digits = symbol.lot_size_digits
avg_price, size = (
round(msg['avg_price'], ndigits=symbol.tick_size_digits),
round(msg['size'], ndigits=lot_size_digits),
)
self.avg_price = avg_price
self.size = size
@property
def dsize(self) -> float:
'''
The "dollar" size of the pp, normally in trading (fiat) unit
terms.
'''
return self.avg_price * self.size
_size_units = bidict({
@ -40,30 +84,33 @@ SizeUnit = Enum(
)
class Allocator(Struct):
class Allocator(BaseModel):
class Config:
validate_assignment = True
copy_on_model_validation = False
arbitrary_types_allowed = True
# required to get the account validator lookup working?
extra = 'allow'
underscore_attrs_are_private = False
symbol: Symbol
account: Optional[str] = 'paper'
_size_units: bidict[str, Optional[str]] = _size_units
# TODO: for enums this clearly doesn't fucking work, you can't set
# a default at startup by passing in a `dict` but yet you can set
# that value through assignment..for wtv cucked reason.. honestly, pure
# unintuitive garbage.
_size_unit: str = 'currency'
size_unit: str = 'currency'
_size_units: dict[str, Optional[str]] = _size_units
@property
def size_unit(self) -> str:
return self._size_unit
@size_unit.setter
def size_unit(self, v: str) -> Optional[str]:
@validator('size_unit', pre=True)
def maybe_lookup_key(cls, v):
# apply the corresponding enum key for the text "description" value
if v not in _size_units:
v = _size_units.inverse[v]
return _size_units.inverse[v]
assert v in _size_units
self._size_unit = v
return v
# TODO: if we ever want ot support non-uniform entry-slot-proportion
@ -126,7 +173,7 @@ class Allocator(Struct):
l_sub_pp = self.units_limit - abs_live_size
elif size_unit == 'currency':
live_cost_basis = abs_live_size * live_pp.ppu
live_cost_basis = abs_live_size * live_pp.avg_price
slot_size = currency_per_slot / price
l_sub_pp = (self.currency_limit - live_cost_basis) / price
@ -158,7 +205,7 @@ class Allocator(Struct):
if size_unit == 'currency':
# compute the "projected" limit's worth of units at the
# current pp (weighted) price:
slot_size = currency_per_slot / live_pp.ppu
slot_size = currency_per_slot / live_pp.avg_price
else:
slot_size = u_per_slot
@ -197,12 +244,7 @@ class Allocator(Struct):
if order_size < slot_size:
# compute a fractional slots size to display
slots_used = self.slots_used(
Position(
symbol=sym,
size=order_size,
ppu=price,
bsuid=sym,
)
Position(symbol=sym, size=order_size, avg_price=price)
)
return {
@ -229,8 +271,8 @@ class Allocator(Struct):
abs_pp_size = abs(pp.size)
if self.size_unit == 'currency':
# live_currency_size = size or (abs_pp_size * pp.ppu)
live_currency_size = abs_pp_size * pp.ppu
# live_currency_size = size or (abs_pp_size * pp.avg_price)
live_currency_size = abs_pp_size * pp.avg_price
prop = live_currency_size / self.currency_limit
else:
@ -258,7 +300,7 @@ def mk_allocator(
# default allocation settings
defaults: dict[str, float] = {
'account': None, # select paper by default
# 'size_unit': 'currency',
'size_unit': 'currency',
'units_limit': 400,
'currency_limit': 5e3,
'slots': 4,
@ -297,13 +339,10 @@ def mk_allocator(
# entry step 1.0
alloc.units_limit = alloc.slots
else:
alloc.size_unit = 'currency'
# if the current position is already greater then the limit
# settings, increase the limit to the current position
if alloc.size_unit == 'currency':
startup_size = startup_pp.size * startup_pp.ppu
startup_size = startup_pp.size * startup_pp.avg_price
if startup_size > alloc.currency_limit:
alloc.currency_limit = round(startup_size, ndigits=2)

View File

@ -31,7 +31,6 @@ from ..log import get_logger
from ._ems import _emsd_main
from .._daemon import maybe_open_emsd
from ._messages import Order, Cancel
from ..brokers import get_brokermod
log = get_logger(__name__)
@ -59,11 +58,11 @@ class OrderBook:
def send(
self,
msg: Order | dict,
msg: Order,
) -> dict:
self._sent_orders[msg.oid] = msg
self._to_ems.send_nowait(msg)
self._to_ems.send_nowait(msg.dict())
return msg
def update(
@ -74,8 +73,9 @@ class OrderBook:
) -> dict:
cmd = self._sent_orders[uuid]
msg = cmd.copy(update=data)
self._sent_orders[uuid] = msg
msg = cmd.dict()
msg.update(data)
self._sent_orders[uuid] = Order(**msg)
self._to_ems.send_nowait(msg)
return cmd
@ -83,18 +83,12 @@ class OrderBook:
"""Cancel an order (or alert) in the EMS.
"""
cmd = self._sent_orders.get(uuid)
if not cmd:
log.error(
f'Unknown order {uuid}!?\n'
f'Maybe there is a stale entry or line?\n'
f'You should report this as a bug!'
)
cmd = self._sent_orders[uuid]
msg = Cancel(
oid=uuid,
symbol=cmd.symbol,
)
self._to_ems.send_nowait(msg)
self._to_ems.send_nowait(msg.dict())
_orders: OrderBook = None
@ -155,17 +149,10 @@ async def relay_order_cmds_from_sync_code(
book = get_orders()
async with book._from_order_book.subscribe() as orders_stream:
async for cmd in orders_stream:
sym = cmd.symbol
msg = pformat(cmd)
if sym == symbol_key:
log.info(f'Send order cmd:\n{msg}')
if cmd['symbol'] == symbol_key:
log.info(f'Send order cmd:\n{pformat(cmd)}')
# send msg over IPC / wire
await to_ems_stream.send(cmd)
else:
log.warning(
f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
f'\n{msg}'
)
@acm
@ -217,35 +204,20 @@ async def open_ems(
from ..data._source import unpack_fqsn
broker, symbol, suffix = unpack_fqsn(fqsn)
mode: str = 'live'
async with maybe_open_emsd(broker) as portal:
mod = get_brokermod(broker)
if not getattr(mod, 'trades_dialogue', None):
mode = 'paper'
async with (
# connect to emsd
portal.open_context(
_emsd_main,
fqsn=fqsn,
exec_mode=mode,
) as (
ctx,
(
positions,
accounts,
dialogs,
)
),
) as (ctx, (positions, accounts)),
# open 2-way trade command stream
ctx.open_stream() as trades_stream,
):
# start sync code order msg delivery task
async with trio.open_nursery() as n:
n.start_soon(
relay_order_cmds_from_sync_code,
@ -253,10 +225,4 @@ async def open_ems(
trades_stream
)
yield (
book,
trades_stream,
positions,
accounts,
dialogs,
)
yield book, trades_stream, positions, accounts

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
# piker: trading gear for hackers
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@ -15,95 +15,22 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Clearing sub-system message and protocols.
Clearing system messagingn types and protocols.
"""
# from collections import (
# ChainMap,
# deque,
# )
from typing import (
Optional,
Literal,
)
from typing import Optional, Union
# TODO: try out just encoding/send direction for now?
# import msgspec
from pydantic import BaseModel
from ..data._source import Symbol
from ..data.types import Struct
# TODO: a composite for tracking msg flow on 2-legged
# dialogs.
# class Dialog(ChainMap):
# '''
# Msg collection abstraction to easily track the state changes of
# a msg flow in one high level, query-able and immutable construct.
# The main use case is to query data from a (long-running)
# msg-transaction-sequence
# '''
# def update(
# self,
# msg,
# ) -> None:
# self.maps.insert(0, msg.to_dict())
# def flatten(self) -> dict:
# return dict(self)
# TODO: ``msgspec`` stuff worth paying attention to:
# - schema evolution:
# https://jcristharif.com/msgspec/usage.html#schema-evolution
# - for eg. ``BrokerdStatus``, instead just have separate messages?
# - use literals for a common msg determined by diff keys?
# - https://jcristharif.com/msgspec/usage.html#literal
# --------------
# Client -> emsd
# --------------
class Order(Struct):
# TODO: ideally we can combine these 2 fields into
# 1 and just use the size polarity to determine a buy/sell.
# i would like to see this become more like
# https://jcristharif.com/msgspec/usage.html#literal
# action: Literal[
# 'live',
# 'dark',
# 'alert',
# ]
action: Literal[
'buy',
'sell',
'alert',
]
# determines whether the create execution
# will be submitted to the ems or directly to
# the backend broker
exec_mode: Literal[
'dark',
'live',
# 'paper', no right?
]
# internal ``emdsd`` unique "order id"
oid: str # uuid4
symbol: str | Symbol
account: str # should we set a default as '' ?
price: float
size: float # -ve is "sell", +ve is "buy"
brokers: Optional[list[str]] = []
class Cancel(Struct):
'''
Cancel msg for removing a dark (ems triggered) or
class Cancel(BaseModel):
'''Cancel msg for removing a dark (ems triggered) or
broker-submitted (live) trigger/order.
'''
@ -112,57 +39,82 @@ class Cancel(Struct):
symbol: str
# --------------
class Order(BaseModel):
action: str # {'buy', 'sell', 'alert'}
# internal ``emdsd`` unique "order id"
oid: str # uuid4
symbol: Union[str, Symbol]
account: str # should we set a default as '' ?
price: float
size: float
brokers: list[str]
# Assigned once initial ack is received
# ack_time_ns: Optional[int] = None
# determines whether the create execution
# will be submitted to the ems or directly to
# the backend broker
exec_mode: str # {'dark', 'live', 'paper'}
class Config:
# just for pre-loading a ``Symbol`` when used
# in the order mode staging process
arbitrary_types_allowed = True
# don't copy this model instance when used in
# a recursive model
copy_on_model_validation = False
# Client <- emsd
# --------------
# update msgs from ems which relay state change info
# from the active clearing engine.
class Status(Struct):
class Status(BaseModel):
name: str = 'status'
oid: str # uuid4
time_ns: int
oid: str # uuid4 ems-order dialog id
resp: Literal[
'pending', # acked by broker but not yet open
'open',
'dark_open', # dark/algo triggered order is open in ems clearing loop
'triggered', # above triggered order sent to brokerd, or an alert closed
'closed', # fully cleared all size/units
'fill', # partial execution
'canceled',
'error',
]
# {
# 'dark_submitted',
# 'dark_cancelled',
# 'dark_triggered',
# 'broker_submitted',
# 'broker_cancelled',
# 'broker_executed',
# 'broker_filled',
# 'broker_errored',
# 'alert_submitted',
# 'alert_triggered',
# }
resp: str # "response", see above
# symbol: str
# trigger info
trigger_price: Optional[float] = None
# price: float
# broker: Optional[str] = None
# this maps normally to the ``BrokerdOrder.reqid`` below, an id
# normally allocated internally by the backend broker routing system
reqid: Optional[int | str] = None
broker_reqid: Optional[Union[int, str]] = None
# the (last) source order/request msg if provided
# (eg. the Order/Cancel which causes this msg) and
# acts as a back-reference to the corresponding
# request message which was the source of this msg.
req: Optional[Order | Cancel] = None
# XXX: better design/name here?
# flag that can be set to indicate a message for an order
# event that wasn't originated by piker's emsd (eg. some external
# trading system which does it's own order control but that you
# might want to "track" using piker UIs/systems).
src: Optional[str] = None
# for relaying a boxed brokerd-dialog-side msg data "through" the
# ems layer to clients.
# for relaying backend msg data "through" the ems layer
brokerd_msg: dict = {}
# ---------------
# emsd -> brokerd
# ---------------
# requests *sent* from ems to respective backend broker daemon
class BrokerdCancel(Struct):
class BrokerdCancel(BaseModel):
action: str = 'cancel'
oid: str # piker emsd order id
@ -175,38 +127,34 @@ class BrokerdCancel(Struct):
# for setting a unique order id then this value will be relayed back
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
# field
reqid: Optional[int | str] = None
reqid: Optional[Union[int, str]] = None
class BrokerdOrder(Struct):
class BrokerdOrder(BaseModel):
action: str # {buy, sell}
oid: str
account: str
time_ns: int
# TODO: if we instead rely on a +ve/-ve size to determine
# the action we more or less don't need this field right?
action: str = '' # {buy, sell}
# "broker request id": broker specific/internal order id if this is
# None, creates a new order otherwise if the id is valid the backend
# api must modify the existing matching order. If the broker allows
# for setting a unique order id then this value will be relayed back
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
# field
reqid: Optional[int | str] = None
reqid: Optional[Union[int, str]] = None
symbol: str # fqsn
symbol: str # symbol.<providername> ?
price: float
size: float
# ---------------
# emsd <- brokerd
# ---------------
# requests *received* to ems from broker backend
class BrokerdOrderAck(Struct):
class BrokerdOrderAck(BaseModel):
'''
Immediate reponse to a brokerd order request providing the broker
specific unique order id so that the EMS can associate this
@ -217,32 +165,39 @@ class BrokerdOrderAck(Struct):
name: str = 'ack'
# defined and provided by backend
reqid: int | str
reqid: Union[int, str]
# emsd id originally sent in matching request msg
oid: str
account: str = ''
class BrokerdStatus(Struct):
class BrokerdStatus(BaseModel):
name: str = 'status'
reqid: int | str
reqid: Union[int, str]
time_ns: int
status: Literal[
'open',
'canceled',
'fill',
'pending',
'error',
]
account: str
# XXX: should be best effort set for every update
account: str = ''
# {
# 'submitted',
# 'cancelled',
# 'filled',
# }
status: str
filled: float = 0.0
reason: str = ''
remaining: float = 0.0
# external: bool = False
# XXX: better design/name here?
# flag that can be set to indicate a message for an order
# event that wasn't originated by piker's emsd (eg. some external
# trading system which does it's own order control but that you
# might want to "track" using piker UIs/systems).
external: bool = False
# XXX: not required schema as of yet
broker_details: dict = {
@ -250,14 +205,14 @@ class BrokerdStatus(Struct):
}
class BrokerdFill(Struct):
class BrokerdFill(BaseModel):
'''
A single message indicating a "fill-details" event from the broker
if avaiable.
'''
name: str = 'fill'
reqid: int | str
reqid: Union[int, str]
time_ns: int
# order exeuction related
@ -275,7 +230,7 @@ class BrokerdFill(Struct):
broker_time: float
class BrokerdError(Struct):
class BrokerdError(BaseModel):
'''
Optional error type that can be relayed to emsd for error handling.
@ -287,14 +242,14 @@ class BrokerdError(Struct):
# if no brokerd order request was actually submitted (eg. we errored
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
reqid: Optional[int | str] = None
reqid: Optional[Union[int, str]] = None
symbol: str
reason: str
broker_details: dict = {}
class BrokerdPosition(Struct):
class BrokerdPosition(BaseModel):
'''Position update event from brokerd.
'''
@ -303,6 +258,6 @@ class BrokerdPosition(Struct):
broker: str
account: str
symbol: str
currency: str
size: float
avg_price: float
currency: str = ''

View File

@ -22,43 +22,29 @@ from contextlib import asynccontextmanager
from datetime import datetime
from operator import itemgetter
import time
from typing import (
Any,
Optional,
Callable,
)
from typing import Tuple, Optional, Callable
import uuid
from bidict import bidict
import pendulum
import trio
import tractor
from dataclasses import dataclass
from .. import data
from ..data._source import Symbol
from ..data.types import Struct
from ..pp import (
Position,
Transaction,
)
from ..data._normalize import iterticks
from ..data._source import unpack_fqsn
from ..log import get_logger
from ._messages import (
BrokerdCancel,
BrokerdOrder,
BrokerdOrderAck,
BrokerdStatus,
BrokerdFill,
BrokerdPosition,
BrokerdError,
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
BrokerdFill, BrokerdPosition, BrokerdError
)
log = get_logger(__name__)
class PaperBoi(Struct):
@dataclass
class PaperBoi:
"""
Emulates a broker order client providing the same API and
delivering an order-event response stream but with methods for
@ -72,15 +58,14 @@ class PaperBoi(Struct):
# map of paper "live" orders which be used
# to simulate fills based on paper engine settings
_buys: dict
_sells: dict
_buys: bidict
_sells: bidict
_reqids: bidict
_positions: dict[str, Position]
_trade_ledger: dict[str, Any]
_positions: dict[str, BrokerdPosition]
# init edge case L1 spread
last_ask: tuple[float, float] = (float('inf'), 0) # price, size
last_bid: tuple[float, float] = (0, 0)
last_ask: Tuple[float, float] = (float('inf'), 0) # price, size
last_bid: Tuple[float, float] = (0, 0)
async def submit_limit(
self,
@ -90,28 +75,27 @@ class PaperBoi(Struct):
action: str,
size: float,
reqid: Optional[str],
) -> int:
'''
Place an order and return integer request id provided by client.
"""Place an order and return integer request id provided by client.
'''
"""
is_modify: bool = False
if reqid is None:
reqid = str(uuid.uuid4())
else:
# order is already existing, this is a modify
(oid, symbol, action, old_price) = self._reqids[reqid]
assert old_price != price
is_modify = True
# register order internally
self._reqids[reqid] = (oid, symbol, action, price)
if action == 'alert':
# bypass all fill simulation
return reqid
entry = self._reqids.get(reqid)
if entry:
# order is already existing, this is a modify
(oid, symbol, action, old_price) = entry
assert old_price != price
is_modify = True
else:
# register order internally
self._reqids[reqid] = (oid, symbol, action, price)
# TODO: net latency model
# we checkpoint here quickly particulalry
# for dark orders since we want the dark_executed
@ -123,18 +107,15 @@ class PaperBoi(Struct):
size = -size
msg = BrokerdStatus(
status='open',
# account=f'paper_{self.broker}',
account='paper',
status='submitted',
reqid=reqid,
broker=self.broker,
time_ns=time.time_ns(),
filled=0.0,
reason='paper_trigger',
remaining=size,
broker_details={'name': 'paperboi'},
)
await self.ems_trades_stream.send(msg)
await self.ems_trades_stream.send(msg.dict())
# if we're already a clearing price simulate an immediate fill
if (
@ -142,14 +123,7 @@ class PaperBoi(Struct):
) or (
action == 'sell' and (clear_price := self.last_bid[0]) >= price
):
await self.fake_fill(
symbol,
clear_price,
size,
action,
reqid,
oid,
)
await self.fake_fill(symbol, clear_price, size, action, reqid, oid)
else:
# register this submissions as a paper live order
@ -191,14 +165,13 @@ class PaperBoi(Struct):
await trio.sleep(0.05)
msg = BrokerdStatus(
status='canceled',
# account=f'paper_{self.broker}',
account='paper',
status='cancelled',
oid=oid,
reqid=reqid,
broker=self.broker,
time_ns=time.time_ns(),
broker_details={'name': 'paperboi'},
)
await self.ems_trades_stream.send(msg)
await self.ems_trades_stream.send(msg.dict())
async def fake_fill(
self,
@ -222,15 +195,16 @@ class PaperBoi(Struct):
"""
# TODO: net latency model
await trio.sleep(0.05)
fill_time_ns = time.time_ns()
fill_time_s = time.time()
fill_msg = BrokerdFill(
msg = BrokerdFill(
reqid=reqid,
time_ns=fill_time_ns,
time_ns=time.time_ns(),
action=action,
size=size,
price=price,
broker_time=datetime.now().timestamp(),
broker_details={
'paper_info': {
@ -240,66 +214,79 @@ class PaperBoi(Struct):
'name': self.broker + '_paper',
},
)
await self.ems_trades_stream.send(fill_msg)
self._trade_ledger.update(fill_msg.to_dict())
await self.ems_trades_stream.send(msg.dict())
if order_complete:
msg = BrokerdStatus(
reqid=reqid,
time_ns=time.time_ns(),
# account=f'paper_{self.broker}',
account='paper',
status='closed',
status='filled',
filled=size,
remaining=0 if order_complete else remaining,
action=action,
size=size,
price=price,
broker_details={
'paper_info': {
'oid': oid,
},
'name': self.broker,
},
)
await self.ems_trades_stream.send(msg)
await self.ems_trades_stream.send(msg.dict())
# lookup any existing position
token = f'{symbol}.{self.broker}'
pp = self._positions.setdefault(
pp_msg = self._positions.setdefault(
token,
Position(
Symbol(
key=symbol,
broker_info={self.broker: {}},
),
size=size,
ppu=price,
bsuid=symbol,
BrokerdPosition(
broker=self.broker,
account='paper',
symbol=symbol,
# TODO: we need to look up the asset currency from
# broker info. i guess for crypto this can be
# inferred from the pair?
currency='',
size=0.0,
avg_price=0,
)
)
t = Transaction(
fqsn=symbol,
tid=oid,
size=size,
price=price,
cost=0, # TODO: cost model
dt=pendulum.from_timestamp(fill_time_s),
bsuid=symbol,
)
pp.add_clear(t)
pp_msg = BrokerdPosition(
broker=self.broker,
account='paper',
symbol=symbol,
# TODO: we need to look up the asset currency from
# broker info. i guess for crypto this can be
# inferred from the pair?
currency='',
size=pp.size,
avg_price=pp.ppu,
)
# "avg position price" calcs
# TODO: eventually it'd be nice to have a small set of routines
# to do this stuff from a sequence of cleared orders to enable
# so called "contextual positions".
new_size = size + pp_msg.size
await self.ems_trades_stream.send(pp_msg)
# old size minus the new size gives us size differential with
# +ve -> increase in pp size
# -ve -> decrease in pp size
size_diff = abs(new_size) - abs(pp_msg.size)
if new_size == 0:
pp_msg.avg_price = 0
elif size_diff > 0:
# only update the "average position price" when the position
# size increases not when it decreases (i.e. the position is
# being made smaller)
pp_msg.avg_price = (
abs(size) * price + pp_msg.avg_price * abs(pp_msg.size)
) / abs(new_size)
pp_msg.size = new_size
await self.ems_trades_stream.send(pp_msg.dict())
async def simulate_fills(
quote_stream: 'tractor.ReceiveStream', # noqa
client: PaperBoi,
) -> None:
# TODO: more machinery to better simulate real-world market things:
@ -319,54 +306,50 @@ async def simulate_fills(
# this stream may eventually contain multiple symbols
async for quotes in quote_stream:
for sym, quote in quotes.items():
for tick in iterticks(
quote,
# dark order price filter(s)
types=('ask', 'bid', 'trade', 'last')
):
# print(tick)
match tick:
case {
'price': tick_price,
'type': 'ask',
}:
client.last_ask = (
tick_price,
tick.get('size', client.last_ask[1]),
)
tick_price = tick.get('price')
ttype = tick['type']
orders = client._buys.get(sym, {})
book_sequence = reversed(
sorted(orders.keys(), key=itemgetter(1)))
if ttype in ('ask',):
def pred(our_price):
return tick_price <= our_price
client.last_ask = (
tick_price,
tick.get('size', client.last_ask[1]),
)
case {
'price': tick_price,
'type': 'bid',
}:
client.last_bid = (
tick_price,
tick.get('size', client.last_bid[1]),
)
orders = client._sells.get(sym, {})
book_sequence = sorted(
orders.keys(),
key=itemgetter(1)
)
orders = client._buys.get(sym, {})
def pred(our_price):
return tick_price >= our_price
book_sequence = reversed(
sorted(orders.keys(), key=itemgetter(1)))
case {
'price': tick_price,
'type': ('trade' | 'last'),
}:
# TODO: simulate actual book queues and our orders
# place in it, might require full L2 data?
continue
def pred(our_price):
return tick_price < our_price
elif ttype in ('bid',):
client.last_bid = (
tick_price,
tick.get('size', client.last_bid[1]),
)
orders = client._sells.get(sym, {})
book_sequence = sorted(orders.keys(), key=itemgetter(1))
def pred(our_price):
return tick_price > our_price
elif ttype in ('trade', 'last'):
# TODO: simulate actual book queues and our orders
# place in it, might require full L2 data?
continue
# iterate book prices descending
for oid, our_price in book_sequence:
@ -397,75 +380,66 @@ async def handle_order_requests(
) -> None:
request_msg: dict
# order_request: dict
async for request_msg in ems_order_stream:
match request_msg:
case {'action': ('buy' | 'sell')}:
order = BrokerdOrder(**request_msg)
account = order.account
if account != 'paper':
log.error(
'This is a paper account,'
' only a `paper` selection is valid'
)
await ems_order_stream.send(BrokerdError(
oid=order.oid,
symbol=order.symbol,
reason=f'Paper only. No account found: `{account}` ?',
))
continue
reqid = order.reqid or str(uuid.uuid4())
action = request_msg['action']
# deliver ack that order has been submitted to broker routing
await ems_order_stream.send(
BrokerdOrderAck(
oid=order.oid,
reqid=reqid,
)
if action in {'buy', 'sell'}:
account = request_msg['account']
if account != 'paper':
log.error(
'This is a paper account, only a `paper` selection is valid'
)
await ems_order_stream.send(BrokerdError(
oid=request_msg['oid'],
symbol=request_msg['symbol'],
reason=f'Paper only. No account found: `{account}` ?',
).dict())
continue
# call our client api to submit the order
reqid = await client.submit_limit(
# validate
order = BrokerdOrder(**request_msg)
# call our client api to submit the order
reqid = await client.submit_limit(
oid=order.oid,
symbol=order.symbol,
price=order.price,
action=order.action,
size=order.size,
# XXX: by default 0 tells ``ib_insync`` methods that
# there is no existing order so ask the client to create
# a new one (which it seems to do by allocating an int
# counter - collision prone..)
reqid=order.reqid,
)
# deliver ack that order has been submitted to broker routing
await ems_order_stream.send(
BrokerdOrderAck(
# ems order request id
oid=order.oid,
symbol=order.symbol,
price=order.price,
action=order.action,
size=order.size,
# XXX: by default 0 tells ``ib_insync`` methods that
# there is no existing order so ask the client to create
# a new one (which it seems to do by allocating an int
# counter - collision prone..)
# broker specific request id
reqid=reqid,
)
# elif action == 'cancel':
case {'action': 'cancel'}:
msg = BrokerdCancel(**request_msg)
await client.submit_cancel(
reqid=msg.reqid
)
).dict()
)
case _:
log.error(f'Unknown order command: {request_msg}')
elif action == 'cancel':
msg = BrokerdCancel(**request_msg)
await client.submit_cancel(
reqid=msg.reqid
)
_reqids: bidict[str, tuple] = {}
_buys: dict[
str,
dict[
tuple[str, float],
tuple[float, str, str],
]
] = {}
_sells: dict[
str,
dict[
tuple[str, float],
tuple[float, str, str],
]
] = {}
_positions: dict[str, Position] = {}
else:
log.error(f'Unknown order command: {request_msg}')
@tractor.context
@ -477,57 +451,39 @@ async def trades_dialogue(
loglevel: str = None,
) -> None:
tractor.log.get_console_log(loglevel)
async with (
data.open_feed(
[fqsn],
loglevel=loglevel,
) as feed,
):
pp_msgs: list[BrokerdPosition] = []
pos: Position
token: str # f'{symbol}.{self.broker}'
for token, pos in _positions.items():
pp_msgs.append(BrokerdPosition(
broker=broker,
account='paper',
symbol=pos.symbol.front_fqsn(),
size=pos.size,
avg_price=pos.ppu,
))
# TODO: load paper positions per broker from .toml config file
# and pass as symbol to position data mapping: ``dict[str, dict]``
# await ctx.started(all_positions)
await ctx.started((pp_msgs, ['paper']))
await ctx.started(({}, {'paper',}))
async with (
ctx.open_stream() as ems_stream,
trio.open_nursery() as n,
):
client = PaperBoi(
broker,
ems_stream,
_buys=_buys,
_sells=_sells,
_buys={},
_sells={},
_reqids=_reqids,
_reqids={},
# TODO: load paper positions from ``positions.toml``
_positions=_positions,
# TODO: load postions from ledger file
_trade_ledger={},
_positions={},
)
n.start_soon(
handle_order_requests,
client,
ems_stream,
)
n.start_soon(handle_order_requests, client, ems_stream)
# paper engine simulator clearing task
await simulate_fills(feed.stream, client)
@ -555,17 +511,17 @@ async def open_paperboi(
# (we likely don't need more then one proc for basic
# simulated order clearing)
if portal is None:
log.info('Starting new paper-engine actor')
portal = await tn.start_actor(
service_name,
enable_modules=[__name__]
)
async with portal.open_context(
trades_dialogue,
broker=broker,
fqsn=fqsn,
loglevel=loglevel,
trades_dialogue,
broker=broker,
fqsn=fqsn,
loglevel=loglevel,
) as (ctx, first):
yield ctx, first

View File

@ -83,9 +83,9 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
)
log.info(
f'`marketstored` up!\n'
f'pid: {pid}\n'
f'container id: {cid[:12]}\n'
f'`marketstore` up!\n'
f'`marketstored` pid: {pid}\n'
f'docker container id: {cid}\n'
f'config: {pformat(config)}'
)

View File

@ -21,7 +21,6 @@ Broker configuration mgmt.
import platform
import sys
import os
from os import path
from os.path import dirname
import shutil
from typing import Optional
@ -112,7 +111,6 @@ if _parent_user:
_conf_names: set[str] = {
'brokers',
'pps',
'trades',
'watchlists',
}
@ -149,21 +147,19 @@ def get_conf_path(
conf_name: str = 'brokers',
) -> str:
'''
Return the top-level default config path normally under
``~/.config/piker`` on linux for a given ``conf_name``, the config
name.
"""Return the default config path normally under
``~/.config/piker`` on linux.
Contains files such as:
- brokers.toml
- pp.toml
- watchlists.toml
- trades.toml
# maybe coming soon ;)
- signals.toml
- strats.toml
'''
"""
assert conf_name in _conf_names
fn = _conf_fn_w_ext(conf_name)
return os.path.join(
@ -177,7 +173,7 @@ def repodir():
Return the abspath to the repo directory.
'''
dirpath = path.abspath(
dirpath = os.path.abspath(
# we're 3 levels down in **this** module file
dirname(dirname(os.path.realpath(__file__)))
)
@ -186,9 +182,7 @@ def repodir():
def load(
conf_name: str = 'brokers',
path: str = None,
**tomlkws,
path: str = None
) -> (dict, str):
'''
@ -196,7 +190,6 @@ def load(
'''
path = path or get_conf_path(conf_name)
if not os.path.isfile(path):
fn = _conf_fn_w_ext(conf_name)
@ -209,11 +202,8 @@ def load(
# if one exists.
if os.path.isfile(template):
shutil.copyfile(template, path)
else:
with open(path, 'w'):
pass # touch
config = toml.load(path, **tomlkws)
config = toml.load(path)
log.debug(f"Read config file {path}")
return config, path
@ -222,7 +212,6 @@ def write(
config: dict, # toml config as dict
name: str = 'brokers',
path: str = None,
**toml_kwargs,
) -> None:
''''
@ -246,14 +235,11 @@ def write(
f"{path}"
)
with open(path, 'w') as cf:
return toml.dump(
config,
cf,
**toml_kwargs,
)
return toml.dump(config, cf)
def load_accounts(
providers: Optional[list[str]] = None
) -> bidict[str, Optional[str]]:

View File

@ -37,13 +37,8 @@ from docker.models.containers import Container as DockerContainer
from docker.errors import (
DockerException,
APIError,
# ContainerError,
)
import requests
from requests.exceptions import (
ConnectionError,
ReadTimeout,
)
from requests.exceptions import ConnectionError, ReadTimeout
from ..log import get_logger, get_console_log
from .. import config
@ -55,8 +50,8 @@ class DockerNotStarted(Exception):
'Prolly you dint start da daemon bruh'
class ApplicationLogError(Exception):
'App in container reported an error in logs'
class ContainerError(RuntimeError):
'Error reported via app-container logging level'
@acm
@ -101,9 +96,9 @@ async def open_docker(
# not perms?
raise
# finally:
# if client:
# client.close()
finally:
if client:
client.close()
class Container:
@ -161,7 +156,7 @@ class Container:
# print(f'level: {level}')
if level in ('error', 'fatal'):
raise ApplicationLogError(msg)
raise ContainerError(msg)
if patt in msg:
return True
@ -190,29 +185,12 @@ class Container:
if 'is not running' in err.explanation:
return False
def hard_kill(self, start: float) -> None:
delay = time.time() - start
# get out the big guns, bc apparently marketstore
# doesn't actually know how to terminate gracefully
# :eyeroll:...
log.error(
f'SIGKILL-ing: {self.cntr.id} after {delay}s\n'
)
self.try_signal('SIGKILL')
self.cntr.wait(
timeout=3,
condition='not-running',
)
async def cancel(
self,
stop_msg: str,
hard_kill: bool = False,
) -> None:
cid = self.cntr.id
# first try a graceful cancel
log.cancel(
f'SIGINT cancelling container: {cid}\n'
@ -221,25 +199,15 @@ class Container:
self.try_signal('SIGINT')
start = time.time()
for _ in range(6):
for _ in range(30):
with trio.move_on_after(0.5) as cs:
log.cancel('polling for CNTR logs...')
cs.shield = True
await self.process_logs_until(stop_msg)
try:
await self.process_logs_until(stop_msg)
except ApplicationLogError:
hard_kill = True
else:
# if we aren't cancelled on above checkpoint then we
# assume we read the expected stop msg and
# terminated.
break
if cs.cancelled_caught:
# on timeout just try a hard kill after
# a quick container sync-wait.
hard_kill = True
# if we aren't cancelled on above checkpoint then we
# assume we read the expected stop msg and terminated.
break
try:
log.info(f'Polling for container shutdown:\n{cid}')
@ -250,7 +218,6 @@ class Container:
condition='not-running',
)
# graceful exit if we didn't time out
break
except (
@ -262,23 +229,25 @@ class Container:
except (
docker.errors.APIError,
ConnectionError,
requests.exceptions.ConnectionError,
trio.Cancelled,
):
log.exception('Docker connection failure')
self.hard_kill(start)
raise
except trio.Cancelled:
log.exception('trio cancelled...')
self.hard_kill(start)
break
else:
hard_kill = True
delay = time.time() - start
log.error(
f'Failed to kill container {cid} after {delay}s\n'
'sending SIGKILL..'
)
# get out the big guns, bc apparently marketstore
# doesn't actually know how to terminate gracefully
# :eyeroll:...
self.try_signal('SIGKILL')
self.cntr.wait(
timeout=3,
condition='not-running',
)
if hard_kill:
self.hard_kill(start)
else:
log.cancel(f'Container stopped: {cid}')
log.cancel(f'Container stopped: {cid}')
@tractor.context
@ -320,13 +289,15 @@ async def open_ahabd(
))
try:
# TODO: we might eventually want a proxy-style msg-prot here
# to allow remote control of containers without needing
# callers to have root perms?
await trio.sleep_forever()
finally:
await cntr.cancel(stop_msg)
with trio.CancelScope(shield=True):
await cntr.cancel(stop_msg)
async def start_ahab(

View File

@ -56,7 +56,7 @@ def iterticks(
sig = (
time,
tick['price'],
tick.get('size')
tick['size']
)
if ttype == 'dark_trade':

View File

@ -27,14 +27,13 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
if _USE_POSIX:
from _posixshmem import shm_unlink
# import msgspec
import numpy as np
from numpy.lib import recfunctions as rfn
import tractor
import numpy as np
from pydantic import BaseModel
from numpy.lib import recfunctions as rfn
from ..log import get_logger
from ._source import base_iohlc_dtype
from .types import Struct
log = get_logger(__name__)
@ -108,12 +107,15 @@ class SharedInt:
log.warning(f'Shm for {name} already unlinked?')
class _Token(Struct, frozen=True):
class _Token(BaseModel):
'''
Internal represenation of a shared memory "token"
which can be used to key a system wide post shm entry.
'''
class Config:
frozen = True
shm_name: str # this servers as a "key" value
shm_first_index_name: str
shm_last_index_name: str
@ -124,22 +126,17 @@ class _Token(Struct, frozen=True):
return np.dtype(list(map(tuple, self.dtype_descr))).descr
def as_msg(self):
return self.to_dict()
return self.dict()
@classmethod
def from_msg(cls, msg: dict) -> _Token:
if isinstance(msg, _Token):
return msg
# TODO: native struct decoding
# return _token_dec.decode(msg)
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
return _Token(**msg)
# _token_dec = msgspec.msgpack.Decoder(_Token)
# TODO: this api?
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
# _known_tokens = tractor.ContextStack('_known_tokens', )
@ -170,7 +167,7 @@ def _make_token(
shm_name=key,
shm_first_index_name=key + "_first",
shm_last_index_name=key + "_last",
dtype_descr=tuple(np.dtype(dtype).descr)
dtype_descr=np.dtype(dtype).descr
)

View File

@ -23,7 +23,7 @@ import decimal
from bidict import bidict
import numpy as np
from msgspec import Struct
from pydantic import BaseModel
# from numba import from_dtype
@ -126,7 +126,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
)
class Symbol(Struct):
class Symbol(BaseModel):
'''
I guess this is some kinda container thing for dealing with
all the different meta-data formats from brokers?
@ -152,7 +152,9 @@ class Symbol(Struct):
info: dict[str, Any],
suffix: str = '',
) -> Symbol:
# XXX: like wtf..
# ) -> 'Symbol':
) -> None:
tick_size = info.get('price_tick_size', 0.01)
lot_tick_size = info.get('lot_tick_size', 0.0)
@ -173,7 +175,9 @@ class Symbol(Struct):
fqsn: str,
info: dict[str, Any],
) -> Symbol:
# XXX: like wtf..
# ) -> 'Symbol':
) -> None:
broker, key, suffix = unpack_fqsn(fqsn)
return cls.from_broker_info(
broker,
@ -236,7 +240,7 @@ class Symbol(Struct):
'''
tokens = self.tokens()
fqsn = '.'.join(map(str.lower, tokens))
fqsn = '.'.join(tokens)
return fqsn
def iterfqsns(self) -> list[str]:

View File

@ -53,11 +53,13 @@ class NoBsWs:
def __init__(
self,
url: str,
token: str,
stack: AsyncExitStack,
fixture: Callable,
serializer: ModuleType = json,
):
self.url = url
self.token = token
self.fixture = fixture
self._stack = stack
self._ws: 'WebSocketConnection' = None # noqa
@ -81,9 +83,14 @@ class NoBsWs:
trio_websocket.open_websocket_url(self.url)
)
# rerun user code fixture
ret = await self._stack.enter_async_context(
self.fixture(self)
)
if self.token == '':
ret = await self._stack.enter_async_context(
self.fixture(self)
)
else:
ret = await self._stack.enter_async_context(
self.fixture(self, self.token)
)
assert ret is None
@ -128,13 +135,14 @@ async def open_autorecon_ws(
# TODO: proper type annot smh
fixture: Callable,
# used for authenticated websockets
token: str = '',
) -> AsyncGenerator[tuple[...], NoBsWs]:
"""Apparently we can QoS for all sorts of reasons..so catch em.
"""
async with AsyncExitStack() as stack:
ws = NoBsWs(url, stack, fixture=fixture)
ws = NoBsWs(url, token, stack, fixture=fixture)
await ws._connect()
try:

View File

@ -42,6 +42,7 @@ from trio_typing import TaskStatus
import trimeter
import tractor
from tractor.trionics import maybe_open_context
from pydantic import BaseModel
import pendulum
import numpy as np
@ -58,7 +59,6 @@ from ._sharedmem import (
ShmArray,
)
from .ingest import get_ingestormod
from .types import Struct
from ._source import (
base_iohlc_dtype,
Symbol,
@ -84,7 +84,7 @@ if TYPE_CHECKING:
log = get_logger(__name__)
class _FeedsBus(Struct):
class _FeedsBus(BaseModel):
'''
Data feeds broadcaster and persistence management.
@ -100,6 +100,10 @@ class _FeedsBus(Struct):
a dedicated cancel scope.
'''
class Config:
arbitrary_types_allowed = True
underscore_attrs_are_private = False
brokername: str
nursery: trio.Nursery
feeds: dict[str, tuple[dict, dict]] = {}
@ -309,7 +313,7 @@ async def start_backfill(
# when no tsdb "last datum" is provided, we just load
# some near-term history.
periods = {
1: {'seconds': 4000},
1: {'days': 1},
60: {'days': 14},
}

View File

@ -37,7 +37,7 @@ import time
from math import isnan
from bidict import bidict
from msgspec.msgpack import encode, decode
import msgpack
import pyqtgraph as pg
import numpy as np
import tractor
@ -774,13 +774,12 @@ async def stream_quotes(
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
# send subs topics to server
resp = await ws.send_message(
encode({'streams': list(tbks.values())})
msgpack.dumps({'streams': list(tbks.values())})
)
log.info(resp)
async def recv() -> dict[str, Any]:
return decode((await ws.get_message()), encoding='utf-8')
return msgpack.loads((await ws.get_message()), encoding='utf-8')
streams = (await recv())['streams']
log.info(f"Subscribed to {streams}")

View File

@ -1,84 +0,0 @@
# piker: trading gear for hackers
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Built-in (extension) types.
"""
import sys
from typing import Optional
from pprint import pformat
import msgspec
class Struct(
msgspec.Struct,
# https://jcristharif.com/msgspec/structs.html#tagged-unions
# tag='pikerstruct',
# tag=True,
):
'''
A "human friendlier" (aka repl buddy) struct subtype.
'''
def to_dict(self) -> dict:
return {
f: getattr(self, f)
for f in self.__struct_fields__
}
def __repr__(self):
# only turn on pprint when we detect a python REPL
# at runtime B)
if (
hasattr(sys, 'ps1')
# TODO: check if we're in pdb
):
return f'Struct({pformat(self.to_dict())})'
return super().__repr__()
def copy(
self,
update: Optional[dict] = None,
) -> msgspec.Struct:
'''
Validate-typecast all self defined fields, return a copy of us
with all such fields.
This is kinda like the default behaviour in `pydantic.BaseModel`.
'''
if update:
for k, v in update.items():
setattr(self, k, v)
# roundtrip serialize to validate
return msgspec.msgpack.Decoder(
type=type(self)
).decode(
msgspec.msgpack.Encoder().encode(self)
)
def typecast(
self,
# fields: Optional[list[str]] = None,
) -> None:
for fname, ftype in self.__annotations__.items():
setattr(self, fname, ftype(getattr(self, fname)))

View File

@ -78,8 +78,7 @@ class Fsp:
# + the consuming fsp *to* the consumers output
# shm flow.
_flow_registry: dict[
tuple[_Token, str],
tuple[_Token, Optional[ShmArray]],
tuple[_Token, str], _Token,
] = {}
def __init__(
@ -121,6 +120,7 @@ class Fsp:
):
return self.func(*args, **kwargs)
# TODO: lru_cache this? prettty sure it'll work?
def get_shm(
self,
src_shm: ShmArray,
@ -131,27 +131,12 @@ class Fsp:
for this "instance" of a signal processor for
the given ``key``.
The destination shm "token" and array are cached if possible to
minimize multiple stdlib/system calls.
'''
dst_token, maybe_array = self._flow_registry[
dst_token = self._flow_registry[
(src_shm._token, self.name)
]
if maybe_array is None:
self._flow_registry[
(src_shm._token, self.name)
] = (
dst_token,
# "cache" the ``ShmArray`` such that
# we call the underlying "attach" code as few
# times as possible as per:
# - https://github.com/pikers/piker/issues/359
# - https://github.com/pikers/piker/issues/332
maybe_array := attach_shm_array(dst_token)
)
return maybe_array
shm = attach_shm_array(dst_token)
return shm
def fsp(

View File

@ -114,7 +114,7 @@ async def fsp_compute(
dict[str, np.ndarray], # multi-output case
np.ndarray, # single output case
]
history_output = await anext(out_stream)
history_output = await out_stream.__anext__()
func_name = func.__name__
profiler(f'{func_name} generated history')
@ -284,10 +284,9 @@ async def cascade(
# TODO: ugh i hate this wind/unwind to list over the wire
# but not sure how else to do it.
for (token, fsp_name, dst_token) in shm_registry:
Fsp._flow_registry[(
_Token.from_msg(token),
fsp_name,
)] = _Token.from_msg(dst_token), None
Fsp._flow_registry[
(_Token.from_msg(token), fsp_name)
] = _Token.from_msg(dst_token)
fsp: Fsp = reg.get(
NamespacePath(ns_path)
@ -375,8 +374,7 @@ async def cascade(
'key': dst_shm_token,
'first': dst._first.value,
'last': dst._last.value,
}
})
}})
return tracker, index
def is_synced(

View File

@ -1,975 +0,0 @@
# piker: trading gear for hackers
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
Personal/Private position parsing, calculating, summarizing in a way
that doesn't try to cuk most humans who prefer to not lose their moneys..
(looking at you `ib` and dirt-bird friends)
'''
from contextlib import contextmanager as cm
from pprint import pformat
import os
from os import path
from math import copysign
import re
import time
from typing import (
Any,
Optional,
Union,
)
import pendulum
from pendulum import datetime, now
import tomli
import toml
from . import config
from .brokers import get_brokermod
from .clearing._messages import BrokerdPosition, Status
from .data._source import Symbol
from .log import get_logger
from .data.types import Struct
log = get_logger(__name__)
@cm
def open_trade_ledger(
broker: str,
account: str,
) -> str:
'''
Indempotently create and read in a trade log file from the
``<configuration_dir>/ledgers/`` directory.
Files are named per broker account of the form
``<brokername>_<accountname>.toml``. The ``accountname`` here is the
name as defined in the user's ``brokers.toml`` config.
'''
ldir = path.join(config._config_dir, 'ledgers')
if not path.isdir(ldir):
os.makedirs(ldir)
fname = f'trades_{broker}_{account}.toml'
tradesfile = path.join(ldir, fname)
if not path.isfile(tradesfile):
log.info(
f'Creating new local trades ledger: {tradesfile}'
)
with open(tradesfile, 'w') as cf:
pass # touch
with open(tradesfile, 'rb') as cf:
start = time.time()
ledger = tomli.load(cf)
print(f'Ledger load took {time.time() - start}s')
cpy = ledger.copy()
try:
yield cpy
finally:
if cpy != ledger:
# TODO: show diff output?
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
print(f'Updating ledger for {tradesfile}:\n')
ledger.update(cpy)
# we write on close the mutated ledger data
with open(tradesfile, 'w') as cf:
toml.dump(ledger, cf)
class Transaction(Struct, frozen=True):
# TODO: should this be ``.to`` (see below)?
fqsn: str
tid: Union[str, int] # unique transaction id
size: float
price: float
cost: float # commisions or other additional costs
dt: datetime
expiry: Optional[datetime] = None
# optional key normally derived from the broker
# backend which ensures the instrument-symbol this record
# is for is truly unique.
bsuid: Optional[Union[str, int]] = None
# optional fqsn for the source "asset"/money symbol?
# from: Optional[str] = None
class Position(Struct):
'''
Basic pp (personal/piker position) model with attached clearing
transaction history.
'''
symbol: Symbol
# can be +ve or -ve for long/short
size: float
# "breakeven price" above or below which pnl moves above and below
# zero for the entirety of the current "trade state".
ppu: float
# unique backend symbol id
bsuid: str
split_ratio: Optional[int] = None
# ordered record of known constituent trade messages
clears: dict[
Union[str, int, Status], # trade id
dict[str, Any], # transaction history summaries
] = {}
first_clear_dt: Optional[datetime] = None
expiry: Optional[datetime] = None
def to_dict(self) -> dict:
return {
f: getattr(self, f)
for f in self.__struct_fields__
}
def to_pretoml(self) -> tuple[str, dict]:
'''
Prep this position's data contents for export to toml including
re-structuring of the ``.clears`` table to an array of
inline-subtables for better ``pps.toml`` compactness.
'''
d = self.to_dict()
clears = d.pop('clears')
expiry = d.pop('expiry')
if self.split_ratio is None:
d.pop('split_ratio')
# should be obvious from clears/event table
d.pop('first_clear_dt')
# TODO: we need to figure out how to have one top level
# listing venue here even when the backend isn't providing
# it via the trades ledger..
# drop symbol obj in serialized form
s = d.pop('symbol')
fqsn = s.front_fqsn()
if self.expiry is None:
d.pop('expiry', None)
elif expiry:
d['expiry'] = str(expiry)
toml_clears_list = []
# reverse sort so latest clears are at top of section?
for tid, data in sorted(
list(clears.items()),
# sort by datetime
key=lambda item: item[1]['dt'],
):
inline_table = toml.TomlDecoder().get_empty_inline_table()
# serialize datetime to parsable `str`
inline_table['dt'] = str(data['dt'])
# insert optional clear fields in column order
for k in ['ppu', 'accum_size']:
val = data.get(k)
if val:
inline_table[k] = val
# insert required fields
for k in ['price', 'size', 'cost']:
inline_table[k] = data[k]
inline_table['tid'] = tid
toml_clears_list.append(inline_table)
d['clears'] = toml_clears_list
return fqsn, d
def ensure_state(self) -> None:
'''
Audit either the `.size` and `.ppu` local instance vars against
the clears table calculations and return the calc-ed values if
they differ and log warnings to console.
'''
clears = list(self.clears.values())
self.first_clear_dt = min(list(entry['dt'] for entry in clears))
last_clear = clears[-1]
csize = self.calc_size()
accum = last_clear['accum_size']
if not self.expired():
if (
csize != accum
and csize != round(accum * self.split_ratio or 1)
):
raise ValueError(f'Size mismatch: {csize}')
else:
assert csize == 0, 'Contract is expired but non-zero size?'
if self.size != csize:
log.warning(
'Position state mismatch:\n'
f'{self.size} => {csize}'
)
self.size = csize
cppu = self.calc_ppu()
ppu = last_clear['ppu']
if (
cppu != ppu
and self.split_ratio is not None
# handle any split info entered (for now) manually by user
and cppu != (ppu / self.split_ratio)
):
raise ValueError(f'PPU mismatch: {cppu}')
if self.ppu != cppu:
log.warning(
'Position state mismatch:\n'
f'{self.ppu} => {cppu}'
)
self.ppu = cppu
def update_from_msg(
self,
msg: BrokerdPosition,
) -> None:
# XXX: better place to do this?
symbol = self.symbol
lot_size_digits = symbol.lot_size_digits
ppu, size = (
round(
msg['avg_price'],
ndigits=symbol.tick_size_digits
),
round(
msg['size'],
ndigits=lot_size_digits
),
)
self.ppu = ppu
self.size = size
@property
def dsize(self) -> float:
'''
The "dollar" size of the pp, normally in trading (fiat) unit
terms.
'''
return self.ppu * self.size
# TODO: idea: "real LIFO" dynamic positioning.
# - when a trade takes place where the pnl for
# the (set of) trade(s) is below the breakeven price
# it may be that the trader took a +ve pnl on a short(er)
# term trade in the same account.
# - in this case we could recalc the be price to
# be reverted back to it's prior value before the nearest term
# trade was opened.?
# def lifo_price() -> float:
# ...
def calc_ppu(
self,
# include transaction cost in breakeven price
# and presume the worst case of the same cost
# to exit this transaction (even though in reality
# it will be dynamic based on exit stratetgy).
cost_scalar: float = 2,
) -> float:
'''
Compute the "price-per-unit" price for the given non-zero sized
rolling position.
The recurrence relation which computes this (exponential) mean
per new clear which **increases** the accumulative postiion size
is:
ppu[-1] = (
ppu[-2] * accum_size[-2]
+
ppu[-1] * size
) / accum_size[-1]
where `cost_basis` for the current step is simply the price
* size of the most recent clearing transaction.
'''
asize_h: list[float] = [] # historical accumulative size
ppu_h: list[float] = [] # historical price-per-unit
clears = list(self.clears.items())
for i, (tid, entry) in enumerate(clears):
clear_size = entry['size']
clear_price = entry['price']
last_accum_size = asize_h[-1] if asize_h else 0
accum_size = last_accum_size + clear_size
accum_sign = copysign(1, accum_size)
sign_change: bool = False
if accum_size == 0:
ppu_h.append(0)
asize_h.append(0)
continue
# test if the pp somehow went "passed" a net zero size state
# resulting in a change of the "sign" of the size (+ve for
# long, -ve for short).
sign_change = (
copysign(1, last_accum_size) + accum_sign == 0
and last_accum_size != 0
)
# since we passed the net-zero-size state the new size
# after sum should be the remaining size the new
# "direction" (aka, long vs. short) for this clear.
if sign_change:
clear_size = accum_size
abs_diff = abs(accum_size)
asize_h.append(0)
ppu_h.append(0)
else:
# old size minus the new size gives us size diff with
# +ve -> increase in pp size
# -ve -> decrease in pp size
abs_diff = abs(accum_size) - abs(last_accum_size)
# XXX: LIFO breakeven price update. only an increaze in size
# of the position contributes the breakeven price,
# a decrease does not (i.e. the position is being made
# smaller).
# abs_clear_size = abs(clear_size)
abs_new_size = abs(accum_size)
if abs_diff > 0:
cost_basis = (
# cost basis for this clear
clear_price * abs(clear_size)
+
# transaction cost
accum_sign * cost_scalar * entry['cost']
)
if asize_h:
size_last = abs(asize_h[-1])
cb_last = ppu_h[-1] * size_last
ppu = (cost_basis + cb_last) / abs_new_size
else:
ppu = cost_basis / abs_new_size
ppu_h.append(ppu)
asize_h.append(accum_size)
else:
# on "exit" clears from a given direction,
# only the size changes not the price-per-unit
# need to be updated since the ppu remains constant
# and gets weighted by the new size.
asize_h.append(accum_size)
ppu_h.append(ppu_h[-1])
final_ppu = ppu_h[-1] if ppu_h else 0
# handle any split info entered (for now) manually by user
if self.split_ratio is not None:
final_ppu /= self.split_ratio
return final_ppu
def expired(self) -> bool:
'''
Predicate which checks if the contract/instrument is past its expiry.
'''
return bool(self.expiry) and self.expiry < now()
def calc_size(self) -> float:
'''
Calculate the unit size of this position in the destination
asset using the clears/trade event table; zero if expired.
'''
size: float = 0
# time-expired pps (normally derivatives) are "closed"
# and have a zero size.
if self.expired():
return 0
for tid, entry in self.clears.items():
size += entry['size']
if self.split_ratio is not None:
size = round(size * self.split_ratio)
return size
def minimize_clears(
self,
) -> dict[str, dict]:
'''
Minimize the position's clears entries by removing
all transactions before the last net zero size to avoid
unecessary history irrelevant to the current pp state.
'''
size: float = 0
clears_since_zero: list[tuple(str, dict)] = []
# TODO: we might just want to always do this when iterating
# a ledger? keep a state of the last net-zero and only do the
# full iterate when no state was stashed?
# scan for the last "net zero" position by iterating
# transactions until the next net-zero size, rinse, repeat.
for tid, clear in self.clears.items():
size += clear['size']
clears_since_zero.append((tid, clear))
if size == 0:
clears_since_zero.clear()
self.clears = dict(clears_since_zero)
return self.clears
def add_clear(
self,
t: Transaction,
) -> dict:
'''
Update clearing table and populate rolling ppu and accumulative
size in both the clears entry and local attrs state.
'''
clear = self.clears[t.tid] = {
'cost': t.cost,
'price': t.price,
'size': t.size,
'dt': t.dt,
}
# TODO: compute these incrementally instead
# of re-looping through each time resulting in O(n**2)
# behaviour..?
# NOTE: we compute these **after** adding the entry in order to
# make the recurrence relation math work inside
# ``.calc_size()``.
self.size = clear['accum_size'] = self.calc_size()
self.ppu = clear['ppu'] = self.calc_ppu()
return clear
def sugest_split(self) -> float:
...
class PpTable(Struct):
brokername: str
acctid: str
pps: dict[str, Position]
conf: Optional[dict] = {}
def update_from_trans(
self,
trans: dict[str, Transaction],
cost_scalar: float = 2,
) -> dict[str, Position]:
pps = self.pps
updated: dict[str, Position] = {}
# lifo update all pps from records
for tid, t in trans.items():
pp = pps.setdefault(
t.bsuid,
# if no existing pp, allocate fresh one.
Position(
Symbol.from_fqsn(
t.fqsn,
info={},
),
size=0.0,
ppu=0.0,
bsuid=t.bsuid,
expiry=t.expiry,
)
)
clears = pp.clears
if clears:
first_clear_dt = pp.first_clear_dt
# don't do updates for ledger records we already have
# included in the current pps state.
if (
t.tid in clears
or first_clear_dt and t.dt < first_clear_dt
):
# NOTE: likely you'll see repeats of the same
# ``Transaction`` passed in here if/when you are restarting
# a ``brokerd.ib`` where the API will re-report trades from
# the current session, so we need to make sure we don't
# "double count" these in pp calculations.
continue
# update clearing table
pp.add_clear(t)
updated[t.bsuid] = pp
# minimize clears tables and update sizing.
for bsuid, pp in updated.items():
pp.ensure_state()
return updated
def dump_active(
self,
) -> tuple[
dict[str, Position],
dict[str, Position]
]:
'''
Iterate all tabulated positions, render active positions to
a ``dict`` format amenable to serialization (via TOML) and drop
from state (``.pps``) as well as return in a ``dict`` all
``Position``s which have recently closed.
'''
# NOTE: newly closed position are also important to report/return
# since a consumer, like an order mode UI ;), might want to react
# based on the closure (for example removing the breakeven line
# and clearing the entry from any lists/monitors).
closed_pp_objs: dict[str, Position] = {}
open_pp_objs: dict[str, Position] = {}
pp_objs = self.pps
for bsuid in list(pp_objs):
pp = pp_objs[bsuid]
# XXX: debug hook for size mismatches
# qqqbsuid = 320227571
# if bsuid == qqqbsuid:
# breakpoint()
pp.ensure_state()
if (
# "net-zero" is a "closed" position
pp.size == 0
# time-expired pps (normally derivatives) are "closed"
or (pp.expiry and pp.expiry < now())
):
# for expired cases
pp.size = 0
# NOTE: we DO NOT pop the pp here since it can still be
# used to check for duplicate clears that may come in as
# new transaction from some backend API and need to be
# ignored; the closed positions won't be written to the
# ``pps.toml`` since ``pp_active_entries`` above is what's
# written.
closed_pp_objs[bsuid] = pp
else:
open_pp_objs[bsuid] = pp
return open_pp_objs, closed_pp_objs
def to_toml(
self,
) -> dict[str, Any]:
active, closed = self.dump_active()
# ONLY dict-serialize all active positions; those that are closed
# we don't store in the ``pps.toml``.
to_toml_dict = {}
for bsuid, pos in active.items():
# keep the minimal amount of clears that make up this
# position since the last net-zero state.
pos.minimize_clears()
pos.ensure_state()
# serialize to pre-toml form
fqsn, asdict = pos.to_pretoml()
log.info(f'Updating active pp: {fqsn}')
# XXX: ugh, it's cuz we push the section under
# the broker name.. maybe we need to rethink this?
brokerless_key = fqsn.removeprefix(f'{self.brokername}.')
to_toml_dict[brokerless_key] = asdict
return to_toml_dict
def write_config(self) -> None:
'''
Write the current position table to the user's ``pps.toml``.
'''
# TODO: show diff output?
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
print(f'Updating ``pps.toml`` for {path}:\n')
# active, closed_pp_objs = table.dump_active()
pp_entries = self.to_toml()
self.conf[self.brokername][self.acctid] = pp_entries
# TODO: why tf haven't they already done this for inline
# tables smh..
enc = PpsEncoder(preserve=True)
# table_bs_type = type(toml.TomlDecoder().get_empty_inline_table())
enc.dump_funcs[
toml.decoder.InlineTableDict
] = enc.dump_inline_table
config.write(
self.conf,
'pps',
encoder=enc,
)
def load_pps_from_ledger(
brokername: str,
acctname: str,
# post normalization filter on ledger entries to be processed
filter_by: Optional[list[dict]] = None,
) -> tuple[
dict[str, Transaction],
dict[str, Position],
]:
'''
Open a ledger file by broker name and account and read in and
process any trade records into our normalized ``Transaction`` form
and then update the equivalent ``Pptable`` and deliver the two
bsuid-mapped dict-sets of the transactions and pps.
'''
with (
open_trade_ledger(brokername, acctname) as ledger,
open_pps(brokername, acctname) as table,
):
if not ledger:
# null case, no ledger file with content
return {}
mod = get_brokermod(brokername)
src_records: dict[str, Transaction] = mod.norm_trade_records(ledger)
if filter_by:
records = {}
bsuids = set(filter_by)
for tid, r in src_records.items():
if r.bsuid in bsuids:
records[tid] = r
else:
records = src_records
updated = table.update_from_trans(records)
return records, updated
# TODO: instead see if we can hack tomli and tomli-w to do the same:
# - https://github.com/hukkin/tomli
# - https://github.com/hukkin/tomli-w
class PpsEncoder(toml.TomlEncoder):
'''
Special "styled" encoder that makes a ``pps.toml`` redable and
compact by putting `.clears` tables inline and everything else
flat-ish.
'''
separator = ','
def dump_list(self, v):
'''
Dump an inline list with a newline after every element and
with consideration for denoted inline table types.
'''
retval = "[\n"
for u in v:
if isinstance(u, toml.decoder.InlineTableDict):
out = self.dump_inline_table(u)
else:
out = str(self.dump_value(u))
retval += " " + out + "," + "\n"
retval += "]"
return retval
def dump_inline_table(self, section):
"""Preserve inline table in its compact syntax instead of expanding
into subsection.
https://github.com/toml-lang/toml#user-content-inline-table
"""
val_list = []
for k, v in section.items():
# if isinstance(v, toml.decoder.InlineTableDict):
if isinstance(v, dict):
val = self.dump_inline_table(v)
else:
val = str(self.dump_value(v))
val_list.append(k + " = " + val)
retval = "{ " + ", ".join(val_list) + " }"
return retval
def dump_sections(self, o, sup):
retstr = ""
if sup != "" and sup[-1] != ".":
sup += '.'
retdict = self._dict()
arraystr = ""
for section in o:
qsection = str(section)
value = o[section]
if not re.match(r'^[A-Za-z0-9_-]+$', section):
qsection = toml.encoder._dump_str(section)
# arrayoftables = False
if (
self.preserve
and isinstance(value, toml.decoder.InlineTableDict)
):
retstr += (
qsection
+
" = "
+
self.dump_inline_table(o[section])
+
'\n' # only on the final terminating left brace
)
# XXX: this code i'm pretty sure is just blatantly bad
# and/or wrong..
# if isinstance(o[section], list):
# for a in o[section]:
# if isinstance(a, dict):
# arrayoftables = True
# if arrayoftables:
# for a in o[section]:
# arraytabstr = "\n"
# arraystr += "[[" + sup + qsection + "]]\n"
# s, d = self.dump_sections(a, sup + qsection)
# if s:
# if s[0] == "[":
# arraytabstr += s
# else:
# arraystr += s
# while d:
# newd = self._dict()
# for dsec in d:
# s1, d1 = self.dump_sections(d[dsec], sup +
# qsection + "." +
# dsec)
# if s1:
# arraytabstr += ("[" + sup + qsection +
# "." + dsec + "]\n")
# arraytabstr += s1
# for s1 in d1:
# newd[dsec + "." + s1] = d1[s1]
# d = newd
# arraystr += arraytabstr
elif isinstance(value, dict):
retdict[qsection] = o[section]
elif o[section] is not None:
retstr += (
qsection
+
" = "
+
str(self.dump_value(o[section]))
)
# if not isinstance(value, dict):
if not isinstance(value, toml.decoder.InlineTableDict):
# inline tables should not contain newlines:
# https://toml.io/en/v1.0.0#inline-table
retstr += '\n'
else:
raise ValueError(value)
retstr += arraystr
return (retstr, retdict)
@cm
def open_pps(
brokername: str,
acctid: str,
write_on_exit: bool = True,
) -> PpTable:
'''
Read out broker-specific position entries from
incremental update file: ``pps.toml``.
'''
conf, path = config.load('pps')
brokersection = conf.setdefault(brokername, {})
pps = brokersection.setdefault(acctid, {})
# TODO: ideally we can pass in an existing
# pps state to this right? such that we
# don't have to do a ledger reload all the
# time.. a couple ideas I can think of,
# - mirror this in some client side actor which
# does the actual ledger updates (say the paper
# engine proc if we decide to always spawn it?),
# - do diffs against updates from the ledger writer
# actor and the in-mem state here?
pp_objs = {}
table = PpTable(
brokername,
acctid,
pp_objs,
conf=conf,
)
# unmarshal/load ``pps.toml`` config entries into object form
# and update `PpTable` obj entries.
for fqsn, entry in pps.items():
bsuid = entry['bsuid']
# convert clears sub-tables (only in this form
# for toml re-presentation) back into a master table.
clears_list = entry['clears']
# index clears entries in "object" form by tid in a top
# level dict instead of a list (as is presented in our
# ``pps.toml``).
clears = pp_objs.setdefault(bsuid, {})
# TODO: should be make a ``Struct`` for clear/event entries?
# convert "clear events table" from the toml config (list of
# a dicts) and load it into object form for use in position
# processing of new clear events.
trans: list[Transaction] = []
for clears_table in clears_list:
tid = clears_table.pop('tid')
dtstr = clears_table['dt']
dt = pendulum.parse(dtstr)
clears_table['dt'] = dt
trans.append(Transaction(
fqsn=bsuid,
bsuid=bsuid,
tid=tid,
size=clears_table['size'],
price=clears_table['price'],
cost=clears_table['cost'],
dt=dt,
))
clears[tid] = clears_table
size = entry['size']
# TODO: remove but, handle old field name for now
ppu = entry.get('ppu', entry.get('be_price', 0))
split_ratio = entry.get('split_ratio')
expiry = entry.get('expiry')
if expiry:
expiry = pendulum.parse(expiry)
pp = pp_objs[bsuid] = Position(
Symbol.from_fqsn(fqsn, info={}),
size=size,
ppu=ppu,
split_ratio=split_ratio,
expiry=expiry,
bsuid=entry['bsuid'],
)
# XXX: super critical, we need to be sure to include
# all pps.toml clears to avoid reusing clears that were
# already included in the current incremental update
# state, since today's records may have already been
# processed!
for t in trans:
pp.add_clear(t)
# audit entries loaded from toml
pp.ensure_state()
try:
yield table
finally:
if write_on_exit:
table.write_config()
if __name__ == '__main__':
import sys
args = sys.argv
assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`'
args = args[1:]
for acctid in args:
broker, name = acctid.split('.')
trans, updated_pps = load_pps_from_ledger(broker, name)
print(
f'Processing transactions into pps for {broker}:{acctid}\n'
f'{pformat(trans)}\n\n'
f'{pformat(updated_pps)}'
)

View File

@ -230,26 +230,25 @@ class GodWidget(QWidget):
# - we'll probably want per-instrument/provider state here?
# change the order config form over to the new chart
# XXX: since the pp config is a singleton widget we have to
# also switch it over to the new chart's interal-layout
# self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
chart = linkedsplits.chart
# chart is already in memory so just focus it
linkedsplits.show()
linkedsplits.focus()
linkedsplits.graphics_cycle()
await trio.sleep(0)
# XXX: since the pp config is a singleton widget we have to
# also switch it over to the new chart's interal-layout
# self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
chart = linkedsplits.chart
# resume feeds *after* rendering chart view asap
if chart:
chart.resume_all_feeds()
chart.resume_all_feeds()
# TODO: we need a check to see if the chart
# last had the xlast in view, if so then shift so it's
# still in view, if the user was viewing history then
# do nothing yah?
chart.default_view()
# TODO: we need a check to see if the chart
# last had the xlast in view, if so then shift so it's
# still in view, if the user was viewing history then
# do nothing yah?
chart.default_view()
self.linkedsplits = linkedsplits
symbol = linkedsplits.symbol
@ -453,6 +452,13 @@ class LinkedSplits(QWidget):
# add crosshair graphic
self.chart.addItem(self.cursor)
# axis placement
if (
_xaxis_at == 'bottom' and
'bottom' in self.chart.plotItem.axes
):
self.chart.hideAxis('bottom')
# style?
self.chart.setFrameStyle(
QFrame.StyledPanel |
@ -517,26 +523,21 @@ class LinkedSplits(QWidget):
cpw.hideAxis('left')
cpw.hideAxis('bottom')
if (
_xaxis_at == 'bottom' and (
self.xaxis_chart
or (
not self.subplots
and self.xaxis_chart is None
)
)
):
if self.xaxis_chart:
self.xaxis_chart.hideAxis('bottom')
if self.xaxis_chart:
self.xaxis_chart.hideAxis('bottom')
# presuming we only want it at the true bottom of all charts.
# XXX: uses new api from our ``pyqtgraph`` fork.
# https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master
# _ = self.xaxis_chart.removeAxis('bottom', unlink=False)
# assert 'bottom' not in self.xaxis_chart.plotItem.axes
self.xaxis_chart = cpw
cpw.showAxis('bottom')
if self.xaxis_chart is None:
self.xaxis_chart = cpw
qframe.chart = cpw
qframe.hbox.addWidget(cpw)
@ -759,18 +760,9 @@ class ChartPlotWidget(pg.PlotWidget):
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
# indempotent startup flag for auto-yrange subsys
# to detect the "first time" y-domain graphics begin
# to be shown in the (main) graphics view.
self._on_screen: bool = False
def resume_all_feeds(self):
try:
for feed in self._feeds.values():
self.linked.godwidget._root_n.start_soon(feed.resume)
except RuntimeError:
# TODO: cancel the qtractor runtime here?
raise
for feed in self._feeds.values():
self.linked.godwidget._root_n.start_soon(feed.resume)
def pause_all_feeds(self):
for feed in self._feeds.values():
@ -867,8 +859,7 @@ class ChartPlotWidget(pg.PlotWidget):
def default_view(
self,
bars_from_y: int = 616,
do_ds: bool = True,
bars_from_y: int = 3000,
) -> None:
'''
@ -929,11 +920,8 @@ class ChartPlotWidget(pg.PlotWidget):
max=end,
padding=0,
)
if do_ds:
self.view.maybe_downsample_graphics()
view._set_yrange()
self.view.maybe_downsample_graphics()
view._set_yrange()
try:
self.linked.graphics_cycle()
except IndexError:
@ -1267,6 +1255,7 @@ class ChartPlotWidget(pg.PlotWidget):
If ``bars_range`` is provided use that range.
'''
# print(f'Chart[{self.name}].maxmin()')
profiler = pg.debug.Profiler(
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
disabled=not pg_profile_enabled(),
@ -1298,18 +1287,11 @@ class ChartPlotWidget(pg.PlotWidget):
key = round(lbar), round(rbar)
res = flow.maxmin(*key)
if (
res is None
):
log.warning(
if res == (None, None):
log.error(
f"{flow_key} no mxmn for bars_range => {key} !?"
)
res = 0, 0
if not self._on_screen:
self.default_view(do_ds=False)
self._on_screen = True
profiler(f'yrange mxmn: {key} -> {res}')
# print(f'{flow_key} yrange mxmn: {key} -> {res}')
return res

View File

@ -223,20 +223,14 @@ def ds_m4(
assert frames >= (xrange / uppx)
# call into ``numba``
(
nb,
x_out,
y_out,
ymn,
ymx,
) = _m4(
nb, i_win, y_out = _m4(
x,
y,
frames,
# TODO: see func below..
# x_out,
# i_win,
# y_out,
# first index in x data to start at
@ -249,11 +243,10 @@ def ds_m4(
# filter out any overshoot in the input allocation arrays by
# removing zero-ed tail entries which should start at a certain
# index.
x_out = x_out[x_out != 0]
y_out = y_out[:x_out.size]
i_win = i_win[i_win != 0]
y_out = y_out[:i_win.size]
# print(f'M4 output ymn, ymx: {ymn},{ymx}')
return nb, x_out, y_out, ymn, ymx
return nb, i_win, y_out
@jit(
@ -267,8 +260,8 @@ def _m4(
frames: int,
# TODO: using this approach, having the ``.zeros()`` alloc lines
# below in pure python, there were segs faults and alloc crashes..
# TODO: using this approach by having the ``.zeros()`` alloc lines
# below, in put python was causing segs faults and alloc crashes..
# we might need to see how it behaves with shm arrays and consider
# allocating them once at startup?
@ -281,22 +274,14 @@ def _m4(
x_start: int,
step: float,
) -> tuple[
int,
np.ndarray,
np.ndarray,
float,
float,
]:
'''
Implementation of the m4 algorithm in ``numba``:
http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
) -> int:
# nbins = len(i_win)
# count = len(xs)
'''
# these are pre-allocated and mutated by ``numba``
# code in-place.
y_out = np.zeros((frames, 4), ys.dtype)
x_out = np.zeros(frames, xs.dtype)
i_win = np.zeros(frames, xs.dtype)
bincount = 0
x_left = x_start
@ -310,34 +295,24 @@ def _m4(
# set all bins in the left-most entry to the starting left-most x value
# (aka a row broadcast).
x_out[bincount] = x_left
i_win[bincount] = x_left
# set all y-values to the first value passed in.
y_out[bincount] = ys[0]
# full input y-data mx and mn
mx: float = -np.inf
mn: float = np.inf
# compute OHLC style max / min values per window sized x-frame.
for i in range(len(xs)):
x = xs[i]
y = ys[i]
if x < x_left + step: # the current window "step" is [bin, bin+1)
ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1])
ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2])
y_out[bincount, 1] = min(y, y_out[bincount, 1])
y_out[bincount, 2] = max(y, y_out[bincount, 2])
y_out[bincount, 3] = y
mx = max(mx, ymx)
mn = min(mn, ymn)
else:
# Find the next bin
while x >= x_left + step:
x_left += step
bincount += 1
x_out[bincount] = x_left
i_win[bincount] = x_left
y_out[bincount] = y
return bincount, x_out, y_out, mn, mx
return bincount, i_win, y_out

View File

@ -105,10 +105,6 @@ def chart_maxmin(
mn, mx = out
mx_vlm_in_view = 0
# TODO: we need to NOT call this to avoid a manual
# np.max/min trigger and especially on the vlm_chart
# flows which aren't shown.. like vlm?
if vlm_chart:
out = vlm_chart.maxmin()
if out:
@ -136,16 +132,16 @@ class DisplayState:
# high level chart handles
linked: LinkedSplits
chart: ChartPlotWidget
vlm_chart: ChartPlotWidget
# axis labels
l1: L1Labels
last_price_sticky: YAxisLabel
vlm_sticky: YAxisLabel
# misc state tracking
vars: dict[str, Any]
vlm_chart: Optional[ChartPlotWidget] = None
vlm_sticky: Optional[YAxisLabel] = None
wap_in_history: bool = False
@ -185,6 +181,9 @@ async def graphics_update_loop(
*ohlcv.array[-1][['index', 'close']]
)
if vlm_chart:
vlm_sticky = vlm_chart._ysticks['volume']
maxmin = partial(
chart_maxmin,
chart,
@ -223,9 +222,33 @@ async def graphics_update_loop(
tick_margin = 3 * tick_size
chart.show()
# view = chart.view
last_quote = time.time()
i_last = ohlcv.index
# async def iter_drain_quotes():
# # NOTE: all code below this loop is expected to be synchronous
# # and thus draw instructions are not picked up jntil the next
# # wait / iteration.
# async for quotes in stream:
# while True:
# try:
# moar = stream.receive_nowait()
# except trio.WouldBlock:
# yield quotes
# break
# else:
# for sym, quote in moar.items():
# ticks_frame = quote.get('ticks')
# if ticks_frame:
# quotes[sym].setdefault(
# 'ticks', []).extend(ticks_frame)
# print('pulled extra')
# yield quotes
# async for quotes in iter_drain_quotes():
ds = linked.display_state = DisplayState(**{
'quotes': {},
'linked': linked,
@ -233,6 +256,8 @@ async def graphics_update_loop(
'ohlcv': ohlcv,
'chart': chart,
'last_price_sticky': last_price_sticky,
'vlm_chart': vlm_chart,
'vlm_sticky': vlm_sticky,
'l1': l1,
'vars': {
@ -245,11 +270,6 @@ async def graphics_update_loop(
}
})
if vlm_chart:
vlm_sticky = vlm_chart._ysticks['volume']
ds.vlm_chart = vlm_chart
ds.vlm_sticky = vlm_sticky
chart.default_view()
# main real-time quotes update loop
@ -273,7 +293,6 @@ async def graphics_update_loop(
# chart isn't active/shown so skip render cycle and pause feed(s)
if chart.linked.isHidden():
print('skipping update')
chart.pause_all_feeds()
continue
@ -322,7 +341,7 @@ def graphics_update_cycle(
for sym, quote in ds.quotes.items():
# compute the first available graphic's x-units-per-pixel
uppx = chart.view.x_uppx()
uppx = vlm_chart.view.x_uppx()
# NOTE: vlm may be written by the ``brokerd`` backend
# event though a tick sample is not emitted.
@ -397,8 +416,10 @@ def graphics_update_cycle(
)
or trigger_all
):
# TODO: we should track and compute whether the last
# pixel in a curve should show new data based on uppx
# and then iff update curves and shift?
chart.increment_view(steps=i_diff)
# chart.increment_view(steps=i_diff + round(append_diff - uppx))
if vlm_chart:
vlm_chart.increment_view(steps=i_diff)
@ -456,6 +477,7 @@ def graphics_update_cycle(
):
chart.update_graphics_from_flow(
chart.name,
# do_append=uppx < update_uppx,
do_append=do_append,
)
@ -786,10 +808,7 @@ async def display_symbol_data(
async with trio.open_nursery() as ln:
# if available load volume related built-in display(s)
if (
not symbol.broker_info[provider].get('no_vlm', False)
and has_vlm(ohlcv)
):
if has_vlm(ohlcv):
vlm_chart = await ln.start(
open_vlm_displays,
linked,
@ -824,9 +843,6 @@ async def display_symbol_data(
order_mode_started
)
):
if not vlm_chart:
chart.default_view()
# let Qt run to render all widgets and make sure the
# sidepanes line up vertically.
await trio.sleep(0)

View File

@ -140,9 +140,9 @@ class LineEditor:
) -> LevelLine:
# staged_line = self._active_staged_line
# if not staged_line:
# raise RuntimeError("No line is currently staged!?")
staged_line = self._active_staged_line
if not staged_line:
raise RuntimeError("No line is currently staged!?")
# for now, until submission reponse arrives
line.hide_labels()

View File

@ -21,6 +21,7 @@ Qt event proxying and processing using ``trio`` mem chans.
from contextlib import asynccontextmanager, AsyncExitStack
from typing import Callable
from pydantic import BaseModel
import trio
from PyQt5 import QtCore
from PyQt5.QtCore import QEvent, pyqtBoundSignal
@ -29,8 +30,6 @@ from PyQt5.QtWidgets import (
QGraphicsSceneMouseEvent as gs_mouse,
)
from ..data.types import Struct
MOUSE_EVENTS = {
gs_mouse.GraphicsSceneMousePress,
@ -44,10 +43,13 @@ MOUSE_EVENTS = {
# TODO: maybe consider some constrained ints down the road?
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
class KeyboardMsg(Struct):
class KeyboardMsg(BaseModel):
'''Unpacked Qt keyboard event data.
'''
class Config:
arbitrary_types_allowed = True
event: QEvent
etype: int
key: int
@ -55,13 +57,16 @@ class KeyboardMsg(Struct):
txt: str
def to_tuple(self) -> tuple:
return tuple(self.to_dict().values())
return tuple(self.dict().values())
class MouseMsg(Struct):
class MouseMsg(BaseModel):
'''Unpacked Qt keyboard event data.
'''
class Config:
arbitrary_types_allowed = True
event: QEvent
etype: int
button: int

View File

@ -337,7 +337,6 @@ class Flow(msgspec.Struct): # , frozen=True):
name: str
plot: pg.PlotItem
graphics: Union[Curve, BarItems]
yrange: tuple[float, float] = None
# in some cases a flow may want to change its
# graphical "type" or, "form" when downsampling,
@ -387,11 +386,10 @@ class Flow(msgspec.Struct): # , frozen=True):
lbar: int,
rbar: int,
) -> Optional[tuple[float, float]]:
) -> tuple[float, float]:
'''
Compute the cached max and min y-range values for a given
x-range determined by ``lbar`` and ``rbar`` or ``None``
if no range can be determined (yet).
x-range determined by ``lbar`` and ``rbar``.
'''
rkey = (lbar, rbar)
@ -401,44 +399,40 @@ class Flow(msgspec.Struct): # , frozen=True):
shm = self.shm
if shm is None:
return None
mxmn = None
arr = shm.array
else: # new block for profiling?..
arr = shm.array
# build relative indexes into shm array
# TODO: should we just add/use a method
# on the shm to do this?
ifirst = arr[0]['index']
slice_view = arr[
lbar - ifirst:
(rbar - ifirst) + 1
]
# build relative indexes into shm array
# TODO: should we just add/use a method
# on the shm to do this?
ifirst = arr[0]['index']
slice_view = arr[
lbar - ifirst:
(rbar - ifirst) + 1
]
if not slice_view.size:
return None
elif self.yrange:
mxmn = self.yrange
# print(f'{self.name} M4 maxmin: {mxmn}')
else:
if self.is_ohlc:
ylow = np.min(slice_view['low'])
yhigh = np.max(slice_view['high'])
if not slice_view.size:
mxmn = None
else:
view = slice_view[self.name]
ylow = np.min(view)
yhigh = np.max(view)
if self.is_ohlc:
ylow = np.min(slice_view['low'])
yhigh = np.max(slice_view['high'])
mxmn = ylow, yhigh
# print(f'{self.name} MANUAL maxmin: {mxmin}')
else:
view = slice_view[self.name]
ylow = np.min(view)
yhigh = np.max(view)
# cache result for input range
assert mxmn
self._mxmns[rkey] = mxmn
mxmn = ylow, yhigh
return mxmn
if mxmn is not None:
# cache new mxmn result
self._mxmns[rkey] = mxmn
return mxmn
def view_range(self) -> tuple[int, int]:
'''
@ -634,13 +628,10 @@ class Flow(msgspec.Struct): # , frozen=True):
# source data so we clear our path data in prep
# to generate a new one from original source data.
new_sample_rate = True
showing_src_data = True
should_ds = False
should_redraw = True
showing_src_data = True
# reset yrange to be computed from source data
self.yrange = None
# MAIN RENDER LOGIC:
# - determine in view data and redraw on range change
# - determine downsampling ops if needed
@ -666,10 +657,6 @@ class Flow(msgspec.Struct): # , frozen=True):
**rkwargs,
)
if showing_src_data:
# print(f"{self.name} SHOWING SOURCE")
# reset yrange to be computed from source data
self.yrange = None
if not out:
log.warning(f'{self.name} failed to render!?')
@ -677,9 +664,6 @@ class Flow(msgspec.Struct): # , frozen=True):
path, data, reset = out
# if self.yrange:
# print(f'flow {self.name} yrange from m4: {self.yrange}')
# XXX: SUPER UGGGHHH... without this we get stale cache
# graphics that don't update until you downsampler again..
if reset:
@ -1074,7 +1058,6 @@ class Renderer(msgspec.Struct):
# xy-path data transform: convert source data to a format
# able to be passed to a `QPainterPath` rendering routine.
if not len(hist):
# XXX: this might be why the profiler only has exits?
return
x_out, y_out, connect = self.format_xy(
@ -1161,14 +1144,11 @@ class Renderer(msgspec.Struct):
elif should_ds and uppx > 1:
x_out, y_out, ymn, ymx = xy_downsample(
x_out, y_out = xy_downsample(
x_out,
y_out,
uppx,
)
self.flow.yrange = ymn, ymx
# print(f'{self.flow.name} post ds: ymn, ymx: {ymn},{ymx}')
reset = True
profiler(f'FULL PATH downsample redraw={should_ds}')
self._in_ds = True

View File

@ -619,7 +619,7 @@ class FillStatusBar(QProgressBar):
# color: #19232D;
# width: 10px;
self.setRange(0, int(slots))
self.setRange(0, slots)
self.setValue(value)

View File

@ -27,13 +27,12 @@ from itertools import cycle
from typing import Optional, AsyncGenerator, Any
import numpy as np
import msgspec
from pydantic import create_model
import tractor
import pyqtgraph as pg
import trio
from trio_typing import TaskStatus
from piker.data.types import Struct
from ._axes import PriceAxis
from .._cacheables import maybe_open_context
from ..calc import humanize
@ -54,7 +53,7 @@ from ._forms import (
from ..fsp._api import maybe_mk_fsp_shm, Fsp
from ..fsp import cascade
from ..fsp._volume import (
# tina_vwap,
tina_vwap,
dolla_vlm,
flow_rates,
)
@ -154,13 +153,12 @@ async def open_fsp_sidepane(
)
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
FspConfig = msgspec.defstruct(
"Point",
[('name', name)] + list(params.items()),
bases=(Struct,),
FspConfig = create_model(
'FspConfig',
name=name,
**params,
)
model = FspConfig(name=name, **params)
sidepane.model = model
sidepane.model = FspConfig()
# just a logger for now until we get fsp configs up and running.
async def settings_change(
@ -442,9 +440,7 @@ class FspAdmin:
# if the chart isn't hidden try to update
# the data on screen.
if not self.linked.isHidden():
log.debug(
f'Re-syncing graphics for fsp: {ns_path}'
)
log.debug(f'Re-syncing graphics for fsp: {ns_path}')
self.linked.graphics_cycle(
trigger_all=True,
prepend_update_index=info['first'],
@ -473,10 +469,9 @@ class FspAdmin:
target=target,
readonly=True,
)
self._flow_registry[(
self.src_shm._token,
target.name
)] = dst_shm._token
self._flow_registry[
(self.src_shm._token, target.name)
] = dst_shm._token
# if not opened:
# raise RuntimeError(
@ -644,25 +639,20 @@ async def open_vlm_displays(
names: list[str],
) -> tuple[float, float]:
'''
Flows "group" maxmin loop; assumes all named flows
are in the same co-domain and thus can be sorted
as one set.
Iterates all the named flows and calls the chart
api to find their range values and return.
TODO: really we should probably have a more built-in API
for this?
'''
mx = 0
for name in names:
ymn, ymx = chart.maxmin(name=name)
mx = max(mx, ymx)
mxmn = chart.maxmin(name=name)
if mxmn:
ymax = mxmn[1]
if ymax > mx:
mx = ymax
return 0, mx
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
# TODO: fix the x-axis label issue where if you put
# the axis on the left it's totally not lined up...
# show volume units value on LHS (for dinkus)
@ -786,7 +776,6 @@ async def open_vlm_displays(
) -> None:
for name in names:
if 'dark' in name:
color = dark_vlm_color
elif 'rate' in name:

View File

@ -221,7 +221,6 @@ async def handle_viewmode_kb_inputs(
# TODO: show pp config mini-params in status bar widget
# mode.pp_config.show()
trigger_type: str = 'dark'
if (
# 's' for "submit" to activate "live" order
Qt.Key_S in pressed or
@ -229,6 +228,9 @@ async def handle_viewmode_kb_inputs(
):
trigger_type: str = 'live'
else:
trigger_type: str = 'dark'
# order mode trigger "actions"
if Qt.Key_D in pressed: # for "damp eet"
action = 'sell'
@ -395,11 +397,8 @@ class ChartView(ViewBox):
'''
if self._ic is None:
try:
self.chart.pause_all_feeds()
self._ic = trio.Event()
except RuntimeError:
pass
self.chart.pause_all_feeds()
self._ic = trio.Event()
def signal_ic(
self,
@ -412,12 +411,9 @@ class ChartView(ViewBox):
'''
if self._ic:
try:
self._ic.set()
self._ic = None
self.chart.resume_all_feeds()
except RuntimeError:
pass
self._ic.set()
self._ic = None
self.chart.resume_all_feeds()
@asynccontextmanager
async def open_async_input_handler(
@ -673,10 +669,7 @@ class ChartView(ViewBox):
# XXX: WHY
ev.accept()
try:
self.start_ic()
except RuntimeError:
pass
self.start_ic()
# if self._ic is None:
# self.chart.pause_all_feeds()
# self._ic = trio.Event()
@ -930,7 +923,6 @@ class ChartView(ViewBox):
# XXX: super important to be aware of this.
# or not flow.graphics.isVisible()
):
# print(f'skipping {flow.name}')
continue
# pass in no array which will read and render from the last

View File

@ -421,10 +421,6 @@ class LevelLine(pg.InfiniteLine):
return path
@property
def marker(self) -> LevelMarker:
return self._marker
def hoverEvent(self, ev):
'''
Mouse hover callback.

View File

@ -22,9 +22,12 @@ from __future__ import annotations
from typing import (
Optional, Generic,
TypeVar, Callable,
Literal,
)
import enum
import sys
# from pydantic import BaseModel, validator
from pydantic import BaseModel, validator
from pydantic.generics import GenericModel
from PyQt5.QtWidgets import (
QWidget,
@ -35,7 +38,6 @@ from ._forms import (
# FontScaledDelegate,
Edit,
)
from ..data.types import Struct
DataType = TypeVar('DataType')
@ -60,7 +62,7 @@ class Selection(Field[DataType], Generic[DataType]):
options: dict[str, DataType]
# value: DataType = None
# @validator('value') # , always=True)
@validator('value') # , always=True)
def set_value_first(
cls,
@ -98,7 +100,7 @@ class Edit(Field[DataType], Generic[DataType]):
widget_factory = Edit
class AllocatorPane(Struct):
class AllocatorPane(BaseModel):
account = Selection[str](
options=dict.fromkeys(

View File

@ -49,17 +49,12 @@ def xy_downsample(
x_spacer: float = 0.5,
) -> tuple[
np.ndarray,
np.ndarray,
float,
float,
]:
) -> tuple[np.ndarray, np.ndarray]:
# downsample whenever more then 1 pixels per datum can be shown.
# always refresh data bounds until we get diffing
# working properly, see above..
bins, x, y, ymn, ymx = ds_m4(
bins, x, y = ds_m4(
x,
y,
uppx,
@ -72,7 +67,7 @@ def xy_downsample(
)).flatten()
y = y.flatten()
return x, y, ymn, ymx
return x, y
@njit(

View File

@ -19,7 +19,6 @@ Position info and display
"""
from __future__ import annotations
from copy import copy
from dataclasses import dataclass
from functools import partial
from math import floor, copysign
@ -106,8 +105,8 @@ async def update_pnl_from_feed(
# compute and display pnl status
order_mode.pane.pnl_label.format(
pnl=copysign(1, size) * pnl(
# live.ppu,
order_mode.current_pp.live_pp.ppu,
# live.avg_price,
order_mode.current_pp.live_pp.avg_price,
tick['price'],
),
)
@ -357,7 +356,7 @@ class SettingsPane:
# last historical close price
last = feed.shm.array[-1][['close']][0]
pnl_value = copysign(1, size) * pnl(
tracker.live_pp.ppu,
tracker.live_pp.avg_price,
last,
)
@ -477,7 +476,7 @@ class PositionTracker:
self.alloc = alloc
self.startup_pp = startup_pp
self.live_pp = copy(startup_pp)
self.live_pp = startup_pp.copy()
view = chart.getViewBox()
@ -557,7 +556,7 @@ class PositionTracker:
pp = position or self.live_pp
self.update_line(
pp.ppu,
pp.avg_price,
pp.size,
self.chart.linked.symbol.lot_size_digits,
)
@ -571,7 +570,7 @@ class PositionTracker:
self.hide()
else:
self._level_marker.level = pp.ppu
self._level_marker.level = pp.avg_price
# these updates are critical to avoid lag on view/scene changes
self._level_marker.update() # trigger paint

View File

@ -27,20 +27,20 @@ import time
from typing import Optional, Dict, Callable, Any
import uuid
from pydantic import BaseModel
import tractor
import trio
from PyQt5.QtCore import Qt
from .. import config
from ..pp import Position
from ..clearing._client import open_ems, OrderBook
from ..clearing._allocate import (
mk_allocator,
Position,
)
from ._style import _font
from ..data._source import Symbol
from ..data.feed import Feed
from ..data.types import Struct
from ..log import get_logger
from ._editors import LineEditor, ArrowEditor
from ._lines import order_line, LevelLine
@ -49,23 +49,17 @@ from ._position import (
SettingsPane,
)
from ._forms import FieldsForm
# from ._label import FormatLabel
from ._window import MultiStatus
from ..clearing._messages import (
Order,
Status,
# BrokerdOrder,
# BrokerdStatus,
BrokerdPosition,
)
from ..clearing._messages import Order, BrokerdPosition
from ._forms import open_form_input_handling
log = get_logger(__name__)
class Dialog(Struct):
'''
Trade dialogue meta-data describing the lifetime
class OrderDialog(BaseModel):
'''Trade dialogue meta-data describing the lifetime
of an order submission to ``emsd`` from a chart.
'''
@ -78,6 +72,41 @@ class Dialog(Struct):
msgs: dict[str, dict] = {}
fills: Dict[str, Any] = {}
class Config:
arbitrary_types_allowed = True
underscore_attrs_are_private = False
def on_level_change_update_next_order_info(
level: float,
# these are all ``partial``-ed in at callback assignment time.
line: LevelLine,
order: Order,
tracker: PositionTracker,
) -> None:
'''A callback applied for each level change to the line
which will recompute the order size based on allocator
settings. this is assigned inside
``OrderMode.line_from_order()``
'''
# NOTE: the ``Order.account`` is set at order stage time
# inside ``OrderMode.line_from_order()``.
order_info = tracker.alloc.next_order_info(
startup_pp=tracker.startup_pp,
live_pp=tracker.live_pp,
price=level,
action=order.action,
)
line.update_labels(order_info)
# update bound-in staged order
order.price = level
order.size = order_info['size']
@dataclass
class OrderMode:
@ -114,7 +143,7 @@ class OrderMode:
current_pp: Optional[PositionTracker] = None
active: bool = False
name: str = 'order'
dialogs: dict[str, Dialog] = field(default_factory=dict)
dialogs: dict[str, OrderDialog] = field(default_factory=dict)
_colors = {
'alert': 'alert_yellow',
@ -123,45 +152,12 @@ class OrderMode:
}
_staged_order: Optional[Order] = None
def on_level_change_update_next_order_info(
self,
level: float,
# these are all ``partial``-ed in at callback assignment time.
line: LevelLine,
order: Order,
tracker: PositionTracker,
) -> None:
'''
A callback applied for each level change to the line
which will recompute the order size based on allocator
settings. this is assigned inside
``OrderMode.line_from_order()``
'''
# NOTE: the ``Order.account`` is set at order stage time inside
# ``OrderMode.line_from_order()`` or is inside ``Order`` msg
# field for loaded orders.
order_info = tracker.alloc.next_order_info(
startup_pp=tracker.startup_pp,
live_pp=tracker.live_pp,
price=level,
action=order.action,
)
line.update_labels(order_info)
# update bound-in staged order
order.price = level
order.size = order_info['size']
# when an order is changed we flip the settings side-pane to
# reflect the corresponding account and pos info.
self.pane.on_ui_settings_change('account', order.account)
def line_from_order(
self,
order: Order,
symbol: Symbol,
**line_kwargs,
) -> LevelLine:
@ -179,8 +175,8 @@ class OrderMode:
color=self._colors[order.action],
dotted=True if (
order.exec_mode == 'dark'
and order.action != 'alert'
order.exec_mode == 'dark' and
order.action != 'alert'
) else False,
**line_kwargs,
@ -190,12 +186,10 @@ class OrderMode:
# immediately
if order.action != 'alert':
line._on_level_change = partial(
self.on_level_change_update_next_order_info,
on_level_change_update_next_order_info,
line=line,
order=order,
# use the corresponding position tracker for the
# order's account.
tracker=self.trackers[order.account],
tracker=self.current_pp,
)
else:
@ -244,6 +238,8 @@ class OrderMode:
line = self.line_from_order(
order,
symbol,
show_markers=True,
# just for the stage line to avoid
# flickering while moving the cursor
@ -255,6 +251,7 @@ class OrderMode:
# prevent flickering of marker while moving/tracking cursor
only_show_markers_on_hover=False,
)
line = self.lines.stage_line(line)
# hide crosshair y-line and label
@ -267,26 +264,28 @@ class OrderMode:
def submit_order(
self,
send_msg: bool = True,
order: Optional[Order] = None,
) -> Dialog:
'''
Send execution order to EMS return a level line to
) -> OrderDialog:
'''Send execution order to EMS return a level line to
represent the order on a chart.
'''
if not order:
staged = self._staged_order
# apply order fields for ems
oid = str(uuid.uuid4())
order = staged.copy()
order.oid = oid
staged = self._staged_order
symbol: Symbol = staged.symbol
oid = str(uuid.uuid4())
order.symbol = order.symbol.front_fqsn()
# format order data for ems
fqsn = symbol.front_fqsn()
order = staged.copy(
update={
'symbol': fqsn,
'oid': oid,
}
)
line = self.line_from_order(
order,
symbol,
show_markers=True,
only_show_markers_on_hover=True,
@ -304,17 +303,17 @@ class OrderMode:
# color once the submission ack arrives.
self.lines.submit_line(
line=line,
uuid=order.oid,
uuid=oid,
)
dialog = Dialog(
uuid=order.oid,
dialog = OrderDialog(
uuid=oid,
order=order,
symbol=order.symbol,
symbol=symbol,
line=line,
last_status_close=self.multistatus.open_status(
f'submitting {order.exec_mode}-{order.action}',
final_msg=f'submitted {order.exec_mode}-{order.action}',
f'submitting {self._trigger_type}-{order.action}',
final_msg=f'submitted {self._trigger_type}-{order.action}',
clear_on_next=True,
)
)
@ -324,21 +323,14 @@ class OrderMode:
# enter submission which will be popped once a response
# from the EMS is received to move the order to a different# status
self.dialogs[order.oid] = dialog
self.dialogs[oid] = dialog
# hook up mouse drag handlers
line._on_drag_start = self.order_line_modify_start
line._on_drag_end = self.order_line_modify_complete
# send order cmd to ems
if send_msg:
self.book.send(order)
else:
# just register for control over this order
# TODO: some kind of mini-perms system here based on
# an out-of-band tagging/auth sub-sys for multiplayer
# order control?
self.book._sent_orders[order.oid] = order
self.book.send(order)
return dialog
@ -376,7 +368,7 @@ class OrderMode:
self,
uuid: str
) -> Dialog:
) -> OrderDialog:
'''
Order submitted status event handler.
@ -431,7 +423,7 @@ class OrderMode:
self,
uuid: str,
msg: Status,
msg: Dict[str, Any],
) -> None:
@ -455,7 +447,7 @@ class OrderMode:
# TODO: add in standard fill/exec info that maybe we
# pack in a broker independent way?
f'{msg.resp}: {msg.req.price}',
f'{msg["resp"]}: {msg["trigger_price"]}',
],
)
log.runtime(result)
@ -515,7 +507,7 @@ class OrderMode:
oid = dialog.uuid
cancel_status_close = self.multistatus.open_status(
f'cancelling order {oid}',
f'cancelling order {oid[:6]}',
group_key=key,
)
dialog.last_status_close = cancel_status_close
@ -525,44 +517,6 @@ class OrderMode:
return ids
def load_unknown_dialog_from_msg(
self,
msg: Status,
) -> Dialog:
# NOTE: the `.order` attr **must** be set with the
# equivalent order msg in order to be loaded.
order = msg.req
oid = str(msg.oid)
symbol = order.symbol
# TODO: MEGA UGGG ZONEEEE!
src = msg.src
if (
src
and src not in ('dark', 'paperboi')
and src not in symbol
):
fqsn = symbol + '.' + src
brokername = src
else:
fqsn = symbol
*head, brokername = fqsn.rsplit('.')
# fill out complex fields
order.oid = str(order.oid)
order.brokers = [brokername]
order.symbol = Symbol.from_fqsn(
fqsn=fqsn,
info={},
)
dialog = self.submit_order(
send_msg=False,
order=order,
)
assert self.dialogs[oid] == dialog
return dialog
@asynccontextmanager
async def open_order_mode(
@ -600,7 +554,6 @@ async def open_order_mode(
trades_stream,
position_msgs,
brokerd_accounts,
ems_dialog_msgs,
),
trio.open_nursery() as tn,
@ -624,9 +577,9 @@ async def open_order_mode(
providers=symbol.brokers
)
# XXX: ``brokerd`` delivers a set of account names that it
# allows use of but the user also can define the accounts they'd
# like to use, in order, in their `brokers.toml` file.
# XXX: ``brokerd`` delivers a set of account names that it allows
# use of but the user also can define the accounts they'd like
# to use, in order, in their `brokers.toml` file.
accounts = {}
for name in brokerd_accounts:
# ensure name is in ``brokers.toml``
@ -639,21 +592,10 @@ async def open_order_mode(
iter(accounts.keys())
) if accounts else 'paper'
# Pack position messages by account, should only be one-to-one.
# NOTE: requires the backend exactly specifies
# the expected symbol key in its positions msg.
pps_by_account = {}
for (broker, acctid), msgs in position_msgs.items():
for msg in msgs:
sym = msg['symbol']
if (
(sym == symkey) or (
# mega-UGH, i think we need to fix the FQSN
# stuff sooner then later..
sym == symkey.removesuffix(f'.{broker}'))
):
pps_by_account[acctid] = msg
pp_msgs = position_msgs.get(symkey, ())
pps_by_account = {msg['account']: msg for msg in pp_msgs}
# update pp trackers with data relayed from ``brokerd``.
for account_name in accounts:
@ -662,10 +604,7 @@ async def open_order_mode(
startup_pp = Position(
symbol=symbol,
size=0,
ppu=0,
# XXX: BLEH, do we care about this on the client side?
bsuid=symbol,
avg_price=0,
)
msg = pps_by_account.get(account_name)
if msg:
@ -705,7 +644,7 @@ async def open_order_mode(
# setup order mode sidepane widgets
form: FieldsForm = chart.sidepane
form.vbox.setSpacing(
int((1 + 5 / 8) * _font.px_size)
int((1 + 5/8)*_font.px_size)
)
from ._feedstatus import mk_feed_label
@ -755,7 +694,7 @@ async def open_order_mode(
order_pane.order_mode = mode
# select a pp to track
tracker: PositionTracker = trackers[pp_account]
tracker = trackers[pp_account]
mode.current_pp = tracker
tracker.show()
tracker.hide_info()
@ -807,186 +746,144 @@ async def open_order_mode(
# to handle input since the ems connection is ready
started.set()
for oid, msg in ems_dialog_msgs.items():
# HACK ALERT: ensure a resp field is filled out since
# techincally the call below expects a ``Status``. TODO:
# parse into proper ``Status`` equivalents ems-side?
# msg.setdefault('resp', msg['broker_details']['resp'])
# msg.setdefault('oid', msg['broker_details']['oid'])
msg['brokerd_msg'] = msg
await process_trade_msg(
mode,
book,
msg,
)
tn.start_soon(
process_trades_and_update_ui,
trades_stream,
tn,
feed,
mode,
trades_stream,
book,
)
yield mode
async def process_trades_and_update_ui(
trades_stream: tractor.MsgStream,
n: trio.Nursery,
feed: Feed,
mode: OrderMode,
trades_stream: tractor.MsgStream,
book: OrderBook,
) -> None:
get_index = mode.chart.get_index
global _pnl_tasks
# this is where we receive **back** messages
# about executions **from** the EMS actor
async for msg in trades_stream:
await process_trade_msg(
mode,
book,
msg,
)
fmsg = pformat(msg)
log.info(f'Received order msg:\n{fmsg}')
async def process_trade_msg(
mode: OrderMode,
book: OrderBook,
msg: dict,
) -> tuple[Dialog, Status]:
get_index = mode.chart.get_index
fmsg = pformat(msg)
log.debug(f'Received order msg:\n{fmsg}')
name = msg['name']
if name in (
'position',
):
sym = mode.chart.linked.symbol
pp_msg_symbol = msg['symbol'].lower()
fqsn = sym.front_fqsn()
broker, key = sym.front_feed()
if (
pp_msg_symbol == fqsn
or pp_msg_symbol == fqsn.removesuffix(f'.{broker}')
name = msg['name']
if name in (
'position',
):
log.info(f'{fqsn} matched pp msg: {fmsg}')
tracker = mode.trackers[msg['account']]
tracker.live_pp.update_from_msg(msg)
# update order pane widgets
tracker.update_from_pp()
mode.pane.update_status_ui(tracker)
sym = mode.chart.linked.symbol
pp_msg_symbol = msg['symbol'].lower()
fqsn = sym.front_fqsn()
broker, key = sym.front_feed()
# print(
# f'pp msg symbol: {pp_msg_symbol}\n',
# f'fqsn: {fqsn}\n',
# f'front key: {key}\n',
# )
if tracker.live_pp.size:
# display pnl
mode.pane.display_pnl(tracker)
if (
pp_msg_symbol == fqsn.replace(f'.{broker}', '')
):
tracker = mode.trackers[msg['account']]
tracker.live_pp.update_from_msg(msg)
# update order pane widgets
tracker.update_from_pp()
mode.pane.update_status_ui(tracker)
# short circuit to next msg to avoid
# unnecessary msg content lookups
return
if tracker.live_pp.size:
# display pnl
mode.pane.display_pnl(tracker)
msg = Status(**msg)
resp = msg.resp
oid = msg.oid
dialog: Dialog = mode.dialogs.get(oid)
# short circuit to next msg to avoid
# unnecessary msg content lookups
continue
match msg:
case Status(resp='dark_open' | 'open'):
resp = msg['resp']
oid = msg['oid']
if dialog is not None:
# show line label once order is live
mode.on_submit(oid)
dialog = mode.dialogs.get(oid)
if dialog is None:
log.warning(f'received msg for untracked dialog:\n{fmsg}')
else:
log.warning(
f'received msg for untracked dialog:\n{fmsg}'
)
assert msg.resp in ('open', 'dark_open'), f'Unknown msg: {msg}'
# TODO: enable pure tracking / mirroring of dialogs
# is desired.
continue
sym = mode.chart.linked.symbol
fqsn = sym.front_fqsn()
order = Order(**msg.req)
if (
((order.symbol + f'.{msg.src}') == fqsn)
# record message to dialog tracking
dialog.msgs[oid] = msg
# a existing dark order for the same symbol
or (
order.symbol == fqsn
and (
msg.src in ('dark', 'paperboi')
or (msg.src in fqsn)
# response to 'action' request (buy/sell)
if resp in (
'dark_submitted',
'broker_submitted'
):
)
)
):
msg.req = order
dialog = mode.load_unknown_dialog_from_msg(msg)
mode.on_submit(oid)
# return dialog, msg
# show line label once order is live
mode.on_submit(oid)
case Status(resp='error'):
# resp to 'cancel' request or error condition
# for action request
elif resp in (
'broker_cancelled',
'broker_inactive',
'broker_errored',
'dark_cancelled'
):
# delete level line from view
mode.on_cancel(oid)
broker_msg = msg.brokerd_msg
log.error(
f'Order {oid}->{resp} with:\n{pformat(broker_msg)}'
)
broker_msg = msg['brokerd_msg']
log.warning(f'Order {oid} failed with:\n{pformat(broker_msg)}')
case Status(resp='canceled'):
# delete level line from view
mode.on_cancel(oid)
req = Order(**msg.req)
log.cancel(f'Canceled {req.action}:{oid}')
case Status(
resp='triggered',
# req=Order(exec_mode='dark') # TODO:
req={'exec_mode': 'dark'},
elif resp in (
'dark_triggered'
):
# TODO: UX for a "pending" clear/live order
log.info(f'Dark order triggered for {fmsg}')
case Status(
resp='triggered',
# req=Order(exec_mode='live', action='alert') as req, # TODO
req={'exec_mode': 'live', 'action': 'alert'} as req,
elif resp in (
'alert_triggered'
):
# should only be one "fill" for an alert
# add a triangle and remove the level line
req = Order(**req)
mode.on_fill(
oid,
price=req.price,
price=msg['trigger_price'],
arrow_index=get_index(time.time()),
)
mode.lines.remove_line(uuid=oid)
msg.req = req
await mode.on_exec(oid, msg)
# response to completed 'dialog' for order request
case Status(
resp='closed',
# req=Order() as req, # TODO
req=req,
# response to completed 'action' request for buy/sell
elif resp in (
'broker_executed',
):
msg.req = Order(**req)
# right now this is just triggering a system alert
await mode.on_exec(oid, msg)
mode.lines.remove_line(uuid=oid)
if msg['brokerd_msg']['remaining'] == 0:
mode.lines.remove_line(uuid=oid)
# each clearing tick is responded individually
case Status(resp='fill'):
elif resp in (
'broker_filled',
):
# handle out-of-piker fills reporting?
known_order = book._sent_orders.get(oid)
if not known_order:
log.warning(f'order {oid} is unknown')
return
continue
action = known_order.action
details = msg.brokerd_msg
details = msg['brokerd_msg']
# TODO: some kinda progress system
mode.on_fill(
@ -1001,9 +898,3 @@ async def process_trade_msg(
# TODO: how should we look this up?
# tracker = mode.trackers[msg['account']]
# tracker.live_pp.fills.append(msg)
# record message to dialog tracking
if dialog:
dialog.msgs[oid] = msg
return dialog, msg

View File

@ -8,6 +8,7 @@
# as more graphics stuff gets hashed out.
-e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph
# our async client for ``marketstore`` (the tsdb)
-e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
@ -17,4 +18,4 @@
# ``asyncvnc`` for sending interactions to ib-gw inside docker
-e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc
-e git+https://github.com/pikers/asyncvnc.git@vid_passthrough#egg=asyncvnc

View File

@ -41,17 +41,17 @@ setup(
},
install_requires=[
'toml',
'tomli', # fastest pure py reader
'click',
'colorlog',
'attrs',
'pygments',
'colorama', # numba traceback coloring
'msgspec', # performant IPC messaging and structs
'pydantic', # structured data
# async
'trio',
'trio-websocket',
'msgspec', # performant IPC messaging
'async_generator',
# from github currently (see requirements.txt)