Compare commits

..

No commits in common. "lifo_pps_ib" and "310_plus" have entirely different histories.

22 changed files with 447 additions and 1857 deletions

View File

@ -38,10 +38,7 @@ from .feed import (
open_symbol_search, open_symbol_search,
stream_quotes, stream_quotes,
) )
from .broker import ( from .broker import trades_dialogue
trades_dialogue,
norm_trade_records,
)
__all__ = [ __all__ = [
'get_client', 'get_client',

View File

@ -38,21 +38,15 @@ import time
from types import SimpleNamespace from types import SimpleNamespace
from bidict import bidict
import trio import trio
import tractor import tractor
from tractor import to_asyncio from tractor import to_asyncio
import ib_insync as ibis
from ib_insync.wrapper import RequestError from ib_insync.wrapper import RequestError
from ib_insync.contract import Contract, ContractDetails from ib_insync.contract import Contract, ContractDetails
from ib_insync.order import Order from ib_insync.order import Order
from ib_insync.ticker import Ticker from ib_insync.ticker import Ticker
from ib_insync.objects import ( from ib_insync.objects import Position
Position, import ib_insync as ibis
Fill,
Execution,
CommissionReport,
)
from ib_insync.wrapper import Wrapper from ib_insync.wrapper import Wrapper
from ib_insync.client import Client as ib_Client from ib_insync.client import Client as ib_Client
import numpy as np import numpy as np
@ -161,23 +155,30 @@ class NonShittyIB(ibis.IB):
self.client.apiEnd += self.disconnectedEvent self.client.apiEnd += self.disconnectedEvent
# map of symbols to contract ids
_adhoc_cmdty_data_map = {
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
# NOTE: some cmdtys/metals don't have trade data like gold/usd:
# https://groups.io/g/twsapi/message/44174
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
}
_futes_venues = ( _futes_venues = (
'GLOBEX', 'GLOBEX',
'NYMEX', 'NYMEX',
'CME', 'CME',
'CMECRYPTO', 'CMECRYPTO',
'COMEX',
'CMDTY', # special name case..
) )
_adhoc_futes_set = { _adhoc_futes_set = {
# equities # equities
'nq.globex', 'nq.globex',
'mnq.globex', # micro 'mnq.globex',
'es.globex', 'es.globex',
'mes.globex', # micro 'mes.globex',
# cypto$ # cypto$
'brr.cmecrypto', 'brr.cmecrypto',
@ -194,46 +195,20 @@ _adhoc_futes_set = {
# metals # metals
'xauusd.cmdty', # gold spot 'xauusd.cmdty', # gold spot
'gc.nymex', 'gc.nymex',
'mgc.nymex', # micro 'mgc.nymex',
# oil & gas
'cl.nymex',
'xagusd.cmdty', # silver spot 'xagusd.cmdty', # silver spot
'ni.nymex', # silver futes 'ni.nymex', # silver futes
'qi.comex', # mini-silver futes 'qi.comex', # mini-silver futes
} }
# map of symbols to contract ids
_adhoc_symbol_map = {
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
# NOTE: some cmdtys/metals don't have trade data like gold/usd:
# https://groups.io/g/twsapi/message/44174
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
}
for qsn in _adhoc_futes_set:
sym, venue = qsn.split('.')
assert venue.upper() in _futes_venues, f'{venue}'
_adhoc_symbol_map[sym.upper()] = (
{'exchange': venue},
{},
)
# exchanges we don't support at the moment due to not knowing # exchanges we don't support at the moment due to not knowing
# how to do symbol-contract lookup correctly likely due # how to do symbol-contract lookup correctly likely due
# to not having the data feeds subscribed. # to not having the data feeds subscribed.
_exch_skip_list = { _exch_skip_list = {
'ASX', # aussie stocks 'ASX', # aussie stocks
'MEXI', # mexican stocks 'MEXI', # mexican stocks
'VALUE', # no idea
# no idea
'VALUE',
'FUNDSERV',
'SWB2',
} }
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
@ -286,29 +261,27 @@ class Client:
# NOTE: the ib.client here is "throttled" to 45 rps by default # NOTE: the ib.client here is "throttled" to 45 rps by default
async def trades(self) -> dict[str, Any]: async def trades(
''' self,
Return list of trade-fills from current session in ``dict``. # api_only: bool = False,
''' ) -> dict[str, Any]:
fills: list[Fill] = self.ib.fills()
norm_fills: list[dict] = [] # orders = await self.ib.reqCompletedOrdersAsync(
# apiOnly=api_only
# )
fills = await self.ib.reqExecutionsAsync()
norm_fills = []
for fill in fills: for fill in fills:
fill = fill._asdict() # namedtuple fill = fill._asdict() # namedtuple
for key, val in fill.items(): for key, val in fill.copy().items():
match val: if isinstance(val, Contract):
case Contract() | Execution() | CommissionReport(): fill[key] = asdict(val)
fill[key] = asdict(val)
norm_fills.append(fill) norm_fills.append(fill)
return norm_fills return norm_fills
async def orders(self) -> list[Order]:
return await self.ib.reqAllOpenOrdersAsync(
apiOnly=False,
)
async def bars( async def bars(
self, self,
fqsn: str, fqsn: str,
@ -510,14 +483,6 @@ class Client:
return con return con
async def get_con(
self,
conid: int,
) -> Contract:
return await self.ib.qualifyContractsAsync(
ibis.Contract(conId=conid)
)
async def find_contract( async def find_contract(
self, self,
pattern: str, pattern: str,
@ -588,7 +553,7 @@ class Client:
# commodities # commodities
elif exch == 'CMDTY': # eg. XAUUSD.CMDTY elif exch == 'CMDTY': # eg. XAUUSD.CMDTY
con_kwargs, bars_kwargs = _adhoc_symbol_map[sym] con_kwargs, bars_kwargs = _adhoc_cmdty_data_map[sym]
con = ibis.Commodity(**con_kwargs) con = ibis.Commodity(**con_kwargs)
con.bars_kwargs = bars_kwargs con.bars_kwargs = bars_kwargs
@ -846,23 +811,10 @@ _scan_ignore: set[tuple[str, int]] = set()
def get_config() -> dict[str, Any]: def get_config() -> dict[str, Any]:
conf, path = config.load('brokers') conf, path = config.load()
section = conf.get('ib') section = conf.get('ib')
accounts = section.get('accounts')
if not accounts:
raise ValueError(
'brokers.toml -> `ib.accounts` must be defined\n'
f'location: {path}'
)
names = list(accounts.keys())
accts = section['accounts'] = bidict(accounts)
log.info(
f'brokers.toml defines {len(accts)} accounts: '
f'{pformat(names)}'
)
if section is None: if section is None:
log.warning(f'No config section found for ib in {path}') log.warning(f'No config section found for ib in {path}')
return {} return {}
@ -1038,7 +990,7 @@ async def load_aio_clients(
for acct, client in _accounts2clients.items(): for acct, client in _accounts2clients.items():
log.info(f'Disconnecting {acct}@{client}') log.info(f'Disconnecting {acct}@{client}')
client.ib.disconnect() client.ib.disconnect()
_client_cache.pop((host, port), None) _client_cache.pop((host, port))
async def load_clients_for_trio( async def load_clients_for_trio(
@ -1067,6 +1019,9 @@ async def load_clients_for_trio(
await asyncio.sleep(float('inf')) await asyncio.sleep(float('inf'))
_proxies: dict[str, MethodProxy] = {}
@acm @acm
async def open_client_proxies() -> tuple[ async def open_client_proxies() -> tuple[
dict[str, MethodProxy], dict[str, MethodProxy],
@ -1089,14 +1044,13 @@ async def open_client_proxies() -> tuple[
if cache_hit: if cache_hit:
log.info(f'Re-using cached clients: {clients}') log.info(f'Re-using cached clients: {clients}')
proxies = {}
for acct_name, client in clients.items(): for acct_name, client in clients.items():
proxy = await stack.enter_async_context( proxy = await stack.enter_async_context(
open_client_proxy(client), open_client_proxy(client),
) )
proxies[acct_name] = proxy _proxies[acct_name] = proxy
yield proxies, clients yield _proxies, clients
def get_preferred_data_client( def get_preferred_data_client(
@ -1245,13 +1199,11 @@ async def open_client_proxy(
event_table = {} event_table = {}
async with ( async with (
to_asyncio.open_channel_from( to_asyncio.open_channel_from(
open_aio_client_method_relay, open_aio_client_method_relay,
client=client, client=client,
event_consumers=event_table, event_consumers=event_table,
) as (first, chan), ) as (first, chan),
trio.open_nursery() as relay_n, trio.open_nursery() as relay_n,
): ):

File diff suppressed because it is too large Load Diff

View File

@ -217,8 +217,8 @@ async def get_bars(
) )
elif ( elif (
err.code == 162 and err.code == 162
'HMDS query returned no data' in err.message and 'HMDS query returned no data' in err.message
): ):
# XXX: this is now done in the storage mgmt layer # XXX: this is now done in the storage mgmt layer
# and we shouldn't implicitly decrement the frame dt # and we shouldn't implicitly decrement the frame dt
@ -237,13 +237,6 @@ async def get_bars(
frame_size=2000, frame_size=2000,
) )
# elif (
# err.code == 162 and
# 'Trading TWS session is connected from a different IP address' in err.message
# ):
# log.warning("ignoring ip address warning")
# continue
elif _pacing in msg: elif _pacing in msg:
log.warning( log.warning(
@ -916,17 +909,17 @@ async def open_symbol_search(
# trigger async request # trigger async request
await trio.sleep(0) await trio.sleep(0)
# # match against our ad-hoc set immediately # match against our ad-hoc set immediately
# adhoc_matches = fuzzy.extractBests( adhoc_matches = fuzzy.extractBests(
# pattern, pattern,
# list(_adhoc_futes_set), list(_adhoc_futes_set),
# score_cutoff=90, score_cutoff=90,
# ) )
# log.info(f'fuzzy matched adhocs: {adhoc_matches}') log.info(f'fuzzy matched adhocs: {adhoc_matches}')
# adhoc_match_results = {} adhoc_match_results = {}
# if adhoc_matches: if adhoc_matches:
# # TODO: do we need to pull contract details? # TODO: do we need to pull contract details?
# adhoc_match_results = {i[0]: {} for i in adhoc_matches} adhoc_match_results = {i[0]: {} for i in adhoc_matches}
log.debug(f'fuzzy matching stocks {stock_results}') log.debug(f'fuzzy matching stocks {stock_results}')
stock_matches = fuzzy.extractBests( stock_matches = fuzzy.extractBests(
@ -935,8 +928,7 @@ async def open_symbol_search(
score_cutoff=50, score_cutoff=50,
) )
# matches = adhoc_match_results | { matches = adhoc_match_results | {
matches = {
item[0]: {} for item in stock_matches item[0]: {} for item in stock_matches
} }
# TODO: we used to deliver contract details # TODO: we used to deliver contract details

View File

@ -23,10 +23,53 @@ from typing import Optional
from bidict import bidict from bidict import bidict
from pydantic import BaseModel, validator from pydantic import BaseModel, validator
# from msgspec import Struct
from ..data._source import Symbol from ..data._source import Symbol
from ..pp import Position from ._messages import BrokerdPosition, Status
class Position(BaseModel):
'''
Basic pp (personal position) model with attached fills history.
This type should be IPC wire ready?
'''
symbol: Symbol
# last size and avg entry price
size: float
avg_price: float # TODO: contextual pricing
# ordered record of known constituent trade messages
fills: list[Status] = []
def update_from_msg(
self,
msg: BrokerdPosition,
) -> None:
# XXX: better place to do this?
symbol = self.symbol
lot_size_digits = symbol.lot_size_digits
avg_price, size = (
round(msg['avg_price'], ndigits=symbol.tick_size_digits),
round(msg['size'], ndigits=lot_size_digits),
)
self.avg_price = avg_price
self.size = size
@property
def dsize(self) -> float:
'''
The "dollar" size of the pp, normally in trading (fiat) unit
terms.
'''
return self.avg_price * self.size
_size_units = bidict({ _size_units = bidict({
@ -130,7 +173,7 @@ class Allocator(BaseModel):
l_sub_pp = self.units_limit - abs_live_size l_sub_pp = self.units_limit - abs_live_size
elif size_unit == 'currency': elif size_unit == 'currency':
live_cost_basis = abs_live_size * live_pp.be_price live_cost_basis = abs_live_size * live_pp.avg_price
slot_size = currency_per_slot / price slot_size = currency_per_slot / price
l_sub_pp = (self.currency_limit - live_cost_basis) / price l_sub_pp = (self.currency_limit - live_cost_basis) / price
@ -162,7 +205,7 @@ class Allocator(BaseModel):
if size_unit == 'currency': if size_unit == 'currency':
# compute the "projected" limit's worth of units at the # compute the "projected" limit's worth of units at the
# current pp (weighted) price: # current pp (weighted) price:
slot_size = currency_per_slot / live_pp.be_price slot_size = currency_per_slot / live_pp.avg_price
else: else:
slot_size = u_per_slot slot_size = u_per_slot
@ -201,12 +244,7 @@ class Allocator(BaseModel):
if order_size < slot_size: if order_size < slot_size:
# compute a fractional slots size to display # compute a fractional slots size to display
slots_used = self.slots_used( slots_used = self.slots_used(
Position( Position(symbol=sym, size=order_size, avg_price=price)
symbol=sym,
size=order_size,
be_price=price,
bsuid=sym,
)
) )
return { return {
@ -233,8 +271,8 @@ class Allocator(BaseModel):
abs_pp_size = abs(pp.size) abs_pp_size = abs(pp.size)
if self.size_unit == 'currency': if self.size_unit == 'currency':
# live_currency_size = size or (abs_pp_size * pp.be_price) # live_currency_size = size or (abs_pp_size * pp.avg_price)
live_currency_size = abs_pp_size * pp.be_price live_currency_size = abs_pp_size * pp.avg_price
prop = live_currency_size / self.currency_limit prop = live_currency_size / self.currency_limit
else: else:
@ -304,7 +342,7 @@ def mk_allocator(
# if the current position is already greater then the limit # if the current position is already greater then the limit
# settings, increase the limit to the current position # settings, increase the limit to the current position
if alloc.size_unit == 'currency': if alloc.size_unit == 'currency':
startup_size = startup_pp.size * startup_pp.be_price startup_size = startup_pp.size * startup_pp.avg_price
if startup_size > alloc.currency_limit: if startup_size > alloc.currency_limit:
alloc.currency_limit = round(startup_size, ndigits=2) alloc.currency_limit = round(startup_size, ndigits=2)

View File

@ -258,6 +258,6 @@ class BrokerdPosition(BaseModel):
broker: str broker: str
account: str account: str
symbol: str symbol: str
currency: str
size: float size: float
avg_price: float avg_price: float
currency: str = ''

View File

@ -31,8 +31,6 @@ import tractor
from dataclasses import dataclass from dataclasses import dataclass
from .. import data from .. import data
from ..data._source import Symbol
from ..pp import Position
from ..data._normalize import iterticks from ..data._normalize import iterticks
from ..data._source import unpack_fqsn from ..data._source import unpack_fqsn
from ..log import get_logger from ..log import get_logger
@ -259,14 +257,29 @@ class PaperBoi:
) )
) )
# delegate update to `.pp.Position.lifo_update()` # "avg position price" calcs
pp = Position( # TODO: eventually it'd be nice to have a small set of routines
Symbol(key=symbol), # to do this stuff from a sequence of cleared orders to enable
size=pp_msg.size, # so called "contextual positions".
be_price=pp_msg.avg_price, new_size = size + pp_msg.size
bsuid=symbol,
) # old size minus the new size gives us size differential with
pp_msg.size, pp_msg.avg_price = pp.lifo_update(size, price) # +ve -> increase in pp size
# -ve -> decrease in pp size
size_diff = abs(new_size) - abs(pp_msg.size)
if new_size == 0:
pp_msg.avg_price = 0
elif size_diff > 0:
# only update the "average position price" when the position
# size increases not when it decreases (i.e. the position is
# being made smaller)
pp_msg.avg_price = (
abs(size) * price + pp_msg.avg_price * abs(pp_msg.size)
) / abs(new_size)
pp_msg.size = new_size
await self.ems_trades_stream.send(pp_msg.dict()) await self.ems_trades_stream.send(pp_msg.dict())
@ -377,8 +390,7 @@ async def handle_order_requests(
account = request_msg['account'] account = request_msg['account']
if account != 'paper': if account != 'paper':
log.error( log.error(
'This is a paper account,' 'This is a paper account, only a `paper` selection is valid'
' only a `paper` selection is valid'
) )
await ems_order_stream.send(BrokerdError( await ems_order_stream.send(BrokerdError(
oid=request_msg['oid'], oid=request_msg['oid'],
@ -452,7 +464,7 @@ async def trades_dialogue(
# TODO: load paper positions per broker from .toml config file # TODO: load paper positions per broker from .toml config file
# and pass as symbol to position data mapping: ``dict[str, dict]`` # and pass as symbol to position data mapping: ``dict[str, dict]``
# await ctx.started(all_positions) # await ctx.started(all_positions)
await ctx.started(({}, ['paper'])) await ctx.started(({}, {'paper',}))
async with ( async with (
ctx.open_stream() as ems_stream, ctx.open_stream() as ems_stream,

View File

@ -83,9 +83,9 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
) )
log.info( log.info(
f'`marketstored` up!\n' f'`marketstore` up!\n'
f'pid: {pid}\n' f'`marketstored` pid: {pid}\n'
f'container id: {cid[:12]}\n' f'docker container id: {cid}\n'
f'config: {pformat(config)}' f'config: {pformat(config)}'
) )

View File

@ -21,7 +21,6 @@ Broker configuration mgmt.
import platform import platform
import sys import sys
import os import os
from os import path
from os.path import dirname from os.path import dirname
import shutil import shutil
from typing import Optional from typing import Optional
@ -112,7 +111,6 @@ if _parent_user:
_conf_names: set[str] = { _conf_names: set[str] = {
'brokers', 'brokers',
'pps',
'trades', 'trades',
'watchlists', 'watchlists',
} }
@ -149,21 +147,19 @@ def get_conf_path(
conf_name: str = 'brokers', conf_name: str = 'brokers',
) -> str: ) -> str:
''' """Return the default config path normally under
Return the top-level default config path normally under ``~/.config/piker`` on linux.
``~/.config/piker`` on linux for a given ``conf_name``, the config
name.
Contains files such as: Contains files such as:
- brokers.toml - brokers.toml
- pp.toml
- watchlists.toml - watchlists.toml
- trades.toml
# maybe coming soon ;) # maybe coming soon ;)
- signals.toml - signals.toml
- strats.toml - strats.toml
''' """
assert conf_name in _conf_names assert conf_name in _conf_names
fn = _conf_fn_w_ext(conf_name) fn = _conf_fn_w_ext(conf_name)
return os.path.join( return os.path.join(
@ -177,7 +173,7 @@ def repodir():
Return the abspath to the repo directory. Return the abspath to the repo directory.
''' '''
dirpath = path.abspath( dirpath = os.path.abspath(
# we're 3 levels down in **this** module file # we're 3 levels down in **this** module file
dirname(dirname(os.path.realpath(__file__))) dirname(dirname(os.path.realpath(__file__)))
) )
@ -186,9 +182,7 @@ def repodir():
def load( def load(
conf_name: str = 'brokers', conf_name: str = 'brokers',
path: str = None, path: str = None
**tomlkws,
) -> (dict, str): ) -> (dict, str):
''' '''
@ -196,7 +190,6 @@ def load(
''' '''
path = path or get_conf_path(conf_name) path = path or get_conf_path(conf_name)
if not os.path.isfile(path): if not os.path.isfile(path):
fn = _conf_fn_w_ext(conf_name) fn = _conf_fn_w_ext(conf_name)
@ -209,11 +202,8 @@ def load(
# if one exists. # if one exists.
if os.path.isfile(template): if os.path.isfile(template):
shutil.copyfile(template, path) shutil.copyfile(template, path)
else:
with open(path, 'w'):
pass # touch
config = toml.load(path, **tomlkws) config = toml.load(path)
log.debug(f"Read config file {path}") log.debug(f"Read config file {path}")
return config, path return config, path
@ -222,7 +212,6 @@ def write(
config: dict, # toml config as dict config: dict, # toml config as dict
name: str = 'brokers', name: str = 'brokers',
path: str = None, path: str = None,
**toml_kwargs,
) -> None: ) -> None:
'''' ''''
@ -246,14 +235,11 @@ def write(
f"{path}" f"{path}"
) )
with open(path, 'w') as cf: with open(path, 'w') as cf:
return toml.dump( return toml.dump(config, cf)
config,
cf,
**toml_kwargs,
)
def load_accounts( def load_accounts(
providers: Optional[list[str]] = None providers: Optional[list[str]] = None
) -> bidict[str, Optional[str]]: ) -> bidict[str, Optional[str]]:

View File

@ -23,7 +23,7 @@ import decimal
from bidict import bidict from bidict import bidict
import numpy as np import numpy as np
from msgspec import Struct from pydantic import BaseModel
# from numba import from_dtype # from numba import from_dtype
@ -126,7 +126,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
) )
class Symbol(Struct): class Symbol(BaseModel):
''' '''
I guess this is some kinda container thing for dealing with I guess this is some kinda container thing for dealing with
all the different meta-data formats from brokers? all the different meta-data formats from brokers?
@ -152,7 +152,9 @@ class Symbol(Struct):
info: dict[str, Any], info: dict[str, Any],
suffix: str = '', suffix: str = '',
) -> Symbol: # XXX: like wtf..
# ) -> 'Symbol':
) -> None:
tick_size = info.get('price_tick_size', 0.01) tick_size = info.get('price_tick_size', 0.01)
lot_tick_size = info.get('lot_tick_size', 0.0) lot_tick_size = info.get('lot_tick_size', 0.0)
@ -173,7 +175,9 @@ class Symbol(Struct):
fqsn: str, fqsn: str,
info: dict[str, Any], info: dict[str, Any],
) -> Symbol: # XXX: like wtf..
# ) -> 'Symbol':
) -> None:
broker, key, suffix = unpack_fqsn(fqsn) broker, key, suffix = unpack_fqsn(fqsn)
return cls.from_broker_info( return cls.from_broker_info(
broker, broker,
@ -236,7 +240,7 @@ class Symbol(Struct):
''' '''
tokens = self.tokens() tokens = self.tokens()
fqsn = '.'.join(map(str.lower, tokens)) fqsn = '.'.join(tokens)
return fqsn return fqsn
def iterfqsns(self) -> list[str]: def iterfqsns(self) -> list[str]:

View File

@ -114,7 +114,7 @@ async def fsp_compute(
dict[str, np.ndarray], # multi-output case dict[str, np.ndarray], # multi-output case
np.ndarray, # single output case np.ndarray, # single output case
] ]
history_output = await anext(out_stream) history_output = await out_stream.__anext__()
func_name = func.__name__ func_name = func.__name__
profiler(f'{func_name} generated history') profiler(f'{func_name} generated history')
@ -374,8 +374,7 @@ async def cascade(
'key': dst_shm_token, 'key': dst_shm_token,
'first': dst._first.value, 'first': dst._first.value,
'last': dst._last.value, 'last': dst._last.value,
} }})
})
return tracker, index return tracker, index
def is_synced( def is_synced(

View File

@ -1,781 +0,0 @@
# piker: trading gear for hackers
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
Personal/Private position parsing, calculating, summarizing in a way
that doesn't try to cuk most humans who prefer to not lose their moneys..
(looking at you `ib` and dirt-bird friends)
'''
from collections import deque
from contextlib import contextmanager as cm
# from pprint import pformat
import os
from os import path
from math import copysign
import re
import time
from typing import (
Any,
Optional,
Union,
)
from msgspec import Struct
import pendulum
from pendulum import datetime, now
import tomli
import toml
from . import config
from .brokers import get_brokermod
from .clearing._messages import BrokerdPosition, Status
from .data._source import Symbol
from .log import get_logger
log = get_logger(__name__)
@cm
def open_trade_ledger(
broker: str,
account: str,
) -> str:
'''
Indempotently create and read in a trade log file from the
``<configuration_dir>/ledgers/`` directory.
Files are named per broker account of the form
``<brokername>_<accountname>.toml``. The ``accountname`` here is the
name as defined in the user's ``brokers.toml`` config.
'''
ldir = path.join(config._config_dir, 'ledgers')
if not path.isdir(ldir):
os.makedirs(ldir)
fname = f'trades_{broker}_{account}.toml'
tradesfile = path.join(ldir, fname)
if not path.isfile(tradesfile):
log.info(
f'Creating new local trades ledger: {tradesfile}'
)
with open(tradesfile, 'w') as cf:
pass # touch
with open(tradesfile, 'rb') as cf:
start = time.time()
ledger = tomli.load(cf)
print(f'Ledger load took {time.time() - start}s')
cpy = ledger.copy()
try:
yield cpy
finally:
if cpy != ledger:
# TODO: show diff output?
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
print(f'Updating ledger for {tradesfile}:\n')
ledger.update(cpy)
# we write on close the mutated ledger data
with open(tradesfile, 'w') as cf:
return toml.dump(ledger, cf)
class Transaction(Struct):
# TODO: should this be ``.to`` (see below)?
fqsn: str
tid: Union[str, int] # unique transaction id
size: float
price: float
cost: float # commisions or other additional costs
dt: datetime
expiry: Optional[datetime] = None
# optional key normally derived from the broker
# backend which ensures the instrument-symbol this record
# is for is truly unique.
bsuid: Optional[Union[str, int]] = None
# optional fqsn for the source "asset"/money symbol?
# from: Optional[str] = None
class Position(Struct):
'''
Basic pp (personal/piker position) model with attached clearing
transaction history.
'''
symbol: Symbol
# can be +ve or -ve for long/short
size: float
# "breakeven price" above or below which pnl moves above and below
# zero for the entirety of the current "trade state".
be_price: float
# unique backend symbol id
bsuid: str
# ordered record of known constituent trade messages
clears: dict[
Union[str, int, Status], # trade id
dict[str, Any], # transaction history summaries
] = {}
expiry: Optional[datetime] = None
def to_dict(self) -> dict:
return {
f: getattr(self, f)
for f in self.__struct_fields__
}
def to_pretoml(self) -> dict:
'''
Prep this position's data contents for export to toml including
re-structuring of the ``.clears`` table to an array of
inline-subtables for better ``pps.toml`` compactness.
'''
d = self.to_dict()
clears = d.pop('clears')
expiry = d.pop('expiry')
if expiry:
d['expiry'] = str(expiry)
clears_list = []
for tid, data in clears.items():
inline_table = toml.TomlDecoder().get_empty_inline_table()
inline_table['tid'] = tid
for k, v in data.items():
inline_table[k] = v
clears_list.append(inline_table)
d['clears'] = clears_list
return d
def update_from_msg(
self,
msg: BrokerdPosition,
) -> None:
# XXX: better place to do this?
symbol = self.symbol
lot_size_digits = symbol.lot_size_digits
be_price, size = (
round(
msg['avg_price'],
ndigits=symbol.tick_size_digits
),
round(
msg['size'],
ndigits=lot_size_digits
),
)
self.be_price = be_price
self.size = size
@property
def dsize(self) -> float:
'''
The "dollar" size of the pp, normally in trading (fiat) unit
terms.
'''
return self.be_price * self.size
def update(
self,
t: Transaction,
) -> None:
self.clears[t.tid] = {
'cost': t.cost,
'price': t.price,
'size': t.size,
'dt': str(t.dt),
}
def lifo_update(
self,
size: float,
price: float,
cost: float = 0,
# TODO: idea: "real LIFO" dynamic positioning.
# - when a trade takes place where the pnl for
# the (set of) trade(s) is below the breakeven price
# it may be that the trader took a +ve pnl on a short(er)
# term trade in the same account.
# - in this case we could recalc the be price to
# be reverted back to it's prior value before the nearest term
# trade was opened.?
# dynamic_breakeven_price: bool = False,
) -> (float, float):
'''
Incremental update using a LIFO-style weighted mean.
'''
# "avg position price" calcs
# TODO: eventually it'd be nice to have a small set of routines
# to do this stuff from a sequence of cleared orders to enable
# so called "contextual positions".
new_size = self.size + size
# old size minus the new size gives us size diff with
# +ve -> increase in pp size
# -ve -> decrease in pp size
size_diff = abs(new_size) - abs(self.size)
if new_size == 0:
self.be_price = 0
elif size_diff > 0:
# XXX: LOFI incremental update:
# only update the "average price" when
# the size increases not when it decreases (i.e. the
# position is being made smaller)
self.be_price = (
# weight of current exec = (size * price) + cost
(abs(size) * price)
+
(copysign(1, new_size) * cost) # transaction cost
+
# weight of existing be price
self.be_price * abs(self.size) # weight of previous pp
) / abs(new_size) # normalized by the new size: weighted mean.
self.size = new_size
return new_size, self.be_price
def minimize_clears(
self,
) -> dict[str, dict]:
'''
Minimize the position's clears entries by removing
all transactions before the last net zero size to avoid
unecessary history irrelevant to the current pp state.
'''
size: float = 0
clears_since_zero: deque[tuple(str, dict)] = deque()
# scan for the last "net zero" position by
# iterating clears in reverse.
for tid, clear in reversed(self.clears.items()):
size += clear['size']
clears_since_zero.appendleft((tid, clear))
if size == 0:
break
self.clears = dict(clears_since_zero)
return self.clears
def update_pps(
records: dict[str, Transaction],
pps: Optional[dict[str, Position]] = None
) -> dict[str, Position]:
'''
Compile a set of positions from a trades ledger.
'''
pps: dict[str, Position] = pps or {}
# lifo update all pps from records
for r in records:
pp = pps.setdefault(
r.bsuid,
# if no existing pp, allocate fresh one.
Position(
Symbol.from_fqsn(
r.fqsn,
info={},
),
size=0.0,
be_price=0.0,
bsuid=r.bsuid,
expiry=r.expiry,
)
)
# don't do updates for ledger records we already have
# included in the current pps state.
if r.tid in pp.clears:
# NOTE: likely you'll see repeats of the same
# ``Transaction`` passed in here if/when you are restarting
# a ``brokerd.ib`` where the API will re-report trades from
# the current session, so we need to make sure we don't
# "double count" these in pp calculations.
continue
# lifo style "breakeven" price calc
pp.lifo_update(
r.size,
r.price,
# include transaction cost in breakeven price
# and presume the worst case of the same cost
# to exit this transaction (even though in reality
# it will be dynamic based on exit stratetgy).
cost=2*r.cost,
)
# track clearing data
pp.update(r)
assert len(set(pp.clears)) == len(pp.clears)
return pps
def load_pps_from_ledger(
brokername: str,
acctname: str,
# post normalization filter on ledger entries to be processed
filter_by: Optional[list[dict]] = None,
) -> dict[str, Position]:
'''
Open a ledger file by broker name and account and read in and
process any trade records into our normalized ``Transaction``
form and then pass these into the position processing routine
and deliver the two dict-sets of the active and closed pps.
'''
with open_trade_ledger(
brokername,
acctname,
) as ledger:
if not ledger:
# null case, no ledger file with content
return {}
brokermod = get_brokermod(brokername)
src_records = brokermod.norm_trade_records(ledger)
if filter_by:
bsuids = set(filter_by)
records = list(filter(lambda r: r.bsuid in bsuids, src_records))
else:
records = src_records
return update_pps(records)
def get_pps(
brokername: str,
acctids: Optional[set[str]] = set(),
) -> dict[str, dict[str, Position]]:
'''
Read out broker-specific position entries from
incremental update file: ``pps.toml``.
'''
conf, path = config.load(
'pps',
# load dicts as inlines to preserve compactness
# _dict=toml.decoder.InlineTableDict,
)
all_active = {}
all_closed = {}
# try to load any ledgers if no section found
bconf, path = config.load('brokers')
accounts = bconf[brokername]['accounts']
for account in accounts:
# TODO: instead of this filter we could
# always send all known pps but just not audit
# them since an active client might not be up?
if (
acctids and
f'{brokername}.{account}' not in acctids
):
continue
active, closed = update_pps_conf(brokername, account)
all_active.setdefault(account, {}).update(active)
all_closed.setdefault(account, {}).update(closed)
return all_active, all_closed
# TODO: instead see if we can hack tomli and tomli-w to do the same:
# - https://github.com/hukkin/tomli
# - https://github.com/hukkin/tomli-w
class PpsEncoder(toml.TomlEncoder):
'''
Special "styled" encoder that makes a ``pps.toml`` redable and
compact by putting `.clears` tables inline and everything else
flat-ish.
'''
separator = ','
def dump_list(self, v):
'''
Dump an inline list with a newline after every element and
with consideration for denoted inline table types.
'''
retval = "[\n"
for u in v:
if isinstance(u, toml.decoder.InlineTableDict):
out = self.dump_inline_table(u)
else:
out = str(self.dump_value(u))
retval += " " + out + "," + "\n"
retval += "]"
return retval
def dump_inline_table(self, section):
"""Preserve inline table in its compact syntax instead of expanding
into subsection.
https://github.com/toml-lang/toml#user-content-inline-table
"""
val_list = []
for k, v in section.items():
# if isinstance(v, toml.decoder.InlineTableDict):
if isinstance(v, dict):
val = self.dump_inline_table(v)
else:
val = str(self.dump_value(v))
val_list.append(k + " = " + val)
retval = "{ " + ", ".join(val_list) + " }"
return retval
def dump_sections(self, o, sup):
retstr = ""
if sup != "" and sup[-1] != ".":
sup += '.'
retdict = self._dict()
arraystr = ""
for section in o:
qsection = str(section)
value = o[section]
if not re.match(r'^[A-Za-z0-9_-]+$', section):
qsection = toml.encoder._dump_str(section)
# arrayoftables = False
if (
self.preserve
and isinstance(value, toml.decoder.InlineTableDict)
):
retstr += (
qsection
+
" = "
+
self.dump_inline_table(o[section])
+
'\n' # only on the final terminating left brace
)
# XXX: this code i'm pretty sure is just blatantly bad
# and/or wrong..
# if isinstance(o[section], list):
# for a in o[section]:
# if isinstance(a, dict):
# arrayoftables = True
# if arrayoftables:
# for a in o[section]:
# arraytabstr = "\n"
# arraystr += "[[" + sup + qsection + "]]\n"
# s, d = self.dump_sections(a, sup + qsection)
# if s:
# if s[0] == "[":
# arraytabstr += s
# else:
# arraystr += s
# while d:
# newd = self._dict()
# for dsec in d:
# s1, d1 = self.dump_sections(d[dsec], sup +
# qsection + "." +
# dsec)
# if s1:
# arraytabstr += ("[" + sup + qsection +
# "." + dsec + "]\n")
# arraytabstr += s1
# for s1 in d1:
# newd[dsec + "." + s1] = d1[s1]
# d = newd
# arraystr += arraytabstr
elif isinstance(value, dict):
retdict[qsection] = o[section]
elif o[section] is not None:
retstr += (
qsection
+
" = "
+
str(self.dump_value(o[section]))
)
# if not isinstance(value, dict):
if not isinstance(value, toml.decoder.InlineTableDict):
# inline tables should not contain newlines:
# https://toml.io/en/v1.0.0#inline-table
retstr += '\n'
else:
raise ValueError(value)
retstr += arraystr
return (retstr, retdict)
def load_pps_from_toml(
brokername: str,
acctid: str,
# XXX: there is an edge case here where we may want to either audit
# the retrieved ``pps.toml`` output or reprocess it since there was
# an error on write on the last attempt to update the state file
# even though the ledger *was* updated. For this cases we allow the
# caller to pass in a symbol set they'd like to reload from the
# underlying ledger to be reprocessed in computing pps state.
reload_records: Optional[dict[str, str]] = None,
update_from_ledger: bool = False,
) -> tuple[dict, dict[str, Position]]:
'''
Load and marshal to objects all pps from either an existing
``pps.toml`` config, or from scratch from a ledger file when
none yet exists.
'''
conf, path = config.load('pps')
brokersection = conf.setdefault(brokername, {})
pps = brokersection.setdefault(acctid, {})
pp_objs = {}
# no pps entry yet for this broker/account so parse any available
# ledgers to build a brand new pps state.
if not pps or update_from_ledger:
pp_objs = load_pps_from_ledger(
brokername,
acctid,
)
# Reload symbol specific ledger entries if requested by the
# caller **AND** none exist in the current pps state table.
elif (
pps and reload_records
):
# no pps entry yet for this broker/account so parse
# any available ledgers to build a pps state.
pp_objs = load_pps_from_ledger(
brokername,
acctid,
filter_by=reload_records,
)
if not pps:
log.warning(
f'No trade history could be loaded for {brokername}:{acctid}'
)
# unmarshal/load ``pps.toml`` config entries into object form.
for fqsn, entry in pps.items():
bsuid = entry['bsuid']
# convert clears sub-tables (only in this form
# for toml re-presentation) back into a master table.
clears_list = entry['clears']
# index clears entries in "object" form by tid in a top
# level dict instead of a list (as is presented in our
# ``pps.toml``).
clears = {}
for clears_table in clears_list:
tid = clears_table.pop('tid')
clears[tid] = clears_table
size = entry['size']
# TODO: an audit system for existing pps entries?
# if not len(clears) == abs(size):
# pp_objs = load_pps_from_ledger(
# brokername,
# acctid,
# filter_by=reload_records,
# )
# reason = 'size <-> len(clears) mismatch'
# raise ValueError(
# '`pps.toml` entry is invalid:\n'
# f'{fqsn}\n'
# f'{pformat(entry)}'
# )
expiry = entry.get('expiry')
if expiry:
expiry = pendulum.parse(expiry)
pp_objs[bsuid] = Position(
Symbol.from_fqsn(fqsn, info={}),
size=size,
be_price=entry['be_price'],
expiry=expiry,
bsuid=entry['bsuid'],
# XXX: super critical, we need to be sure to include
# all pps.toml clears to avoid reusing clears that were
# already included in the current incremental update
# state, since today's records may have already been
# processed!
clears=clears,
)
return conf, pp_objs
def update_pps_conf(
brokername: str,
acctid: str,
trade_records: Optional[list[Transaction]] = None,
ledger_reload: Optional[dict[str, str]] = None,
) -> tuple[
dict[str, Position],
dict[str, Position],
]:
# this maps `.bsuid` values to positions
pp_objs: dict[Union[str, int], Position]
if trade_records and ledger_reload:
for r in trade_records:
ledger_reload[r.bsuid] = r.fqsn
conf, pp_objs = load_pps_from_toml(
brokername,
acctid,
reload_records=ledger_reload,
)
# update all pp objects from any (new) trade records which
# were passed in (aka incremental update case).
if trade_records:
pp_objs = update_pps(
trade_records,
pps=pp_objs,
)
pp_entries = {} # dict-serialize all active pps
# NOTE: newly closed position are also important to report/return
# since a consumer, like an order mode UI ;), might want to react
# based on the closure.
closed_pp_objs: dict[str, Position] = {}
for bsuid in list(pp_objs):
pp = pp_objs[bsuid]
pp.minimize_clears()
if (
pp.size == 0
# drop time-expired positions (normally derivatives)
or (pp.expiry and pp.expiry < now())
):
# if expired the position is closed
pp.size = 0
# position is already closed aka "net zero"
closed_pp = pp_objs.pop(bsuid, None)
if closed_pp:
closed_pp_objs[bsuid] = closed_pp
else:
# serialize to pre-toml form
asdict = pp.to_pretoml()
if pp.expiry is None:
asdict.pop('expiry', None)
# TODO: we need to figure out how to have one top level
# listing venue here even when the backend isn't providing
# it via the trades ledger..
# drop symbol obj in serialized form
s = asdict.pop('symbol')
fqsn = s.front_fqsn()
print(f'Updating active pp: {fqsn}')
# XXX: ugh, it's cuz we push the section under
# the broker name.. maybe we need to rethink this?
brokerless_key = fqsn.rstrip(f'.{brokername}')
pp_entries[brokerless_key] = asdict
conf[brokername][acctid] = pp_entries
# TODO: why tf haven't they already done this for inline tables smh..
enc = PpsEncoder(preserve=True)
# table_bs_type = type(toml.TomlDecoder().get_empty_inline_table())
enc.dump_funcs[toml.decoder.InlineTableDict] = enc.dump_inline_table
config.write(
conf,
'pps',
encoder=enc,
)
# deliver object form of all pps in table to caller
return pp_objs, closed_pp_objs
if __name__ == '__main__':
import sys
args = sys.argv
assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`'
args = args[1:]
for acctid in args:
broker, name = acctid.split('.')
update_pps_conf(broker, name)

View File

@ -230,26 +230,25 @@ class GodWidget(QWidget):
# - we'll probably want per-instrument/provider state here? # - we'll probably want per-instrument/provider state here?
# change the order config form over to the new chart # change the order config form over to the new chart
# XXX: since the pp config is a singleton widget we have to
# also switch it over to the new chart's interal-layout
# self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
chart = linkedsplits.chart
# chart is already in memory so just focus it # chart is already in memory so just focus it
linkedsplits.show() linkedsplits.show()
linkedsplits.focus() linkedsplits.focus()
linkedsplits.graphics_cycle() linkedsplits.graphics_cycle()
await trio.sleep(0) await trio.sleep(0)
# XXX: since the pp config is a singleton widget we have to
# also switch it over to the new chart's interal-layout
# self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
chart = linkedsplits.chart
# resume feeds *after* rendering chart view asap # resume feeds *after* rendering chart view asap
if chart: chart.resume_all_feeds()
chart.resume_all_feeds()
# TODO: we need a check to see if the chart # TODO: we need a check to see if the chart
# last had the xlast in view, if so then shift so it's # last had the xlast in view, if so then shift so it's
# still in view, if the user was viewing history then # still in view, if the user was viewing history then
# do nothing yah? # do nothing yah?
chart.default_view() chart.default_view()
self.linkedsplits = linkedsplits self.linkedsplits = linkedsplits
symbol = linkedsplits.symbol symbol = linkedsplits.symbol
@ -761,18 +760,9 @@ class ChartPlotWidget(pg.PlotWidget):
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem) self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
# indempotent startup flag for auto-yrange subsys
# to detect the "first time" y-domain graphics begin
# to be shown in the (main) graphics view.
self._on_screen: bool = False
def resume_all_feeds(self): def resume_all_feeds(self):
try: for feed in self._feeds.values():
for feed in self._feeds.values(): self.linked.godwidget._root_n.start_soon(feed.resume)
self.linked.godwidget._root_n.start_soon(feed.resume)
except RuntimeError:
# TODO: cancel the qtractor runtime here?
raise
def pause_all_feeds(self): def pause_all_feeds(self):
for feed in self._feeds.values(): for feed in self._feeds.values():
@ -869,8 +859,7 @@ class ChartPlotWidget(pg.PlotWidget):
def default_view( def default_view(
self, self,
bars_from_y: int = 616, bars_from_y: int = 3000,
do_ds: bool = True,
) -> None: ) -> None:
''' '''
@ -931,11 +920,8 @@ class ChartPlotWidget(pg.PlotWidget):
max=end, max=end,
padding=0, padding=0,
) )
self.view.maybe_downsample_graphics()
if do_ds: view._set_yrange()
self.view.maybe_downsample_graphics()
view._set_yrange()
try: try:
self.linked.graphics_cycle() self.linked.graphics_cycle()
except IndexError: except IndexError:
@ -1269,6 +1255,7 @@ class ChartPlotWidget(pg.PlotWidget):
If ``bars_range`` is provided use that range. If ``bars_range`` is provided use that range.
''' '''
# print(f'Chart[{self.name}].maxmin()')
profiler = pg.debug.Profiler( profiler = pg.debug.Profiler(
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`', msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
disabled=not pg_profile_enabled(), disabled=not pg_profile_enabled(),
@ -1300,18 +1287,11 @@ class ChartPlotWidget(pg.PlotWidget):
key = round(lbar), round(rbar) key = round(lbar), round(rbar)
res = flow.maxmin(*key) res = flow.maxmin(*key)
if res == (None, None):
if ( log.error(
res is None
):
log.warning(
f"{flow_key} no mxmn for bars_range => {key} !?" f"{flow_key} no mxmn for bars_range => {key} !?"
) )
res = 0, 0 res = 0, 0
if not self._on_screen:
self.default_view(do_ds=False)
self._on_screen = True
profiler(f'yrange mxmn: {key} -> {res}') profiler(f'yrange mxmn: {key} -> {res}')
# print(f'{flow_key} yrange mxmn: {key} -> {res}')
return res return res

View File

@ -223,20 +223,14 @@ def ds_m4(
assert frames >= (xrange / uppx) assert frames >= (xrange / uppx)
# call into ``numba`` # call into ``numba``
( nb, i_win, y_out = _m4(
nb,
x_out,
y_out,
ymn,
ymx,
) = _m4(
x, x,
y, y,
frames, frames,
# TODO: see func below.. # TODO: see func below..
# x_out, # i_win,
# y_out, # y_out,
# first index in x data to start at # first index in x data to start at
@ -249,11 +243,10 @@ def ds_m4(
# filter out any overshoot in the input allocation arrays by # filter out any overshoot in the input allocation arrays by
# removing zero-ed tail entries which should start at a certain # removing zero-ed tail entries which should start at a certain
# index. # index.
x_out = x_out[x_out != 0] i_win = i_win[i_win != 0]
y_out = y_out[:x_out.size] y_out = y_out[:i_win.size]
# print(f'M4 output ymn, ymx: {ymn},{ymx}') return nb, i_win, y_out
return nb, x_out, y_out, ymn, ymx
@jit( @jit(
@ -267,8 +260,8 @@ def _m4(
frames: int, frames: int,
# TODO: using this approach, having the ``.zeros()`` alloc lines # TODO: using this approach by having the ``.zeros()`` alloc lines
# below in pure python, there were segs faults and alloc crashes.. # below, in put python was causing segs faults and alloc crashes..
# we might need to see how it behaves with shm arrays and consider # we might need to see how it behaves with shm arrays and consider
# allocating them once at startup? # allocating them once at startup?
@ -281,22 +274,14 @@ def _m4(
x_start: int, x_start: int,
step: float, step: float,
) -> tuple[ ) -> int:
int, # nbins = len(i_win)
np.ndarray, # count = len(xs)
np.ndarray,
float,
float,
]:
'''
Implementation of the m4 algorithm in ``numba``:
http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
'''
# these are pre-allocated and mutated by ``numba`` # these are pre-allocated and mutated by ``numba``
# code in-place. # code in-place.
y_out = np.zeros((frames, 4), ys.dtype) y_out = np.zeros((frames, 4), ys.dtype)
x_out = np.zeros(frames, xs.dtype) i_win = np.zeros(frames, xs.dtype)
bincount = 0 bincount = 0
x_left = x_start x_left = x_start
@ -310,34 +295,24 @@ def _m4(
# set all bins in the left-most entry to the starting left-most x value # set all bins in the left-most entry to the starting left-most x value
# (aka a row broadcast). # (aka a row broadcast).
x_out[bincount] = x_left i_win[bincount] = x_left
# set all y-values to the first value passed in. # set all y-values to the first value passed in.
y_out[bincount] = ys[0] y_out[bincount] = ys[0]
# full input y-data mx and mn
mx: float = -np.inf
mn: float = np.inf
# compute OHLC style max / min values per window sized x-frame.
for i in range(len(xs)): for i in range(len(xs)):
x = xs[i] x = xs[i]
y = ys[i] y = ys[i]
if x < x_left + step: # the current window "step" is [bin, bin+1) if x < x_left + step: # the current window "step" is [bin, bin+1)
ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1]) y_out[bincount, 1] = min(y, y_out[bincount, 1])
ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2]) y_out[bincount, 2] = max(y, y_out[bincount, 2])
y_out[bincount, 3] = y y_out[bincount, 3] = y
mx = max(mx, ymx)
mn = min(mn, ymn)
else: else:
# Find the next bin # Find the next bin
while x >= x_left + step: while x >= x_left + step:
x_left += step x_left += step
bincount += 1 bincount += 1
x_out[bincount] = x_left i_win[bincount] = x_left
y_out[bincount] = y y_out[bincount] = y
return bincount, x_out, y_out, mn, mx return bincount, i_win, y_out

View File

@ -105,10 +105,6 @@ def chart_maxmin(
mn, mx = out mn, mx = out
mx_vlm_in_view = 0 mx_vlm_in_view = 0
# TODO: we need to NOT call this to avoid a manual
# np.max/min trigger and especially on the vlm_chart
# flows which aren't shown.. like vlm?
if vlm_chart: if vlm_chart:
out = vlm_chart.maxmin() out = vlm_chart.maxmin()
if out: if out:
@ -226,9 +222,33 @@ async def graphics_update_loop(
tick_margin = 3 * tick_size tick_margin = 3 * tick_size
chart.show() chart.show()
# view = chart.view
last_quote = time.time() last_quote = time.time()
i_last = ohlcv.index i_last = ohlcv.index
# async def iter_drain_quotes():
# # NOTE: all code below this loop is expected to be synchronous
# # and thus draw instructions are not picked up jntil the next
# # wait / iteration.
# async for quotes in stream:
# while True:
# try:
# moar = stream.receive_nowait()
# except trio.WouldBlock:
# yield quotes
# break
# else:
# for sym, quote in moar.items():
# ticks_frame = quote.get('ticks')
# if ticks_frame:
# quotes[sym].setdefault(
# 'ticks', []).extend(ticks_frame)
# print('pulled extra')
# yield quotes
# async for quotes in iter_drain_quotes():
ds = linked.display_state = DisplayState(**{ ds = linked.display_state = DisplayState(**{
'quotes': {}, 'quotes': {},
'linked': linked, 'linked': linked,
@ -273,7 +293,6 @@ async def graphics_update_loop(
# chart isn't active/shown so skip render cycle and pause feed(s) # chart isn't active/shown so skip render cycle and pause feed(s)
if chart.linked.isHidden(): if chart.linked.isHidden():
print('skipping update')
chart.pause_all_feeds() chart.pause_all_feeds()
continue continue
@ -397,8 +416,10 @@ def graphics_update_cycle(
) )
or trigger_all or trigger_all
): ):
# TODO: we should track and compute whether the last
# pixel in a curve should show new data based on uppx
# and then iff update curves and shift?
chart.increment_view(steps=i_diff) chart.increment_view(steps=i_diff)
# chart.increment_view(steps=i_diff + round(append_diff - uppx))
if vlm_chart: if vlm_chart:
vlm_chart.increment_view(steps=i_diff) vlm_chart.increment_view(steps=i_diff)
@ -456,6 +477,7 @@ def graphics_update_cycle(
): ):
chart.update_graphics_from_flow( chart.update_graphics_from_flow(
chart.name, chart.name,
# do_append=uppx < update_uppx,
do_append=do_append, do_append=do_append,
) )

View File

@ -337,7 +337,6 @@ class Flow(msgspec.Struct): # , frozen=True):
name: str name: str
plot: pg.PlotItem plot: pg.PlotItem
graphics: Union[Curve, BarItems] graphics: Union[Curve, BarItems]
yrange: tuple[float, float] = None
# in some cases a flow may want to change its # in some cases a flow may want to change its
# graphical "type" or, "form" when downsampling, # graphical "type" or, "form" when downsampling,
@ -387,11 +386,10 @@ class Flow(msgspec.Struct): # , frozen=True):
lbar: int, lbar: int,
rbar: int, rbar: int,
) -> Optional[tuple[float, float]]: ) -> tuple[float, float]:
''' '''
Compute the cached max and min y-range values for a given Compute the cached max and min y-range values for a given
x-range determined by ``lbar`` and ``rbar`` or ``None`` x-range determined by ``lbar`` and ``rbar``.
if no range can be determined (yet).
''' '''
rkey = (lbar, rbar) rkey = (lbar, rbar)
@ -401,44 +399,40 @@ class Flow(msgspec.Struct): # , frozen=True):
shm = self.shm shm = self.shm
if shm is None: if shm is None:
return None mxmn = None
arr = shm.array else: # new block for profiling?..
arr = shm.array
# build relative indexes into shm array # build relative indexes into shm array
# TODO: should we just add/use a method # TODO: should we just add/use a method
# on the shm to do this? # on the shm to do this?
ifirst = arr[0]['index'] ifirst = arr[0]['index']
slice_view = arr[ slice_view = arr[
lbar - ifirst: lbar - ifirst:
(rbar - ifirst) + 1 (rbar - ifirst) + 1
] ]
if not slice_view.size: if not slice_view.size:
return None mxmn = None
elif self.yrange:
mxmn = self.yrange
# print(f'{self.name} M4 maxmin: {mxmn}')
else:
if self.is_ohlc:
ylow = np.min(slice_view['low'])
yhigh = np.max(slice_view['high'])
else: else:
view = slice_view[self.name] if self.is_ohlc:
ylow = np.min(view) ylow = np.min(slice_view['low'])
yhigh = np.max(view) yhigh = np.max(slice_view['high'])
mxmn = ylow, yhigh else:
# print(f'{self.name} MANUAL maxmin: {mxmin}') view = slice_view[self.name]
ylow = np.min(view)
yhigh = np.max(view)
# cache result for input range mxmn = ylow, yhigh
assert mxmn
self._mxmns[rkey] = mxmn
return mxmn if mxmn is not None:
# cache new mxmn result
self._mxmns[rkey] = mxmn
return mxmn
def view_range(self) -> tuple[int, int]: def view_range(self) -> tuple[int, int]:
''' '''
@ -634,13 +628,10 @@ class Flow(msgspec.Struct): # , frozen=True):
# source data so we clear our path data in prep # source data so we clear our path data in prep
# to generate a new one from original source data. # to generate a new one from original source data.
new_sample_rate = True new_sample_rate = True
showing_src_data = True
should_ds = False should_ds = False
should_redraw = True should_redraw = True
showing_src_data = True
# reset yrange to be computed from source data
self.yrange = None
# MAIN RENDER LOGIC: # MAIN RENDER LOGIC:
# - determine in view data and redraw on range change # - determine in view data and redraw on range change
# - determine downsampling ops if needed # - determine downsampling ops if needed
@ -666,10 +657,6 @@ class Flow(msgspec.Struct): # , frozen=True):
**rkwargs, **rkwargs,
) )
if showing_src_data:
# print(f"{self.name} SHOWING SOURCE")
# reset yrange to be computed from source data
self.yrange = None
if not out: if not out:
log.warning(f'{self.name} failed to render!?') log.warning(f'{self.name} failed to render!?')
@ -677,9 +664,6 @@ class Flow(msgspec.Struct): # , frozen=True):
path, data, reset = out path, data, reset = out
# if self.yrange:
# print(f'flow {self.name} yrange from m4: {self.yrange}')
# XXX: SUPER UGGGHHH... without this we get stale cache # XXX: SUPER UGGGHHH... without this we get stale cache
# graphics that don't update until you downsampler again.. # graphics that don't update until you downsampler again..
if reset: if reset:
@ -1074,7 +1058,6 @@ class Renderer(msgspec.Struct):
# xy-path data transform: convert source data to a format # xy-path data transform: convert source data to a format
# able to be passed to a `QPainterPath` rendering routine. # able to be passed to a `QPainterPath` rendering routine.
if not len(hist): if not len(hist):
# XXX: this might be why the profiler only has exits?
return return
x_out, y_out, connect = self.format_xy( x_out, y_out, connect = self.format_xy(
@ -1161,14 +1144,11 @@ class Renderer(msgspec.Struct):
elif should_ds and uppx > 1: elif should_ds and uppx > 1:
x_out, y_out, ymn, ymx = xy_downsample( x_out, y_out = xy_downsample(
x_out, x_out,
y_out, y_out,
uppx, uppx,
) )
self.flow.yrange = ymn, ymx
# print(f'{self.flow.name} post ds: ymn, ymx: {ymn},{ymx}')
reset = True reset = True
profiler(f'FULL PATH downsample redraw={should_ds}') profiler(f'FULL PATH downsample redraw={should_ds}')
self._in_ds = True self._in_ds = True

View File

@ -639,25 +639,20 @@ async def open_vlm_displays(
names: list[str], names: list[str],
) -> tuple[float, float]: ) -> tuple[float, float]:
'''
Flows "group" maxmin loop; assumes all named flows
are in the same co-domain and thus can be sorted
as one set.
Iterates all the named flows and calls the chart
api to find their range values and return.
TODO: really we should probably have a more built-in API
for this?
'''
mx = 0 mx = 0
for name in names: for name in names:
ymn, ymx = chart.maxmin(name=name)
mx = max(mx, ymx) mxmn = chart.maxmin(name=name)
if mxmn:
ymax = mxmn[1]
if ymax > mx:
mx = ymax
return 0, mx return 0, mx
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
# TODO: fix the x-axis label issue where if you put # TODO: fix the x-axis label issue where if you put
# the axis on the left it's totally not lined up... # the axis on the left it's totally not lined up...
# show volume units value on LHS (for dinkus) # show volume units value on LHS (for dinkus)
@ -781,7 +776,6 @@ async def open_vlm_displays(
) -> None: ) -> None:
for name in names: for name in names:
if 'dark' in name: if 'dark' in name:
color = dark_vlm_color color = dark_vlm_color
elif 'rate' in name: elif 'rate' in name:

View File

@ -923,7 +923,6 @@ class ChartView(ViewBox):
# XXX: super important to be aware of this. # XXX: super important to be aware of this.
# or not flow.graphics.isVisible() # or not flow.graphics.isVisible()
): ):
# print(f'skipping {flow.name}')
continue continue
# pass in no array which will read and render from the last # pass in no array which will read and render from the last

View File

@ -49,17 +49,12 @@ def xy_downsample(
x_spacer: float = 0.5, x_spacer: float = 0.5,
) -> tuple[ ) -> tuple[np.ndarray, np.ndarray]:
np.ndarray,
np.ndarray,
float,
float,
]:
# downsample whenever more then 1 pixels per datum can be shown. # downsample whenever more then 1 pixels per datum can be shown.
# always refresh data bounds until we get diffing # always refresh data bounds until we get diffing
# working properly, see above.. # working properly, see above..
bins, x, y, ymn, ymx = ds_m4( bins, x, y = ds_m4(
x, x,
y, y,
uppx, uppx,
@ -72,7 +67,7 @@ def xy_downsample(
)).flatten() )).flatten()
y = y.flatten() y = y.flatten()
return x, y, ymn, ymx return x, y
@njit( @njit(

View File

@ -19,7 +19,6 @@ Position info and display
""" """
from __future__ import annotations from __future__ import annotations
from copy import copy
from dataclasses import dataclass from dataclasses import dataclass
from functools import partial from functools import partial
from math import floor, copysign from math import floor, copysign
@ -106,8 +105,8 @@ async def update_pnl_from_feed(
# compute and display pnl status # compute and display pnl status
order_mode.pane.pnl_label.format( order_mode.pane.pnl_label.format(
pnl=copysign(1, size) * pnl( pnl=copysign(1, size) * pnl(
# live.be_price, # live.avg_price,
order_mode.current_pp.live_pp.be_price, order_mode.current_pp.live_pp.avg_price,
tick['price'], tick['price'],
), ),
) )
@ -357,7 +356,7 @@ class SettingsPane:
# last historical close price # last historical close price
last = feed.shm.array[-1][['close']][0] last = feed.shm.array[-1][['close']][0]
pnl_value = copysign(1, size) * pnl( pnl_value = copysign(1, size) * pnl(
tracker.live_pp.be_price, tracker.live_pp.avg_price,
last, last,
) )
@ -477,7 +476,7 @@ class PositionTracker:
self.alloc = alloc self.alloc = alloc
self.startup_pp = startup_pp self.startup_pp = startup_pp
self.live_pp = copy(startup_pp) self.live_pp = startup_pp.copy()
view = chart.getViewBox() view = chart.getViewBox()
@ -557,7 +556,7 @@ class PositionTracker:
pp = position or self.live_pp pp = position or self.live_pp
self.update_line( self.update_line(
pp.be_price, pp.avg_price,
pp.size, pp.size,
self.chart.linked.symbol.lot_size_digits, self.chart.linked.symbol.lot_size_digits,
) )
@ -571,7 +570,7 @@ class PositionTracker:
self.hide() self.hide()
else: else:
self._level_marker.level = pp.be_price self._level_marker.level = pp.avg_price
# these updates are critical to avoid lag on view/scene changes # these updates are critical to avoid lag on view/scene changes
self._level_marker.update() # trigger paint self._level_marker.update() # trigger paint

View File

@ -33,10 +33,10 @@ import trio
from PyQt5.QtCore import Qt from PyQt5.QtCore import Qt
from .. import config from .. import config
from ..pp import Position
from ..clearing._client import open_ems, OrderBook from ..clearing._client import open_ems, OrderBook
from ..clearing._allocate import ( from ..clearing._allocate import (
mk_allocator, mk_allocator,
Position,
) )
from ._style import _font from ._style import _font
from ..data._source import Symbol from ..data._source import Symbol
@ -59,8 +59,7 @@ log = get_logger(__name__)
class OrderDialog(BaseModel): class OrderDialog(BaseModel):
''' '''Trade dialogue meta-data describing the lifetime
Trade dialogue meta-data describing the lifetime
of an order submission to ``emsd`` from a chart. of an order submission to ``emsd`` from a chart.
''' '''
@ -88,8 +87,7 @@ def on_level_change_update_next_order_info(
tracker: PositionTracker, tracker: PositionTracker,
) -> None: ) -> None:
''' '''A callback applied for each level change to the line
A callback applied for each level change to the line
which will recompute the order size based on allocator which will recompute the order size based on allocator
settings. this is assigned inside settings. this is assigned inside
``OrderMode.line_from_order()`` ``OrderMode.line_from_order()``
@ -606,10 +604,7 @@ async def open_order_mode(
startup_pp = Position( startup_pp = Position(
symbol=symbol, symbol=symbol,
size=0, size=0,
be_price=0, avg_price=0,
# XXX: BLEH, do we care about this on the client side?
bsuid=symbol,
) )
msg = pps_by_account.get(account_name) msg = pps_by_account.get(account_name)
if msg: if msg:

View File

@ -41,7 +41,6 @@ setup(
}, },
install_requires=[ install_requires=[
'toml', 'toml',
'tomli', # fastest pure py reader
'click', 'click',
'colorlog', 'colorlog',
'attrs', 'attrs',