2022-06-08 15:25:17 +00:00
|
|
|
# piker: trading gear for hackers
|
|
|
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
|
|
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
|
|
|
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
'''
|
2022-06-14 18:58:21 +00:00
|
|
|
Personal/Private position parsing, calculating, summarizing in a way
|
2022-06-08 15:25:17 +00:00
|
|
|
that doesn't try to cuk most humans who prefer to not lose their moneys..
|
2022-06-14 18:58:21 +00:00
|
|
|
(looking at you `ib` and dirt-bird friends)
|
2022-06-08 15:25:17 +00:00
|
|
|
|
|
|
|
'''
|
2022-06-23 18:59:47 +00:00
|
|
|
from collections import deque
|
2022-06-13 18:11:37 +00:00
|
|
|
from contextlib import contextmanager as cm
|
2022-06-22 19:41:26 +00:00
|
|
|
# from pprint import pformat
|
2022-06-13 18:11:37 +00:00
|
|
|
import os
|
|
|
|
from os import path
|
2022-06-23 20:11:50 +00:00
|
|
|
from math import copysign
|
2022-06-17 19:41:17 +00:00
|
|
|
import re
|
2022-06-23 18:59:47 +00:00
|
|
|
import time
|
2022-06-08 15:25:17 +00:00
|
|
|
from typing import (
|
2022-06-13 18:11:37 +00:00
|
|
|
Any,
|
2022-06-08 15:25:17 +00:00
|
|
|
Optional,
|
|
|
|
Union,
|
|
|
|
)
|
|
|
|
|
2022-06-18 19:30:52 +00:00
|
|
|
import pendulum
|
|
|
|
from pendulum import datetime, now
|
2022-06-22 19:41:26 +00:00
|
|
|
import tomli
|
2022-06-13 18:11:37 +00:00
|
|
|
import toml
|
2022-06-08 15:25:17 +00:00
|
|
|
|
|
|
|
from . import config
|
2022-06-13 18:11:37 +00:00
|
|
|
from .brokers import get_brokermod
|
2022-06-08 15:25:17 +00:00
|
|
|
from .clearing._messages import BrokerdPosition, Status
|
|
|
|
from .data._source import Symbol
|
2022-06-13 18:11:37 +00:00
|
|
|
from .log import get_logger
|
2022-07-19 12:26:28 +00:00
|
|
|
from .data.types import Struct
|
2022-06-13 18:11:37 +00:00
|
|
|
|
|
|
|
log = get_logger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
@cm
|
|
|
|
def open_trade_ledger(
|
|
|
|
broker: str,
|
|
|
|
account: str,
|
|
|
|
|
|
|
|
) -> str:
|
|
|
|
'''
|
|
|
|
Indempotently create and read in a trade log file from the
|
|
|
|
``<configuration_dir>/ledgers/`` directory.
|
|
|
|
|
|
|
|
Files are named per broker account of the form
|
|
|
|
``<brokername>_<accountname>.toml``. The ``accountname`` here is the
|
|
|
|
name as defined in the user's ``brokers.toml`` config.
|
|
|
|
|
|
|
|
'''
|
|
|
|
ldir = path.join(config._config_dir, 'ledgers')
|
|
|
|
if not path.isdir(ldir):
|
|
|
|
os.makedirs(ldir)
|
|
|
|
|
|
|
|
fname = f'trades_{broker}_{account}.toml'
|
|
|
|
tradesfile = path.join(ldir, fname)
|
|
|
|
|
|
|
|
if not path.isfile(tradesfile):
|
|
|
|
log.info(
|
|
|
|
f'Creating new local trades ledger: {tradesfile}'
|
|
|
|
)
|
|
|
|
with open(tradesfile, 'w') as cf:
|
|
|
|
pass # touch
|
2022-06-22 19:41:26 +00:00
|
|
|
with open(tradesfile, 'rb') as cf:
|
2022-06-23 18:59:47 +00:00
|
|
|
start = time.time()
|
2022-06-22 19:41:26 +00:00
|
|
|
ledger = tomli.load(cf)
|
2022-06-23 18:59:47 +00:00
|
|
|
print(f'Ledger load took {time.time() - start}s')
|
2022-06-16 14:52:43 +00:00
|
|
|
cpy = ledger.copy()
|
2022-06-13 18:11:37 +00:00
|
|
|
|
2022-07-21 19:28:04 +00:00
|
|
|
yield cpy
|
|
|
|
|
|
|
|
if cpy != ledger:
|
|
|
|
# TODO: show diff output?
|
|
|
|
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
|
|
|
print(f'Updating ledger for {tradesfile}:\n')
|
|
|
|
ledger.update(cpy)
|
|
|
|
|
|
|
|
# we write on close the mutated ledger data
|
|
|
|
with open(tradesfile, 'w') as cf:
|
|
|
|
toml.dump(ledger, cf)
|
2022-06-10 17:28:34 +00:00
|
|
|
|
|
|
|
|
2022-07-11 00:00:12 +00:00
|
|
|
class Transaction(Struct, frozen=True):
|
2022-06-22 19:41:26 +00:00
|
|
|
# TODO: should this be ``.to`` (see below)?
|
|
|
|
fqsn: str
|
|
|
|
|
2022-06-14 18:58:21 +00:00
|
|
|
tid: Union[str, int] # unique transaction id
|
2022-06-10 17:28:34 +00:00
|
|
|
size: float
|
|
|
|
price: float
|
|
|
|
cost: float # commisions or other additional costs
|
2022-06-18 19:30:52 +00:00
|
|
|
dt: datetime
|
|
|
|
expiry: Optional[datetime] = None
|
2022-06-13 18:11:37 +00:00
|
|
|
|
2022-06-10 17:28:34 +00:00
|
|
|
# optional key normally derived from the broker
|
|
|
|
# backend which ensures the instrument-symbol this record
|
|
|
|
# is for is truly unique.
|
2022-06-14 18:58:21 +00:00
|
|
|
bsuid: Optional[Union[str, int]] = None
|
2022-06-08 15:25:17 +00:00
|
|
|
|
2022-06-22 19:41:26 +00:00
|
|
|
# optional fqsn for the source "asset"/money symbol?
|
|
|
|
# from: Optional[str] = None
|
|
|
|
|
2022-06-08 15:25:17 +00:00
|
|
|
|
|
|
|
class Position(Struct):
|
|
|
|
'''
|
2022-06-17 19:41:17 +00:00
|
|
|
Basic pp (personal/piker position) model with attached clearing
|
|
|
|
transaction history.
|
2022-06-08 15:25:17 +00:00
|
|
|
|
|
|
|
'''
|
|
|
|
symbol: Symbol
|
|
|
|
|
2022-06-18 19:30:52 +00:00
|
|
|
# can be +ve or -ve for long/short
|
2022-06-08 15:25:17 +00:00
|
|
|
size: float
|
2022-06-18 19:30:52 +00:00
|
|
|
|
|
|
|
# "breakeven price" above or below which pnl moves above and below
|
|
|
|
# zero for the entirety of the current "trade state".
|
|
|
|
be_price: float
|
|
|
|
|
|
|
|
# unique backend symbol id
|
2022-06-14 18:58:21 +00:00
|
|
|
bsuid: str
|
2022-06-08 15:25:17 +00:00
|
|
|
|
|
|
|
# ordered record of known constituent trade messages
|
2022-06-17 19:41:17 +00:00
|
|
|
clears: dict[
|
2022-06-14 18:58:21 +00:00
|
|
|
Union[str, int, Status], # trade id
|
2022-06-19 20:30:08 +00:00
|
|
|
dict[str, Any], # transaction history summaries
|
2022-06-14 18:58:21 +00:00
|
|
|
] = {}
|
|
|
|
|
2022-06-18 19:30:52 +00:00
|
|
|
expiry: Optional[datetime] = None
|
|
|
|
|
2022-06-17 19:41:17 +00:00
|
|
|
def to_dict(self) -> dict:
|
2022-06-10 17:28:34 +00:00
|
|
|
return {
|
|
|
|
f: getattr(self, f)
|
|
|
|
for f in self.__struct_fields__
|
|
|
|
}
|
2022-06-08 15:25:17 +00:00
|
|
|
|
2022-06-17 19:41:17 +00:00
|
|
|
def to_pretoml(self) -> dict:
|
2022-06-19 20:30:08 +00:00
|
|
|
'''
|
|
|
|
Prep this position's data contents for export to toml including
|
|
|
|
re-structuring of the ``.clears`` table to an array of
|
|
|
|
inline-subtables for better ``pps.toml`` compactness.
|
|
|
|
|
|
|
|
'''
|
2022-06-17 19:41:17 +00:00
|
|
|
d = self.to_dict()
|
|
|
|
clears = d.pop('clears')
|
2022-06-18 19:30:52 +00:00
|
|
|
expiry = d.pop('expiry')
|
2022-06-18 22:30:53 +00:00
|
|
|
|
2022-06-18 19:30:52 +00:00
|
|
|
if expiry:
|
|
|
|
d['expiry'] = str(expiry)
|
2022-06-17 19:41:17 +00:00
|
|
|
|
2022-06-18 22:30:53 +00:00
|
|
|
clears_list = []
|
|
|
|
|
2022-06-17 19:41:17 +00:00
|
|
|
for tid, data in clears.items():
|
2022-06-18 22:30:53 +00:00
|
|
|
inline_table = toml.TomlDecoder().get_empty_inline_table()
|
|
|
|
inline_table['tid'] = tid
|
|
|
|
|
|
|
|
for k, v in data.items():
|
|
|
|
inline_table[k] = v
|
2022-06-17 19:41:17 +00:00
|
|
|
|
2022-06-18 22:30:53 +00:00
|
|
|
clears_list.append(inline_table)
|
2022-06-17 19:41:17 +00:00
|
|
|
|
2022-06-18 22:30:53 +00:00
|
|
|
d['clears'] = clears_list
|
2022-06-19 20:30:08 +00:00
|
|
|
|
2022-06-17 19:41:17 +00:00
|
|
|
return d
|
|
|
|
|
2022-06-08 15:25:17 +00:00
|
|
|
def update_from_msg(
|
|
|
|
self,
|
|
|
|
msg: BrokerdPosition,
|
|
|
|
|
|
|
|
) -> None:
|
|
|
|
|
|
|
|
# XXX: better place to do this?
|
|
|
|
symbol = self.symbol
|
|
|
|
|
|
|
|
lot_size_digits = symbol.lot_size_digits
|
2022-06-18 19:30:52 +00:00
|
|
|
be_price, size = (
|
2022-06-08 15:25:17 +00:00
|
|
|
round(
|
|
|
|
msg['avg_price'],
|
|
|
|
ndigits=symbol.tick_size_digits
|
|
|
|
),
|
|
|
|
round(
|
|
|
|
msg['size'],
|
|
|
|
ndigits=lot_size_digits
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2022-06-18 19:30:52 +00:00
|
|
|
self.be_price = be_price
|
2022-06-08 15:25:17 +00:00
|
|
|
self.size = size
|
|
|
|
|
|
|
|
@property
|
|
|
|
def dsize(self) -> float:
|
|
|
|
'''
|
|
|
|
The "dollar" size of the pp, normally in trading (fiat) unit
|
|
|
|
terms.
|
|
|
|
|
|
|
|
'''
|
2022-06-18 19:30:52 +00:00
|
|
|
return self.be_price * self.size
|
2022-06-08 15:25:17 +00:00
|
|
|
|
2022-06-23 18:59:47 +00:00
|
|
|
def update(
|
|
|
|
self,
|
|
|
|
t: Transaction,
|
|
|
|
|
|
|
|
) -> None:
|
|
|
|
self.clears[t.tid] = {
|
|
|
|
'cost': t.cost,
|
|
|
|
'price': t.price,
|
|
|
|
'size': t.size,
|
|
|
|
'dt': str(t.dt),
|
|
|
|
}
|
|
|
|
|
2022-06-08 15:25:17 +00:00
|
|
|
def lifo_update(
|
|
|
|
self,
|
|
|
|
size: float,
|
|
|
|
price: float,
|
2022-06-14 18:58:21 +00:00
|
|
|
cost: float = 0,
|
2022-06-08 15:25:17 +00:00
|
|
|
|
2022-06-13 18:11:37 +00:00
|
|
|
# TODO: idea: "real LIFO" dynamic positioning.
|
|
|
|
# - when a trade takes place where the pnl for
|
|
|
|
# the (set of) trade(s) is below the breakeven price
|
|
|
|
# it may be that the trader took a +ve pnl on a short(er)
|
|
|
|
# term trade in the same account.
|
|
|
|
# - in this case we could recalc the be price to
|
|
|
|
# be reverted back to it's prior value before the nearest term
|
|
|
|
# trade was opened.?
|
2022-06-24 00:18:59 +00:00
|
|
|
# dynamic_breakeven_price: bool = False,
|
2022-06-13 18:11:37 +00:00
|
|
|
|
2022-06-08 15:25:17 +00:00
|
|
|
) -> (float, float):
|
|
|
|
'''
|
|
|
|
Incremental update using a LIFO-style weighted mean.
|
|
|
|
|
|
|
|
'''
|
|
|
|
# "avg position price" calcs
|
|
|
|
# TODO: eventually it'd be nice to have a small set of routines
|
|
|
|
# to do this stuff from a sequence of cleared orders to enable
|
|
|
|
# so called "contextual positions".
|
|
|
|
new_size = self.size + size
|
|
|
|
|
|
|
|
# old size minus the new size gives us size diff with
|
|
|
|
# +ve -> increase in pp size
|
|
|
|
# -ve -> decrease in pp size
|
|
|
|
size_diff = abs(new_size) - abs(self.size)
|
|
|
|
|
|
|
|
if new_size == 0:
|
2022-06-18 19:30:52 +00:00
|
|
|
self.be_price = 0
|
2022-06-08 15:25:17 +00:00
|
|
|
|
|
|
|
elif size_diff > 0:
|
|
|
|
# XXX: LOFI incremental update:
|
|
|
|
# only update the "average price" when
|
|
|
|
# the size increases not when it decreases (i.e. the
|
|
|
|
# position is being made smaller)
|
2022-06-18 19:30:52 +00:00
|
|
|
self.be_price = (
|
2022-06-24 00:18:59 +00:00
|
|
|
# weight of current exec = (size * price) + cost
|
|
|
|
(abs(size) * price)
|
2022-06-08 15:25:17 +00:00
|
|
|
+
|
2022-06-24 00:18:59 +00:00
|
|
|
(copysign(1, new_size) * cost) # transaction cost
|
2022-06-14 18:58:21 +00:00
|
|
|
+
|
2022-06-24 00:18:59 +00:00
|
|
|
# weight of existing be price
|
2022-06-18 19:30:52 +00:00
|
|
|
self.be_price * abs(self.size) # weight of previous pp
|
2022-06-24 00:18:59 +00:00
|
|
|
) / abs(new_size) # normalized by the new size: weighted mean.
|
2022-06-08 15:25:17 +00:00
|
|
|
|
|
|
|
self.size = new_size
|
|
|
|
|
2022-06-18 19:30:52 +00:00
|
|
|
return new_size, self.be_price
|
2022-06-08 15:25:17 +00:00
|
|
|
|
2022-06-23 18:59:47 +00:00
|
|
|
def minimize_clears(
|
|
|
|
self,
|
|
|
|
|
|
|
|
) -> dict[str, dict]:
|
|
|
|
'''
|
|
|
|
Minimize the position's clears entries by removing
|
|
|
|
all transactions before the last net zero size to avoid
|
|
|
|
unecessary history irrelevant to the current pp state.
|
|
|
|
|
|
|
|
'''
|
2022-07-01 19:18:23 +00:00
|
|
|
size: float = self.size
|
2022-06-23 18:59:47 +00:00
|
|
|
clears_since_zero: deque[tuple(str, dict)] = deque()
|
|
|
|
|
|
|
|
# scan for the last "net zero" position by
|
|
|
|
# iterating clears in reverse.
|
|
|
|
for tid, clear in reversed(self.clears.items()):
|
2022-07-01 19:18:23 +00:00
|
|
|
size -= clear['size']
|
2022-06-23 18:59:47 +00:00
|
|
|
clears_since_zero.appendleft((tid, clear))
|
|
|
|
|
|
|
|
if size == 0:
|
|
|
|
break
|
|
|
|
|
|
|
|
self.clears = dict(clears_since_zero)
|
|
|
|
return self.clears
|
|
|
|
|
2022-06-08 15:25:17 +00:00
|
|
|
|
2022-07-18 16:23:02 +00:00
|
|
|
class PpTable(Struct):
|
|
|
|
|
|
|
|
pps: dict[str, Position]
|
|
|
|
conf: Optional[dict] = {}
|
|
|
|
|
|
|
|
def update_from_trans(
|
|
|
|
self,
|
|
|
|
trans: dict[str, Transaction],
|
|
|
|
) -> dict[str, Position]:
|
|
|
|
|
|
|
|
pps = self.pps
|
|
|
|
|
2022-07-19 12:26:28 +00:00
|
|
|
updated: dict[str, Position] = {}
|
|
|
|
|
2022-07-18 16:23:02 +00:00
|
|
|
# lifo update all pps from records
|
|
|
|
for tid, r in trans.items():
|
|
|
|
|
|
|
|
pp = pps.setdefault(
|
|
|
|
r.bsuid,
|
|
|
|
|
|
|
|
# if no existing pp, allocate fresh one.
|
|
|
|
Position(
|
|
|
|
Symbol.from_fqsn(
|
|
|
|
r.fqsn,
|
|
|
|
info={},
|
|
|
|
),
|
|
|
|
size=0.0,
|
|
|
|
be_price=0.0,
|
|
|
|
bsuid=r.bsuid,
|
|
|
|
expiry=r.expiry,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# don't do updates for ledger records we already have
|
|
|
|
# included in the current pps state.
|
|
|
|
if r.tid in pp.clears:
|
|
|
|
# NOTE: likely you'll see repeats of the same
|
|
|
|
# ``Transaction`` passed in here if/when you are restarting
|
|
|
|
# a ``brokerd.ib`` where the API will re-report trades from
|
|
|
|
# the current session, so we need to make sure we don't
|
|
|
|
# "double count" these in pp calculations.
|
|
|
|
continue
|
|
|
|
|
|
|
|
# lifo style "breakeven" price calc
|
|
|
|
pp.lifo_update(
|
|
|
|
r.size,
|
|
|
|
r.price,
|
|
|
|
|
|
|
|
# include transaction cost in breakeven price
|
|
|
|
# and presume the worst case of the same cost
|
|
|
|
# to exit this transaction (even though in reality
|
|
|
|
# it will be dynamic based on exit stratetgy).
|
|
|
|
cost=2*r.cost,
|
|
|
|
)
|
|
|
|
|
|
|
|
# track clearing data
|
|
|
|
pp.update(r)
|
|
|
|
|
2022-07-19 12:26:28 +00:00
|
|
|
updated[r.bsuid] = pp
|
|
|
|
|
|
|
|
return updated
|
2022-07-18 16:23:02 +00:00
|
|
|
|
|
|
|
def dump_active(
|
|
|
|
self,
|
|
|
|
brokername: str,
|
|
|
|
) -> tuple[
|
|
|
|
dict[str, Any],
|
|
|
|
dict[str, Position]
|
|
|
|
]:
|
|
|
|
'''
|
|
|
|
Iterate all tabulated positions, render active positions to
|
|
|
|
a ``dict`` format amenable to serialization (via TOML) and drop
|
|
|
|
from state (``.pps``) as well as return in a ``dict`` all
|
|
|
|
``Position``s which have recently closed.
|
|
|
|
|
|
|
|
'''
|
|
|
|
# ONLY dict-serialize all active positions; those that are closed
|
|
|
|
# we don't store in the ``pps.toml``.
|
|
|
|
# NOTE: newly closed position are also important to report/return
|
|
|
|
# since a consumer, like an order mode UI ;), might want to react
|
|
|
|
# based on the closure.
|
|
|
|
pp_entries = {}
|
|
|
|
closed_pp_objs: dict[str, Position] = {}
|
|
|
|
|
|
|
|
pp_objs = self.pps
|
|
|
|
for bsuid in list(pp_objs):
|
|
|
|
pp = pp_objs[bsuid]
|
|
|
|
|
|
|
|
# XXX: debug hook for size mismatches
|
2022-07-21 14:12:51 +00:00
|
|
|
# qqqbsuid = 320227571
|
|
|
|
# if bsuid == qqqbsuid:
|
2022-07-18 16:23:02 +00:00
|
|
|
# breakpoint()
|
|
|
|
|
|
|
|
pp.minimize_clears()
|
|
|
|
|
|
|
|
if (
|
2022-07-19 12:26:28 +00:00
|
|
|
# "net-zero" is a "closed" position
|
2022-07-18 16:23:02 +00:00
|
|
|
pp.size == 0
|
|
|
|
|
2022-07-19 12:26:28 +00:00
|
|
|
# time-expired pps (normally derivatives) are "closed"
|
2022-07-18 16:23:02 +00:00
|
|
|
or (pp.expiry and pp.expiry < now())
|
|
|
|
):
|
2022-07-19 12:26:28 +00:00
|
|
|
# for expired cases
|
2022-07-18 16:23:02 +00:00
|
|
|
pp.size = 0
|
|
|
|
|
2022-07-19 12:26:28 +00:00
|
|
|
# NOTE: we DO NOT pop the pp here since it can still be
|
|
|
|
# used to check for duplicate clears that may come in as
|
|
|
|
# new transaction from some backend API and need to be
|
|
|
|
# ignored; the closed positions won't be written to the
|
|
|
|
# ``pps.toml`` since ``pp_entries`` above is what's
|
|
|
|
# written.
|
|
|
|
# closed_pp = pp_objs.pop(bsuid, None)
|
|
|
|
closed_pp = pp_objs.get(bsuid)
|
2022-07-18 16:23:02 +00:00
|
|
|
if closed_pp:
|
|
|
|
closed_pp_objs[bsuid] = closed_pp
|
|
|
|
|
|
|
|
else:
|
|
|
|
# serialize to pre-toml form
|
|
|
|
asdict = pp.to_pretoml()
|
|
|
|
|
|
|
|
if pp.expiry is None:
|
|
|
|
asdict.pop('expiry', None)
|
|
|
|
|
|
|
|
# TODO: we need to figure out how to have one top level
|
|
|
|
# listing venue here even when the backend isn't providing
|
|
|
|
# it via the trades ledger..
|
|
|
|
# drop symbol obj in serialized form
|
|
|
|
s = asdict.pop('symbol')
|
|
|
|
fqsn = s.front_fqsn()
|
|
|
|
log.info(f'Updating active pp: {fqsn}')
|
|
|
|
|
|
|
|
# XXX: ugh, it's cuz we push the section under
|
|
|
|
# the broker name.. maybe we need to rethink this?
|
|
|
|
brokerless_key = fqsn.removeprefix(f'{brokername}.')
|
|
|
|
|
|
|
|
pp_entries[brokerless_key] = asdict
|
|
|
|
|
|
|
|
return pp_entries, closed_pp_objs
|
|
|
|
|
|
|
|
|
2022-06-10 17:28:34 +00:00
|
|
|
def update_pps(
|
2022-06-14 18:58:21 +00:00
|
|
|
records: dict[str, Transaction],
|
2022-06-10 21:50:29 +00:00
|
|
|
pps: Optional[dict[str, Position]] = None
|
|
|
|
|
|
|
|
) -> dict[str, Position]:
|
2022-06-10 17:28:34 +00:00
|
|
|
'''
|
|
|
|
Compile a set of positions from a trades ledger.
|
|
|
|
|
|
|
|
'''
|
|
|
|
pps: dict[str, Position] = pps or {}
|
2022-07-19 12:26:28 +00:00
|
|
|
table = PpTable(pps)
|
|
|
|
table.update_from_trans(records)
|
|
|
|
return table.pps
|
2022-06-10 21:50:29 +00:00
|
|
|
|
2022-06-14 18:58:21 +00:00
|
|
|
|
2022-07-18 16:23:02 +00:00
|
|
|
def load_trans_from_ledger(
|
2022-06-08 15:25:17 +00:00
|
|
|
|
|
|
|
brokername: str,
|
|
|
|
acctname: str,
|
|
|
|
|
2022-06-21 16:37:33 +00:00
|
|
|
# post normalization filter on ledger entries to be processed
|
2022-06-22 19:41:26 +00:00
|
|
|
filter_by: Optional[list[dict]] = None,
|
2022-06-21 16:37:33 +00:00
|
|
|
|
2022-06-18 19:30:52 +00:00
|
|
|
) -> dict[str, Position]:
|
2022-06-11 20:18:05 +00:00
|
|
|
'''
|
|
|
|
Open a ledger file by broker name and account and read in and
|
2022-06-14 18:58:21 +00:00
|
|
|
process any trade records into our normalized ``Transaction``
|
2022-06-11 20:18:05 +00:00
|
|
|
form and then pass these into the position processing routine
|
|
|
|
and deliver the two dict-sets of the active and closed pps.
|
2022-06-08 15:25:17 +00:00
|
|
|
|
2022-06-11 20:18:05 +00:00
|
|
|
'''
|
2022-06-13 18:11:37 +00:00
|
|
|
with open_trade_ledger(
|
2022-06-10 17:28:34 +00:00
|
|
|
brokername,
|
|
|
|
acctname,
|
|
|
|
) as ledger:
|
2022-06-21 16:37:33 +00:00
|
|
|
if not ledger:
|
|
|
|
# null case, no ledger file with content
|
|
|
|
return {}
|
2022-06-15 13:55:32 +00:00
|
|
|
|
2022-06-10 21:50:29 +00:00
|
|
|
brokermod = get_brokermod(brokername)
|
2022-07-18 14:33:56 +00:00
|
|
|
src_records: dict[str, Transaction] = brokermod.norm_trade_records(ledger)
|
2022-06-21 16:37:33 +00:00
|
|
|
|
|
|
|
if filter_by:
|
2022-07-18 14:33:56 +00:00
|
|
|
records = {}
|
2022-06-22 19:41:26 +00:00
|
|
|
bsuids = set(filter_by)
|
2022-07-18 14:33:56 +00:00
|
|
|
for tid, r in src_records.items():
|
|
|
|
if r.bsuid in bsuids:
|
|
|
|
records[tid] = r
|
2022-06-23 18:59:47 +00:00
|
|
|
else:
|
|
|
|
records = src_records
|
2022-06-21 16:37:33 +00:00
|
|
|
|
2022-07-18 16:23:02 +00:00
|
|
|
return records
|
2022-06-13 18:11:37 +00:00
|
|
|
|
|
|
|
|
2022-06-19 20:30:08 +00:00
|
|
|
# TODO: instead see if we can hack tomli and tomli-w to do the same:
|
|
|
|
# - https://github.com/hukkin/tomli
|
|
|
|
# - https://github.com/hukkin/tomli-w
|
2022-06-17 19:41:17 +00:00
|
|
|
class PpsEncoder(toml.TomlEncoder):
|
|
|
|
'''
|
|
|
|
Special "styled" encoder that makes a ``pps.toml`` redable and
|
|
|
|
compact by putting `.clears` tables inline and everything else
|
|
|
|
flat-ish.
|
|
|
|
|
|
|
|
'''
|
|
|
|
separator = ','
|
|
|
|
|
2022-06-18 22:30:53 +00:00
|
|
|
def dump_list(self, v):
|
2022-06-19 20:30:08 +00:00
|
|
|
'''
|
|
|
|
Dump an inline list with a newline after every element and
|
|
|
|
with consideration for denoted inline table types.
|
2022-06-18 22:30:53 +00:00
|
|
|
|
2022-06-19 20:30:08 +00:00
|
|
|
'''
|
2022-06-18 22:30:53 +00:00
|
|
|
retval = "[\n"
|
|
|
|
for u in v:
|
|
|
|
if isinstance(u, toml.decoder.InlineTableDict):
|
|
|
|
out = self.dump_inline_table(u)
|
|
|
|
else:
|
|
|
|
out = str(self.dump_value(u))
|
|
|
|
|
|
|
|
retval += " " + out + "," + "\n"
|
|
|
|
retval += "]"
|
|
|
|
return retval
|
|
|
|
|
2022-06-17 19:41:17 +00:00
|
|
|
def dump_inline_table(self, section):
|
|
|
|
"""Preserve inline table in its compact syntax instead of expanding
|
|
|
|
into subsection.
|
|
|
|
https://github.com/toml-lang/toml#user-content-inline-table
|
|
|
|
"""
|
|
|
|
val_list = []
|
|
|
|
for k, v in section.items():
|
|
|
|
# if isinstance(v, toml.decoder.InlineTableDict):
|
|
|
|
if isinstance(v, dict):
|
|
|
|
val = self.dump_inline_table(v)
|
|
|
|
else:
|
|
|
|
val = str(self.dump_value(v))
|
|
|
|
|
|
|
|
val_list.append(k + " = " + val)
|
|
|
|
|
|
|
|
retval = "{ " + ", ".join(val_list) + " }"
|
|
|
|
return retval
|
|
|
|
|
|
|
|
def dump_sections(self, o, sup):
|
|
|
|
retstr = ""
|
|
|
|
if sup != "" and sup[-1] != ".":
|
|
|
|
sup += '.'
|
|
|
|
retdict = self._dict()
|
|
|
|
arraystr = ""
|
|
|
|
for section in o:
|
|
|
|
qsection = str(section)
|
|
|
|
value = o[section]
|
|
|
|
|
|
|
|
if not re.match(r'^[A-Za-z0-9_-]+$', section):
|
|
|
|
qsection = toml.encoder._dump_str(section)
|
|
|
|
|
|
|
|
# arrayoftables = False
|
|
|
|
if (
|
|
|
|
self.preserve
|
|
|
|
and isinstance(value, toml.decoder.InlineTableDict)
|
|
|
|
):
|
|
|
|
retstr += (
|
|
|
|
qsection
|
|
|
|
+
|
|
|
|
" = "
|
|
|
|
+
|
|
|
|
self.dump_inline_table(o[section])
|
|
|
|
+
|
|
|
|
'\n' # only on the final terminating left brace
|
|
|
|
)
|
|
|
|
|
|
|
|
# XXX: this code i'm pretty sure is just blatantly bad
|
|
|
|
# and/or wrong..
|
|
|
|
# if isinstance(o[section], list):
|
|
|
|
# for a in o[section]:
|
|
|
|
# if isinstance(a, dict):
|
|
|
|
# arrayoftables = True
|
|
|
|
# if arrayoftables:
|
|
|
|
# for a in o[section]:
|
|
|
|
# arraytabstr = "\n"
|
|
|
|
# arraystr += "[[" + sup + qsection + "]]\n"
|
|
|
|
# s, d = self.dump_sections(a, sup + qsection)
|
|
|
|
# if s:
|
|
|
|
# if s[0] == "[":
|
|
|
|
# arraytabstr += s
|
|
|
|
# else:
|
|
|
|
# arraystr += s
|
|
|
|
# while d:
|
|
|
|
# newd = self._dict()
|
|
|
|
# for dsec in d:
|
|
|
|
# s1, d1 = self.dump_sections(d[dsec], sup +
|
|
|
|
# qsection + "." +
|
|
|
|
# dsec)
|
|
|
|
# if s1:
|
|
|
|
# arraytabstr += ("[" + sup + qsection +
|
|
|
|
# "." + dsec + "]\n")
|
|
|
|
# arraytabstr += s1
|
|
|
|
# for s1 in d1:
|
|
|
|
# newd[dsec + "." + s1] = d1[s1]
|
|
|
|
# d = newd
|
|
|
|
# arraystr += arraytabstr
|
|
|
|
|
|
|
|
elif isinstance(value, dict):
|
|
|
|
retdict[qsection] = o[section]
|
|
|
|
|
|
|
|
elif o[section] is not None:
|
|
|
|
retstr += (
|
|
|
|
qsection
|
|
|
|
+
|
|
|
|
" = "
|
|
|
|
+
|
|
|
|
str(self.dump_value(o[section]))
|
|
|
|
)
|
|
|
|
|
|
|
|
# if not isinstance(value, dict):
|
|
|
|
if not isinstance(value, toml.decoder.InlineTableDict):
|
|
|
|
# inline tables should not contain newlines:
|
|
|
|
# https://toml.io/en/v1.0.0#inline-table
|
|
|
|
retstr += '\n'
|
|
|
|
|
|
|
|
else:
|
|
|
|
raise ValueError(value)
|
|
|
|
|
|
|
|
retstr += arraystr
|
|
|
|
return (retstr, retdict)
|
|
|
|
|
|
|
|
|
2022-06-19 20:30:08 +00:00
|
|
|
def load_pps_from_toml(
|
2022-06-10 21:50:29 +00:00
|
|
|
brokername: str,
|
2022-06-21 16:37:33 +00:00
|
|
|
acctid: str,
|
|
|
|
|
|
|
|
# XXX: there is an edge case here where we may want to either audit
|
|
|
|
# the retrieved ``pps.toml`` output or reprocess it since there was
|
|
|
|
# an error on write on the last attempt to update the state file
|
|
|
|
# even though the ledger *was* updated. For this cases we allow the
|
|
|
|
# caller to pass in a symbol set they'd like to reload from the
|
|
|
|
# underlying ledger to be reprocessed in computing pps state.
|
2022-06-22 19:41:26 +00:00
|
|
|
reload_records: Optional[dict[str, str]] = None,
|
2022-07-18 14:33:56 +00:00
|
|
|
|
|
|
|
# XXX: this is "global" update from ledger flag which
|
|
|
|
# does a full refresh of pps from the available ledger.
|
2022-06-22 19:41:26 +00:00
|
|
|
update_from_ledger: bool = False,
|
2022-06-13 18:11:37 +00:00
|
|
|
|
2022-07-19 12:26:28 +00:00
|
|
|
) -> tuple[PpTable, dict[str, str]]:
|
2022-06-19 20:30:08 +00:00
|
|
|
'''
|
|
|
|
Load and marshal to objects all pps from either an existing
|
|
|
|
``pps.toml`` config, or from scratch from a ledger file when
|
|
|
|
none yet exists.
|
2022-06-13 18:11:37 +00:00
|
|
|
|
2022-07-18 16:23:02 +00:00
|
|
|
'''
|
2022-07-21 14:12:51 +00:00
|
|
|
with open_pps(
|
|
|
|
brokername,
|
|
|
|
acctid,
|
|
|
|
write_on_exit=False,
|
|
|
|
) as table:
|
2022-07-18 16:23:02 +00:00
|
|
|
pp_objs = table.pps
|
|
|
|
|
|
|
|
# no pps entry yet for this broker/account so parse any available
|
|
|
|
# ledgers to build a brand new pps state.
|
|
|
|
if not pp_objs or update_from_ledger:
|
|
|
|
trans = load_trans_from_ledger(
|
|
|
|
brokername,
|
|
|
|
acctid,
|
|
|
|
)
|
2022-07-19 12:26:28 +00:00
|
|
|
table.update_from_trans(trans)
|
2022-07-18 16:23:02 +00:00
|
|
|
|
|
|
|
# Reload symbol specific ledger entries if requested by the
|
|
|
|
# caller **AND** none exist in the current pps state table.
|
|
|
|
elif (
|
|
|
|
pp_objs and reload_records
|
|
|
|
):
|
|
|
|
# no pps entry yet for this broker/account so parse
|
|
|
|
# any available ledgers to build a pps state.
|
|
|
|
trans = load_trans_from_ledger(
|
|
|
|
brokername,
|
|
|
|
acctid,
|
|
|
|
filter_by=reload_records,
|
|
|
|
)
|
2022-07-19 12:26:28 +00:00
|
|
|
table.update_from_trans(trans)
|
2022-07-18 16:23:02 +00:00
|
|
|
|
2022-07-19 12:26:28 +00:00
|
|
|
if not table.pps:
|
2022-07-18 16:23:02 +00:00
|
|
|
log.warning(
|
|
|
|
f'No `pps.toml` values could be loaded {brokername}:{acctid}'
|
|
|
|
)
|
|
|
|
|
2022-07-19 12:26:28 +00:00
|
|
|
return table, table.conf
|
2022-07-18 16:23:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
@cm
|
|
|
|
def open_pps(
|
|
|
|
brokername: str,
|
|
|
|
acctid: str,
|
2022-07-21 14:12:51 +00:00
|
|
|
write_on_exit: bool = True,
|
2022-07-18 16:23:02 +00:00
|
|
|
|
2022-07-21 14:12:51 +00:00
|
|
|
) -> PpTable:
|
2022-07-18 16:23:02 +00:00
|
|
|
'''
|
|
|
|
Read out broker-specific position entries from
|
|
|
|
incremental update file: ``pps.toml``.
|
|
|
|
|
2022-06-19 20:30:08 +00:00
|
|
|
'''
|
2022-06-10 21:50:29 +00:00
|
|
|
conf, path = config.load('pps')
|
|
|
|
brokersection = conf.setdefault(brokername, {})
|
2022-06-15 13:55:32 +00:00
|
|
|
pps = brokersection.setdefault(acctid, {})
|
2022-07-19 12:26:28 +00:00
|
|
|
|
2022-06-21 16:37:33 +00:00
|
|
|
pp_objs = {}
|
2022-07-18 16:23:02 +00:00
|
|
|
table = PpTable(pp_objs, conf=conf)
|
2022-06-08 15:25:17 +00:00
|
|
|
|
2022-07-18 16:23:02 +00:00
|
|
|
# unmarshal/load ``pps.toml`` config entries into object form
|
|
|
|
# and update `PpTable` obj entries.
|
2022-06-21 16:37:33 +00:00
|
|
|
for fqsn, entry in pps.items():
|
2022-06-22 19:41:26 +00:00
|
|
|
bsuid = entry['bsuid']
|
2022-06-21 16:37:33 +00:00
|
|
|
|
|
|
|
# convert clears sub-tables (only in this form
|
|
|
|
# for toml re-presentation) back into a master table.
|
|
|
|
clears_list = entry['clears']
|
|
|
|
|
|
|
|
# index clears entries in "object" form by tid in a top
|
|
|
|
# level dict instead of a list (as is presented in our
|
|
|
|
# ``pps.toml``).
|
2022-07-01 20:12:09 +00:00
|
|
|
pp = pp_objs.get(bsuid)
|
|
|
|
if pp:
|
|
|
|
clears = pp.clears
|
|
|
|
else:
|
|
|
|
clears = {}
|
|
|
|
|
2022-06-21 16:37:33 +00:00
|
|
|
for clears_table in clears_list:
|
|
|
|
tid = clears_table.pop('tid')
|
|
|
|
clears[tid] = clears_table
|
|
|
|
|
2022-06-22 19:41:26 +00:00
|
|
|
size = entry['size']
|
|
|
|
|
|
|
|
# TODO: an audit system for existing pps entries?
|
|
|
|
# if not len(clears) == abs(size):
|
|
|
|
# pp_objs = load_pps_from_ledger(
|
|
|
|
# brokername,
|
|
|
|
# acctid,
|
|
|
|
# filter_by=reload_records,
|
|
|
|
# )
|
|
|
|
# reason = 'size <-> len(clears) mismatch'
|
|
|
|
# raise ValueError(
|
|
|
|
# '`pps.toml` entry is invalid:\n'
|
|
|
|
# f'{fqsn}\n'
|
|
|
|
# f'{pformat(entry)}'
|
|
|
|
# )
|
|
|
|
|
2022-06-21 16:37:33 +00:00
|
|
|
expiry = entry.get('expiry')
|
|
|
|
if expiry:
|
|
|
|
expiry = pendulum.parse(expiry)
|
|
|
|
|
2022-06-22 19:41:26 +00:00
|
|
|
pp_objs[bsuid] = Position(
|
2022-06-21 16:37:33 +00:00
|
|
|
Symbol.from_fqsn(fqsn, info={}),
|
2022-06-22 19:41:26 +00:00
|
|
|
size=size,
|
2022-06-21 16:37:33 +00:00
|
|
|
be_price=entry['be_price'],
|
|
|
|
expiry=expiry,
|
|
|
|
bsuid=entry['bsuid'],
|
|
|
|
|
|
|
|
# XXX: super critical, we need to be sure to include
|
|
|
|
# all pps.toml clears to avoid reusing clears that were
|
|
|
|
# already included in the current incremental update
|
|
|
|
# state, since today's records may have already been
|
|
|
|
# processed!
|
|
|
|
clears=clears,
|
|
|
|
)
|
2022-06-08 15:25:17 +00:00
|
|
|
|
2022-07-21 19:28:04 +00:00
|
|
|
yield table
|
2022-07-19 12:26:28 +00:00
|
|
|
|
2022-07-21 19:28:04 +00:00
|
|
|
if not write_on_exit:
|
|
|
|
return
|
2022-07-21 14:12:51 +00:00
|
|
|
|
2022-07-21 19:28:04 +00:00
|
|
|
# TODO: show diff output?
|
|
|
|
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
|
|
|
print(f'Updating ``pps.toml`` for {path}:\n')
|
|
|
|
|
|
|
|
pp_entries, closed_pp_objs = table.dump_active(brokername)
|
|
|
|
conf[brokername][acctid] = pp_entries
|
|
|
|
|
|
|
|
# TODO: why tf haven't they already done this for inline
|
|
|
|
# tables smh..
|
|
|
|
enc = PpsEncoder(preserve=True)
|
|
|
|
# table_bs_type = type(toml.TomlDecoder().get_empty_inline_table())
|
|
|
|
enc.dump_funcs[
|
|
|
|
toml.decoder.InlineTableDict
|
|
|
|
] = enc.dump_inline_table
|
|
|
|
|
|
|
|
config.write(
|
|
|
|
conf,
|
|
|
|
'pps',
|
|
|
|
encoder=enc,
|
|
|
|
)
|
2022-06-19 20:30:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
def update_pps_conf(
|
|
|
|
brokername: str,
|
|
|
|
acctid: str,
|
2022-06-21 16:37:33 +00:00
|
|
|
|
2022-07-18 14:33:56 +00:00
|
|
|
trade_records: Optional[dict[str, Transaction]] = None,
|
2022-06-22 19:41:26 +00:00
|
|
|
ledger_reload: Optional[dict[str, str]] = None,
|
2022-06-19 20:30:08 +00:00
|
|
|
|
2022-06-22 19:41:26 +00:00
|
|
|
) -> tuple[
|
|
|
|
dict[str, Position],
|
|
|
|
dict[str, Position],
|
|
|
|
]:
|
2022-07-11 00:00:12 +00:00
|
|
|
# TODO: ideally we can pass in an existing
|
|
|
|
# pps state to this right? such that we
|
|
|
|
# don't have to do a ledger reload all the
|
|
|
|
# time.. a couple ideas I can think of,
|
|
|
|
# - load pps once after backend ledger state
|
|
|
|
# is loaded and keep maintainend in memory
|
|
|
|
# inside a with block,
|
|
|
|
# - mirror this in some client side actor which
|
|
|
|
# does the actual ledger updates (say the paper
|
|
|
|
# engine proc if we decide to always spawn it?),
|
|
|
|
# - do diffs against updates from the ledger writer
|
|
|
|
# actor and the in-mem state here?
|
2022-06-22 19:41:26 +00:00
|
|
|
|
|
|
|
if trade_records and ledger_reload:
|
2022-07-18 14:33:56 +00:00
|
|
|
for tid, r in trade_records.items():
|
2022-06-22 19:41:26 +00:00
|
|
|
ledger_reload[r.bsuid] = r.fqsn
|
2022-06-19 20:30:08 +00:00
|
|
|
|
2022-07-19 12:26:28 +00:00
|
|
|
table, conf = load_pps_from_toml(
|
2022-06-21 16:37:33 +00:00
|
|
|
brokername,
|
|
|
|
acctid,
|
2022-06-22 19:41:26 +00:00
|
|
|
reload_records=ledger_reload,
|
2022-06-21 16:37:33 +00:00
|
|
|
)
|
2022-06-19 20:30:08 +00:00
|
|
|
|
2022-06-14 18:58:21 +00:00
|
|
|
# update all pp objects from any (new) trade records which
|
|
|
|
# were passed in (aka incremental update case).
|
|
|
|
if trade_records:
|
2022-07-18 16:23:02 +00:00
|
|
|
table.update_from_trans(trade_records)
|
2022-06-23 18:59:47 +00:00
|
|
|
|
2022-07-19 12:26:28 +00:00
|
|
|
# this maps `.bsuid` values to positions
|
2022-07-18 16:23:02 +00:00
|
|
|
pp_entries, closed_pp_objs = table.dump_active(brokername)
|
2022-07-19 12:26:28 +00:00
|
|
|
pp_objs: dict[Union[str, int], Position] = table.pps
|
2022-06-15 15:55:26 +00:00
|
|
|
|
2022-06-14 18:58:21 +00:00
|
|
|
conf[brokername][acctid] = pp_entries
|
2022-06-17 19:41:17 +00:00
|
|
|
|
|
|
|
# TODO: why tf haven't they already done this for inline tables smh..
|
2022-06-19 20:30:08 +00:00
|
|
|
enc = PpsEncoder(preserve=True)
|
2022-06-18 22:30:53 +00:00
|
|
|
# table_bs_type = type(toml.TomlDecoder().get_empty_inline_table())
|
|
|
|
enc.dump_funcs[toml.decoder.InlineTableDict] = enc.dump_inline_table
|
2022-06-17 19:41:17 +00:00
|
|
|
|
2022-06-10 21:50:29 +00:00
|
|
|
config.write(
|
|
|
|
conf,
|
|
|
|
'pps',
|
2022-06-17 19:41:17 +00:00
|
|
|
encoder=enc,
|
2022-06-10 17:28:34 +00:00
|
|
|
)
|
2022-06-10 21:50:29 +00:00
|
|
|
|
2022-06-15 15:55:26 +00:00
|
|
|
# deliver object form of all pps in table to caller
|
2022-06-22 19:41:26 +00:00
|
|
|
return pp_objs, closed_pp_objs
|
2022-06-11 04:05:02 +00:00
|
|
|
|
2022-06-10 21:50:29 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2022-06-13 18:11:37 +00:00
|
|
|
import sys
|
|
|
|
|
|
|
|
args = sys.argv
|
|
|
|
assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`'
|
|
|
|
args = args[1:]
|
|
|
|
for acctid in args:
|
|
|
|
broker, name = acctid.split('.')
|
|
|
|
update_pps_conf(broker, name)
|