From 450009ff9c0446c7809f8e18904ab60919cd5d45 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 7 Jun 2022 14:53:01 -0400 Subject: [PATCH 01/58] Add `open_trade_ledger()` for writing `/ledgers/trades__.toml` files --- piker/config.py | 60 ++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 55 insertions(+), 5 deletions(-) diff --git a/piker/config.py b/piker/config.py index d1926dec..8a2dd003 100644 --- a/piker/config.py +++ b/piker/config.py @@ -18,9 +18,11 @@ Broker configuration mgmt. """ +from contextlib import contextmanager as cm import platform import sys import os +from os import path from os.path import dirname import shutil from typing import Optional @@ -111,6 +113,7 @@ if _parent_user: _conf_names: set[str] = { 'brokers', + 'pp', 'trades', 'watchlists', } @@ -147,19 +150,21 @@ def get_conf_path( conf_name: str = 'brokers', ) -> str: - """Return the default config path normally under - ``~/.config/piker`` on linux. + ''' + Return the top-level default config path normally under + ``~/.config/piker`` on linux for a given ``conf_name``, the config + name. Contains files such as: - brokers.toml + - pp.toml - watchlists.toml - - trades.toml # maybe coming soon ;) - signals.toml - strats.toml - """ + ''' assert conf_name in _conf_names fn = _conf_fn_w_ext(conf_name) return os.path.join( @@ -168,12 +173,57 @@ def get_conf_path( ) +@cm +def open_trade_ledger( + broker: str, + account: str, + +) -> str: + ''' + Indempotently create and read in a trade log file from the + ``/ledgers/`` directory. + + Files are named per broker account of the form + ``_.toml``. The ``accountname`` here is the + name as defined in the user's ``brokers.toml`` config. + + ''' + ldir = path.join(_config_dir, 'ledgers') + if not path.isdir(ldir): + os.makedirs(ldir) + + fname = f'trades_{broker}_{account}.toml' + tradesfile = path.join(ldir, fname) + + if not path.isfile(tradesfile): + log.info( + f'Creating new local trades ledger: {tradesfile}' + ) + with open(tradesfile, 'w') as cf: + pass # touch + try: + with open(tradesfile, 'r') as cf: + ledger = toml.load(tradesfile) + cpy = ledger.copy() + yield cpy + finally: + if cpy != ledger: + # TODO: show diff output? + # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries + print(f'Updating ledger for {tradesfile}:\n') + ledger.update(cpy) + + # we write on close the mutated ledger data + with open(tradesfile, 'w') as cf: + return toml.dump(ledger, cf) + + def repodir(): ''' Return the abspath to the repo directory. ''' - dirpath = os.path.abspath( + dirpath = path.abspath( # we're 3 levels down in **this** module file dirname(dirname(os.path.realpath(__file__))) ) From 050aa7594cd3aaad77dd5acd3a81f63276b0f4bb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 7 Jun 2022 14:53:31 -0400 Subject: [PATCH 02/58] Simplify trades ledger collection to single pass loop --- piker/brokers/ib/broker.py | 60 +++++++++++++++++++++++++------------- 1 file changed, 40 insertions(+), 20 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 721b6da8..bfde22f8 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -555,36 +555,56 @@ def load_flex_trades( report = flexreport.FlexReport(path=path) trade_entries = report.extract('Trade') - trades = { + + # get reverse map to user account names + accounts = conf['accounts'].inverse + trades_by_account = {} + + for t in trade_entries: + # XXX: LOL apparently ``toml`` has a bug # where a section key error will show up in the write # if you leave this as an ``int``? - str(t.__dict__['tradeID']): t.__dict__ - for t in trade_entries - } + trade = t.__dict__ + # oddly for some so-called "BookTrade" entries + # this field seems to be blank, no cuckin clue. + # trade['ibExecID'] + tid = str(trade['tradeID']) + date = str(trade['tradeDate']) + acctid = accounts[str(trade['accountId'])] + trades_by_account.setdefault( + acctid, {} + ).setdefault(date, {})[tid] = trade - ln = len(trades) + ln = len(trades_by_account.values()) log.info(f'Loaded {ln} trades from flex query') - trades_by_account = {} - for tid, trade in trades.items(): - trades_by_account.setdefault( - # oddly for some so-called "BookTrade" entries - # this field seems to be blank, no cuckin clue. - # trade['ibExecID'] - str(trade['accountId']), {} - )[tid] = trade + # section = {'ib': trades_by_account} + for acctid, trades_by_id in trades_by_account.items(): + with config.open_trade_ledger('ib', acctid) as ledger: + ledger.update({'ib': trades_by_id}) - section = {'ib': trades_by_account} - pprint(section) + # pprint(section) # TODO: load the config first and append in # the new trades loaded here.. - try: - config.write(section, 'trades') - except KeyError: - import pdbpp; pdbpp.set_trace() # noqa + # try: + # config.write(section, 'trades') + # except KeyError: + # import pdbpp; pdbpp.set_trace() # noqa if __name__ == '__main__': - load_flex_trades() + import sys + import os + + args = sys.argv + if len(args) > 1: + args = args[1:] + for arg in args: + path = os.path.abspath(arg) + load_flex_trades(path=path) + else: + # expect brokers.toml to have an entry and + # pull from the web service. + load_flex_trades() From 725909a94ccf07537f68a32e335422cf6ffb5877 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 7 Jun 2022 16:22:46 -0400 Subject: [PATCH 03/58] Convert accounts table to `bidict` after config load --- piker/brokers/ib/api.py | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index b12e723b..044415fc 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -38,6 +38,7 @@ import time from types import SimpleNamespace +from bidict import bidict import trio import tractor from tractor import to_asyncio @@ -261,12 +262,11 @@ class Client: # NOTE: the ib.client here is "throttled" to 45 rps by default - async def trades( - self, - # api_only: bool = False, - - ) -> dict[str, Any]: + async def trades(self) -> dict[str, Any]: + ''' + Return list of trades from current session in ``dict``. + ''' # orders = await self.ib.reqCompletedOrdersAsync( # apiOnly=api_only # ) @@ -811,10 +811,23 @@ _scan_ignore: set[tuple[str, int]] = set() def get_config() -> dict[str, Any]: - conf, path = config.load() - + conf, path = config.load('brokers') section = conf.get('ib') + accounts = section.get('accounts') + if not accounts: + raise ValueError( + 'brokers.toml -> `ib.accounts` must be defined\n' + f'location: {path}' + ) + + names = list(accounts.keys()) + accts = section['accounts'] = bidict(accounts) + log.info( + f'brokers.toml defines {len(accts)} accounts: ' + f'{pformat(names)}' + ) + if section is None: log.warning(f'No config section found for ib in {path}') return {} From 1eb7e109e663ce43d50796048be50cb284ff8984 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 8 Jun 2022 11:25:17 -0400 Subject: [PATCH 04/58] Start `piker.pp` module, LIFO pp updates Start a generic "position related" util mod and bring in the `Position` type from the allocator , convert it to a `msgspec.Struct` and add a `.lifo_update()` method. Implement a WIP pp parser from a trades ledger and use the new lifo method to gather position entries. --- piker/clearing/_allocate.py | 46 +--------- piker/pp.py | 177 ++++++++++++++++++++++++++++++++++++ piker/ui/_position.py | 3 +- piker/ui/order_mode.py | 6 +- 4 files changed, 185 insertions(+), 47 deletions(-) create mode 100644 piker/pp.py diff --git a/piker/clearing/_allocate.py b/piker/clearing/_allocate.py index 71d7d9a0..f14728a1 100644 --- a/piker/clearing/_allocate.py +++ b/piker/clearing/_allocate.py @@ -23,53 +23,11 @@ from typing import Optional from bidict import bidict from pydantic import BaseModel, validator +from msgspec import Struct from ..data._source import Symbol from ._messages import BrokerdPosition, Status - - -class Position(BaseModel): - ''' - Basic pp (personal position) model with attached fills history. - - This type should be IPC wire ready? - - ''' - symbol: Symbol - - # last size and avg entry price - size: float - avg_price: float # TODO: contextual pricing - - # ordered record of known constituent trade messages - fills: list[Status] = [] - - def update_from_msg( - self, - msg: BrokerdPosition, - - ) -> None: - - # XXX: better place to do this? - symbol = self.symbol - - lot_size_digits = symbol.lot_size_digits - avg_price, size = ( - round(msg['avg_price'], ndigits=symbol.tick_size_digits), - round(msg['size'], ndigits=lot_size_digits), - ) - - self.avg_price = avg_price - self.size = size - - @property - def dsize(self) -> float: - ''' - The "dollar" size of the pp, normally in trading (fiat) unit - terms. - - ''' - return self.avg_price * self.size +from ..pp import Position _size_units = bidict({ diff --git a/piker/pp.py b/piker/pp.py new file mode 100644 index 00000000..cdbcd0d5 --- /dev/null +++ b/piker/pp.py @@ -0,0 +1,177 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License + +# along with this program. If not, see . +''' +Personal/Private position parsing, calculmating, summarizing in a way +that doesn't try to cuk most humans who prefer to not lose their moneys.. +(looking at you `ib` and shitzy friends) + +''' +from typing import ( + Any, + Optional, + Union, +) + +from msgspec import Struct + +from . import config +from .clearing._messages import BrokerdPosition, Status +from .data._source import Symbol + + +class Position(Struct): + ''' + Basic pp (personal position) model with attached fills history. + + This type should be IPC wire ready? + + ''' + symbol: Symbol + + # last size and avg entry price + size: float + avg_price: float # TODO: contextual pricing + + # ordered record of known constituent trade messages + fills: list[Status] = [] + + def update_from_msg( + self, + msg: BrokerdPosition, + + ) -> None: + + # XXX: better place to do this? + symbol = self.symbol + + lot_size_digits = symbol.lot_size_digits + avg_price, size = ( + round( + msg['avg_price'], + ndigits=symbol.tick_size_digits + ), + round( + msg['size'], + ndigits=lot_size_digits + ), + ) + + self.avg_price = avg_price + self.size = size + + @property + def dsize(self) -> float: + ''' + The "dollar" size of the pp, normally in trading (fiat) unit + terms. + + ''' + return self.avg_price * self.size + + def lifo_update( + self, + size: float, + price: float, + + ) -> (float, float): + ''' + Incremental update using a LIFO-style weighted mean. + + ''' + # "avg position price" calcs + # TODO: eventually it'd be nice to have a small set of routines + # to do this stuff from a sequence of cleared orders to enable + # so called "contextual positions". + new_size = self.size + size + + # old size minus the new size gives us size diff with + # +ve -> increase in pp size + # -ve -> decrease in pp size + size_diff = abs(new_size) - abs(self.size) + + if new_size == 0: + self.avg_price = 0 + + elif size_diff > 0: + # XXX: LOFI incremental update: + # only update the "average price" when + # the size increases not when it decreases (i.e. the + # position is being made smaller) + self.avg_price = ( + abs(size) * price # weight of current exec + + + self.avg_price * abs(self.size) # weight of previous pp + ) / abs(new_size) + + self.size = new_size + + return new_size, self.avg_price + + +def parse_pps( + + brokername: str, + acctname: str, + + ledger: Optional[dict[str, Union[str, float]]] = None, + +) -> dict[str, Any]: + + pps: dict[str, Position] = {} + + if not ledger: + with config.open_trade_ledger( + brokername, + acctname, + ) as ledger: + pass # readonly + + by_date = ledger[brokername] + + for date, by_id in by_date.items(): + for tid, record in by_id.items(): + + # ib specific record parsing + # date, time = record['dateTime'] + # conid = record['condid'] + # cost = record['cost'] + # comms = record['ibCommission'] + symbol = record['symbol'] + price = record['tradePrice'] + # action = record['buySell'] + + # NOTE: can be -ve on sells + size = float(record['quantity']) + + pp = pps.setdefault( + symbol, + Position( + Symbol(key=symbol), + size=0.0, + avg_price=0.0, + ) + ) + + # LOFI style average price calc + pp.lifo_update(size, price) + + from pprint import pprint + pprint(pps) + + +if __name__ == '__main__': + parse_pps('ib', 'algopaper') diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 0abb6459..844869b0 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -19,6 +19,7 @@ Position info and display """ from __future__ import annotations +from copy import copy from dataclasses import dataclass from functools import partial from math import floor, copysign @@ -476,7 +477,7 @@ class PositionTracker: self.alloc = alloc self.startup_pp = startup_pp - self.live_pp = startup_pp.copy() + self.live_pp = copy(startup_pp) view = chart.getViewBox() diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index a86fe816..5ee53bd4 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -59,7 +59,8 @@ log = get_logger(__name__) class OrderDialog(BaseModel): - '''Trade dialogue meta-data describing the lifetime + ''' + Trade dialogue meta-data describing the lifetime of an order submission to ``emsd`` from a chart. ''' @@ -87,7 +88,8 @@ def on_level_change_update_next_order_info( tracker: PositionTracker, ) -> None: - '''A callback applied for each level change to the line + ''' + A callback applied for each level change to the line which will recompute the order size based on allocator settings. this is assigned inside ``OrderMode.line_from_order()`` From add0e92335b509c6556c20106ae95514b10158df Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 8 Jun 2022 12:37:12 -0400 Subject: [PATCH 05/58] Drop old trade log config writing code --- piker/brokers/ib/broker.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index bfde22f8..f9fab51d 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -579,20 +579,10 @@ def load_flex_trades( ln = len(trades_by_account.values()) log.info(f'Loaded {ln} trades from flex query') - # section = {'ib': trades_by_account} for acctid, trades_by_id in trades_by_account.items(): with config.open_trade_ledger('ib', acctid) as ledger: ledger.update({'ib': trades_by_id}) - # pprint(section) - - # TODO: load the config first and append in - # the new trades loaded here.. - # try: - # config.write(section, 'trades') - # except KeyError: - # import pdbpp; pdbpp.set_trace() # noqa - if __name__ == '__main__': import sys From f76857606092bf687703bbe514c46faad2cf0776 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 8 Jun 2022 12:37:44 -0400 Subject: [PATCH 06/58] Delegate paper engine pp tracking to new type --- piker/clearing/_paper_engine.py | 37 +++++++++++---------------------- 1 file changed, 12 insertions(+), 25 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 99039049..a11e9ea2 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -31,6 +31,8 @@ import tractor from dataclasses import dataclass from .. import data +from ..data._source import Symbol +from ..pp import Position from ..data._normalize import iterticks from ..data._source import unpack_fqsn from ..log import get_logger @@ -257,29 +259,13 @@ class PaperBoi: ) ) - # "avg position price" calcs - # TODO: eventually it'd be nice to have a small set of routines - # to do this stuff from a sequence of cleared orders to enable - # so called "contextual positions". - new_size = size + pp_msg.size - - # old size minus the new size gives us size differential with - # +ve -> increase in pp size - # -ve -> decrease in pp size - size_diff = abs(new_size) - abs(pp_msg.size) - - if new_size == 0: - pp_msg.avg_price = 0 - - elif size_diff > 0: - # only update the "average position price" when the position - # size increases not when it decreases (i.e. the position is - # being made smaller) - pp_msg.avg_price = ( - abs(size) * price + pp_msg.avg_price * abs(pp_msg.size) - ) / abs(new_size) - - pp_msg.size = new_size + # delegate update to `.pp.Position.lifo_update()` + pp = Position( + Symbol(key=symbol), + size=pp_msg.size, + avg_price=pp_msg.avg_price, + ) + pp_msg.size, pp_msg.avg_price = pp.lifo_update(size, price) await self.ems_trades_stream.send(pp_msg.dict()) @@ -390,7 +376,8 @@ async def handle_order_requests( account = request_msg['account'] if account != 'paper': log.error( - 'This is a paper account, only a `paper` selection is valid' + 'This is a paper account,' + ' only a `paper` selection is valid' ) await ems_order_stream.send(BrokerdError( oid=request_msg['oid'], @@ -464,7 +451,7 @@ async def trades_dialogue( # TODO: load paper positions per broker from .toml config file # and pass as symbol to position data mapping: ``dict[str, dict]`` # await ctx.started(all_positions) - await ctx.started(({}, {'paper',})) + await ctx.started(({}, ['paper'])) async with ( ctx.open_stream() as ems_stream, From eb2bad51389a3a4f439c4b305f82bc0ffc290835 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Jun 2022 13:24:39 -0400 Subject: [PATCH 07/58] Make our `Symbol` a `msgspec.Struct` --- piker/config.py | 1 - piker/data/_source.py | 14 +++++--------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/piker/config.py b/piker/config.py index 8a2dd003..706ada5e 100644 --- a/piker/config.py +++ b/piker/config.py @@ -289,7 +289,6 @@ def write( def load_accounts( - providers: Optional[list[str]] = None ) -> bidict[str, Optional[str]]: diff --git a/piker/data/_source.py b/piker/data/_source.py index 9afcb191..73c218ca 100644 --- a/piker/data/_source.py +++ b/piker/data/_source.py @@ -23,7 +23,7 @@ import decimal from bidict import bidict import numpy as np -from pydantic import BaseModel +from msgspec import Struct # from numba import from_dtype @@ -126,7 +126,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: ) -class Symbol(BaseModel): +class Symbol(Struct): ''' I guess this is some kinda container thing for dealing with all the different meta-data formats from brokers? @@ -152,9 +152,7 @@ class Symbol(BaseModel): info: dict[str, Any], suffix: str = '', - # XXX: like wtf.. - # ) -> 'Symbol': - ) -> None: + ) -> Symbol: tick_size = info.get('price_tick_size', 0.01) lot_tick_size = info.get('lot_tick_size', 0.0) @@ -175,9 +173,7 @@ class Symbol(BaseModel): fqsn: str, info: dict[str, Any], - # XXX: like wtf.. - # ) -> 'Symbol': - ) -> None: + ) -> Symbol: broker, key, suffix = unpack_fqsn(fqsn) return cls.from_broker_info( broker, @@ -240,7 +236,7 @@ class Symbol(BaseModel): ''' tokens = self.tokens() - fqsn = '.'.join(tokens) + fqsn = '.'.join(map(str.lower, tokens)) return fqsn def iterfqsns(self) -> list[str]: From 88b4ccc768d72f987e1106c6f71fcc073f1852e2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Jun 2022 13:25:08 -0400 Subject: [PATCH 08/58] Add API trade/exec entry parsing and ledger updates Since "flex reports" are only available for the current session's trades the day after, this adds support for also collecting trade execution records for the current session and writing them to the equivalent ledger file. Summary: - add `trades_to_records()` to handle parsing both flex and API event objects into a common record form. - add `norm_trade_records()` to handle converting ledger entries into `TradeRecord` types from the new `piker.pps` mod (coming in next commit). --- piker/brokers/ib/__init__.py | 5 +- piker/brokers/ib/api.py | 8 ++ piker/brokers/ib/broker.py | 209 ++++++++++++++++++++++++++++++----- 3 files changed, 193 insertions(+), 29 deletions(-) diff --git a/piker/brokers/ib/__init__.py b/piker/brokers/ib/__init__.py index 3f6504a1..5c329ecc 100644 --- a/piker/brokers/ib/__init__.py +++ b/piker/brokers/ib/__init__.py @@ -38,7 +38,10 @@ from .feed import ( open_symbol_search, stream_quotes, ) -from .broker import trades_dialogue +from .broker import ( + trades_dialogue, + norm_trade_records, +) __all__ = [ 'get_client', diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 044415fc..a68ee6fe 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -483,6 +483,14 @@ class Client: return con + async def get_con( + self, + conid: int, + ) -> Contract: + return await self.ib.qualifyContractsAsync( + ibis.Contract(conId=conid) + ) + async def find_contract( self, pattern: str, diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index f9fab51d..2792c517 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -28,6 +28,7 @@ from typing import ( AsyncIterator, ) +from bidict import bidict import trio from trio_typing import TaskStatus import tractor @@ -44,8 +45,10 @@ from ib_insync.objects import ( Execution, ) from ib_insync.objects import Position +import pendulum from piker import config +from piker.pp import TradeRecord from piker.log import get_console_log from piker.clearing._messages import ( BrokerdOrder, @@ -56,6 +59,7 @@ from piker.clearing._messages import ( BrokerdFill, BrokerdError, ) +from piker.data._source import Symbol from .api import ( _accounts2clients, _adhoc_futes_set, @@ -64,6 +68,7 @@ from .api import ( open_client_proxies, Client, ) +# from .feed import open_data_client def pack_position( @@ -95,7 +100,6 @@ def pack_position( symkey += f'.{expiry}' # TODO: options contracts into a sane format.. - return BrokerdPosition( broker='ib', account=pos.account, @@ -317,11 +321,72 @@ async def trades_dialogue( all_positions.append(msg.dict()) - trades: list[dict] = [] + trades_by_account: dict = {} + conf = get_config() for proxy in proxies.values(): - trades.append(await proxy.trades()) + trade_entries = await proxy.trades() + # { + # 'commissionReport': CommissionReport( + # execId='', + # commission=0.0, + # currency='', + # realizedPNL=0.0, + # yield_=0.0, + # yieldRedemptionDate=0), + # 'contract': { + # 'comboLegs': [], + # 'comboLegsDescrip': '', + # 'conId': 477837024, + # 'currency': 'USD', + # 'deltaNeutralContract': None, + # 'exchange': 'GLOBEX', + # 'includeExpired': False, + # 'lastTradeDateOrContractMonth': '20220617', + # 'localSymbol': 'MNQM2', + # 'multiplier': '2', + # 'primaryExchange': '', + # 'right': '?', + # 'secId': '', + # 'secIdType': '', + # 'secType': 'FUT', + # 'strike': 0.0, + # 'symbol': 'MNQ', + # 'tradingClass': 'MNQ' + # }, + # 'execution': Execution( + # execId='0000e1a7.62a2315f.01.01', + # time=1654801166.0, + # acctNumber='DU5612476', + # exchange='GLOBEX', + # side='BOT', + # shares=1.0, + # price=12443.5, + # permId=778998556, + # clientId=6116, + # orderId=555, + # liquidation=0, + # cumQty=1.0, + # avgPrice=12443.5, + # orderRef='', + # evRule='', + # evMultiplier=0.0, + # modelCode='', + # lastLiquidity=1 + # ), + # 'time': 1654801166.0 + # } + trades_by_account.update( + trades_to_records( + conf['accounts'].inverse, + trade_entries, + ) + ) - log.info(f'Loaded {len(trades)} from this session') + for acctid, trades_by_id in trades_by_account.items(): + with config.open_trade_ledger('ib', acctid) as ledger: + ledger.update(trades_by_id) + + # log.info(f'Loaded {len(trades)} from this session') # TODO: write trades to local ``trades.toml`` # - use above per-session trades data and write to local file # - get the "flex reports" working and pull historical data and @@ -514,12 +579,114 @@ async def deliver_trade_events( await ems_stream.send(msg.dict()) +def norm_trade_records( + ledger: dict[str, Any], + +) -> dict[str, list[TradeRecord]]: + ''' + Normalize a flex report or API retrieved executions + ledger into our standard record format. + + ''' + records: list[TradeRecord] = [] + # async with open_data_client() as proxy: + for tid, record in ledger.items(): + # date, time = record['dateTime'] + # cost = record['cost'] + # action = record['buySell'] + conid = record.get('conId') or record['conid'] + comms = record.get('ibCommission', 0) + price = record.get('price') or record['tradePrice'] + size = record.get('shares') or record['quantity'] + + symbol = record['symbol'] + + # special handling of symbol extraction from + # flex records using some ad-hoc schema parsing. + instr = record.get('assetCategory') + if instr == 'FUT': + symbol = record['description'][:3] + + # try to build out piker fqsn from record. + expiry = record.get('lastTradeDateOrContractMonth') or record['expiry'] + exch = record.get('listingExchange') or record['exchange'] + + fqsn = Symbol.from_broker_info( + broker='ib', + symbol=symbol, + suffix=f'{exch}.{expiry}', + info={}, + ).front_fqsn() + + # NOTE: for flex records the normal fields won't be available so + # we have to do a lookup at some point to reverse map the conid + # to a fqsn. + + # con = await proxy.get_con(conid) + + records.append(TradeRecord( + fqsn=fqsn, + tid=tid, + size=size, + price=price, + cost=comms, + symkey=conid, + )) + + return records + + +def trades_to_records( + accounts: bidict, + trade_entries: list[object], + source_type: str = 'api', + +) -> dict: + + trades_by_account = {} + + for t in trade_entries: + if source_type == 'flex': + entry = t.__dict__ + + # oddly for some so-called "BookTrade" entries + # this field seems to be blank, no cuckin clue. + # trade['ibExecID'] + + # XXX: LOL apparently ``toml`` has a bug + # where a section key error will show up in the write + # if you leave this as an ``int``? + tid = str(entry['tradeID']) + # date = str(entry['tradeDate']) + acctid = accounts[str(entry['accountId'])] + + elif source_type == 'api': + entry = {} + for section, obj in t.items(): + match section: + case 'commisionReport' | 'execution': + entry.update(asdict(obj)) + + case 'contract': + entry.update(obj) + + tid = str(entry['execId']) + dt = pendulum.from_timestamp(entry['time']) + entry['date'] = str(dt) + acctid = accounts[entry['acctNumber']] + + trades_by_account.setdefault( + acctid, {} + )[tid] = entry + + return trades_by_account + + def load_flex_trades( path: Optional[str] = None, ) -> dict[str, str]: - from pprint import pprint from ib_insync import flexreport, util conf = get_config() @@ -555,33 +722,19 @@ def load_flex_trades( report = flexreport.FlexReport(path=path) trade_entries = report.extract('Trade') + trades_by_account = trades_to_records( + # get reverse map to user account names + conf['accounts'].inverse, + trade_entries, + source_type='flex', + ) - # get reverse map to user account names - accounts = conf['accounts'].inverse - trades_by_account = {} - - for t in trade_entries: - - # XXX: LOL apparently ``toml`` has a bug - # where a section key error will show up in the write - # if you leave this as an ``int``? - trade = t.__dict__ - # oddly for some so-called "BookTrade" entries - # this field seems to be blank, no cuckin clue. - # trade['ibExecID'] - tid = str(trade['tradeID']) - date = str(trade['tradeDate']) - acctid = accounts[str(trade['accountId'])] - trades_by_account.setdefault( - acctid, {} - ).setdefault(date, {})[tid] = trade - - ln = len(trades_by_account.values()) - log.info(f'Loaded {ln} trades from flex query') + # ln = len(trades) + # log.info(f'Loaded {ln} trades from flex query') for acctid, trades_by_id in trades_by_account.items(): with config.open_trade_ledger('ib', acctid) as ledger: - ledger.update({'ib': trades_by_id}) + ledger.update(trades_by_id) if __name__ == '__main__': From f8f7ca350c60d8fe9fb7fed543543733a61c8e3a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Jun 2022 13:28:34 -0400 Subject: [PATCH 09/58] Extend trade-record tools, add ledger to pps extraction Add a `TradeRecord` struct which holds the minimal field set to build out position entries. Add `.update_pps()` to convert a set of records into LIFO position entries, optionally allowing for an update to some existing pp input set. Add `load_pps_from_ledger()` which does a full ledger extraction to pp objects, ready for writing a `pps.toml`. --- piker/pp.py | 118 +++++++++++++++++++++++++++++++++++----------------- 1 file changed, 79 insertions(+), 39 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index cdbcd0d5..719defc3 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -31,6 +31,20 @@ from msgspec import Struct from . import config from .clearing._messages import BrokerdPosition, Status from .data._source import Symbol +from .brokers import get_brokermod + + +class TradeRecord(Struct): + fqsn: str # normally fqsn + tid: Union[str, int] + size: float + price: float + cost: float # commisions or other additional costs + + # optional key normally derived from the broker + # backend which ensures the instrument-symbol this record + # is for is truly unique. + symkey: Optional[Union[str, int]] = None class Position(Struct): @@ -47,7 +61,13 @@ class Position(Struct): avg_price: float # TODO: contextual pricing # ordered record of known constituent trade messages - fills: list[Status] = [] + fills: list[Union[str, int, Status]] = [] + + def to_dict(self): + return { + f: getattr(self, f) + for f in self.__struct_fields__ + } def update_from_msg( self, @@ -122,56 +142,76 @@ class Position(Struct): return new_size, self.avg_price -def parse_pps( +def update_pps( + brokername: str, + ledger: dict[str, Union[str, float]], + pps: Optional[dict[str, TradeRecord]] = None + +) -> dict[str, TradeRecord]: + ''' + Compile a set of positions from a trades ledger. + + ''' + brokermod = get_brokermod(brokername) + + pps: dict[str, Position] = pps or {} + records = brokermod.norm_trade_records(ledger) + for r in records: + key = r.symkey or r.fqsn + pp = pps.setdefault( + key, + Position( + Symbol.from_fqsn(r.fqsn, info={}), + size=0.0, + avg_price=0.0, + ) + ) + + # lifo style average price calc + pp.lifo_update(r.size, r.price) + pp.fills.append(r.tid) + + return pps + + +async def load_pps_from_ledger( brokername: str, acctname: str, - ledger: Optional[dict[str, Union[str, float]]] = None, - ) -> dict[str, Any]: - pps: dict[str, Position] = {} + with config.open_trade_ledger( + brokername, + acctname, + ) as ledger: + pass # readonly - if not ledger: - with config.open_trade_ledger( - brokername, - acctname, - ) as ledger: - pass # readonly + pps = update_pps(brokername, ledger) - by_date = ledger[brokername] + active_pps = {} + for k, pp in pps.items(): - for date, by_id in by_date.items(): - for tid, record in by_id.items(): + if pp.size == 0: + continue - # ib specific record parsing - # date, time = record['dateTime'] - # conid = record['condid'] - # cost = record['cost'] - # comms = record['ibCommission'] - symbol = record['symbol'] - price = record['tradePrice'] - # action = record['buySell'] - - # NOTE: can be -ve on sells - size = float(record['quantity']) - - pp = pps.setdefault( - symbol, - Position( - Symbol(key=symbol), - size=0.0, - avg_price=0.0, - ) - ) - - # LOFI style average price calc - pp.lifo_update(size, price) + active_pps[pp.symbol.front_fqsn()] = pp.to_dict() + # pprint({pp.symbol.front_fqsn(): pp.to_dict() for k, pp in pps.items()}) from pprint import pprint - pprint(pps) + pprint(active_pps) + # pprint({pp.symbol.front_fqsn(): pp.to_dict() for k, pp in pps.items()}) + + +def update_pps_conf( + trade_records: list[TradeRecord], +): + conf, path = config.load('pp') + if __name__ == '__main__': - parse_pps('ib', 'algopaper') + import trio + trio.run( + load_pps_from_ledger, 'ib', 'algopaper', + ) From 2a641ab8b48dd723e4d535b9c37d4f543af8ea0a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Jun 2022 17:39:17 -0400 Subject: [PATCH 10/58] Call it `pps.toml`, allows toml passthrough kwargs --- piker/config.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/piker/config.py b/piker/config.py index 706ada5e..72ff8a41 100644 --- a/piker/config.py +++ b/piker/config.py @@ -113,7 +113,7 @@ if _parent_user: _conf_names: set[str] = { 'brokers', - 'pp', + 'pps', 'trades', 'watchlists', } @@ -240,6 +240,7 @@ def load( ''' path = path or get_conf_path(conf_name) + if not os.path.isfile(path): fn = _conf_fn_w_ext(conf_name) @@ -252,6 +253,9 @@ def load( # if one exists. if os.path.isfile(template): shutil.copyfile(template, path) + else: + with open(path, 'w'): + pass # touch config = toml.load(path) log.debug(f"Read config file {path}") @@ -262,6 +266,7 @@ def write( config: dict, # toml config as dict name: str = 'brokers', path: str = None, + **toml_kwargs, ) -> None: '''' @@ -285,7 +290,11 @@ def write( f"{path}" ) with open(path, 'w') as cf: - return toml.dump(config, cf) + return toml.dump( + config, + cf, + **toml_kwargs, + ) def load_accounts( From dd05ed13718bd698260d546707efeea54cf9a410 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Jun 2022 17:50:29 -0400 Subject: [PATCH 11/58] Implement updates and write to config: `pps.toml` Begins the position tracking incremental update API which supports both constructing a `pps.toml` both from trade ledgers as well diff-oriented incremental update from an existing config assumed to be previously generated from some prior ledger. New set of routines includes: - `_split_active()` a helper to split a position table into the active and closed positions (aka pps of size 0) for determining entry updates in the `pps.toml`. - `update_pps_conf()` to maybe load a `pps.toml` and update it from an input trades ledger including necessary (de)serialization to and from `Position` object form(s). - `load_pps_from_ledger()` a ledger parser-loader which constructs a table of pps strictly from the broker-account ledger data without any consideration for any existing pps file. Each "entry" in `pps.toml` also contains a `fills: list` attr (name may change) which references the set of trade records which make up its state since the last net-zero position in the instrument. --- piker/pp.py | 119 ++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 92 insertions(+), 27 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index 719defc3..ed993334 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -144,24 +144,28 @@ class Position(Struct): def update_pps( brokername: str, - ledger: dict[str, Union[str, float]], - pps: Optional[dict[str, TradeRecord]] = None + records: dict[str, TradeRecord], -) -> dict[str, TradeRecord]: + pps: Optional[dict[str, Position]] = None + +) -> dict[str, Position]: ''' Compile a set of positions from a trades ledger. ''' - brokermod = get_brokermod(brokername) pps: dict[str, Position] = pps or {} - records = brokermod.norm_trade_records(ledger) + + # lifo update all pps from records for r in records: key = r.symkey or r.fqsn pp = pps.setdefault( key, Position( - Symbol.from_fqsn(r.fqsn, info={}), + Symbol.from_fqsn( + r.fqsn, + info={}, + ), size=0.0, avg_price=0.0, ) @@ -171,10 +175,30 @@ def update_pps( pp.lifo_update(r.size, r.price) pp.fills.append(r.tid) + assert len(set(pp.fills)) == len(pp.fills) return pps -async def load_pps_from_ledger( +def _split_active( + pps: dict[str, Position], + +) -> tuple[dict, dict]: + + active = {} + closed = {} + + for k, pp in pps.items(): + fqsn = pp.symbol.front_fqsn() + asdict = pp.to_dict() + if pp.size == 0: + closed[fqsn] = asdict + else: + active[fqsn] = asdict + + return active, closed + + +def load_pps_from_ledger( brokername: str, acctname: str, @@ -187,31 +211,72 @@ async def load_pps_from_ledger( ) as ledger: pass # readonly - pps = update_pps(brokername, ledger) - - active_pps = {} - for k, pp in pps.items(): - - if pp.size == 0: - continue - - active_pps[pp.symbol.front_fqsn()] = pp.to_dict() - # pprint({pp.symbol.front_fqsn(): pp.to_dict() for k, pp in pps.items()}) - - from pprint import pprint - pprint(active_pps) - # pprint({pp.symbol.front_fqsn(): pp.to_dict() for k, pp in pps.items()}) + brokermod = get_brokermod(brokername) + records = brokermod.norm_trade_records(ledger) + pps = update_pps( + brokername, + records, + ) + return _split_active(pps) def update_pps_conf( - trade_records: list[TradeRecord], + brokername: str, + acctid: str, + trade_records: Optional[list[TradeRecord]] = None, ): - conf, path = config.load('pp') + conf, path = config.load('pps') + brokersection = conf.setdefault(brokername, {}) + entries = brokersection.setdefault(acctid, {}) + if not entries: + + # no pps entry yet for this broker/account + active, closed = load_pps_from_ledger( + brokername, + acctid, + ) + + elif trade_records: + + # table for map-back to object form + pps = {} + + # load ``pps.toml`` config entries back into object form. + for fqsn, entry in entries.items(): + pps[fqsn] = Position( + Symbol.from_fqsn(fqsn, info={}), + size=entry['size'], + avg_price=entry['avg_price'], + ) + + pps = update_pps( + brokername, + trade_records, + pps=pps, + ) + active, closed = _split_active(pps) + + for fqsn in closed: + print(f'removing closed pp: {fqsn}') + entries.pop(fqsn, None) + + for fqsn, pp_dict in active.items(): + print(f'Updating active pp: {fqsn}') + + # normalize to a simpler flat dict format + _ = pp_dict.pop('symbol') + entries[fqsn.rstrip(f'.{brokername}')] = pp_dict + + config.write( + conf, + 'pps', + encoder=config.toml.Encoder(preserve=True), + ) + + from pprint import pprint + pprint(conf) if __name__ == '__main__': - import trio - trio.run( - load_pps_from_ledger, 'ib', 'algopaper', - ) + update_pps_conf('ib', 'algopaper') From 73fa320917f3c609dbef0610f3f752d14e9838ae Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Jun 2022 18:12:27 -0400 Subject: [PATCH 12/58] Cut schema-related comment down to major sections --- piker/brokers/ib/broker.py | 69 +++++++------------------------------- 1 file changed, 13 insertions(+), 56 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 2792c517..c79e87d6 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -325,62 +325,11 @@ async def trades_dialogue( conf = get_config() for proxy in proxies.values(): trade_entries = await proxy.trades() - # { - # 'commissionReport': CommissionReport( - # execId='', - # commission=0.0, - # currency='', - # realizedPNL=0.0, - # yield_=0.0, - # yieldRedemptionDate=0), - # 'contract': { - # 'comboLegs': [], - # 'comboLegsDescrip': '', - # 'conId': 477837024, - # 'currency': 'USD', - # 'deltaNeutralContract': None, - # 'exchange': 'GLOBEX', - # 'includeExpired': False, - # 'lastTradeDateOrContractMonth': '20220617', - # 'localSymbol': 'MNQM2', - # 'multiplier': '2', - # 'primaryExchange': '', - # 'right': '?', - # 'secId': '', - # 'secIdType': '', - # 'secType': 'FUT', - # 'strike': 0.0, - # 'symbol': 'MNQ', - # 'tradingClass': 'MNQ' - # }, - # 'execution': Execution( - # execId='0000e1a7.62a2315f.01.01', - # time=1654801166.0, - # acctNumber='DU5612476', - # exchange='GLOBEX', - # side='BOT', - # shares=1.0, - # price=12443.5, - # permId=778998556, - # clientId=6116, - # orderId=555, - # liquidation=0, - # cumQty=1.0, - # avgPrice=12443.5, - # orderRef='', - # evRule='', - # evMultiplier=0.0, - # modelCode='', - # lastLiquidity=1 - # ), - # 'time': 1654801166.0 - # } - trades_by_account.update( - trades_to_records( - conf['accounts'].inverse, - trade_entries, - ) + records = trades_to_records( + conf['accounts'].inverse, + trade_entries, ) + trades_by_account.update(records) for acctid, trades_by_id in trades_by_account.items(): with config.open_trade_ledger('ib', acctid) as ledger: @@ -620,7 +569,7 @@ def norm_trade_records( # NOTE: for flex records the normal fields won't be available so # we have to do a lookup at some point to reverse map the conid - # to a fqsn. + # to a fqsn? # con = await proxy.get_con(conid) @@ -661,6 +610,14 @@ def trades_to_records( acctid = accounts[str(entry['accountId'])] elif source_type == 'api': + # NOTE: example of schema we pull from the API client. + # { + # 'commissionReport': CommissionReport(... + # 'contract': {... + # 'execution': Execution(... + # 'time': 1654801166.0 + # } + entry = {} for section, obj in t.items(): match section: From b629ce177d2fe65b329ede3c389c6a25189643a0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 11 Jun 2022 00:05:02 -0400 Subject: [PATCH 13/58] Ensure `.fills` are filled in during object construct.. --- piker/pp.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index ed993334..b300e7d8 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -158,7 +158,7 @@ def update_pps( # lifo update all pps from records for r in records: - key = r.symkey or r.fqsn + key = r.fqsn or r.symkey pp = pps.setdefault( key, Position( @@ -170,6 +170,10 @@ def update_pps( avg_price=0.0, ) ) + # don't do updates for ledger records we already have + # included in the current pps state. + if r.tid in pp.fills: + continue # lifo style average price calc pp.lifo_update(r.size, r.price) @@ -244,10 +248,17 @@ def update_pps_conf( # load ``pps.toml`` config entries back into object form. for fqsn, entry in entries.items(): - pps[fqsn] = Position( + pps[f'{fqsn}.{brokername}'] = Position( Symbol.from_fqsn(fqsn, info={}), size=entry['size'], avg_price=entry['avg_price'], + + # XXX: super critical, we need to be sure to include + # all pps.toml fills to avoid reusing fills that were + # already included in the current incremental update + # state, since today's records may have already been + # processed! + fills=entry['fills'], ) pps = update_pps( @@ -271,11 +282,13 @@ def update_pps_conf( config.write( conf, 'pps', - encoder=config.toml.Encoder(preserve=True), + # encoder=config.toml.Encoder(preserve=True), ) - from pprint import pprint - pprint(conf) + return active + + # from pprint import pprint + # pprint(conf) if __name__ == '__main__': From ce1eb11b5979453904b499613d855ae23abe6148 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 11 Jun 2022 00:05:30 -0400 Subject: [PATCH 14/58] Use new ledger pps but cross-ref with what ib says --- piker/brokers/ib/broker.py | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index c79e87d6..263e9868 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -48,6 +48,7 @@ from ib_insync.objects import Position import pendulum from piker import config +from piker.pp import update_pps_conf from piker.pp import TradeRecord from piker.log import get_console_log from piker.clearing._messages import ( @@ -310,17 +311,17 @@ async def trades_dialogue( assert account in accounts_def accounts.add(account) + pp_msgs = {} for client in aioclients.values(): for pos in client.positions(): msg = pack_position(pos) msg.account = accounts_def.inverse[msg.account] + pp_msgs[msg.symbol] = msg assert msg.account in accounts, ( f'Position for unknown account: {msg.account}') - all_positions.append(msg.dict()) - trades_by_account: dict = {} conf = get_config() for proxy in proxies.values(): @@ -335,6 +336,33 @@ async def trades_dialogue( with config.open_trade_ledger('ib', acctid) as ledger: ledger.update(trades_by_id) + records = norm_trade_records(trades_by_id) + active = update_pps_conf('ib', acctid, records) + for fqsn, pp in active.items(): + + ibppmsg = pp_msgs[fqsn.rstrip('.ib')] + msg = BrokerdPosition( + broker='ib', + # account=acctid + '.ib', + account=ibppmsg.account, + # XXX: the `.ib` is stripped..? + symbol=ibppmsg.symbol, + currency=ibppmsg.currency, + size=pp['size'], + avg_price=pp['avg_price'], + ) + assert ibppmsg.size == msg.size + if ibppmsg.avg_price != msg.avg_price: + # TODO: make this a "propoganda" log level? + log.warning( + 'The mega-cucks at IB want you to believe with their ' + '"FIFO" positioning the following:\n' + f'"ib" mega-cucker avg price: {ibppmsg.avg_price}\n' + f'piker, legitamous-ness, LIFO avg price: {msg.avg_price}' + ) + + all_positions.append(msg.dict()) + # log.info(f'Loaded {len(trades)} from this session') # TODO: write trades to local ``trades.toml`` # - use above per-session trades data and write to local file From de77c7d209a68ac1677757a9cde911225e7fd0eb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 11 Jun 2022 16:18:05 -0400 Subject: [PATCH 15/58] Better doc strings and detailed comments --- piker/brokers/ib/broker.py | 23 +++++++++++++++++++++- piker/pp.py | 40 ++++++++++++++++++++++++++------------ 2 files changed, 50 insertions(+), 13 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 263e9868..c032870f 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -312,9 +312,14 @@ async def trades_dialogue( accounts.add(account) pp_msgs = {} + + # process pp value reported from ib's system. we only use these + # to cross-check sizing since average pricing on their end uses + # the so called (bs) "FIFO" style which more or less results in + # a price that's not useful for traders who want to not lose + # money.. xb for client in aioclients.values(): for pos in client.positions(): - msg = pack_position(pos) msg.account = accounts_def.inverse[msg.account] pp_msgs[msg.symbol] = msg @@ -322,8 +327,14 @@ async def trades_dialogue( assert msg.account in accounts, ( f'Position for unknown account: {msg.account}') + # built-out piker pps from trade ledger, underneath using + # LIFO style breakeven pricing calcs. trades_by_account: dict = {} conf = get_config() + + # retreive new trade executions from the last session + # and/or day's worth of trading and convert into trade + # records suitable for a local ledger file. for proxy in proxies.values(): trade_entries = await proxy.trades() records = trades_to_records( @@ -332,18 +343,27 @@ async def trades_dialogue( ) trades_by_account.update(records) + # write recent session's trades to the user's (local) ledger + # file. for acctid, trades_by_id in trades_by_account.items(): with config.open_trade_ledger('ib', acctid) as ledger: ledger.update(trades_by_id) + # (incrementally) update the user's pps in mem and + # in the `pps.toml`. records = norm_trade_records(trades_by_id) active = update_pps_conf('ib', acctid, records) + + # relay re-formatted pps as msgs to the ems. for fqsn, pp in active.items(): ibppmsg = pp_msgs[fqsn.rstrip('.ib')] msg = BrokerdPosition( broker='ib', # account=acctid + '.ib', + # XXX: ok so this is annoying, we're relaying + # an account name with the backend suffix prefixed + # but when reading accounts from ledgers account=ibppmsg.account, # XXX: the `.ib` is stripped..? symbol=ibppmsg.symbol, @@ -614,6 +634,7 @@ def norm_trade_records( def trades_to_records( + accounts: bidict, trade_entries: list[object], source_type: str = 'api', diff --git a/piker/pp.py b/piker/pp.py index b300e7d8..784192c8 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -21,7 +21,6 @@ that doesn't try to cuk most humans who prefer to not lose their moneys.. ''' from typing import ( - Any, Optional, Union, ) @@ -153,14 +152,15 @@ def update_pps( Compile a set of positions from a trades ledger. ''' - pps: dict[str, Position] = pps or {} # lifo update all pps from records for r in records: - key = r.fqsn or r.symkey + pp = pps.setdefault( - key, + r.fqsn or r.symkey, + + # if no existing pp, allocate fresh one. Position( Symbol.from_fqsn( r.fqsn, @@ -173,6 +173,11 @@ def update_pps( # don't do updates for ledger records we already have # included in the current pps state. if r.tid in pp.fills: + # NOTE: likely you'll see repeats of the same + # ``TradeRecord`` passed in here if/when you are restarting + # a ``brokerd.ib`` where the API will re-report trades from + # the current session, so we need to make sure we don't + # "double count" these in pp calculations. continue # lifo style average price calc @@ -187,7 +192,16 @@ def _split_active( pps: dict[str, Position], ) -> tuple[dict, dict]: + ''' + Split pps into those that are "active" (non-zero size) and "closed" + (zero size) and return in 2 dicts. + Returning the "closed" set is important for updating the pps state + in any ``pps.toml`` such that we remove entries which are no longer + part of any "VaR" set (well presumably, except of course your liquidity + asset could be full of "risk" XD ). + + ''' active = {} closed = {} @@ -207,8 +221,14 @@ def load_pps_from_ledger( brokername: str, acctname: str, -) -> dict[str, Any]: +) -> tuple[dict, dict]: + ''' + Open a ledger file by broker name and account and read in and + process any trade records into our normalized ``TradeRecord`` + form and then pass these into the position processing routine + and deliver the two dict-sets of the active and closed pps. + ''' with config.open_trade_ledger( brokername, acctname, @@ -217,10 +237,8 @@ def load_pps_from_ledger( brokermod = get_brokermod(brokername) records = brokermod.norm_trade_records(ledger) - pps = update_pps( - brokername, - records, - ) + pps = update_pps(brokername, records) + return _split_active(pps) @@ -282,14 +300,12 @@ def update_pps_conf( config.write( conf, 'pps', + # TODO: make nested tables and/or inline tables work? # encoder=config.toml.Encoder(preserve=True), ) return active - # from pprint import pprint - # pprint(conf) - if __name__ == '__main__': update_pps_conf('ib', 'algopaper') From 5d774bef907b4836346eeab478fc56699556d9a7 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 13 Jun 2022 14:11:37 -0400 Subject: [PATCH 16/58] Move `open_trade_ledger()` to pp mod, add `get_pps()` --- piker/config.py | 46 -------------------- piker/pp.py | 113 +++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 106 insertions(+), 53 deletions(-) diff --git a/piker/config.py b/piker/config.py index 72ff8a41..cbe134ea 100644 --- a/piker/config.py +++ b/piker/config.py @@ -18,7 +18,6 @@ Broker configuration mgmt. """ -from contextlib import contextmanager as cm import platform import sys import os @@ -173,51 +172,6 @@ def get_conf_path( ) -@cm -def open_trade_ledger( - broker: str, - account: str, - -) -> str: - ''' - Indempotently create and read in a trade log file from the - ``/ledgers/`` directory. - - Files are named per broker account of the form - ``_.toml``. The ``accountname`` here is the - name as defined in the user's ``brokers.toml`` config. - - ''' - ldir = path.join(_config_dir, 'ledgers') - if not path.isdir(ldir): - os.makedirs(ldir) - - fname = f'trades_{broker}_{account}.toml' - tradesfile = path.join(ldir, fname) - - if not path.isfile(tradesfile): - log.info( - f'Creating new local trades ledger: {tradesfile}' - ) - with open(tradesfile, 'w') as cf: - pass # touch - try: - with open(tradesfile, 'r') as cf: - ledger = toml.load(tradesfile) - cpy = ledger.copy() - yield cpy - finally: - if cpy != ledger: - # TODO: show diff output? - # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries - print(f'Updating ledger for {tradesfile}:\n') - ledger.update(cpy) - - # we write on close the mutated ledger data - with open(tradesfile, 'w') as cf: - return toml.dump(ledger, cf) - - def repodir(): ''' Return the abspath to the repo directory. diff --git a/piker/pp.py b/piker/pp.py index 784192c8..7b775c77 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -20,17 +20,70 @@ that doesn't try to cuk most humans who prefer to not lose their moneys.. (looking at you `ib` and shitzy friends) ''' +from contextlib import contextmanager as cm +import os +from os import path from typing import ( + Any, Optional, Union, ) from msgspec import Struct +import toml from . import config +from .brokers import get_brokermod from .clearing._messages import BrokerdPosition, Status from .data._source import Symbol -from .brokers import get_brokermod +from .log import get_logger + +log = get_logger(__name__) + + +@cm +def open_trade_ledger( + broker: str, + account: str, + +) -> str: + ''' + Indempotently create and read in a trade log file from the + ``/ledgers/`` directory. + + Files are named per broker account of the form + ``_.toml``. The ``accountname`` here is the + name as defined in the user's ``brokers.toml`` config. + + ''' + ldir = path.join(config._config_dir, 'ledgers') + if not path.isdir(ldir): + os.makedirs(ldir) + + fname = f'trades_{broker}_{account}.toml' + tradesfile = path.join(ldir, fname) + + if not path.isfile(tradesfile): + log.info( + f'Creating new local trades ledger: {tradesfile}' + ) + with open(tradesfile, 'w') as cf: + pass # touch + try: + with open(tradesfile, 'r') as cf: + ledger = toml.load(tradesfile) + cpy = ledger.copy() + yield cpy + finally: + if cpy != ledger: + # TODO: show diff output? + # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries + print(f'Updating ledger for {tradesfile}:\n') + ledger.update(cpy) + + # we write on close the mutated ledger data + with open(tradesfile, 'w') as cf: + return toml.dump(ledger, cf) class TradeRecord(Struct): @@ -40,6 +93,8 @@ class TradeRecord(Struct): price: float cost: float # commisions or other additional costs + # dt: datetime + # optional key normally derived from the broker # backend which ensures the instrument-symbol this record # is for is truly unique. @@ -106,6 +161,16 @@ class Position(Struct): size: float, price: float, + # TODO: idea: "real LIFO" dynamic positioning. + # - when a trade takes place where the pnl for + # the (set of) trade(s) is below the breakeven price + # it may be that the trader took a +ve pnl on a short(er) + # term trade in the same account. + # - in this case we could recalc the be price to + # be reverted back to it's prior value before the nearest term + # trade was opened.? + dynamic_breakeven_price: bool = False, + ) -> (float, float): ''' Incremental update using a LIFO-style weighted mean. @@ -191,7 +256,10 @@ def update_pps( def _split_active( pps: dict[str, Position], -) -> tuple[dict, dict]: +) -> tuple[ + dict[str, Any], + dict[str, Any], +]: ''' Split pps into those that are "active" (non-zero size) and "closed" (zero size) and return in 2 dicts. @@ -229,7 +297,7 @@ def load_pps_from_ledger( and deliver the two dict-sets of the active and closed pps. ''' - with config.open_trade_ledger( + with open_trade_ledger( brokername, acctname, ) as ledger: @@ -242,18 +310,34 @@ def load_pps_from_ledger( return _split_active(pps) +def get_pps( + brokername: str, + +) -> dict[str, Any]: + ''' + Read out broker-specific position entries from + incremental update file: ``pps.toml``. + + ''' + conf, path = config.load('pps') + return conf.setdefault(brokername, {}) + + def update_pps_conf( brokername: str, acctid: str, trade_records: Optional[list[TradeRecord]] = None, -): + +) -> dict[str, Position]: + conf, path = config.load('pps') brokersection = conf.setdefault(brokername, {}) entries = brokersection.setdefault(acctid, {}) if not entries: - # no pps entry yet for this broker/account + # no pps entry yet for this broker/account so parse + # any available ledgers to build a pps state. active, closed = load_pps_from_ledger( brokername, acctid, @@ -286,6 +370,9 @@ def update_pps_conf( ) active, closed = _split_active(pps) + else: + raise ValueError('wut wut') + for fqsn in closed: print(f'removing closed pp: {fqsn}') entries.pop(fqsn, None) @@ -295,11 +382,16 @@ def update_pps_conf( # normalize to a simpler flat dict format _ = pp_dict.pop('symbol') - entries[fqsn.rstrip(f'.{brokername}')] = pp_dict + + # XXX: ugh, it's cuz we push the section under + # the broker name.. maybe we need to rethink this? + brokerless_key = fqsn.rstrip(f'.{brokername}') + entries[brokerless_key] = pp_dict config.write( conf, 'pps', + # TODO: make nested tables and/or inline tables work? # encoder=config.toml.Encoder(preserve=True), ) @@ -308,4 +400,11 @@ def update_pps_conf( if __name__ == '__main__': - update_pps_conf('ib', 'algopaper') + import sys + + args = sys.argv + assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`' + args = args[1:] + for acctid in args: + broker, name = acctid.split('.') + update_pps_conf(broker, name) From c1b63f4757fa223680c2dfc5b4eb8e52df0b8725 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Jun 2022 10:45:10 -0400 Subject: [PATCH 17/58] Use `IB.fills()` method for `Client.trades()` --- piker/brokers/ib/api.py | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index a68ee6fe..4581e200 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -42,12 +42,17 @@ from bidict import bidict import trio import tractor from tractor import to_asyncio +import ib_insync as ibis from ib_insync.wrapper import RequestError from ib_insync.contract import Contract, ContractDetails from ib_insync.order import Order from ib_insync.ticker import Ticker -from ib_insync.objects import Position -import ib_insync as ibis +from ib_insync.objects import ( + Position, + Fill, + Execution, + CommissionReport, +) from ib_insync.wrapper import Wrapper from ib_insync.client import Client as ib_Client import numpy as np @@ -264,24 +269,27 @@ class Client: async def trades(self) -> dict[str, Any]: ''' - Return list of trades from current session in ``dict``. + Return list of trade-fills from current session in ``dict``. ''' - # orders = await self.ib.reqCompletedOrdersAsync( - # apiOnly=api_only - # ) - fills = await self.ib.reqExecutionsAsync() - norm_fills = [] + fills: list[Fill] = self.ib.fills() + norm_fills: list[dict] = [] for fill in fills: fill = fill._asdict() # namedtuple - for key, val in fill.copy().items(): - if isinstance(val, Contract): - fill[key] = asdict(val) + for key, val in fill.items(): + match val: + case Contract() | Execution() | CommissionReport(): + fill[key] = asdict(val) norm_fills.append(fill) return norm_fills + async def orders(self) -> list[Order]: + return await self.ib.reqAllOpenOrdersAsync( + apiOnly=False, + ) + async def bars( self, fqsn: str, @@ -1011,7 +1019,7 @@ async def load_aio_clients( for acct, client in _accounts2clients.items(): log.info(f'Disconnecting {acct}@{client}') client.ib.disconnect() - _client_cache.pop((host, port)) + _client_cache.pop((host, port), None) async def load_clients_for_trio( From 412138a75bf9189b358fcc9299f3b34a5ad7a777 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Jun 2022 14:58:21 -0400 Subject: [PATCH 18/58] Add transaction costs to "fills" This makes a few major changes but mostly is centered around including transaction (aka trade-clear) costs in the avg breakeven price calculation. TL;DR: - rename `TradeRecord` -> `Transaction`. - make `Position.fills` a `dict[str, float]` which holds each clear's cost value. - change `Transaction.symkey` -> `.bsuid` for "backend symbol unique id". - drop `brokername: str` arg to `update_pps()` - rename `._split_active()` -> `dump_active()` and use input keys verbatim in output map. - in `update_pps_conf()` always incrementally update from trade records even when no `pps.toml` exists yet since it may be both the case that the ledger needs loading **and** the caller is handing new records not yet in the ledger. --- piker/pp.py | 128 +++++++++++++++++++++++++++++----------------------- 1 file changed, 72 insertions(+), 56 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index 7b775c77..7f363915 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -15,9 +15,9 @@ # along with this program. If not, see . ''' -Personal/Private position parsing, calculmating, summarizing in a way +Personal/Private position parsing, calculating, summarizing in a way that doesn't try to cuk most humans who prefer to not lose their moneys.. -(looking at you `ib` and shitzy friends) +(looking at you `ib` and dirt-bird friends) ''' from contextlib import contextmanager as cm @@ -86,9 +86,9 @@ def open_trade_ledger( return toml.dump(ledger, cf) -class TradeRecord(Struct): +class Transaction(Struct): fqsn: str # normally fqsn - tid: Union[str, int] + tid: Union[str, int] # unique transaction id size: float price: float cost: float # commisions or other additional costs @@ -98,7 +98,7 @@ class TradeRecord(Struct): # optional key normally derived from the broker # backend which ensures the instrument-symbol this record # is for is truly unique. - symkey: Optional[Union[str, int]] = None + bsuid: Optional[Union[str, int]] = None class Position(Struct): @@ -113,9 +113,14 @@ class Position(Struct): # last size and avg entry price size: float avg_price: float # TODO: contextual pricing + bsuid: str # ordered record of known constituent trade messages - fills: list[Union[str, int, Status]] = [] + fills: dict[ + Union[str, int, Status], # trade id + float, # cost + ] = {} + def to_dict(self): return { @@ -160,6 +165,7 @@ class Position(Struct): self, size: float, price: float, + cost: float = 0, # TODO: idea: "real LIFO" dynamic positioning. # - when a trade takes place where the pnl for @@ -198,6 +204,8 @@ class Position(Struct): self.avg_price = ( abs(size) * price # weight of current exec + + cost # transaction cost + + self.avg_price * abs(self.size) # weight of previous pp ) / abs(new_size) @@ -207,9 +215,7 @@ class Position(Struct): def update_pps( - brokername: str, - records: dict[str, TradeRecord], - + records: dict[str, Transaction], pps: Optional[dict[str, Position]] = None ) -> dict[str, Position]: @@ -223,7 +229,7 @@ def update_pps( for r in records: pp = pps.setdefault( - r.fqsn or r.symkey, + r.fqsn or r.bsuid, # if no existing pp, allocate fresh one. Position( @@ -233,27 +239,39 @@ def update_pps( ), size=0.0, avg_price=0.0, + bsuid=r.bsuid, ) ) # don't do updates for ledger records we already have # included in the current pps state. if r.tid in pp.fills: # NOTE: likely you'll see repeats of the same - # ``TradeRecord`` passed in here if/when you are restarting + # ``Transaction`` passed in here if/when you are restarting # a ``brokerd.ib`` where the API will re-report trades from # the current session, so we need to make sure we don't # "double count" these in pp calculations. continue - # lifo style average price calc - pp.lifo_update(r.size, r.price) - pp.fills.append(r.tid) + # lifo style "breakeven" price calc + pp.lifo_update( + r.size, + r.price, + + # include transaction cost in breakeven price + # and presume the worst case of the same cost + # to exit this transaction (even though in reality + # it will be dynamic based on exit stratetgy). + cost=2*r.cost, + ) + + # track clearing costs + pp.fills[r.tid] = r.cost assert len(set(pp.fills)) == len(pp.fills) return pps -def _split_active( +def dump_active( pps: dict[str, Position], ) -> tuple[ @@ -277,9 +295,9 @@ def _split_active( fqsn = pp.symbol.front_fqsn() asdict = pp.to_dict() if pp.size == 0: - closed[fqsn] = asdict + closed[k] = asdict else: - active[fqsn] = asdict + active[k] = asdict return active, closed @@ -292,7 +310,7 @@ def load_pps_from_ledger( ) -> tuple[dict, dict]: ''' Open a ledger file by broker name and account and read in and - process any trade records into our normalized ``TradeRecord`` + process any trade records into our normalized ``Transaction`` form and then pass these into the position processing routine and deliver the two dict-sets of the active and closed pps. @@ -305,9 +323,8 @@ def load_pps_from_ledger( brokermod = get_brokermod(brokername) records = brokermod.norm_trade_records(ledger) - pps = update_pps(brokername, records) - - return _split_active(pps) + pps = update_pps(records) + return dump_active(pps) def get_pps( @@ -326,57 +343,55 @@ def get_pps( def update_pps_conf( brokername: str, acctid: str, - trade_records: Optional[list[TradeRecord]] = None, + trade_records: Optional[list[Transaction]] = None, ) -> dict[str, Position]: conf, path = config.load('pps') brokersection = conf.setdefault(brokername, {}) - entries = brokersection.setdefault(acctid, {}) - - if not entries: + accountsection = pps = brokersection.setdefault(acctid, {}) + if not pps: # no pps entry yet for this broker/account so parse # any available ledgers to build a pps state. - active, closed = load_pps_from_ledger( + pps, closed = load_pps_from_ledger( brokername, acctid, ) - - elif trade_records: - - # table for map-back to object form - pps = {} - - # load ``pps.toml`` config entries back into object form. - for fqsn, entry in entries.items(): - pps[f'{fqsn}.{brokername}'] = Position( - Symbol.from_fqsn(fqsn, info={}), - size=entry['size'], - avg_price=entry['avg_price'], - - # XXX: super critical, we need to be sure to include - # all pps.toml fills to avoid reusing fills that were - # already included in the current incremental update - # state, since today's records may have already been - # processed! - fills=entry['fills'], + if not pps: + log.warning( + f'No trade history could be loaded for {brokername}:{acctid}' ) - pps = update_pps( - brokername, - trade_records, - pps=pps, + # unmarshal/load ``pps.toml`` config entries into object form. + pp_objs = {} + for fqsn, entry in pps.items(): + pp_objs[fqsn] = Position( + Symbol.from_fqsn(fqsn, info={}), + size=entry['size'], + avg_price=entry['avg_price'], + bsuid=entry['bsuid'], + + # XXX: super critical, we need to be sure to include + # all pps.toml fills to avoid reusing fills that were + # already included in the current incremental update + # state, since today's records may have already been + # processed! + fills=entry['fills'], ) - active, closed = _split_active(pps) - else: - raise ValueError('wut wut') + # update all pp objects from any (new) trade records which + # were passed in (aka incremental update case). + if trade_records: + pp_objs = update_pps( + trade_records, + pps=pp_objs, + ) - for fqsn in closed: - print(f'removing closed pp: {fqsn}') - entries.pop(fqsn, None) + active, closed = dump_active(pp_objs) + # dict-serialize all active pps + pp_entries = {} for fqsn, pp_dict in active.items(): print(f'Updating active pp: {fqsn}') @@ -386,8 +401,9 @@ def update_pps_conf( # XXX: ugh, it's cuz we push the section under # the broker name.. maybe we need to rethink this? brokerless_key = fqsn.rstrip(f'.{brokername}') - entries[brokerless_key] = pp_dict + pp_entries[brokerless_key] = pp_dict + conf[brokername][acctid] = pp_entries config.write( conf, 'pps', From 05a1a4e3d8730e8c7036321b9098731b42389686 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Jun 2022 16:22:30 -0400 Subject: [PATCH 19/58] Use new `Position.bsuid` field throughout --- piker/clearing/_allocate.py | 10 +++++++--- piker/clearing/_paper_engine.py | 1 + piker/ui/order_mode.py | 5 ++++- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/piker/clearing/_allocate.py b/piker/clearing/_allocate.py index f14728a1..7ee2b0be 100644 --- a/piker/clearing/_allocate.py +++ b/piker/clearing/_allocate.py @@ -23,10 +23,9 @@ from typing import Optional from bidict import bidict from pydantic import BaseModel, validator -from msgspec import Struct +# from msgspec import Struct from ..data._source import Symbol -from ._messages import BrokerdPosition, Status from ..pp import Position @@ -202,7 +201,12 @@ class Allocator(BaseModel): if order_size < slot_size: # compute a fractional slots size to display slots_used = self.slots_used( - Position(symbol=sym, size=order_size, avg_price=price) + Position( + symbol=sym, + size=order_size, + avg_price=price, + bsuid=sym, + ) ) return { diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index a11e9ea2..9e70dce2 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -264,6 +264,7 @@ class PaperBoi: Symbol(key=symbol), size=pp_msg.size, avg_price=pp_msg.avg_price, + uid=symbol.front_fqsn(), ) pp_msg.size, pp_msg.avg_price = pp.lifo_update(size, price) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 5ee53bd4..03132b8e 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -33,10 +33,10 @@ import trio from PyQt5.QtCore import Qt from .. import config +from ..pp import Position from ..clearing._client import open_ems, OrderBook from ..clearing._allocate import ( mk_allocator, - Position, ) from ._style import _font from ..data._source import Symbol @@ -607,6 +607,9 @@ async def open_order_mode( symbol=symbol, size=0, avg_price=0, + + # XXX: BLEH, do we care about this on the client side? + bsuid=symbol, ) msg = pps_by_account.get(account_name) if msg: From 82b718d5a35e76e5d6082759c0f023ce0e83fde0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Jun 2022 16:23:46 -0400 Subject: [PATCH 20/58] Many, many `ib` trade log schema hackz I don't want to rant too much any more since it's pretty clear `ib` has either zero concern for its (api) user's or a severely terrible data management team and/or general inter-team coordination system, but this patch more or less hacks the flex report records to be similar enough to API "execution" / "fill" records such that they can be similarly normalized and stored as well as processed for position calculations.. Dirty deats, - use the `IB.fills()` method for pulling current session trade events since it's both recommended in the docs and does seem to capture more extensive meta-data. - add a `update_ledger_from_api()` helper which does all the insane work of making sure api trade entries are usable both within piker's global fqsn system but also compatible with incremental updates of positions computed from trade ledgers derived from ib's "flex reports". - add "auditting" of `ib`'s reported positioning API messages by comparison with piker's new "traders first" breakeven price style and complain via logging on mismatches. - handle buy vs. sell arithmetic (via a +ve or -ve multiplier) to make "size" arithmetic work for API trade entries.. - draft out options contract transaction parsing but skip in pps generation for now. - always use the "execution id" as ledger keys both in flex and api trade processing. - for whatever weird reason `ib_insync` doesn't include the so called "primary exchange" in contracts reported in fill events, so do manual contract lookups in such cases such that pps entries can be placed in the right fqsn section... Still ToDo: - incremental update on trade clears / position updates - pps audit from ledger depending on user config? --- piker/brokers/ib/broker.py | 286 +++++++++++++++++++++++++------------ 1 file changed, 198 insertions(+), 88 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index c032870f..8f3ec44c 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -26,6 +26,7 @@ from typing import ( Any, Optional, AsyncIterator, + Union, ) from bidict import bidict @@ -48,8 +49,7 @@ from ib_insync.objects import Position import pendulum from piker import config -from piker.pp import update_pps_conf -from piker.pp import TradeRecord +from piker import pp from piker.log import get_console_log from piker.clearing._messages import ( BrokerdOrder, @@ -68,6 +68,7 @@ from .api import ( get_config, open_client_proxies, Client, + MethodProxy, ) # from .feed import open_data_client @@ -87,27 +88,30 @@ def pack_position( symbol = con.symbol.lower() exch = (con.primaryExchange or con.exchange).lower() - symkey = '.'.join((symbol, exch)) + fqsn = '.'.join((symbol, exch)) if not exch: # attempt to lookup the symbol from our # hacked set.. for sym in _adhoc_futes_set: if symbol in sym: - symkey = sym + fqsn = sym break expiry = con.lastTradeDateOrContractMonth if expiry: - symkey += f'.{expiry}' + fqsn += f'.{expiry}' # TODO: options contracts into a sane format.. - return BrokerdPosition( - broker='ib', - account=pos.account, - symbol=symkey, - currency=con.currency, - size=float(pos.position), - avg_price=float(pos.avgCost) / float(con.multiplier or 1.0), + return ( + con.conId, + BrokerdPosition( + broker='ib', + account=pos.account, + symbol=fqsn, + currency=con.currency, + size=float(pos.position), + avg_price=float(pos.avgCost) / float(con.multiplier or 1.0), + ), ) @@ -262,6 +266,70 @@ async def recv_trade_updates( await client.ib.disconnectedEvent +async def update_ledger_from_api_trades( + clients: list[Union[Client, MethodProxy]], + ib_pp_msgs: dict[int, BrokerdPosition], # conid -> msg + +) -> dict[str, Any]: + + # construct piker pps from trade ledger, underneath using + # LIFO style breakeven pricing calcs. + conf = get_config() + + # retreive new trade executions from the last session + # and/or day's worth of trading and convert into trade + # records suitable for a local ledger file. + trades_by_account: dict = {} + for client in clients: + + trade_entries = await client.trades() + + # XXX; ERRGGG.. + # pack in the "primary/listing exchange" value from a + # contract lookup since it seems this isn't available by + # default from the `.fills()` method endpoint... + for entry in trade_entries: + condict = entry['contract'] + conid = condict['conId'] + pexch = condict['primaryExchange'] + + if not pexch: + con = (await client.get_con(conid=conid))[0] + pexch = con.primaryExchange + + entry['listingExchange'] = pexch + + records = trades_to_records( + conf['accounts'].inverse, + trade_entries, + ) + trades_by_account.update(records) + + # write recent session's trades to the user's (local) ledger file. + for acctid, trades_by_id in trades_by_account.items(): + + with pp.open_trade_ledger('ib', acctid) as ledger: + ledger.update(trades_by_id) + + # (incrementally) update the user's pps in mem and + # in the `pps.toml`. + records = norm_trade_records(trades_by_id) + + # remap stupid ledger fqsns (which are often + # filled with lesser venue/exchange values) to + # the ones we pull from the API via ib's reported + # positioning messages. + for r in records: + normed_msg = ib_pp_msgs[r.bsuid] + if normed_msg.symbol != r.fqsn: + log.warning( + f'Remapping ledger fqsn: {r.fqsn} -> {normed_msg.symbol}' + ) + r.fqsn = normed_msg.symbol + + pp.update_pps_conf('ib', acctid, records) + + @tractor.context async def trades_dialogue( @@ -311,7 +379,7 @@ async def trades_dialogue( assert account in accounts_def accounts.add(account) - pp_msgs = {} + cids2pps = {} # process pp value reported from ib's system. we only use these # to cross-check sizing since average pricing on their end uses @@ -320,65 +388,65 @@ async def trades_dialogue( # money.. xb for client in aioclients.values(): for pos in client.positions(): - msg = pack_position(pos) - msg.account = accounts_def.inverse[msg.account] - pp_msgs[msg.symbol] = msg + cid, msg = pack_position(pos) + msg.account = accounts_def.inverse[msg.account] + cids2pps[cid] = msg assert msg.account in accounts, ( f'Position for unknown account: {msg.account}') - # built-out piker pps from trade ledger, underneath using - # LIFO style breakeven pricing calcs. - trades_by_account: dict = {} - conf = get_config() + # update trades ledgers for all accounts from + # connected api clients. + await update_ledger_from_api_trades( + proxies.values(), + cids2pps, # pass these in to map to correct fqsns.. + ) - # retreive new trade executions from the last session - # and/or day's worth of trading and convert into trade - # records suitable for a local ledger file. - for proxy in proxies.values(): - trade_entries = await proxy.trades() - records = trades_to_records( - conf['accounts'].inverse, - trade_entries, - ) - trades_by_account.update(records) - - # write recent session's trades to the user's (local) ledger - # file. - for acctid, trades_by_id in trades_by_account.items(): - with config.open_trade_ledger('ib', acctid) as ledger: - ledger.update(trades_by_id) - - # (incrementally) update the user's pps in mem and - # in the `pps.toml`. - records = norm_trade_records(trades_by_id) - active = update_pps_conf('ib', acctid, records) - - # relay re-formatted pps as msgs to the ems. - for fqsn, pp in active.items(): - - ibppmsg = pp_msgs[fqsn.rstrip('.ib')] + # load all positions from `pps.toml`, cross check with ib's + # positions data, and relay re-formatted pps as msgs to the ems. + for acctid, by_fqsn in pp.get_pps('ib').items(): + for fqsn, posdict in by_fqsn.items(): + ibppmsg = cids2pps[posdict['bsuid']] msg = BrokerdPosition( broker='ib', # account=acctid + '.ib', # XXX: ok so this is annoying, we're relaying # an account name with the backend suffix prefixed - # but when reading accounts from ledgers + # but when reading accounts from ledgers we don't + # need it and/or it's prefixed in the section + # table.. account=ibppmsg.account, # XXX: the `.ib` is stripped..? symbol=ibppmsg.symbol, currency=ibppmsg.currency, - size=pp['size'], - avg_price=pp['avg_price'], + size=posdict['size'], + avg_price=posdict['avg_price'], ) - assert ibppmsg.size == msg.size + print(msg) + ibsize = ibppmsg.size + pikersize = msg.size + diff = pikersize - ibsize + + # if ib reports a lesser pp it's not as bad since we can + # presume we're at least not more in the shit then we + # thought. + if diff: + raise ValueError( + f'POSITION MISMATCH ib <-> piker ledger:\n' + f'ib: {ibsize}\n' + f'piker: {pikersize}\n' + 'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?' + ) + msg.size = ibsize + if ibppmsg.avg_price != msg.avg_price: + # TODO: make this a "propoganda" log level? log.warning( 'The mega-cucks at IB want you to believe with their ' - '"FIFO" positioning the following:\n' + f'"FIFO" positioning for {msg.symbol}:\n' f'"ib" mega-cucker avg price: {ibppmsg.avg_price}\n' - f'piker, legitamous-ness, LIFO avg price: {msg.avg_price}' + f'piker, LIFO breakeven PnL price: {msg.avg_price}' ) all_positions.append(msg.dict()) @@ -545,7 +613,7 @@ async def deliver_trade_events( continue elif event_name == 'position': - msg = pack_position(item) + cid, msg = pack_position(item) msg.account = accounts_def.inverse[msg.account] elif event_name == 'event': @@ -579,25 +647,49 @@ async def deliver_trade_events( def norm_trade_records( ledger: dict[str, Any], -) -> dict[str, list[TradeRecord]]: +) -> dict[str, list[pp.Transaction]]: ''' Normalize a flex report or API retrieved executions ledger into our standard record format. ''' - records: list[TradeRecord] = [] - # async with open_data_client() as proxy: + records: list[pp.Transaction] = [] + for tid, record in ledger.items(): # date, time = record['dateTime'] # cost = record['cost'] # action = record['buySell'] conid = record.get('conId') or record['conid'] - comms = record.get('ibCommission', 0) + comms = record.get('commission') or -1*record['ibCommission'] price = record.get('price') or record['tradePrice'] - size = record.get('shares') or record['quantity'] + # the api doesn't do the -/+ on the quantity for you but flex + # records do.. are you fucking serious ib...!? + size = record.get('quantity') or record['shares'] * { + 'BOT': 1, + 'SLD': -1, + }[record['side']] + + exch = record['exchange'] + lexch = record.get('listingExchange') + + suffix = lexch or exch symbol = record['symbol'] + # likely an opts contract record from a flex report.. + # TODO: no idea how to parse ^ the strike part from flex.. + # (00010000 any, or 00007500 tsla, ..) + # we probably must do the contract lookup for this? + if ' ' in symbol or '--' in exch: + underlying, _, tail = symbol.partition(' ') + suffix = exch = 'opt' + expiry = tail[:6] + # otype = tail[6] + # strike = tail[7:] + + print(f'skipping opts contract {symbol}') + continue + # special handling of symbol extraction from # flex records using some ad-hoc schema parsing. instr = record.get('assetCategory') @@ -605,15 +697,16 @@ def norm_trade_records( symbol = record['description'][:3] # try to build out piker fqsn from record. - expiry = record.get('lastTradeDateOrContractMonth') or record['expiry'] - exch = record.get('listingExchange') or record['exchange'] + expiry = record.get( + 'lastTradeDateOrContractMonth') or record.get('expiry') + if expiry: + expiry = str(expiry).strip(' ') + suffix = f'{exch}.{expiry}' - fqsn = Symbol.from_broker_info( - broker='ib', - symbol=symbol, - suffix=f'{exch}.{expiry}', + fqsn = Symbol.from_fqsn( + fqsn=f'{symbol}.{suffix}.ib', info={}, - ).front_fqsn() + ).front_fqsn().rstrip('.ib') # NOTE: for flex records the normal fields won't be available so # we have to do a lookup at some point to reverse map the conid @@ -621,41 +714,50 @@ def norm_trade_records( # con = await proxy.get_con(conid) - records.append(TradeRecord( + records.append(pp.Transaction( fqsn=fqsn, tid=tid, size=size, price=price, cost=comms, - symkey=conid, + bsuid=conid, )) return records def trades_to_records( - accounts: bidict, trade_entries: list[object], source_type: str = 'api', ) -> dict: + ''' + Convert either of API execution objects or flex report + entry objects into ``dict`` form, pretty much straight up + without modification. + ''' trades_by_account = {} for t in trade_entries: if source_type == 'flex': entry = t.__dict__ + # XXX: LOL apparently ``toml`` has a bug + # where a section key error will show up in the write + # if you leave a table key as an `int`? So i guess + # cast to strs for all keys.. + # oddly for some so-called "BookTrade" entries # this field seems to be blank, no cuckin clue. # trade['ibExecID'] - - # XXX: LOL apparently ``toml`` has a bug - # where a section key error will show up in the write - # if you leave this as an ``int``? - tid = str(entry['tradeID']) + tid = str(entry.get('ibExecID') or entry['tradeID']) # date = str(entry['tradeDate']) + + # XXX: is it going to cause problems if a account name + # get's lost? The user should be able to find it based + # on the actual exec history right? acctid = accounts[str(entry['accountId'])] elif source_type == 'api': @@ -667,17 +769,19 @@ def trades_to_records( # 'time': 1654801166.0 # } + # flatten all sub-dicts and values into one top level entry. entry = {} - for section, obj in t.items(): + for section, val in t.items(): match section: - case 'commisionReport' | 'execution': - entry.update(asdict(obj)) - - case 'contract': - entry.update(obj) + case 'contract' | 'execution' | 'commissionReport': + # sub-dict cases + entry.update(val) + case _: + entry[section] = val tid = str(entry['execId']) dt = pendulum.from_timestamp(entry['time']) + # TODO: why isn't this showing seconds in the str? entry['date'] = str(dt) acctid = accounts[entry['acctNumber']] @@ -691,7 +795,7 @@ def trades_to_records( def load_flex_trades( path: Optional[str] = None, -) -> dict[str, str]: +) -> dict[str, Any]: from ib_insync import flexreport, util @@ -704,10 +808,10 @@ def load_flex_trades( token = conf.get('flex_token') if not token: raise ValueError( - 'You must specify a ``flex_token`` field in your' - '`brokers.toml` in order load your trade log, see our' - 'intructions for how to set this up here:\n' - 'PUT LINK HERE!' + 'You must specify a ``flex_token`` field in your' + '`brokers.toml` in order load your trade log, see our' + 'intructions for how to set this up here:\n' + 'PUT LINK HERE!' ) qid = conf['flex_trades_query_id'] @@ -728,6 +832,10 @@ def load_flex_trades( report = flexreport.FlexReport(path=path) trade_entries = report.extract('Trade') + ln = len(trade_entries) + # log.info(f'Loaded {ln} trades from flex query') + print(f'Loaded {ln} trades from flex query') + trades_by_account = trades_to_records( # get reverse map to user account names conf['accounts'].inverse, @@ -735,13 +843,15 @@ def load_flex_trades( source_type='flex', ) - # ln = len(trades) - # log.info(f'Loaded {ln} trades from flex query') - + ledgers = {} for acctid, trades_by_id in trades_by_account.items(): - with config.open_trade_ledger('ib', acctid) as ledger: + with pp.open_trade_ledger('ib', acctid) as ledger: ledger.update(trades_by_id) + ledgers[acctid] = ledger + + return ledgers + if __name__ == '__main__': import sys From cd3bfb1ea42a26a9197a05ea3cc832adbf0be1dd Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 15 Jun 2022 09:55:32 -0400 Subject: [PATCH 21/58] Maybe load from ledger in `get_pps()`, allow account filtering --- piker/pp.py | 37 +++++++++++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index 7f363915..bcba3ffd 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -121,7 +121,6 @@ class Position(Struct): float, # cost ] = {} - def to_dict(self): return { f: getattr(self, f) @@ -292,7 +291,6 @@ def dump_active( closed = {} for k, pp in pps.items(): - fqsn = pp.symbol.front_fqsn() asdict = pp.to_dict() if pp.size == 0: closed[k] = asdict @@ -321,6 +319,10 @@ def load_pps_from_ledger( ) as ledger: pass # readonly + if not ledger: + # null case, no ledger file with content + return {}, {} + brokermod = get_brokermod(brokername) records = brokermod.norm_trade_records(ledger) pps = update_pps(records) @@ -329,6 +331,7 @@ def load_pps_from_ledger( def get_pps( brokername: str, + acctids: Optional[set[str]] = set(), ) -> dict[str, Any]: ''' @@ -337,7 +340,33 @@ def get_pps( ''' conf, path = config.load('pps') - return conf.setdefault(brokername, {}) + brokersection = conf.setdefault(brokername, {}) + + all_active = {} + + # try to load any ledgers if no section found + if not brokersection: + bconf, path = config.load('brokers') + accounts = bconf[brokername]['accounts'] + for account in accounts: + + # TODO: instead of this filter we could + # always send all known pps but just not audit + # them since an active client might not be up? + if ( + acctids and + f'{brokername}.{account}' not in acctids + ): + continue + + active = update_pps_conf(brokername, account) + all_active.update(active) + + # reload pps after ledger updates + conf, path = config.load('pps') + brokersection = conf.setdefault(brokername, {}) + + return brokersection def update_pps_conf( @@ -349,7 +378,7 @@ def update_pps_conf( conf, path = config.load('pps') brokersection = conf.setdefault(brokername, {}) - accountsection = pps = brokersection.setdefault(acctid, {}) + pps = brokersection.setdefault(acctid, {}) if not pps: # no pps entry yet for this broker/account so parse From cbcbb2b24350ea522ecfcc2c48e8f1a737b7ac18 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 15 Jun 2022 09:56:14 -0400 Subject: [PATCH 22/58] Filter pps loading to client-active accounts set --- piker/brokers/ib/broker.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 8f3ec44c..aa3fa07c 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -380,6 +380,7 @@ async def trades_dialogue( accounts.add(account) cids2pps = {} + used_accounts = set() # process pp value reported from ib's system. we only use these # to cross-check sizing since average pricing on their end uses @@ -390,7 +391,8 @@ async def trades_dialogue( for pos in client.positions(): cid, msg = pack_position(pos) - msg.account = accounts_def.inverse[msg.account] + acctid = msg.account = accounts_def.inverse[msg.account] + used_accounts.add(acctid) cids2pps[cid] = msg assert msg.account in accounts, ( f'Position for unknown account: {msg.account}') @@ -404,12 +406,14 @@ async def trades_dialogue( # load all positions from `pps.toml`, cross check with ib's # positions data, and relay re-formatted pps as msgs to the ems. - for acctid, by_fqsn in pp.get_pps('ib').items(): + for acctid, by_fqsn in pp.get_pps( + 'ib', acctids=used_accounts, + ).items(): for fqsn, posdict in by_fqsn.items(): ibppmsg = cids2pps[posdict['bsuid']] msg = BrokerdPosition( broker='ib', - # account=acctid + '.ib', + # XXX: ok so this is annoying, we're relaying # an account name with the backend suffix prefixed # but when reading accounts from ledgers we don't @@ -451,6 +455,10 @@ async def trades_dialogue( all_positions.append(msg.dict()) + if not all_positions and cids2pps: + raise RuntimeError( + 'Positions report by ib but not found in `pps.toml` !?') + # log.info(f'Loaded {len(trades)} from this session') # TODO: write trades to local ``trades.toml`` # - use above per-session trades data and write to local file From 7b2e8f1ba51d71af3f3389384b41fe97ae135814 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 15 Jun 2022 11:55:26 -0400 Subject: [PATCH 23/58] Return object form from `update_pps_conf()` --- piker/pp.py | 43 ++++++++++++++++++++----------------------- 1 file changed, 20 insertions(+), 23 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index bcba3ffd..a3aab3d5 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -333,40 +333,33 @@ def get_pps( brokername: str, acctids: Optional[set[str]] = set(), -) -> dict[str, Any]: +) -> dict[str, dict[str, Position]]: ''' Read out broker-specific position entries from incremental update file: ``pps.toml``. ''' conf, path = config.load('pps') - brokersection = conf.setdefault(brokername, {}) - all_active = {} # try to load any ledgers if no section found - if not brokersection: - bconf, path = config.load('brokers') - accounts = bconf[brokername]['accounts'] - for account in accounts: + bconf, path = config.load('brokers') + accounts = bconf[brokername]['accounts'] + for account in accounts: - # TODO: instead of this filter we could - # always send all known pps but just not audit - # them since an active client might not be up? - if ( - acctids and - f'{brokername}.{account}' not in acctids - ): - continue + # TODO: instead of this filter we could + # always send all known pps but just not audit + # them since an active client might not be up? + if ( + acctids and + f'{brokername}.{account}' not in acctids + ): + continue - active = update_pps_conf(brokername, account) - all_active.update(active) + active = update_pps_conf(brokername, account) + all_active.setdefault(account, {}).update(active) - # reload pps after ledger updates - conf, path = config.load('pps') - brokersection = conf.setdefault(brokername, {}) - - return brokersection + return all_active def update_pps_conf( @@ -432,6 +425,9 @@ def update_pps_conf( brokerless_key = fqsn.rstrip(f'.{brokername}') pp_entries[brokerless_key] = pp_dict + for fqsn in closed: + pp_objs.pop(fqsn, None) + conf[brokername][acctid] = pp_entries config.write( conf, @@ -441,7 +437,8 @@ def update_pps_conf( # encoder=config.toml.Encoder(preserve=True), ) - return active + # deliver object form of all pps in table to caller + return pp_objs if __name__ == '__main__': From 3991d8f9112ccb8699c8691651bc17122d51934e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 15 Jun 2022 11:56:49 -0400 Subject: [PATCH 24/58] Add `update_and_audit()` in prep for rt per-trade-event pp udpates --- piker/brokers/ib/broker.py | 174 ++++++++++++++++++++++--------------- 1 file changed, 102 insertions(+), 72 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index aa3fa07c..c2967d1b 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -267,8 +267,9 @@ async def recv_trade_updates( async def update_ledger_from_api_trades( - clients: list[Union[Client, MethodProxy]], + trade_entries: dict[str, Any], ib_pp_msgs: dict[int, BrokerdPosition], # conid -> msg + client: Union[Client, MethodProxy], ) -> dict[str, Any]: @@ -279,34 +280,34 @@ async def update_ledger_from_api_trades( # retreive new trade executions from the last session # and/or day's worth of trading and convert into trade # records suitable for a local ledger file. - trades_by_account: dict = {} - for client in clients: + # trades_by_account: dict = {} + # for client in clients: - trade_entries = await client.trades() + # trade_entries = await client.trades() - # XXX; ERRGGG.. - # pack in the "primary/listing exchange" value from a - # contract lookup since it seems this isn't available by - # default from the `.fills()` method endpoint... - for entry in trade_entries: - condict = entry['contract'] - conid = condict['conId'] - pexch = condict['primaryExchange'] + # XXX; ERRGGG.. + # pack in the "primary/listing exchange" value from a + # contract lookup since it seems this isn't available by + # default from the `.fills()` method endpoint... + for entry in trade_entries: + condict = entry['contract'] + conid = condict['conId'] + pexch = condict['primaryExchange'] - if not pexch: - con = (await client.get_con(conid=conid))[0] - pexch = con.primaryExchange + if not pexch: + con = (await client.get_con(conid=conid))[0] + pexch = con.primaryExchange - entry['listingExchange'] = pexch + entry['listingExchange'] = pexch - records = trades_to_records( - conf['accounts'].inverse, - trade_entries, - ) - trades_by_account.update(records) + records = trades_to_records( + conf['accounts'].inverse, + trade_entries, + ) + # trades_by_account.update(records) # write recent session's trades to the user's (local) ledger file. - for acctid, trades_by_id in trades_by_account.items(): + for acctid, trades_by_id in records.items(): with pp.open_trade_ledger('ib', acctid) as ledger: ledger.update(trades_by_id) @@ -327,7 +328,76 @@ async def update_ledger_from_api_trades( ) r.fqsn = normed_msg.symbol - pp.update_pps_conf('ib', acctid, records) + active = pp.update_pps_conf('ib', acctid, records) + + return active + + +async def update_and_audit( + by_fqsn: dict[str, pp.Position], + cids2pps: dict[int, BrokerdPosition], + +) -> list[BrokerdPosition]: + + msgs: list[BrokerdPosition] = [] + pps: dict[int, pp.Position] = {} + + for fqsn, p in by_fqsn.items(): + bsuid = p.bsuid + + # build trade-session-actor local table + # of pps from unique symbol ids. + pps[bsuid] = p + + # retreive equivalent ib reported position message + # for comparison/audit versus the piker equivalent + # breakeven pp calcs. + ibppmsg = cids2pps[bsuid] + + msg = BrokerdPosition( + broker='ib', + + # XXX: ok so this is annoying, we're relaying + # an account name with the backend suffix prefixed + # but when reading accounts from ledgers we don't + # need it and/or it's prefixed in the section + # table.. + account=ibppmsg.account, + # XXX: the `.ib` is stripped..? + symbol=ibppmsg.symbol, + currency=ibppmsg.currency, + size=p.size, + avg_price=p.avg_price, + ) + ibsize = ibppmsg.size + pikersize = msg.size + diff = pikersize - ibsize + + # if ib reports a lesser pp it's not as bad since we can + # presume we're at least not more in the shit then we + # thought. + if diff: + raise ValueError( + f'POSITION MISMATCH ib <-> piker ledger:\n' + f'ib: {msg}\n' + f'piker: {ibppmsg}\n' + 'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?' + ) + msg.size = ibsize + + if ibppmsg.avg_price != msg.avg_price: + + # TODO: make this a "propoganda" log level? + log.warning( + 'The mega-cucks at IB want you to believe with their ' + f'"FIFO" positioning for {msg.symbol}:\n' + f'"ib" mega-cucker avg price: {ibppmsg.avg_price}\n' + f'piker, LIFO breakeven PnL price: {msg.avg_price}' + ) + + msgs.append(msg) + + return msgs @tractor.context @@ -389,7 +459,6 @@ async def trades_dialogue( # money.. xb for client in aioclients.values(): for pos in client.positions(): - cid, msg = pack_position(pos) acctid = msg.account = accounts_def.inverse[msg.account] used_accounts.add(acctid) @@ -399,61 +468,21 @@ async def trades_dialogue( # update trades ledgers for all accounts from # connected api clients. - await update_ledger_from_api_trades( - proxies.values(), - cids2pps, # pass these in to map to correct fqsns.. - ) + for account, proxy in proxies.items(): + await update_ledger_from_api_trades( + await proxy.trades(), + cids2pps, # pass these in to map to correct fqsns.. + proxy, + ) # load all positions from `pps.toml`, cross check with ib's # positions data, and relay re-formatted pps as msgs to the ems. for acctid, by_fqsn in pp.get_pps( 'ib', acctids=used_accounts, ).items(): - for fqsn, posdict in by_fqsn.items(): - ibppmsg = cids2pps[posdict['bsuid']] - msg = BrokerdPosition( - broker='ib', - # XXX: ok so this is annoying, we're relaying - # an account name with the backend suffix prefixed - # but when reading accounts from ledgers we don't - # need it and/or it's prefixed in the section - # table.. - account=ibppmsg.account, - # XXX: the `.ib` is stripped..? - symbol=ibppmsg.symbol, - currency=ibppmsg.currency, - size=posdict['size'], - avg_price=posdict['avg_price'], - ) - print(msg) - ibsize = ibppmsg.size - pikersize = msg.size - diff = pikersize - ibsize - - # if ib reports a lesser pp it's not as bad since we can - # presume we're at least not more in the shit then we - # thought. - if diff: - raise ValueError( - f'POSITION MISMATCH ib <-> piker ledger:\n' - f'ib: {ibsize}\n' - f'piker: {pikersize}\n' - 'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?' - ) - msg.size = ibsize - - if ibppmsg.avg_price != msg.avg_price: - - # TODO: make this a "propoganda" log level? - log.warning( - 'The mega-cucks at IB want you to believe with their ' - f'"FIFO" positioning for {msg.symbol}:\n' - f'"ib" mega-cucker avg price: {ibppmsg.avg_price}\n' - f'piker, LIFO breakeven PnL price: {msg.avg_price}' - ) - - all_positions.append(msg.dict()) + msgs = await update_and_audit(by_fqsn, cids2pps) + all_positions.extend(msg.dict() for msg in msgs) if not all_positions and cids2pps: raise RuntimeError( @@ -621,6 +650,7 @@ async def deliver_trade_events( continue elif event_name == 'position': + cid, msg = pack_position(item) msg.account = accounts_def.inverse[msg.account] From fbee33b00d7c4a23755d25f2f0780ccbafc83dbb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 16 Jun 2022 10:38:11 -0400 Subject: [PATCH 25/58] Get real-time trade oriented pp updates workin What a nightmare this was.. main holdup was that cost (commissions) reports are fired independent from "fills" so you can't really emit a proper full position update until they both arrive. Deatz: - move `push_tradesies()` and relay loop in `deliver_trade_events()` to the new py3.10 `match:` syntax B) - subscribe for, and handle `CommissionReport` events from `ib_insync` and repack as a `cost` event type. - handle cons with no primary/listing exchange (like futes) in `update_ledger_from_api_trades()` by falling back to the plain 'exchange' field. - drop reverse fqsn lookup from ib positions map; just use contract lookup for api trade logs since we're already connected.. - make validation in `update_and_audit()` optional via flag. - pass in the accounts def, ib pp msg table and the proxies table to the trade event relay task-loop. - add `emit_pp_update()` too encapsulate a full api trade entry incremental update which calls into the `piker.pp` apis to, - update the ledger - update the pps.toml - generate a new `BrokerdPosition` msg to send to the ems - adjust trades relay loop to only emit pp updates when a cost report arrives for the fill/execution by maintaining a small table per exec id. --- piker/brokers/ib/broker.py | 462 +++++++++++++++++++++++-------------- 1 file changed, 289 insertions(+), 173 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index c2967d1b..a8484941 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -44,6 +44,7 @@ from ib_insync.order import ( from ib_insync.objects import ( Fill, Execution, + CommissionReport, ) from ib_insync.objects import Position import pendulum @@ -214,19 +215,35 @@ async def recv_trade_updates( # sync with trio task to_trio.send_nowait(None) - def push_tradesies(eventkit_obj, obj, fill=None): - """Push events to trio task. + def push_tradesies( + eventkit_obj, + obj, + fill: Optional[Fill] = None, + report: Optional[CommissionReport] = None, + ): + ''' + Push events to trio task. - """ - if fill is not None: - # execution details event - item = ('fill', (obj, fill)) + ''' + match eventkit_obj.name(): - elif eventkit_obj.name() == 'positionEvent': - item = ('position', obj) + case 'orderStatusEvent': + item = ('status', obj) - else: - item = ('status', obj) + case 'commissionReportEvent': + assert report + item = ('cost', report) + + case 'execDetailsEvent': + # execution details event + item = ('fill', (obj, fill)) + + case 'positionEvent': + item = ('position', obj) + + case _: + log.error(f'Error unknown event {obj}') + return log.info(f'eventkit event ->\n{pformat(item)}') @@ -242,15 +259,15 @@ async def recv_trade_updates( 'execDetailsEvent', # all "fill" updates 'positionEvent', # avg price updates per symbol per account - # 'commissionReportEvent', # XXX: ugh, it is a separate event from IB and it's # emitted as follows: # self.ib.commissionReportEvent.emit(trade, fill, report) + 'commissionReportEvent', # XXX: not sure yet if we need these # 'updatePortfolioEvent', - # XXX: these all seem to be weird ib_insync intrernal + # XXX: these all seem to be weird ib_insync internal # events that we probably don't care that much about # given the internal design is wonky af.. # 'newOrderEvent', @@ -267,7 +284,7 @@ async def recv_trade_updates( async def update_ledger_from_api_trades( - trade_entries: dict[str, Any], + trade_entries: list[dict[str, Any]], ib_pp_msgs: dict[int, BrokerdPosition], # conid -> msg client: Union[Client, MethodProxy], @@ -277,14 +294,6 @@ async def update_ledger_from_api_trades( # LIFO style breakeven pricing calcs. conf = get_config() - # retreive new trade executions from the last session - # and/or day's worth of trading and convert into trade - # records suitable for a local ledger file. - # trades_by_account: dict = {} - # for client in clients: - - # trade_entries = await client.trades() - # XXX; ERRGGG.. # pack in the "primary/listing exchange" value from a # contract lookup since it seems this isn't available by @@ -295,8 +304,13 @@ async def update_ledger_from_api_trades( pexch = condict['primaryExchange'] if not pexch: - con = (await client.get_con(conid=conid))[0] - pexch = con.primaryExchange + cons = await client.get_con(conid=conid) + if cons: + con = cons[0] + pexch = con.primaryExchange or con.exchange + else: + # for futes it seems like the primary is always empty? + pexch = condict['exchange'] entry['listingExchange'] = pexch @@ -304,38 +318,29 @@ async def update_ledger_from_api_trades( conf['accounts'].inverse, trade_entries, ) - # trades_by_account.update(records) + actives = {} # write recent session's trades to the user's (local) ledger file. for acctid, trades_by_id in records.items(): with pp.open_trade_ledger('ib', acctid) as ledger: ledger.update(trades_by_id) - # (incrementally) update the user's pps in mem and - # in the `pps.toml`. + # normalize records = norm_trade_records(trades_by_id) - # remap stupid ledger fqsns (which are often - # filled with lesser venue/exchange values) to - # the ones we pull from the API via ib's reported - # positioning messages. - for r in records: - normed_msg = ib_pp_msgs[r.bsuid] - if normed_msg.symbol != r.fqsn: - log.warning( - f'Remapping ledger fqsn: {r.fqsn} -> {normed_msg.symbol}' - ) - r.fqsn = normed_msg.symbol - + # (incrementally) update the user's pps in mem and + # in the `pps.toml`. active = pp.update_pps_conf('ib', acctid, records) + actives.update(active) - return active + return actives async def update_and_audit( by_fqsn: dict[str, pp.Position], cids2pps: dict[int, BrokerdPosition], + validate: bool = False, ) -> list[BrokerdPosition]: @@ -369,21 +374,24 @@ async def update_and_audit( size=p.size, avg_price=p.avg_price, ) - ibsize = ibppmsg.size - pikersize = msg.size - diff = pikersize - ibsize + msgs.append(msg) - # if ib reports a lesser pp it's not as bad since we can - # presume we're at least not more in the shit then we - # thought. - if diff: - raise ValueError( - f'POSITION MISMATCH ib <-> piker ledger:\n' - f'ib: {msg}\n' - f'piker: {ibppmsg}\n' - 'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?' - ) - msg.size = ibsize + if validate: + ibsize = ibppmsg.size + pikersize = msg.size + diff = pikersize - ibsize + + # if ib reports a lesser pp it's not as bad since we can + # presume we're at least not more in the shit then we + # thought. + if diff: + raise ValueError( + f'POSITION MISMATCH ib <-> piker ledger:\n' + f'ib: {ibppmsg}\n' + f'piker: {msg}\n' + 'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?' + ) + msg.size = ibsize if ibppmsg.avg_price != msg.avg_price: @@ -395,8 +403,6 @@ async def update_and_audit( f'piker, LIFO breakeven PnL price: {msg.avg_price}' ) - msgs.append(msg) - return msgs @@ -449,8 +455,8 @@ async def trades_dialogue( assert account in accounts_def accounts.add(account) - cids2pps = {} - used_accounts = set() + cids2pps: dict[str, BrokerdPosition] = {} + active_accts: set[str] = set() # process pp value reported from ib's system. we only use these # to cross-check sizing since average pricing on their end uses @@ -461,7 +467,7 @@ async def trades_dialogue( for pos in client.positions(): cid, msg = pack_position(pos) acctid = msg.account = accounts_def.inverse[msg.account] - used_accounts.add(acctid) + active_accts.add(acctid) cids2pps[cid] = msg assert msg.account in accounts, ( f'Position for unknown account: {msg.account}') @@ -469,8 +475,9 @@ async def trades_dialogue( # update trades ledgers for all accounts from # connected api clients. for account, proxy in proxies.items(): + trades = await proxy.trades() await update_ledger_from_api_trades( - await proxy.trades(), + trades, cids2pps, # pass these in to map to correct fqsns.. proxy, ) @@ -478,10 +485,10 @@ async def trades_dialogue( # load all positions from `pps.toml`, cross check with ib's # positions data, and relay re-formatted pps as msgs to the ems. for acctid, by_fqsn in pp.get_pps( - 'ib', acctids=used_accounts, + 'ib', acctids=active_accts, ).items(): - msgs = await update_and_audit(by_fqsn, cids2pps) + msgs = await update_and_audit(by_fqsn, cids2pps, validate=True) all_positions.extend(msg.dict() for msg in msgs) if not all_positions and cids2pps: @@ -512,174 +519,278 @@ async def trades_dialogue( deliver_trade_events, stream, ems_stream, - accounts_def + accounts_def, + cids2pps, + proxies, ) # block until cancelled await trio.sleep_forever() +async def emit_pp_update( + ems_stream: tractor.MsgStream, + trade_entry: dict, + accounts_def: bidict, + proxies: dict, + cids2pps: dict, + +) -> None: + + # compute and relay incrementally updated piker pp + acctid = accounts_def.inverse[trade_entry['execution']['acctNumber']] + proxy = proxies[acctid] + await update_ledger_from_api_trades( + [trade_entry], + cids2pps, # pass these in to map to correct fqsns.. + proxy, + ) + # load all positions from `pps.toml`, cross check with + # ib's positions data, and relay re-formatted pps as + # msgs to the ems. + for acctid, by_fqsn in pp.get_pps( + 'ib', + acctids={acctid}, + ).items(): + + # should only be one right? + msgs = await update_and_audit( + by_fqsn, + cids2pps, + validate=False, + ) + for msg in msgs: + await ems_stream.send(msg.dict()) + + async def deliver_trade_events( trade_event_stream: trio.MemoryReceiveChannel, ems_stream: tractor.MsgStream, accounts_def: dict[str, str], + cids2pps: dict[str, BrokerdPosition], + proxies: dict[str, MethodProxy], ) -> None: - '''Format and relay all trade events for a given client to the EMS. + ''' + Format and relay all trade events for a given client to emsd. ''' action_map = {'BOT': 'buy', 'SLD': 'sell'} + ids2fills: dict[str, dict] = {} # TODO: for some reason we can receive a ``None`` here when the # ib-gw goes down? Not sure exactly how that's happening looking # at the eventkit code above but we should probably handle it... async for event_name, item in trade_event_stream: - log.info(f'ib sending {event_name}:\n{pformat(item)}') - # TODO: templating the ib statuses in comparison with other - # brokers is likely the way to go: - # https://interactivebrokers.github.io/tws-api/interfaceIBApi_1_1EWrapper.html#a17f2a02d6449710b6394d0266a353313 - # short list: - # - PendingSubmit - # - PendingCancel - # - PreSubmitted (simulated orders) - # - ApiCancelled (cancelled by client before submission - # to routing) - # - Cancelled - # - Filled - # - Inactive (reject or cancelled but not by trader) + match event_name: + # TODO: templating the ib statuses in comparison with other + # brokers is likely the way to go: + # https://interactivebrokers.github.io/tws-api/interfaceIBApi_1_1EWrapper.html#a17f2a02d6449710b6394d0266a353313 + # short list: + # - PendingSubmit + # - PendingCancel + # - PreSubmitted (simulated orders) + # - ApiCancelled (cancelled by client before submission + # to routing) + # - Cancelled + # - Filled + # - Inactive (reject or cancelled but not by trader) - # XXX: here's some other sucky cases from the api - # - short-sale but securities haven't been located, in this - # case we should probably keep the order in some kind of - # weird state or cancel it outright? + # XXX: here's some other sucky cases from the api + # - short-sale but securities haven't been located, in this + # case we should probably keep the order in some kind of + # weird state or cancel it outright? - # status='PendingSubmit', message=''), - # status='Cancelled', message='Error 404, - # reqId 1550: Order held while securities are located.'), - # status='PreSubmitted', message='')], + # status='PendingSubmit', message=''), + # status='Cancelled', message='Error 404, + # reqId 1550: Order held while securities are located.'), + # status='PreSubmitted', message='')], - if event_name == 'status': + case 'status': - # XXX: begin normalization of nonsense ib_insync internal - # object-state tracking representations... + # XXX: begin normalization of nonsense ib_insync internal + # object-state tracking representations... - # unwrap needed data from ib_insync internal types - trade: Trade = item - status: OrderStatus = trade.orderStatus + # unwrap needed data from ib_insync internal types + trade: Trade = item + status: OrderStatus = trade.orderStatus - # skip duplicate filled updates - we get the deats - # from the execution details event - msg = BrokerdStatus( + # skip duplicate filled updates - we get the deats + # from the execution details event + msg = BrokerdStatus( - reqid=trade.order.orderId, - time_ns=time.time_ns(), # cuz why not - account=accounts_def.inverse[trade.order.account], + reqid=trade.order.orderId, + time_ns=time.time_ns(), # cuz why not + account=accounts_def.inverse[trade.order.account], - # everyone doin camel case.. - status=status.status.lower(), # force lower case + # everyone doin camel case.. + status=status.status.lower(), # force lower case - filled=status.filled, - reason=status.whyHeld, + filled=status.filled, + reason=status.whyHeld, - # this seems to not be necessarily up to date in the - # execDetails event.. so we have to send it here I guess? - remaining=status.remaining, + # this seems to not be necessarily up to date in the + # execDetails event.. so we have to send it here I guess? + remaining=status.remaining, - broker_details={'name': 'ib'}, - ) + broker_details={'name': 'ib'}, + ) + await ems_stream.send(msg.dict()) - elif event_name == 'fill': + case 'fill': - # for wtv reason this is a separate event type - # from IB, not sure why it's needed other then for extra - # complexity and over-engineering :eyeroll:. - # we may just end up dropping these events (or - # translating them to ``Status`` msgs) if we can - # show the equivalent status events are no more latent. + # for wtv reason this is a separate event type + # from IB, not sure why it's needed other then for extra + # complexity and over-engineering :eyeroll:. + # we may just end up dropping these events (or + # translating them to ``Status`` msgs) if we can + # show the equivalent status events are no more latent. - # unpack ib_insync types - # pep-0526 style: - # https://www.python.org/dev/peps/pep-0526/#global-and-local-variable-annotations - trade: Trade - fill: Fill - trade, fill = item - execu: Execution = fill.execution + # unpack ib_insync types + # pep-0526 style: + # https://www.python.org/dev/peps/pep-0526/#global-and-local-variable-annotations + trade: Trade + fill: Fill - # TODO: normalize out commissions details? - details = { - 'contract': asdict(fill.contract), - 'execution': asdict(fill.execution), - 'commissions': asdict(fill.commissionReport), - 'broker_time': execu.time, # supposedly server fill time - 'name': 'ib', - } + # TODO: maybe we can use matching to better handle these cases. + trade, fill = item + execu: Execution = fill.execution + execid = execu.execId - msg = BrokerdFill( - # should match the value returned from `.submit_limit()` - reqid=execu.orderId, - time_ns=time.time_ns(), # cuz why not + # TODO: + # - normalize out commissions details? + # - this is the same as the unpacking loop above in + # ``trades_to_records()`` no? + trade_entry = ids2fills.setdefault(execid, {}) + cost_already_rx = bool(trade_entry) - action=action_map[execu.side], - size=execu.shares, - price=execu.price, + # if the costs report was already received this + # should be not empty right? + comms = fill.commissionReport.commission + if cost_already_rx: + assert comms - broker_details=details, - # XXX: required by order mode currently - broker_time=details['broker_time'], + trade_entry.update( + { + 'contract': asdict(fill.contract), + 'execution': asdict(fill.execution), + 'commissionReport': asdict(fill.commissionReport), + # supposedly server fill time? + 'broker_time': execu.time, + 'name': 'ib', + } + ) - ) + msg = BrokerdFill( + # should match the value returned from `.submit_limit()` + reqid=execu.orderId, + time_ns=time.time_ns(), # cuz why not - elif event_name == 'error': + action=action_map[execu.side], + size=execu.shares, + price=execu.price, - err: dict = item + broker_details=trade_entry, + # XXX: required by order mode currently + broker_time=trade_entry['broker_time'], - # f$#$% gawd dammit insync.. - con = err['contract'] - if isinstance(con, Contract): - err['contract'] = asdict(con) + ) + await ems_stream.send(msg.dict()) - if err['reqid'] == -1: - log.error(f'TWS external order error:\n{pformat(err)}') + # 2 cases: + # - fill comes first or + # - comms report comes first + comms = fill.commissionReport.commission + if comms: + # UGHHH since the commision report object might be + # filled in **after** we already serialized to dict.. + # def need something better for all this. + trade_entry.update( + {'commissionReport': asdict(fill.commissionReport)} + ) - # TODO: what schema for this msg if we're going to make it - # portable across all backends? - # msg = BrokerdError(**err) - continue + if comms or cost_already_rx: + # only send a pp update once we have a cost report + print("EMITTING PP") + await emit_pp_update( + ems_stream, + trade_entry, + accounts_def, + proxies, + cids2pps, + ) - elif event_name == 'position': + case 'cost': - cid, msg = pack_position(item) - msg.account = accounts_def.inverse[msg.account] + cr: CommissionReport = item + execid = cr.execId - elif event_name == 'event': + trade_entry = ids2fills.setdefault(execid, {}) + fill_already_rx = bool(trade_entry) - # it's either a general system status event or an external - # trade event? - log.info(f"TWS system status: \n{pformat(item)}") + # no fill msg has arrived yet so just fill out the + # cost report for now and when the fill arrives a pp + # msg can be emitted. + trade_entry.update( + {'commissionReport': asdict(cr)} + ) - # TODO: support this again but needs parsing at the callback - # level... - # reqid = item.get('reqid', 0) - # if getattr(msg, 'reqid', 0) < -1: - # log.info(f"TWS triggered trade\n{pformat(msg.dict())}") + if fill_already_rx: + print("EMITTING PP") + await emit_pp_update( + ems_stream, + trade_entry, + accounts_def, + proxies, + cids2pps, + ) - continue + case 'error': + err: dict = item - # msg.reqid = 'tws-' + str(-1 * reqid) + # f$#$% gawd dammit insync.. + con = err['contract'] + if isinstance(con, Contract): + err['contract'] = asdict(con) - # mark msg as from "external system" - # TODO: probably something better then this.. and start - # considering multiplayer/group trades tracking - # msg.broker_details['external_src'] = 'tws' + if err['reqid'] == -1: + log.error(f'TWS external order error:\n{pformat(err)}') - # XXX: we always serialize to a dict for msgpack - # translations, ideally we can move to an msgspec (or other) - # encoder # that can be enabled in ``tractor`` ahead of - # time so we can pass through the message types directly. - await ems_stream.send(msg.dict()) + # TODO: what schema for this msg if we're going to make it + # portable across all backends? + # msg = BrokerdError(**err) + + case 'position': + + cid, msg = pack_position(item) + # acctid = msg.account = accounts_def.inverse[msg.account] + # cuck ib and it's shitty fifo sys for pps! + # await ems_stream.send(msg.dict()) + + case 'event': + + # it's either a general system status event or an external + # trade event? + log.info(f"TWS system status: \n{pformat(item)}") + + # TODO: support this again but needs parsing at the callback + # level... + # reqid = item.get('reqid', 0) + # if getattr(msg, 'reqid', 0) < -1: + # log.info(f"TWS triggered trade\n{pformat(msg.dict())}") + + # msg.reqid = 'tws-' + str(-1 * reqid) + + # mark msg as from "external system" + # TODO: probably something better then this.. and start + # considering multiplayer/group trades tracking + # msg.broker_details['external_src'] = 'tws' + + case _: + log.error(f'WTF: {event_name}: {item}') def norm_trade_records( @@ -814,6 +925,11 @@ def trades_to_records( case 'contract' | 'execution' | 'commissionReport': # sub-dict cases entry.update(val) + + case 'time': + # ib has wack ns timestamps, or is that us? + continue + case _: entry[section] = val From 3dcb72d429ebb1c474ce300b3204541eb7044d5f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 16 Jun 2022 10:52:43 -0400 Subject: [PATCH 26/58] Only finally-write around the ledger yield up --- piker/pp.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index a3aab3d5..a6f61f56 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -69,11 +69,11 @@ def open_trade_ledger( ) with open(tradesfile, 'w') as cf: pass # touch + with open(tradesfile, 'r') as cf: + ledger = toml.load(tradesfile) + cpy = ledger.copy() try: - with open(tradesfile, 'r') as cf: - ledger = toml.load(tradesfile) - cpy = ledger.copy() - yield cpy + yield cpy finally: if cpy != ledger: # TODO: show diff output? From 5147cd7be088829dbbe77dc59fe83c22402a2809 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 16 Jun 2022 11:16:23 -0400 Subject: [PATCH 27/58] Drop global proxies table, isn't multi-task safe.. --- piker/brokers/ib/api.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 4581e200..1305e486 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -1048,9 +1048,6 @@ async def load_clients_for_trio( await asyncio.sleep(float('inf')) -_proxies: dict[str, MethodProxy] = {} - - @acm async def open_client_proxies() -> tuple[ dict[str, MethodProxy], @@ -1073,13 +1070,14 @@ async def open_client_proxies() -> tuple[ if cache_hit: log.info(f'Re-using cached clients: {clients}') + proxies = {} for acct_name, client in clients.items(): proxy = await stack.enter_async_context( open_client_proxy(client), ) - _proxies[acct_name] = proxy + proxies[acct_name] = proxy - yield _proxies, clients + yield proxies, clients def get_preferred_data_client( @@ -1228,11 +1226,13 @@ async def open_client_proxy( event_table = {} async with ( + to_asyncio.open_channel_from( open_aio_client_method_relay, client=client, event_consumers=event_table, ) as (first, chan), + trio.open_nursery() as relay_n, ): From ecdc747cede569c5588f6af71fc7b35bff398f4f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 16 Jun 2022 15:27:26 -0400 Subject: [PATCH 28/58] Allow packing pps by a different key set --- piker/pp.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/piker/pp.py b/piker/pp.py index a6f61f56..9dfa4eb1 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -332,6 +332,7 @@ def load_pps_from_ledger( def get_pps( brokername: str, acctids: Optional[set[str]] = set(), + key_by: Optional[str] = None, ) -> dict[str, dict[str, Position]]: ''' @@ -356,7 +357,7 @@ def get_pps( ): continue - active = update_pps_conf(brokername, account) + active = update_pps_conf(brokername, account, key_by=key_by) all_active.setdefault(account, {}).update(active) return all_active @@ -366,6 +367,7 @@ def update_pps_conf( brokername: str, acctid: str, trade_records: Optional[list[Transaction]] = None, + key_by: Optional[str] = None, ) -> dict[str, Position]: @@ -437,6 +439,9 @@ def update_pps_conf( # encoder=config.toml.Encoder(preserve=True), ) + if key_by: + pps_objs = {getattr(pp, key_by): pp for pp in pps_objs} + # deliver object form of all pps in table to caller return pp_objs From b6f344f34a75b329d649cc2bd99704e4713485f9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 16 Jun 2022 15:31:22 -0400 Subject: [PATCH 29/58] Only emit pps msg for trade triggering instrument We can probably make this better (and with less file sys accesses) later such that we keep a consistent pps state in mem and only write async maybe from another side-task? --- piker/brokers/ib/broker.py | 32 ++++++++++++++------------------ 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index a8484941..eac7a8a4 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -71,7 +71,6 @@ from .api import ( Client, MethodProxy, ) -# from .feed import open_data_client def pack_position( @@ -285,7 +284,6 @@ async def recv_trade_updates( async def update_ledger_from_api_trades( trade_entries: list[dict[str, Any]], - ib_pp_msgs: dict[int, BrokerdPosition], # conid -> msg client: Union[Client, MethodProxy], ) -> dict[str, Any]: @@ -478,7 +476,6 @@ async def trades_dialogue( trades = await proxy.trades() await update_ledger_from_api_trades( trades, - cids2pps, # pass these in to map to correct fqsns.. proxy, ) @@ -542,25 +539,26 @@ async def emit_pp_update( proxy = proxies[acctid] await update_ledger_from_api_trades( [trade_entry], - cids2pps, # pass these in to map to correct fqsns.. proxy, ) # load all positions from `pps.toml`, cross check with # ib's positions data, and relay re-formatted pps as # msgs to the ems. - for acctid, by_fqsn in pp.get_pps( - 'ib', - acctids={acctid}, - ).items(): + by_acct = pp.get_pps('ib', acctids={acctid}) + by_fqsn = by_acct[acctid.strip('ib.')] - # should only be one right? - msgs = await update_and_audit( - by_fqsn, - cids2pps, - validate=False, - ) - for msg in msgs: - await ems_stream.send(msg.dict()) + for fqsn, p in by_fqsn.items(): + if p.bsuid == trade_entry['contract']['conId']: + # should only be one right? + msgs = await update_and_audit( + {fqsn: p}, + cids2pps, + validate=False, + ) + msg = msgs[0] + break + + await ems_stream.send(msg.dict()) async def deliver_trade_events( @@ -714,7 +712,6 @@ async def deliver_trade_events( if comms or cost_already_rx: # only send a pp update once we have a cost report - print("EMITTING PP") await emit_pp_update( ems_stream, trade_entry, @@ -739,7 +736,6 @@ async def deliver_trade_events( ) if fill_already_rx: - print("EMITTING PP") await emit_pp_update( ems_stream, trade_entry, From 21153a0e1ef36ee8c3c5e7e6d5493fa1ffb4cdd5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 17 Jun 2022 15:41:17 -0400 Subject: [PATCH 30/58] Ugh, hack our own toml encoder since it seems everything in the lib is half-baked.. --- piker/pp.py | 176 +++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 159 insertions(+), 17 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index 9dfa4eb1..a9950ef2 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -23,6 +23,7 @@ that doesn't try to cuk most humans who prefer to not lose their moneys.. from contextlib import contextmanager as cm import os from os import path +import re from typing import ( Any, Optional, @@ -103,9 +104,8 @@ class Transaction(Struct): class Position(Struct): ''' - Basic pp (personal position) model with attached fills history. - - This type should be IPC wire ready? + Basic pp (personal/piker position) model with attached clearing + transaction history. ''' symbol: Symbol @@ -116,17 +116,33 @@ class Position(Struct): bsuid: str # ordered record of known constituent trade messages - fills: dict[ + clears: dict[ Union[str, int, Status], # trade id float, # cost ] = {} - def to_dict(self): + def to_dict(self) -> dict: return { f: getattr(self, f) for f in self.__struct_fields__ } + def to_pretoml(self) -> dict: + d = self.to_dict() + clears = d.pop('clears') + + # clears_list = [] + + inline_table = toml.TomlDecoder().get_empty_inline_table() + for tid, data in clears.items(): + inline_table[tid] = data + + # clears_list.append(inline_table) + + # d['clears'] = clears_list + d['clears'] = inline_table + return d + def update_from_msg( self, msg: BrokerdPosition, @@ -243,7 +259,7 @@ def update_pps( ) # don't do updates for ledger records we already have # included in the current pps state. - if r.tid in pp.fills: + if r.tid in pp.clears: # NOTE: likely you'll see repeats of the same # ``Transaction`` passed in here if/when you are restarting # a ``brokerd.ib`` where the API will re-report trades from @@ -263,10 +279,10 @@ def update_pps( cost=2*r.cost, ) - # track clearing costs - pp.fills[r.tid] = r.cost + # track clearing data + pp.clears[f'"{r.tid}"'] = {'cost': r.cost} - assert len(set(pp.fills)) == len(pp.fills) + assert len(set(pp.clears)) == len(pp.clears) return pps @@ -291,7 +307,7 @@ def dump_active( closed = {} for k, pp in pps.items(): - asdict = pp.to_dict() + asdict = pp.to_pretoml() if pp.size == 0: closed[k] = asdict else: @@ -340,7 +356,11 @@ def get_pps( incremental update file: ``pps.toml``. ''' - conf, path = config.load('pps') + conf, path = config.load( + 'pps', + # load dicts as inlines to preserve compactness + # _dict=toml.decoder.InlineTableDict, + ) all_active = {} # try to load any ledgers if no section found @@ -363,6 +383,117 @@ def get_pps( return all_active +class PpsEncoder(toml.TomlEncoder): + ''' + Special "styled" encoder that makes a ``pps.toml`` redable and + compact by putting `.clears` tables inline and everything else + flat-ish. + + ''' + separator = ',' + + def dump_inline_table(self, section): + """Preserve inline table in its compact syntax instead of expanding + into subsection. + https://github.com/toml-lang/toml#user-content-inline-table + """ + val_list = [] + for k, v in section.items(): + # if isinstance(v, toml.decoder.InlineTableDict): + if isinstance(v, dict): + val = self.dump_inline_table(v) + else: + val = str(self.dump_value(v)) + + val_list.append(k + " = " + val) + + retval = "{ " + ", ".join(val_list) + " }" + return retval + + def dump_sections(self, o, sup): + retstr = "" + if sup != "" and sup[-1] != ".": + sup += '.' + retdict = self._dict() + arraystr = "" + for section in o: + qsection = str(section) + value = o[section] + + if not re.match(r'^[A-Za-z0-9_-]+$', section): + qsection = toml.encoder._dump_str(section) + + # arrayoftables = False + if ( + self.preserve + and isinstance(value, toml.decoder.InlineTableDict) + ): + retstr += ( + qsection + + + " = " + + + self.dump_inline_table(o[section]) + + + '\n' # only on the final terminating left brace + ) + + # XXX: this code i'm pretty sure is just blatantly bad + # and/or wrong.. + # if isinstance(o[section], list): + # for a in o[section]: + # if isinstance(a, dict): + # arrayoftables = True + # if arrayoftables: + # for a in o[section]: + # arraytabstr = "\n" + # arraystr += "[[" + sup + qsection + "]]\n" + # s, d = self.dump_sections(a, sup + qsection) + # if s: + # if s[0] == "[": + # arraytabstr += s + # else: + # arraystr += s + # while d: + # newd = self._dict() + # for dsec in d: + # s1, d1 = self.dump_sections(d[dsec], sup + + # qsection + "." + + # dsec) + # if s1: + # arraytabstr += ("[" + sup + qsection + + # "." + dsec + "]\n") + # arraytabstr += s1 + # for s1 in d1: + # newd[dsec + "." + s1] = d1[s1] + # d = newd + # arraystr += arraytabstr + + elif isinstance(value, dict): + retdict[qsection] = o[section] + + elif o[section] is not None: + retstr += ( + qsection + + + " = " + + + str(self.dump_value(o[section])) + ) + + # if not isinstance(value, dict): + if not isinstance(value, toml.decoder.InlineTableDict): + # inline tables should not contain newlines: + # https://toml.io/en/v1.0.0#inline-table + retstr += '\n' + + else: + raise ValueError(value) + + retstr += arraystr + return (retstr, retdict) + + def update_pps_conf( brokername: str, acctid: str, @@ -390,6 +521,14 @@ def update_pps_conf( # unmarshal/load ``pps.toml`` config entries into object form. pp_objs = {} for fqsn, entry in pps.items(): + + # convert clears sub-tables (only in this form + # for toml re-presentation) back into a master table. + clears = entry['clears'] + # clears = {} + # for table in entry['clears']: + # clears.update(table) + pp_objs[fqsn] = Position( Symbol.from_fqsn(fqsn, info={}), size=entry['size'], @@ -397,11 +536,11 @@ def update_pps_conf( bsuid=entry['bsuid'], # XXX: super critical, we need to be sure to include - # all pps.toml fills to avoid reusing fills that were + # all pps.toml clears to avoid reusing clears that were # already included in the current incremental update # state, since today's records may have already been # processed! - fills=entry['fills'], + clears=clears, ) # update all pp objects from any (new) trade records which @@ -431,16 +570,19 @@ def update_pps_conf( pp_objs.pop(fqsn, None) conf[brokername][acctid] = pp_entries + + enc = PpsEncoder(preserve=True) + # TODO: why tf haven't they already done this for inline tables smh.. + # enc.dump_funcs[dict] = enc.dump_inline_table + config.write( conf, 'pps', - - # TODO: make nested tables and/or inline tables work? - # encoder=config.toml.Encoder(preserve=True), + encoder=enc, ) if key_by: - pps_objs = {getattr(pp, key_by): pp for pp in pps_objs} + pp_objs = {getattr(pp, key_by): pp for pp in pp_objs} # deliver object form of all pps in table to caller return pp_objs From ff74f4302a866a10b732fc4098de82f45a181444 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 18 Jun 2022 15:30:52 -0400 Subject: [PATCH 31/58] Support pp expiries, datetimes on transactions Since some positions obviously expire and thus shouldn't continually exist inside a `pps.toml` add naive support for tracking and discarding expired contracts: - add `Transaction.expiry: Optional[pendulum.datetime]`. - add `Position.expiry: Optional[pendulum.datetime]` which can be parsed from a transaction ledger. - only write pps with a non-none expiry to the `pps.toml` - change `Position.avg_price` -> `.be_price` (be is "breakeven") since it's a much less ambiguous name. - change `load_pps_from_legder()` to *not* call `dump_active()` since for the only use case it ends up getting called later anyway. --- piker/pp.py | 111 +++++++++++++++++++++++++++++++++------------------- 1 file changed, 71 insertions(+), 40 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index a9950ef2..6030249e 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -31,6 +31,8 @@ from typing import ( ) from msgspec import Struct +import pendulum +from pendulum import datetime, now import toml from . import config @@ -93,8 +95,8 @@ class Transaction(Struct): size: float price: float cost: float # commisions or other additional costs - - # dt: datetime + dt: datetime + expiry: Optional[datetime] = None # optional key normally derived from the broker # backend which ensures the instrument-symbol this record @@ -110,9 +112,14 @@ class Position(Struct): ''' symbol: Symbol - # last size and avg entry price + # can be +ve or -ve for long/short size: float - avg_price: float # TODO: contextual pricing + + # "breakeven price" above or below which pnl moves above and below + # zero for the entirety of the current "trade state". + be_price: float + + # unique backend symbol id bsuid: str # ordered record of known constituent trade messages @@ -121,6 +128,8 @@ class Position(Struct): float, # cost ] = {} + expiry: Optional[datetime] = None + def to_dict(self) -> dict: return { f: getattr(self, f) @@ -130,12 +139,16 @@ class Position(Struct): def to_pretoml(self) -> dict: d = self.to_dict() clears = d.pop('clears') - + expiry = d.pop('expiry') + # if not expiry is None: + # breakpoint() + if expiry: + d['expiry'] = str(expiry) # clears_list = [] inline_table = toml.TomlDecoder().get_empty_inline_table() for tid, data in clears.items(): - inline_table[tid] = data + inline_table[f'{tid}'] = data # clears_list.append(inline_table) @@ -153,7 +166,7 @@ class Position(Struct): symbol = self.symbol lot_size_digits = symbol.lot_size_digits - avg_price, size = ( + be_price, size = ( round( msg['avg_price'], ndigits=symbol.tick_size_digits @@ -164,7 +177,7 @@ class Position(Struct): ), ) - self.avg_price = avg_price + self.be_price = be_price self.size = size @property @@ -174,7 +187,7 @@ class Position(Struct): terms. ''' - return self.avg_price * self.size + return self.be_price * self.size def lifo_update( self, @@ -209,24 +222,24 @@ class Position(Struct): size_diff = abs(new_size) - abs(self.size) if new_size == 0: - self.avg_price = 0 + self.be_price = 0 elif size_diff > 0: # XXX: LOFI incremental update: # only update the "average price" when # the size increases not when it decreases (i.e. the # position is being made smaller) - self.avg_price = ( + self.be_price = ( abs(size) * price # weight of current exec + cost # transaction cost + - self.avg_price * abs(self.size) # weight of previous pp + self.be_price * abs(self.size) # weight of previous pp ) / abs(new_size) self.size = new_size - return new_size, self.avg_price + return new_size, self.be_price def update_pps( @@ -253,10 +266,12 @@ def update_pps( info={}, ), size=0.0, - avg_price=0.0, + be_price=0.0, bsuid=r.bsuid, + expiry=r.expiry, ) ) + # don't do updates for ledger records we already have # included in the current pps state. if r.tid in pp.clears: @@ -307,8 +322,18 @@ def dump_active( closed = {} for k, pp in pps.items(): + asdict = pp.to_pretoml() - if pp.size == 0: + + if pp.expiry is None: + asdict.pop('expiry', None) + + if ( + pp.size == 0 + + # drop time-expired positions (normally derivatives) + or (pp.expiry and pp.expiry < now()) + ): closed[k] = asdict else: active[k] = asdict @@ -321,7 +346,7 @@ def load_pps_from_ledger( brokername: str, acctname: str, -) -> tuple[dict, dict]: +) -> dict[str, Position]: ''' Open a ledger file by broker name and account and read in and process any trade records into our normalized ``Transaction`` @@ -341,8 +366,7 @@ def load_pps_from_ledger( brokermod = get_brokermod(brokername) records = brokermod.norm_trade_records(ledger) - pps = update_pps(records) - return dump_active(pps) + return update_pps(records) def get_pps( @@ -509,7 +533,7 @@ def update_pps_conf( if not pps: # no pps entry yet for this broker/account so parse # any available ledgers to build a pps state. - pps, closed = load_pps_from_ledger( + pp_objs = load_pps_from_ledger( brokername, acctid, ) @@ -518,30 +542,37 @@ def update_pps_conf( f'No trade history could be loaded for {brokername}:{acctid}' ) - # unmarshal/load ``pps.toml`` config entries into object form. - pp_objs = {} - for fqsn, entry in pps.items(): + else: + # unmarshal/load ``pps.toml`` config entries into object form. + pp_objs = {} + for fqsn, entry in pps.items(): - # convert clears sub-tables (only in this form - # for toml re-presentation) back into a master table. - clears = entry['clears'] - # clears = {} - # for table in entry['clears']: - # clears.update(table) + # convert clears sub-tables (only in this form + # for toml re-presentation) back into a master table. + clears = entry['clears'] + expiry = entry.get('expiry') + if expiry: + expiry = pendulum.parse(expiry) - pp_objs[fqsn] = Position( - Symbol.from_fqsn(fqsn, info={}), - size=entry['size'], - avg_price=entry['avg_price'], - bsuid=entry['bsuid'], + # clears = {} + # for k, v in clears.items(): + # print((k, v)) + # clears.update(table) - # XXX: super critical, we need to be sure to include - # all pps.toml clears to avoid reusing clears that were - # already included in the current incremental update - # state, since today's records may have already been - # processed! - clears=clears, - ) + pp_objs[fqsn] = Position( + Symbol.from_fqsn(fqsn, info={}), + size=entry['size'], + be_price=entry['be_price'], + expiry=expiry, + bsuid=entry['bsuid'], + + # XXX: super critical, we need to be sure to include + # all pps.toml clears to avoid reusing clears that were + # already included in the current incremental update + # state, since today's records may have already been + # processed! + clears=clears, + ) # update all pp objects from any (new) trade records which # were passed in (aka incremental update case). From c617a069050e0a868bc7154acf28bf15fd315710 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 18 Jun 2022 15:53:12 -0400 Subject: [PATCH 32/58] Port everything to `Position.be_price` --- piker/clearing/_allocate.py | 12 ++++++------ piker/clearing/_paper_engine.py | 2 +- piker/ui/_position.py | 10 +++++----- piker/ui/order_mode.py | 2 +- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/piker/clearing/_allocate.py b/piker/clearing/_allocate.py index 7ee2b0be..336a9b25 100644 --- a/piker/clearing/_allocate.py +++ b/piker/clearing/_allocate.py @@ -130,7 +130,7 @@ class Allocator(BaseModel): l_sub_pp = self.units_limit - abs_live_size elif size_unit == 'currency': - live_cost_basis = abs_live_size * live_pp.avg_price + live_cost_basis = abs_live_size * live_pp.be_price slot_size = currency_per_slot / price l_sub_pp = (self.currency_limit - live_cost_basis) / price @@ -162,7 +162,7 @@ class Allocator(BaseModel): if size_unit == 'currency': # compute the "projected" limit's worth of units at the # current pp (weighted) price: - slot_size = currency_per_slot / live_pp.avg_price + slot_size = currency_per_slot / live_pp.be_price else: slot_size = u_per_slot @@ -204,7 +204,7 @@ class Allocator(BaseModel): Position( symbol=sym, size=order_size, - avg_price=price, + be_price=price, bsuid=sym, ) ) @@ -233,8 +233,8 @@ class Allocator(BaseModel): abs_pp_size = abs(pp.size) if self.size_unit == 'currency': - # live_currency_size = size or (abs_pp_size * pp.avg_price) - live_currency_size = abs_pp_size * pp.avg_price + # live_currency_size = size or (abs_pp_size * pp.be_price) + live_currency_size = abs_pp_size * pp.be_price prop = live_currency_size / self.currency_limit else: @@ -304,7 +304,7 @@ def mk_allocator( # if the current position is already greater then the limit # settings, increase the limit to the current position if alloc.size_unit == 'currency': - startup_size = startup_pp.size * startup_pp.avg_price + startup_size = startup_pp.size * startup_pp.be_price if startup_size > alloc.currency_limit: alloc.currency_limit = round(startup_size, ndigits=2) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 9e70dce2..bd78e1b4 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -263,7 +263,7 @@ class PaperBoi: pp = Position( Symbol(key=symbol), size=pp_msg.size, - avg_price=pp_msg.avg_price, + be_price=pp_msg.avg_price, uid=symbol.front_fqsn(), ) pp_msg.size, pp_msg.avg_price = pp.lifo_update(size, price) diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 844869b0..6a1ab01e 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -106,8 +106,8 @@ async def update_pnl_from_feed( # compute and display pnl status order_mode.pane.pnl_label.format( pnl=copysign(1, size) * pnl( - # live.avg_price, - order_mode.current_pp.live_pp.avg_price, + # live.be_price, + order_mode.current_pp.live_pp.be_price, tick['price'], ), ) @@ -357,7 +357,7 @@ class SettingsPane: # last historical close price last = feed.shm.array[-1][['close']][0] pnl_value = copysign(1, size) * pnl( - tracker.live_pp.avg_price, + tracker.live_pp.be_price, last, ) @@ -557,7 +557,7 @@ class PositionTracker: pp = position or self.live_pp self.update_line( - pp.avg_price, + pp.be_price, pp.size, self.chart.linked.symbol.lot_size_digits, ) @@ -571,7 +571,7 @@ class PositionTracker: self.hide() else: - self._level_marker.level = pp.avg_price + self._level_marker.level = pp.be_price # these updates are critical to avoid lag on view/scene changes self._level_marker.update() # trigger paint diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 03132b8e..f5a85d64 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -606,7 +606,7 @@ async def open_order_mode( startup_pp = Position( symbol=symbol, size=0, - avg_price=0, + be_price=0, # XXX: BLEH, do we care about this on the client side? bsuid=symbol, From bfad676b7c44a63df39974a3222a7cf83cfe8d6b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 18 Jun 2022 15:53:56 -0400 Subject: [PATCH 33/58] Add expiry and datetime support to ledger parsing --- piker/brokers/ib/broker.py | 48 +++++++++++++++++++++++++++----------- 1 file changed, 35 insertions(+), 13 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index eac7a8a4..09922055 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -370,7 +370,7 @@ async def update_and_audit( symbol=ibppmsg.symbol, currency=ibppmsg.currency, size=p.size, - avg_price=p.avg_price, + avg_price=p.be_price, ) msgs.append(msg) @@ -474,17 +474,17 @@ async def trades_dialogue( # connected api clients. for account, proxy in proxies.items(): trades = await proxy.trades() - await update_ledger_from_api_trades( - trades, - proxy, - ) + if trades: + await update_ledger_from_api_trades( + trades, + proxy, + ) # load all positions from `pps.toml`, cross check with ib's # positions data, and relay re-formatted pps as msgs to the ems. - for acctid, by_fqsn in pp.get_pps( - 'ib', acctids=active_accts, - ).items(): + pps_by_account = pp.get_pps('ib', acctids=active_accts) + for acctid, by_fqsn in pps_by_account.items(): msgs = await update_and_audit(by_fqsn, cids2pps, validate=True) all_positions.extend(msg.dict() for msg in msgs) @@ -835,6 +835,20 @@ def norm_trade_records( print(f'skipping opts contract {symbol}') continue + # timestamping is way different in API records + date = record.get('date') + if not date: + # probably a flex record with a wonky non-std timestamp.. + date, ts = record['dateTime'].split(';') + dt = pendulum.parse(date) + ts = f'{ts[:2]}:{ts[2:4]}:{ts[4:]}' + tsdt = pendulum.parse(ts) + dt.set(hour=tsdt.hour, minute=tsdt.minute, second=tsdt.second) + + else: + epoch_dt = pendulum.from_timestamp(record.get('time')) + dt = pendulum.parse(date) + # special handling of symbol extraction from # flex records using some ad-hoc schema parsing. instr = record.get('assetCategory') @@ -847,17 +861,23 @@ def norm_trade_records( if expiry: expiry = str(expiry).strip(' ') suffix = f'{exch}.{expiry}' + expiry = pendulum.parse(expiry) fqsn = Symbol.from_fqsn( fqsn=f'{symbol}.{suffix}.ib', info={}, ).front_fqsn().rstrip('.ib') - # NOTE: for flex records the normal fields won't be available so - # we have to do a lookup at some point to reverse map the conid - # to a fqsn? - - # con = await proxy.get_con(conid) + # NOTE: for flex records the normal fields for defining an fqsn + # sometimes won't be available so we rely on two approaches for + # the "reverse lookup" of piker style fqsn keys: + # - when dealing with API trade records received from + # `IB.trades()` we do a contract lookup at he time of processing + # - when dealing with flex records, it is assumed the record + # is at least a day old and thus the TWS position reporting system + # should already have entries if the pps are still open, in + # which case, we can pull the fqsn from that table (see + # `trades_dialogue()` above). records.append(pp.Transaction( fqsn=fqsn, @@ -865,6 +885,8 @@ def norm_trade_records( size=size, price=price, cost=comms, + dt=dt, + expiry=expiry, bsuid=conid, )) From 16b2937d234c2fed45a07aade402d7bfde77de71 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 18 Jun 2022 15:54:16 -0400 Subject: [PATCH 34/58] Passthrough toml lib kwargs --- piker/config.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/piker/config.py b/piker/config.py index cbe134ea..c7a7acc9 100644 --- a/piker/config.py +++ b/piker/config.py @@ -186,7 +186,9 @@ def repodir(): def load( conf_name: str = 'brokers', - path: str = None + path: str = None, + + **tomlkws, ) -> (dict, str): ''' @@ -211,7 +213,7 @@ def load( with open(path, 'w'): pass # touch - config = toml.load(path) + config = toml.load(path, **tomlkws) log.debug(f"Read config file {path}") return config, path From f1fe369bbf670c5ed8f152c61f69581189f44aaf Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 18 Jun 2022 18:30:53 -0400 Subject: [PATCH 35/58] Write clears table as a list of tables in toml --- piker/pp.py | 58 ++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 44 insertions(+), 14 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index 6030249e..fd71ef29 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -140,20 +140,28 @@ class Position(Struct): d = self.to_dict() clears = d.pop('clears') expiry = d.pop('expiry') + # if not expiry is None: # breakpoint() + if expiry: d['expiry'] = str(expiry) - # clears_list = [] - inline_table = toml.TomlDecoder().get_empty_inline_table() + clears_list = [] + for tid, data in clears.items(): - inline_table[f'{tid}'] = data + inline_table = toml.TomlDecoder().get_empty_inline_table() + # inline_table[f'{tid}'] = data + # inline_table = type('uhh', (dict, toml.decoder.InlineTableDict()), + inline_table['tid'] = tid - # clears_list.append(inline_table) + for k, v in data.items(): + inline_table[k] = v - # d['clears'] = clears_list - d['clears'] = inline_table + clears_list.append(inline_table) + + d['clears'] = clears_list + # d['clears'] = inline_table return d def update_from_msg( @@ -295,7 +303,9 @@ def update_pps( ) # track clearing data - pp.clears[f'"{r.tid}"'] = {'cost': r.cost} + pp.clears[r.tid] = { + 'cost': r.cost, + } assert len(set(pp.clears)) == len(pp.clears) return pps @@ -416,6 +426,21 @@ class PpsEncoder(toml.TomlEncoder): ''' separator = ',' + def dump_list(self, v): + # breakpoint() + # super().dump_list(section) + + retval = "[\n" + for u in v: + if isinstance(u, toml.decoder.InlineTableDict): + out = self.dump_inline_table(u) + else: + out = str(self.dump_value(u)) + + retval += " " + out + "," + "\n" + retval += "]" + return retval + def dump_inline_table(self, section): """Preserve inline table in its compact syntax instead of expanding into subsection. @@ -549,16 +574,20 @@ def update_pps_conf( # convert clears sub-tables (only in this form # for toml re-presentation) back into a master table. - clears = entry['clears'] + clears_list = entry['clears'] + + # index clears entries in "object" form by tid in a top + # level dict instead of a list (as is presented in our + # ``pps.toml``). + clears = {} + for clears_table in clears_list: + tid = clears_table.pop('tid') + clears[tid] = clears_table + expiry = entry.get('expiry') if expiry: expiry = pendulum.parse(expiry) - # clears = {} - # for k, v in clears.items(): - # print((k, v)) - # clears.update(table) - pp_objs[fqsn] = Position( Symbol.from_fqsn(fqsn, info={}), size=entry['size'], @@ -604,7 +633,8 @@ def update_pps_conf( enc = PpsEncoder(preserve=True) # TODO: why tf haven't they already done this for inline tables smh.. - # enc.dump_funcs[dict] = enc.dump_inline_table + # table_bs_type = type(toml.TomlDecoder().get_empty_inline_table()) + enc.dump_funcs[toml.decoder.InlineTableDict] = enc.dump_inline_table config.write( conf, From 68b32208de3fa748769f9ae47aeab80e0f23496e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 19 Jun 2022 16:30:08 -0400 Subject: [PATCH 36/58] Key pps by bsuid to avoid incorrect disparate entries --- piker/pp.py | 69 +++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 48 insertions(+), 21 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index fd71ef29..1f09c2d7 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -125,7 +125,7 @@ class Position(Struct): # ordered record of known constituent trade messages clears: dict[ Union[str, int, Status], # trade id - float, # cost + dict[str, Any], # transaction history summaries ] = {} expiry: Optional[datetime] = None @@ -137,13 +137,16 @@ class Position(Struct): } def to_pretoml(self) -> dict: + ''' + Prep this position's data contents for export to toml including + re-structuring of the ``.clears`` table to an array of + inline-subtables for better ``pps.toml`` compactness. + + ''' d = self.to_dict() clears = d.pop('clears') expiry = d.pop('expiry') - # if not expiry is None: - # breakpoint() - if expiry: d['expiry'] = str(expiry) @@ -151,8 +154,6 @@ class Position(Struct): for tid, data in clears.items(): inline_table = toml.TomlDecoder().get_empty_inline_table() - # inline_table[f'{tid}'] = data - # inline_table = type('uhh', (dict, toml.decoder.InlineTableDict()), inline_table['tid'] = tid for k, v in data.items(): @@ -161,7 +162,7 @@ class Position(Struct): clears_list.append(inline_table) d['clears'] = clears_list - # d['clears'] = inline_table + return d def update_from_msg( @@ -265,7 +266,7 @@ def update_pps( for r in records: pp = pps.setdefault( - r.fqsn or r.bsuid, + r.bsuid, # if no existing pp, allocate fresh one. Position( @@ -417,6 +418,9 @@ def get_pps( return all_active +# TODO: instead see if we can hack tomli and tomli-w to do the same: +# - https://github.com/hukkin/tomli +# - https://github.com/hukkin/tomli-w class PpsEncoder(toml.TomlEncoder): ''' Special "styled" encoder that makes a ``pps.toml`` redable and @@ -427,9 +431,11 @@ class PpsEncoder(toml.TomlEncoder): separator = ',' def dump_list(self, v): - # breakpoint() - # super().dump_list(section) + ''' + Dump an inline list with a newline after every element and + with consideration for denoted inline table types. + ''' retval = "[\n" for u in v: if isinstance(u, toml.decoder.InlineTableDict): @@ -543,14 +549,17 @@ class PpsEncoder(toml.TomlEncoder): return (retstr, retdict) -def update_pps_conf( +def load_pps_from_toml( brokername: str, - acctid: str, - trade_records: Optional[list[Transaction]] = None, - key_by: Optional[str] = None, + acctid: str -) -> dict[str, Position]: +) -> tuple[dict, dict[str, Position]]: + ''' + Load and marshal to objects all pps from either an existing + ``pps.toml`` config, or from scratch from a ledger file when + none yet exists. + ''' conf, path = config.load('pps') brokersection = conf.setdefault(brokername, {}) pps = brokersection.setdefault(acctid, {}) @@ -603,6 +612,19 @@ def update_pps_conf( clears=clears, ) + return conf, pp_objs + + +def update_pps_conf( + brokername: str, + acctid: str, + trade_records: Optional[list[Transaction]] = None, + key_by: Optional[str] = None, + +) -> dict[str, Position]: + + conf, pp_objs = load_pps_from_toml(brokername, acctid) + # update all pp objects from any (new) trade records which # were passed in (aka incremental update case). if trade_records: @@ -615,24 +637,29 @@ def update_pps_conf( # dict-serialize all active pps pp_entries = {} - for fqsn, pp_dict in active.items(): - print(f'Updating active pp: {fqsn}') + + for bsuid, pp_dict in active.items(): # normalize to a simpler flat dict format - _ = pp_dict.pop('symbol') + s = pp_dict.pop('symbol') + # TODO: we need to figure out how to have one top level + # listing venue here even when the backend isn't providing + # it via the trades ledger.. + fqsn = s.front_fqsn() + print(f'Updating active pp: {fqsn}') # XXX: ugh, it's cuz we push the section under # the broker name.. maybe we need to rethink this? brokerless_key = fqsn.rstrip(f'.{brokername}') pp_entries[brokerless_key] = pp_dict - for fqsn in closed: - pp_objs.pop(fqsn, None) + for bsuid in closed: + pp_objs.pop(bsuid, None) conf[brokername][acctid] = pp_entries - enc = PpsEncoder(preserve=True) # TODO: why tf haven't they already done this for inline tables smh.. + enc = PpsEncoder(preserve=True) # table_bs_type = type(toml.TomlDecoder().get_empty_inline_table()) enc.dump_funcs[toml.decoder.InlineTableDict] = enc.dump_inline_table From fe146050345a71b6aa271d631ae5c655e910aa67 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Jun 2022 08:10:41 -0400 Subject: [PATCH 37/58] Fix null case return --- piker/pp.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/piker/pp.py b/piker/pp.py index 1f09c2d7..d0809f88 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -373,7 +373,7 @@ def load_pps_from_ledger( if not ledger: # null case, no ledger file with content - return {}, {} + return {} brokermod = get_brokermod(brokername) records = brokermod.norm_trade_records(ledger) @@ -572,6 +572,7 @@ def load_pps_from_toml( acctid, ) if not pps: + breakpoint() log.warning( f'No trade history could be loaded for {brokername}:{acctid}' ) From 2063b9d8bb6d93e5070918bd33c5dcd4eea45c3b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Jun 2022 11:26:32 -0400 Subject: [PATCH 38/58] Drop ledger entries that have no transaction id --- piker/brokers/ib/broker.py | 16 +++++++++++++--- piker/pp.py | 1 - 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 09922055..dde4c9c6 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -801,9 +801,7 @@ def norm_trade_records( records: list[pp.Transaction] = [] for tid, record in ledger.items(): - # date, time = record['dateTime'] - # cost = record['cost'] - # action = record['buySell'] + conid = record.get('conId') or record['conid'] comms = record.get('commission') or -1*record['ibCommission'] price = record.get('price') or record['tradePrice'] @@ -957,6 +955,18 @@ def trades_to_records( entry['date'] = str(dt) acctid = accounts[entry['acctNumber']] + if not tid: + # this is likely some kind of internal adjustment + # transaction, likely one of the following: + # - an expiry event that will show a "book trade" indicating + # some adjustment to cash balances: zeroing or itm settle. + # - a manual cash balance position adjustment likely done by + # the user from the accounts window in TWS where they can + # manually set the avg price and size: + # https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST + log.warning(f'Skipping ID-less ledger entry:\n{pformat(entry)}') + continue + trades_by_account.setdefault( acctid, {} )[tid] = entry diff --git a/piker/pp.py b/piker/pp.py index d0809f88..68663f89 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -572,7 +572,6 @@ def load_pps_from_toml( acctid, ) if not pps: - breakpoint() log.warning( f'No trade history could be loaded for {brokername}:{acctid}' ) From f32b4d37cb252e9a3384a0aa2d9f39e3c496afa9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Jun 2022 15:49:07 -0400 Subject: [PATCH 39/58] Support pp audits with multiple accounts --- piker/brokers/ib/broker.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index dde4c9c6..7870b2b7 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -336,8 +336,9 @@ async def update_ledger_from_api_trades( async def update_and_audit( + acctid: str, by_fqsn: dict[str, pp.Position], - cids2pps: dict[int, BrokerdPosition], + cids2pps: dict[tuple[str, int], BrokerdPosition], validate: bool = False, ) -> list[BrokerdPosition]: @@ -355,7 +356,7 @@ async def update_and_audit( # retreive equivalent ib reported position message # for comparison/audit versus the piker equivalent # breakeven pp calcs. - ibppmsg = cids2pps[bsuid] + ibppmsg = cids2pps[(acctid, bsuid)] msg = BrokerdPosition( broker='ib', @@ -466,7 +467,7 @@ async def trades_dialogue( cid, msg = pack_position(pos) acctid = msg.account = accounts_def.inverse[msg.account] active_accts.add(acctid) - cids2pps[cid] = msg + cids2pps[(acctid.strip('ib.'), cid)] = msg assert msg.account in accounts, ( f'Position for unknown account: {msg.account}') @@ -485,7 +486,12 @@ async def trades_dialogue( pps_by_account = pp.get_pps('ib', acctids=active_accts) for acctid, by_fqsn in pps_by_account.items(): - msgs = await update_and_audit(by_fqsn, cids2pps, validate=True) + msgs = await update_and_audit( + acctid, + by_fqsn, + cids2pps, + validate=True, + ) all_positions.extend(msg.dict() for msg in msgs) if not all_positions and cids2pps: @@ -551,6 +557,7 @@ async def emit_pp_update( if p.bsuid == trade_entry['contract']['conId']: # should only be one right? msgs = await update_and_audit( + acctid, {fqsn: p}, cids2pps, validate=False, @@ -566,7 +573,7 @@ async def deliver_trade_events( trade_event_stream: trio.MemoryReceiveChannel, ems_stream: tractor.MsgStream, accounts_def: dict[str, str], - cids2pps: dict[str, BrokerdPosition], + cids2pps: dict[tuple[str, str], BrokerdPosition], proxies: dict[str, MethodProxy], ) -> None: From 4fdfb818763089a673f15cb45fbedd6e88f23f6d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Jun 2022 12:37:33 -0400 Subject: [PATCH 40/58] Support re-processing a filtered ledger entry set This makes it possible to refresh a single fqsn-position in one's `pps.toml` by simply deleting the file entry, in which case, if there is new trade records passed to `load_pps_from_toml()` via the new `reload_records` kwarg, then the backend ledger entries matching that symbol will be filtered and used to recompute a fresh position. This turns out to be super handy when you have crashes that prevent a `pps.toml` entry from being updated correctly but where the ledger does have all the data necessary to calculate a fresh correct entry. --- piker/pp.py | 116 +++++++++++++++++++++++++++++++++------------------- 1 file changed, 75 insertions(+), 41 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index 68663f89..3860bf36 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -33,6 +33,7 @@ from typing import ( from msgspec import Struct import pendulum from pendulum import datetime, now +# import tomli import toml from . import config @@ -357,6 +358,9 @@ def load_pps_from_ledger( brokername: str, acctname: str, + # post normalization filter on ledger entries to be processed + filter_by: Optional[list[Transaction]] = None, + ) -> dict[str, Position]: ''' Open a ledger file by broker name and account and read in and @@ -369,14 +373,17 @@ def load_pps_from_ledger( brokername, acctname, ) as ledger: - pass # readonly - - if not ledger: - # null case, no ledger file with content - return {} + if not ledger: + # null case, no ledger file with content + return {} brokermod = get_brokermod(brokername) records = brokermod.norm_trade_records(ledger) + + if filter_by: + bsuids = set(r.bsuid for r in filter_by) + records = filter(lambda r: r.bsuid in bsuids, records) + return update_pps(records) @@ -551,7 +558,15 @@ class PpsEncoder(toml.TomlEncoder): def load_pps_from_toml( brokername: str, - acctid: str + acctid: str, + + # XXX: there is an edge case here where we may want to either audit + # the retrieved ``pps.toml`` output or reprocess it since there was + # an error on write on the last attempt to update the state file + # even though the ledger *was* updated. For this cases we allow the + # caller to pass in a symbol set they'd like to reload from the + # underlying ledger to be reprocessed in computing pps state. + reload_records: Optional[list[Transaction]] = None, ) -> tuple[dict, dict[str, Position]]: ''' @@ -563,6 +578,7 @@ def load_pps_from_toml( conf, path = config.load('pps') brokersection = conf.setdefault(brokername, {}) pps = brokersection.setdefault(acctid, {}) + pp_objs = {} if not pps: # no pps entry yet for this broker/account so parse @@ -571,46 +587,59 @@ def load_pps_from_toml( brokername, acctid, ) - if not pps: - log.warning( - f'No trade history could be loaded for {brokername}:{acctid}' - ) - else: - # unmarshal/load ``pps.toml`` config entries into object form. - pp_objs = {} - for fqsn, entry in pps.items(): + # Reload symbol specific ledger entries if requested by the + # caller **AND** none exist in the current pps state table. + elif ( + pps and reload_records and + not any(r.fqsn in pps for r in reload_records) + ): + # no pps entry yet for this broker/account so parse + # any available ledgers to build a pps state. + pp_objs = load_pps_from_ledger( + brokername, + acctid, + filter_by=reload_records, + ) - # convert clears sub-tables (only in this form - # for toml re-presentation) back into a master table. - clears_list = entry['clears'] + if not pps: + log.warning( + f'No trade history could be loaded for {brokername}:{acctid}' + ) - # index clears entries in "object" form by tid in a top - # level dict instead of a list (as is presented in our - # ``pps.toml``). - clears = {} - for clears_table in clears_list: - tid = clears_table.pop('tid') - clears[tid] = clears_table + # unmarshal/load ``pps.toml`` config entries into object form. + for fqsn, entry in pps.items(): - expiry = entry.get('expiry') - if expiry: - expiry = pendulum.parse(expiry) + # convert clears sub-tables (only in this form + # for toml re-presentation) back into a master table. + clears_list = entry['clears'] - pp_objs[fqsn] = Position( - Symbol.from_fqsn(fqsn, info={}), - size=entry['size'], - be_price=entry['be_price'], - expiry=expiry, - bsuid=entry['bsuid'], + # index clears entries in "object" form by tid in a top + # level dict instead of a list (as is presented in our + # ``pps.toml``). + clears = {} + for clears_table in clears_list: + tid = clears_table.pop('tid') + clears[tid] = clears_table - # XXX: super critical, we need to be sure to include - # all pps.toml clears to avoid reusing clears that were - # already included in the current incremental update - # state, since today's records may have already been - # processed! - clears=clears, - ) + expiry = entry.get('expiry') + if expiry: + expiry = pendulum.parse(expiry) + + pp_objs[fqsn] = Position( + Symbol.from_fqsn(fqsn, info={}), + size=entry['size'], + be_price=entry['be_price'], + expiry=expiry, + bsuid=entry['bsuid'], + + # XXX: super critical, we need to be sure to include + # all pps.toml clears to avoid reusing clears that were + # already included in the current incremental update + # state, since today's records may have already been + # processed! + clears=clears, + ) return conf, pp_objs @@ -618,12 +647,17 @@ def load_pps_from_toml( def update_pps_conf( brokername: str, acctid: str, + trade_records: Optional[list[Transaction]] = None, key_by: Optional[str] = None, ) -> dict[str, Position]: - conf, pp_objs = load_pps_from_toml(brokername, acctid) + conf, pp_objs = load_pps_from_toml( + brokername, + acctid, + reload_records=trade_records, + ) # update all pp objects from any (new) trade records which # were passed in (aka incremental update case). From 3713288b482aeb3f41b1ddf7ed3731cb9a5c4e03 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Jun 2022 12:52:20 -0400 Subject: [PATCH 41/58] Strip ib prefix before acctid use --- piker/brokers/ib/broker.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 7870b2b7..661022fe 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -551,13 +551,14 @@ async def emit_pp_update( # ib's positions data, and relay re-formatted pps as # msgs to the ems. by_acct = pp.get_pps('ib', acctids={acctid}) - by_fqsn = by_acct[acctid.strip('ib.')] + acctname = acctid.strip('ib.') + by_fqsn = by_acct[acctname] for fqsn, p in by_fqsn.items(): if p.bsuid == trade_entry['contract']['conId']: # should only be one right? msgs = await update_and_audit( - acctid, + acctname, {fqsn: p}, cids2pps, validate=False, @@ -572,7 +573,7 @@ async def deliver_trade_events( trade_event_stream: trio.MemoryReceiveChannel, ems_stream: tractor.MsgStream, - accounts_def: dict[str, str], + accounts_def: dict[str, str], # eg. `'ib.main'` -> `'DU999999'` cids2pps: dict[tuple[str, str], BrokerdPosition], proxies: dict[str, MethodProxy], @@ -851,7 +852,7 @@ def norm_trade_records( dt.set(hour=tsdt.hour, minute=tsdt.minute, second=tsdt.second) else: - epoch_dt = pendulum.from_timestamp(record.get('time')) + # epoch_dt = pendulum.from_timestamp(record.get('time')) dt = pendulum.parse(date) # special handling of symbol extraction from From 4475823e485871abd6bbd6eeab13e9fb5fee33be Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Jun 2022 12:52:45 -0400 Subject: [PATCH 42/58] Add draft ip-mismatch skip case --- piker/brokers/ib/feed.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index 1b2bfb45..bc3ec8d5 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -217,8 +217,8 @@ async def get_bars( ) elif ( - err.code == 162 - and 'HMDS query returned no data' in err.message + err.code == 162 and + 'HMDS query returned no data' in err.message ): # XXX: this is now done in the storage mgmt layer # and we shouldn't implicitly decrement the frame dt @@ -237,6 +237,13 @@ async def get_bars( frame_size=2000, ) + # elif ( + # err.code == 162 and + # 'Trading TWS session is connected from a different IP address' in err.message + # ): + # log.warning("ignoring ip address warning") + # continue + elif _pacing in msg: log.warning( From 7ebf8a8dc0215fefe7f4672f742d30b724e20116 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Jun 2022 12:53:34 -0400 Subject: [PATCH 43/58] Add `tomli` as dep being fastest in the west --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index a495db2e..9ae98dbb 100755 --- a/setup.py +++ b/setup.py @@ -41,6 +41,7 @@ setup( }, install_requires=[ 'toml', + 'tomli', # fastest pure py reader 'click', 'colorlog', 'attrs', From cc68501c7a862f340222bcd4539a00d2c9ca3cac Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Jun 2022 16:43:52 -0400 Subject: [PATCH 44/58] Make pp msg `.currency` not required --- piker/clearing/_messages.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/clearing/_messages.py b/piker/clearing/_messages.py index 1f07f0b0..4bb0be00 100644 --- a/piker/clearing/_messages.py +++ b/piker/clearing/_messages.py @@ -258,6 +258,6 @@ class BrokerdPosition(BaseModel): broker: str account: str symbol: str - currency: str size: float avg_price: float + currency: str = '' From a12e6800ffc4bd01e85739c8d61a7ae13cd07b6f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Jun 2022 15:41:26 -0400 Subject: [PATCH 45/58] Support per-symbol reload from ledger pp loading - use `tomli` package for reading since it's the fastest pure python reader available apparently. - add new fields to each pp's clears table: price, size, dt - make `load_pps_from_toml()`'s `reload_records` a dict that can be passed in by the caller and is verbatim used to re-read a ledger and filter to the specified symbol set to build out fresh pp objects. - add a `update_from_ledger: bool` flag to `load_pps_from_toml()` to allow forcing a full backend ledger read. - if a set of trades records is passed into `update_pps_conf()` parse out the meta data required to cause a ledger reload as per 2 bullets above. - return active and closed pps in separate by-account maps from `update_pps_conf()`. - drop the `key_by` kwarg. --- piker/pp.py | 91 ++++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 66 insertions(+), 25 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index 3860bf36..fd8d5cda 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -21,6 +21,7 @@ that doesn't try to cuk most humans who prefer to not lose their moneys.. ''' from contextlib import contextmanager as cm +# from pprint import pformat import os from os import path import re @@ -33,7 +34,7 @@ from typing import ( from msgspec import Struct import pendulum from pendulum import datetime, now -# import tomli +import tomli import toml from . import config @@ -73,8 +74,8 @@ def open_trade_ledger( ) with open(tradesfile, 'w') as cf: pass # touch - with open(tradesfile, 'r') as cf: - ledger = toml.load(tradesfile) + with open(tradesfile, 'rb') as cf: + ledger = tomli.load(cf) cpy = ledger.copy() try: yield cpy @@ -91,7 +92,9 @@ def open_trade_ledger( class Transaction(Struct): - fqsn: str # normally fqsn + # TODO: should this be ``.to`` (see below)? + fqsn: str + tid: Union[str, int] # unique transaction id size: float price: float @@ -104,6 +107,9 @@ class Transaction(Struct): # is for is truly unique. bsuid: Optional[Union[str, int]] = None + # optional fqsn for the source "asset"/money symbol? + # from: Optional[str] = None + class Position(Struct): ''' @@ -307,6 +313,9 @@ def update_pps( # track clearing data pp.clears[r.tid] = { 'cost': r.cost, + 'price': r.price, + 'size': r.size, + 'dt': str(r.dt), } assert len(set(pp.clears)) == len(pp.clears) @@ -359,7 +368,7 @@ def load_pps_from_ledger( acctname: str, # post normalization filter on ledger entries to be processed - filter_by: Optional[list[Transaction]] = None, + filter_by: Optional[list[dict]] = None, ) -> dict[str, Position]: ''' @@ -381,7 +390,7 @@ def load_pps_from_ledger( records = brokermod.norm_trade_records(ledger) if filter_by: - bsuids = set(r.bsuid for r in filter_by) + bsuids = set(filter_by) records = filter(lambda r: r.bsuid in bsuids, records) return update_pps(records) @@ -390,7 +399,6 @@ def load_pps_from_ledger( def get_pps( brokername: str, acctids: Optional[set[str]] = set(), - key_by: Optional[str] = None, ) -> dict[str, dict[str, Position]]: ''' @@ -403,7 +411,9 @@ def get_pps( # load dicts as inlines to preserve compactness # _dict=toml.decoder.InlineTableDict, ) + all_active = {} + all_closed = {} # try to load any ledgers if no section found bconf, path = config.load('brokers') @@ -419,10 +429,11 @@ def get_pps( ): continue - active = update_pps_conf(brokername, account, key_by=key_by) + active, closed = update_pps_conf(brokername, account) all_active.setdefault(account, {}).update(active) + all_closed.setdefault(account, {}).update(closed) - return all_active + return all_active, all_closed # TODO: instead see if we can hack tomli and tomli-w to do the same: @@ -566,7 +577,8 @@ def load_pps_from_toml( # even though the ledger *was* updated. For this cases we allow the # caller to pass in a symbol set they'd like to reload from the # underlying ledger to be reprocessed in computing pps state. - reload_records: Optional[list[Transaction]] = None, + reload_records: Optional[dict[str, str]] = None, + update_from_ledger: bool = False, ) -> tuple[dict, dict[str, Position]]: ''' @@ -580,9 +592,9 @@ def load_pps_from_toml( pps = brokersection.setdefault(acctid, {}) pp_objs = {} - if not pps: - # no pps entry yet for this broker/account so parse - # any available ledgers to build a pps state. + # no pps entry yet for this broker/account so parse any available + # ledgers to build a brand new pps state. + if not pps or update_from_ledger: pp_objs = load_pps_from_ledger( brokername, acctid, @@ -591,8 +603,7 @@ def load_pps_from_toml( # Reload symbol specific ledger entries if requested by the # caller **AND** none exist in the current pps state table. elif ( - pps and reload_records and - not any(r.fqsn in pps for r in reload_records) + pps and reload_records ): # no pps entry yet for this broker/account so parse # any available ledgers to build a pps state. @@ -609,6 +620,7 @@ def load_pps_from_toml( # unmarshal/load ``pps.toml`` config entries into object form. for fqsn, entry in pps.items(): + bsuid = entry['bsuid'] # convert clears sub-tables (only in this form # for toml re-presentation) back into a master table. @@ -622,13 +634,29 @@ def load_pps_from_toml( tid = clears_table.pop('tid') clears[tid] = clears_table + size = entry['size'] + + # TODO: an audit system for existing pps entries? + # if not len(clears) == abs(size): + # pp_objs = load_pps_from_ledger( + # brokername, + # acctid, + # filter_by=reload_records, + # ) + # reason = 'size <-> len(clears) mismatch' + # raise ValueError( + # '`pps.toml` entry is invalid:\n' + # f'{fqsn}\n' + # f'{pformat(entry)}' + # ) + expiry = entry.get('expiry') if expiry: expiry = pendulum.parse(expiry) - pp_objs[fqsn] = Position( + pp_objs[bsuid] = Position( Symbol.from_fqsn(fqsn, info={}), - size=entry['size'], + size=size, be_price=entry['be_price'], expiry=expiry, bsuid=entry['bsuid'], @@ -649,14 +677,24 @@ def update_pps_conf( acctid: str, trade_records: Optional[list[Transaction]] = None, - key_by: Optional[str] = None, + ledger_reload: Optional[dict[str, str]] = None, -) -> dict[str, Position]: +) -> tuple[ + dict[str, Position], + dict[str, Position], +]: + + # this maps `.bsuid` values to positions + pp_objs: dict[Union[str, int], Position] + + if trade_records and ledger_reload: + for r in trade_records: + ledger_reload[r.bsuid] = r.fqsn conf, pp_objs = load_pps_from_toml( brokername, acctid, - reload_records=trade_records, + reload_records=ledger_reload, ) # update all pp objects from any (new) trade records which @@ -667,6 +705,9 @@ def update_pps_conf( pps=pp_objs, ) + # NOTE: newly closed position are also important to report/return + # since a consumer, like an order mode UI ;), might want to react + # based on the closure. active, closed = dump_active(pp_objs) # dict-serialize all active pps @@ -687,8 +728,11 @@ def update_pps_conf( brokerless_key = fqsn.rstrip(f'.{brokername}') pp_entries[brokerless_key] = pp_dict + closed_pp_objs: dict[str, Position] = {} for bsuid in closed: - pp_objs.pop(bsuid, None) + closed_pp = pp_objs.pop(bsuid, None) + if closed_pp: + closed_pp_objs[bsuid] = closed_pp conf[brokername][acctid] = pp_entries @@ -703,11 +747,8 @@ def update_pps_conf( encoder=enc, ) - if key_by: - pp_objs = {getattr(pp, key_by): pp for pp in pp_objs} - # deliver object form of all pps in table to caller - return pp_objs + return pp_objs, closed_pp_objs if __name__ == '__main__': From f9c4b3cc96019fe53674f0886c0915689643c5fd Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Jun 2022 18:18:02 -0400 Subject: [PATCH 46/58] Fixes for newly opened and closed pps Before we weren't emitting pp msgs when a position went back to "net zero" (aka the size is zero) nor when a new one was opened (wasn't previously loaded from the `pps.toml`). This reworks a bunch of the incremental update logic as well as ports to the changes in the `piker.pp` module: - rename a few of the normalizing helpers to be more explicit. - drop calling `pp.get_pps()` in the trades dialog task and instead create msgs iteratively, per account, by iterating through collected position and API trade records and calling instead `pp.update_pps_conf()`. - always from-ledger-update both positions reported from ib's pp sys and session api trades detected on ems-trade-dialog startup. - `update_ledger_from_api_trades()` now does **just** that: only updates the trades ledger and returns the transaction set. - `update_and_audit_msgs()` now only the input list of msgs and properly generates new msgs for newly created positions that weren't previously loaded from the `pps.toml`. --- piker/brokers/ib/broker.py | 257 ++++++++++++++++++++++--------------- 1 file changed, 156 insertions(+), 101 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 661022fe..b6d780de 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -286,8 +286,7 @@ async def update_ledger_from_api_trades( trade_entries: list[dict[str, Any]], client: Union[Client, MethodProxy], -) -> dict[str, Any]: - +) -> dict[str, pp.Transaction]: # construct piker pps from trade ledger, underneath using # LIFO style breakeven pricing calcs. conf = get_config() @@ -312,95 +311,110 @@ async def update_ledger_from_api_trades( entry['listingExchange'] = pexch - records = trades_to_records( + entries = trades_to_ledger_entries( conf['accounts'].inverse, trade_entries, ) - actives = {} # write recent session's trades to the user's (local) ledger file. - for acctid, trades_by_id in records.items(): - + records: dict[str, pp.Transactions] = {} + for acctid, trades_by_id in entries.items(): with pp.open_trade_ledger('ib', acctid) as ledger: ledger.update(trades_by_id) - # normalize - records = norm_trade_records(trades_by_id) + # normalize to transaction form + records[acctid] = norm_trade_records(trades_by_id) - # (incrementally) update the user's pps in mem and - # in the `pps.toml`. - active = pp.update_pps_conf('ib', acctid, records) - actives.update(active) - - return actives + return records -async def update_and_audit( - acctid: str, - by_fqsn: dict[str, pp.Position], +async def update_and_audit_msgs( + acctid: str, # no `ib.` prefix is required! + pps: list[pp.Position], cids2pps: dict[tuple[str, int], BrokerdPosition], validate: bool = False, ) -> list[BrokerdPosition]: msgs: list[BrokerdPosition] = [] - pps: dict[int, pp.Position] = {} + # pps: dict[int, pp.Position] = {} - for fqsn, p in by_fqsn.items(): + for p in pps: bsuid = p.bsuid # build trade-session-actor local table # of pps from unique symbol ids. - pps[bsuid] = p + # pps[bsuid] = p # retreive equivalent ib reported position message # for comparison/audit versus the piker equivalent # breakeven pp calcs. - ibppmsg = cids2pps[(acctid, bsuid)] + ibppmsg = cids2pps.get((acctid, bsuid)) - msg = BrokerdPosition( - broker='ib', + if ibppmsg: + msg = BrokerdPosition( + broker='ib', - # XXX: ok so this is annoying, we're relaying - # an account name with the backend suffix prefixed - # but when reading accounts from ledgers we don't - # need it and/or it's prefixed in the section - # table.. - account=ibppmsg.account, - # XXX: the `.ib` is stripped..? - symbol=ibppmsg.symbol, - currency=ibppmsg.currency, - size=p.size, - avg_price=p.be_price, - ) - msgs.append(msg) - - if validate: - ibsize = ibppmsg.size - pikersize = msg.size - diff = pikersize - ibsize - - # if ib reports a lesser pp it's not as bad since we can - # presume we're at least not more in the shit then we - # thought. - if diff: - raise ValueError( - f'POSITION MISMATCH ib <-> piker ledger:\n' - f'ib: {ibppmsg}\n' - f'piker: {msg}\n' - 'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?' - ) - msg.size = ibsize - - if ibppmsg.avg_price != msg.avg_price: - - # TODO: make this a "propoganda" log level? - log.warning( - 'The mega-cucks at IB want you to believe with their ' - f'"FIFO" positioning for {msg.symbol}:\n' - f'"ib" mega-cucker avg price: {ibppmsg.avg_price}\n' - f'piker, LIFO breakeven PnL price: {msg.avg_price}' + # XXX: ok so this is annoying, we're relaying + # an account name with the backend suffix prefixed + # but when reading accounts from ledgers we don't + # need it and/or it's prefixed in the section + # table.. + account=ibppmsg.account, + # XXX: the `.ib` is stripped..? + symbol=ibppmsg.symbol, + currency=ibppmsg.currency, + size=p.size, + avg_price=p.be_price, ) + msgs.append(msg) + + if validate: + ibsize = ibppmsg.size + pikersize = msg.size + diff = pikersize - ibsize + + # if ib reports a lesser pp it's not as bad since we can + # presume we're at least not more in the shit then we + # thought. + if diff: + raise ValueError( + f'POSITION MISMATCH ib <-> piker ledger:\n' + f'ib: {ibppmsg}\n' + f'piker: {msg}\n' + 'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?' + ) + msg.size = ibsize + + if ibppmsg.avg_price != msg.avg_price: + + # TODO: make this a "propoganda" log level? + log.warning( + 'The mega-cucks at IB want you to believe with their ' + f'"FIFO" positioning for {msg.symbol}:\n' + f'"ib" mega-cucker avg price: {ibppmsg.avg_price}\n' + f'piker, LIFO breakeven PnL price: {msg.avg_price}' + ) + + else: + # make brand new message + msg = BrokerdPosition( + broker='ib', + + # XXX: ok so this is annoying, we're relaying + # an account name with the backend suffix prefixed + # but when reading accounts from ledgers we don't + # need it and/or it's prefixed in the section + # table.. we should just strip this from the message + # right since `.broker` is already included? + account=f'ib.{acctid}', + # XXX: the `.ib` is stripped..? + symbol=p.symbol.front_fqsn(), + # currency=ibppmsg.currency, + size=p.size, + avg_price=p.be_price, + ) + msgs.append(msg) return msgs @@ -455,7 +469,7 @@ async def trades_dialogue( accounts.add(account) cids2pps: dict[str, BrokerdPosition] = {} - active_accts: set[str] = set() + update_records: dict[str, bidict] = {} # process pp value reported from ib's system. we only use these # to cross-check sizing since average pricing on their end uses @@ -464,39 +478,67 @@ async def trades_dialogue( # money.. xb for client in aioclients.values(): for pos in client.positions(): + cid, msg = pack_position(pos) acctid = msg.account = accounts_def.inverse[msg.account] - active_accts.add(acctid) - cids2pps[(acctid.strip('ib.'), cid)] = msg + acctid = acctid.strip('ib.') + cids2pps[(acctid, cid)] = msg assert msg.account in accounts, ( f'Position for unknown account: {msg.account}') + # collect all ib-pp reported positions so that we can be + # sure know which positions to update from the ledger if + # any are missing from the ``pps.toml`` + update_records.setdefault(acctid, bidict())[cid] = msg.symbol + # update trades ledgers for all accounts from - # connected api clients. + # connected api clients which report trades for **this session**. + new_trades = {} for account, proxy in proxies.items(): trades = await proxy.trades() - if trades: - await update_ledger_from_api_trades( - trades, - proxy, - ) + new_trades.update(await update_ledger_from_api_trades( + trades, + proxy, + )) + + for acctid, trans in new_trades.items(): + for t in trans: + bsuid = t.bsuid + if bsuid in update_records: + assert update_records[bsuid] == t.fqsn + else: + update_records.setdefault(acctid, bidict())[bsuid] = t.fqsn # load all positions from `pps.toml`, cross check with ib's # positions data, and relay re-formatted pps as msgs to the ems. - pps_by_account = pp.get_pps('ib', acctids=active_accts) - - for acctid, by_fqsn in pps_by_account.items(): - msgs = await update_and_audit( + # __2 cases__: + # - new trades have taken place this session that we want to + # always reprocess indempotently, + # - no new trades yet but we want to reload and audit any + # positions reported by ib's sys that may not yet be in + # piker's ``pps.toml`` state-file. + for acctid, to_update in update_records.items(): + trans = new_trades.get(acctid) + active, closed = pp.update_pps_conf( + 'ib', acctid, - by_fqsn, - cids2pps, - validate=True, + trade_records=trans, + ledger_reload=to_update, ) - all_positions.extend(msg.dict() for msg in msgs) + for pps in [active, closed]: + msgs = await update_and_audit_msgs( + acctid, + pps.values(), + cids2pps, + validate=True, + ) + all_positions.extend(msg.dict() for msg in msgs) if not all_positions and cids2pps: raise RuntimeError( - 'Positions report by ib but not found in `pps.toml` !?') + 'Positions reported by ib but not found in `pps.toml`!?\n' + f'{pformat(cids2pps)}' + ) # log.info(f'Loaded {len(trades)} from this session') # TODO: write trades to local ``trades.toml`` @@ -543,26 +585,39 @@ async def emit_pp_update( # compute and relay incrementally updated piker pp acctid = accounts_def.inverse[trade_entry['execution']['acctNumber']] proxy = proxies[acctid] - await update_ledger_from_api_trades( + + acctname = acctid.strip('ib.') + records = (await update_ledger_from_api_trades( [trade_entry], proxy, - ) - # load all positions from `pps.toml`, cross check with - # ib's positions data, and relay re-formatted pps as - # msgs to the ems. - by_acct = pp.get_pps('ib', acctids={acctid}) - acctname = acctid.strip('ib.') - by_fqsn = by_acct[acctname] + ))[acctname] + r = records[0] - for fqsn, p in by_fqsn.items(): - if p.bsuid == trade_entry['contract']['conId']: - # should only be one right? - msgs = await update_and_audit( - acctname, - {fqsn: p}, - cids2pps, - validate=False, - ) + # update and load all positions from `pps.toml`, cross check with + # ib's positions data, and relay re-formatted pps as msgs to the + # ems. we report both the open and closed updates in one map since + # for incremental update we may have just fully closed a pp and need + # to relay that msg as well! + active, closed = pp.update_pps_conf( + 'ib', + acctname, + trade_records=records, + ledger_reload={r.bsuid: r.fqsn}, + ) + + for pos in filter( + bool, + [active.get(r.bsuid), closed.get(r.bsuid)] + ): + msgs = await update_and_audit_msgs( + acctname, + [pos], + cids2pps, + + # ib pp event might not have arrived yet + validate=False, + ) + if msgs: msg = msgs[0] break @@ -669,7 +724,7 @@ async def deliver_trade_events( # TODO: # - normalize out commissions details? # - this is the same as the unpacking loop above in - # ``trades_to_records()`` no? + # ``trades_to_ledger_entries()`` no? trade_entry = ids2fills.setdefault(execid, {}) cost_already_rx = bool(trade_entry) @@ -800,7 +855,7 @@ async def deliver_trade_events( def norm_trade_records( ledger: dict[str, Any], -) -> dict[str, list[pp.Transaction]]: +) -> list[pp.Transaction]: ''' Normalize a flex report or API retrieved executions ledger into our standard record format. @@ -899,7 +954,7 @@ def norm_trade_records( return records -def trades_to_records( +def trades_to_ledger_entries( accounts: bidict, trade_entries: list[object], source_type: str = 'api', @@ -1026,7 +1081,7 @@ def load_flex_trades( # log.info(f'Loaded {ln} trades from flex query') print(f'Loaded {ln} trades from flex query') - trades_by_account = trades_to_records( + trades_by_account = trades_to_ledger_entries( # get reverse map to user account names conf['accounts'].inverse, trade_entries, From 566a54ffb6d48c49854318f383006a463fbf990e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Jun 2022 18:54:23 -0400 Subject: [PATCH 47/58] Reset the clears table on zero size conditions --- piker/pp.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index fd8d5cda..4d1be0c4 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -310,13 +310,17 @@ def update_pps( cost=2*r.cost, ) - # track clearing data - pp.clears[r.tid] = { - 'cost': r.cost, - 'price': r.price, - 'size': r.size, - 'dt': str(r.dt), - } + if pp.size == 0: + pp.clears.clear() + + else: + # track clearing data + pp.clears[r.tid] = { + 'cost': r.cost, + 'price': r.price, + 'size': r.size, + 'dt': str(r.dt), + } assert len(set(pp.clears)) == len(pp.clears) return pps From 87f301500de7456c4307950a6c0526e53645db17 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Jun 2022 14:59:47 -0400 Subject: [PATCH 48/58] Simplify updates to single-pass, fix clears minimizing Gah, was a remaining bug where if you tried to update the pps state with both new trades and from the ledger you'd do a double add of transactions that were cleared during a `update_pps()` loop. Instead now keep all clears in tact until ready to serialize to the `pps.toml` file in which cases we call a new method `Position.minimize_clears()` which does the work of only keep clears since the last net-zero size. Re-implement `update_pps_conf()` update logic as a single pass loop which does expiry and size checking for closed pps all in one pass thus allowing us to drop `dump_active()` which was kinda redundant anyway.. --- piker/pp.py | 166 ++++++++++++++++++++++++++++------------------------ 1 file changed, 89 insertions(+), 77 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index 4d1be0c4..bdd2fae0 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -20,11 +20,13 @@ that doesn't try to cuk most humans who prefer to not lose their moneys.. (looking at you `ib` and dirt-bird friends) ''' +from collections import deque from contextlib import contextmanager as cm # from pprint import pformat import os from os import path import re +import time from typing import ( Any, Optional, @@ -75,7 +77,9 @@ def open_trade_ledger( with open(tradesfile, 'w') as cf: pass # touch with open(tradesfile, 'rb') as cf: + start = time.time() ledger = tomli.load(cf) + print(f'Ledger load took {time.time() - start}s') cpy = ledger.copy() try: yield cpy @@ -205,6 +209,18 @@ class Position(Struct): ''' return self.be_price * self.size + def update( + self, + t: Transaction, + + ) -> None: + self.clears[t.tid] = { + 'cost': t.cost, + 'price': t.price, + 'size': t.size, + 'dt': str(t.dt), + } + def lifo_update( self, size: float, @@ -257,6 +273,32 @@ class Position(Struct): return new_size, self.be_price + def minimize_clears( + self, + + ) -> dict[str, dict]: + ''' + Minimize the position's clears entries by removing + all transactions before the last net zero size to avoid + unecessary history irrelevant to the current pp state. + + + ''' + size: float = 0 + clears_since_zero: deque[tuple(str, dict)] = deque() + + # scan for the last "net zero" position by + # iterating clears in reverse. + for tid, clear in reversed(self.clears.items()): + size += clear['size'] + clears_since_zero.appendleft((tid, clear)) + + if size == 0: + break + + self.clears = dict(clears_since_zero) + return self.clears + def update_pps( records: dict[str, Transaction], @@ -310,62 +352,14 @@ def update_pps( cost=2*r.cost, ) - if pp.size == 0: - pp.clears.clear() + # track clearing data + pp.update(r) - else: - # track clearing data - pp.clears[r.tid] = { - 'cost': r.cost, - 'price': r.price, - 'size': r.size, - 'dt': str(r.dt), - } + assert len(set(pp.clears)) == len(pp.clears) - assert len(set(pp.clears)) == len(pp.clears) return pps -def dump_active( - pps: dict[str, Position], - -) -> tuple[ - dict[str, Any], - dict[str, Any], -]: - ''' - Split pps into those that are "active" (non-zero size) and "closed" - (zero size) and return in 2 dicts. - - Returning the "closed" set is important for updating the pps state - in any ``pps.toml`` such that we remove entries which are no longer - part of any "VaR" set (well presumably, except of course your liquidity - asset could be full of "risk" XD ). - - ''' - active = {} - closed = {} - - for k, pp in pps.items(): - - asdict = pp.to_pretoml() - - if pp.expiry is None: - asdict.pop('expiry', None) - - if ( - pp.size == 0 - - # drop time-expired positions (normally derivatives) - or (pp.expiry and pp.expiry < now()) - ): - closed[k] = asdict - else: - active[k] = asdict - - return active, closed - - def load_pps_from_ledger( brokername: str, @@ -391,11 +385,13 @@ def load_pps_from_ledger( return {} brokermod = get_brokermod(brokername) - records = brokermod.norm_trade_records(ledger) + src_records = brokermod.norm_trade_records(ledger) if filter_by: bsuids = set(filter_by) - records = filter(lambda r: r.bsuid in bsuids, records) + records = list(filter(lambda r: r.bsuid in bsuids, src_records)) + else: + records = src_records return update_pps(records) @@ -709,34 +705,50 @@ def update_pps_conf( pps=pp_objs, ) + pp_entries = {} # dict-serialize all active pps # NOTE: newly closed position are also important to report/return # since a consumer, like an order mode UI ;), might want to react # based on the closure. - active, closed = dump_active(pp_objs) - - # dict-serialize all active pps - pp_entries = {} - - for bsuid, pp_dict in active.items(): - - # normalize to a simpler flat dict format - s = pp_dict.pop('symbol') - # TODO: we need to figure out how to have one top level - # listing venue here even when the backend isn't providing - # it via the trades ledger.. - fqsn = s.front_fqsn() - - print(f'Updating active pp: {fqsn}') - # XXX: ugh, it's cuz we push the section under - # the broker name.. maybe we need to rethink this? - brokerless_key = fqsn.rstrip(f'.{brokername}') - pp_entries[brokerless_key] = pp_dict - closed_pp_objs: dict[str, Position] = {} - for bsuid in closed: - closed_pp = pp_objs.pop(bsuid, None) - if closed_pp: - closed_pp_objs[bsuid] = closed_pp + + for bsuid in list(pp_objs): + pp = pp_objs[bsuid] + pp.minimize_clears() + + if ( + pp.size == 0 + + # drop time-expired positions (normally derivatives) + or (pp.expiry and pp.expiry < now()) + ): + # if expired the position is closed + pp.size = 0 + + # position is already closed aka "net zero" + closed_pp = pp_objs.pop(bsuid, None) + if closed_pp: + closed_pp_objs[bsuid] = closed_pp + + else: + # serialize to pre-toml form + asdict = pp.to_pretoml() + + if pp.expiry is None: + asdict.pop('expiry', None) + + # TODO: we need to figure out how to have one top level + # listing venue here even when the backend isn't providing + # it via the trades ledger.. + # drop symbol obj in serialized form + s = asdict.pop('symbol') + fqsn = s.front_fqsn() + print(f'Updating active pp: {fqsn}') + + # XXX: ugh, it's cuz we push the section under + # the broker name.. maybe we need to rethink this? + brokerless_key = fqsn.rstrip(f'.{brokername}') + + pp_entries[brokerless_key] = asdict conf[brokername][acctid] = pp_entries From aec48a1dd5032c5f40640bc8dfb4506ad435a793 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Jun 2022 15:07:56 -0400 Subject: [PATCH 49/58] Right, zero sized "closed out" msgs are totally fine --- piker/brokers/ib/broker.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index b6d780de..11f43a6f 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -414,6 +414,12 @@ async def update_and_audit_msgs( size=p.size, avg_price=p.be_price, ) + if validate and p.size: + raise ValueError( + f'UNEXPECTED POSITION ib <-> piker ledger:\n' + f'piker: {msg}\n' + 'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?' + ) msgs.append(msg) return msgs From 8a7e391b4e1fcf14b588f65eaf3105495e9a6cd3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Jun 2022 15:13:58 -0400 Subject: [PATCH 50/58] Terser startup msg fields --- piker/cli/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/piker/cli/__init__.py b/piker/cli/__init__.py index e9512322..853860aa 100644 --- a/piker/cli/__init__.py +++ b/piker/cli/__init__.py @@ -83,9 +83,9 @@ def pikerd(loglevel, host, tl, pdb, tsdb): ) log.info( - f'`marketstore` up!\n' - f'`marketstored` pid: {pid}\n' - f'docker container id: {cid}\n' + f'`marketstored` up!\n' + f'pid: {pid}\n' + f'container id: {cid[:12]}\n' f'config: {pformat(config)}' ) From c6efa2641b081fc0e0caec4bfe8b78d01213a19d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Jun 2022 16:11:50 -0400 Subject: [PATCH 51/58] Cost part of position breakeven calc is direction dependent --- piker/pp.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/piker/pp.py b/piker/pp.py index bdd2fae0..1bf3580e 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -25,6 +25,7 @@ from contextlib import contextmanager as cm # from pprint import pformat import os from os import path +from math import copysign import re import time from typing import ( @@ -264,7 +265,7 @@ class Position(Struct): self.be_price = ( abs(size) * price # weight of current exec + - cost # transaction cost + copysign(1, size)*cost # transaction cost + self.be_price * abs(self.size) # weight of previous pp ) / abs(new_size) From 557562e25c74ffcf4505e3681dd6856b05dd59d2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Jun 2022 20:18:15 -0400 Subject: [PATCH 52/58] Build out adhoc sym map from futes list --- piker/brokers/ib/api.py | 41 ++++++++++++++++++++++++++++------------- 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 1305e486..ddfef9a1 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -161,30 +161,23 @@ class NonShittyIB(ibis.IB): self.client.apiEnd += self.disconnectedEvent -# map of symbols to contract ids -_adhoc_cmdty_data_map = { - # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 - - # NOTE: some cmdtys/metals don't have trade data like gold/usd: - # https://groups.io/g/twsapi/message/44174 - 'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}), -} - _futes_venues = ( 'GLOBEX', 'NYMEX', 'CME', 'CMECRYPTO', + 'COMEX', + 'CMDTY', # special name case.. ) _adhoc_futes_set = { # equities 'nq.globex', - 'mnq.globex', + 'mnq.globex', # micro 'es.globex', - 'mes.globex', + 'mes.globex', # micro # cypto$ 'brr.cmecrypto', @@ -201,13 +194,34 @@ _adhoc_futes_set = { # metals 'xauusd.cmdty', # gold spot 'gc.nymex', - 'mgc.nymex', + 'mgc.nymex', # micro + + # oil & gas + 'cl.nymex', 'xagusd.cmdty', # silver spot 'ni.nymex', # silver futes 'qi.comex', # mini-silver futes } + +# map of symbols to contract ids +_adhoc_symbol_map = { + # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 + + # NOTE: some cmdtys/metals don't have trade data like gold/usd: + # https://groups.io/g/twsapi/message/44174 + 'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}), +} +for qsn in _adhoc_futes_set: + sym, venue = qsn.split('.') + assert venue.upper() in _futes_venues, f'{venue}' + _adhoc_symbol_map[sym.upper()] = ( + {'exchange': venue}, + {}, + ) + + # exchanges we don't support at the moment due to not knowing # how to do symbol-contract lookup correctly likely due # to not having the data feeds subscribed. @@ -215,6 +229,7 @@ _exch_skip_list = { 'ASX', # aussie stocks 'MEXI', # mexican stocks 'VALUE', # no idea + 'FUNDSERV', # no idea } # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 @@ -569,7 +584,7 @@ class Client: # commodities elif exch == 'CMDTY': # eg. XAUUSD.CMDTY - con_kwargs, bars_kwargs = _adhoc_cmdty_data_map[sym] + con_kwargs, bars_kwargs = _adhoc_symbol_map[sym] con = ibis.Commodity(**con_kwargs) con.bars_kwargs = bars_kwargs From fa8920758385bd26c1c85a6190a08483b6c924c1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Jun 2022 20:18:59 -0400 Subject: [PATCH 53/58] Use sign of the new size which indicates direction of position --- piker/pp.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index 1bf3580e..0a67d04f 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -236,7 +236,7 @@ class Position(Struct): # - in this case we could recalc the be price to # be reverted back to it's prior value before the nearest term # trade was opened.? - dynamic_breakeven_price: bool = False, + # dynamic_breakeven_price: bool = False, ) -> (float, float): ''' @@ -263,12 +263,14 @@ class Position(Struct): # the size increases not when it decreases (i.e. the # position is being made smaller) self.be_price = ( - abs(size) * price # weight of current exec + # weight of current exec = (size * price) + cost + (abs(size) * price) + - copysign(1, size)*cost # transaction cost + (copysign(1, new_size) * cost) # transaction cost + + # weight of existing be price self.be_price * abs(self.size) # weight of previous pp - ) / abs(new_size) + ) / abs(new_size) # normalized by the new size: weighted mean. self.size = new_size From d6c32bba866b09a03527f5d1eadd70a4bbc4ede9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Jun 2022 20:19:43 -0400 Subject: [PATCH 54/58] Use new adhoc sym map for symbols without exchange tags (usually futes) --- piker/brokers/ib/broker.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 11f43a6f..3638d6d6 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -64,7 +64,8 @@ from piker.clearing._messages import ( from piker.data._source import Symbol from .api import ( _accounts2clients, - _adhoc_futes_set, + # _adhoc_futes_set, + _adhoc_symbol_map, log, get_config, open_client_proxies, @@ -87,15 +88,23 @@ def pack_position( # TODO: lookup fqsn even for derivs. symbol = con.symbol.lower() + # try our best to figure out the exchange / venue exch = (con.primaryExchange or con.exchange).lower() - fqsn = '.'.join((symbol, exch)) if not exch: - # attempt to lookup the symbol from our - # hacked set.. - for sym in _adhoc_futes_set: - if symbol in sym: - fqsn = sym - break + # for wtv cucked reason some futes don't show their + # exchange (like CL.NYMEX) ... + entry = _adhoc_symbol_map.get( + con.symbol or con.localSymbol + ) + if entry: + meta, kwargs = entry + cid = meta.get('conId') + if cid: + assert con.conId == meta['conId'] + exch = meta['exchange'] + + assert exch, f'No clue:\n {con}' + fqsn = '.'.join((symbol, exch)) expiry = con.lastTradeDateOrContractMonth if expiry: From 695ba5288d726366b2e9a8822f605dd045927a86 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Jun 2022 20:20:08 -0400 Subject: [PATCH 55/58] Comment-drop adhoc symbol (futes) matching in search --- piker/brokers/ib/feed.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index bc3ec8d5..b22ddc1b 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -916,17 +916,17 @@ async def open_symbol_search( # trigger async request await trio.sleep(0) - # match against our ad-hoc set immediately - adhoc_matches = fuzzy.extractBests( - pattern, - list(_adhoc_futes_set), - score_cutoff=90, - ) - log.info(f'fuzzy matched adhocs: {adhoc_matches}') - adhoc_match_results = {} - if adhoc_matches: - # TODO: do we need to pull contract details? - adhoc_match_results = {i[0]: {} for i in adhoc_matches} + # # match against our ad-hoc set immediately + # adhoc_matches = fuzzy.extractBests( + # pattern, + # list(_adhoc_futes_set), + # score_cutoff=90, + # ) + # log.info(f'fuzzy matched adhocs: {adhoc_matches}') + # adhoc_match_results = {} + # if adhoc_matches: + # # TODO: do we need to pull contract details? + # adhoc_match_results = {i[0]: {} for i in adhoc_matches} log.debug(f'fuzzy matching stocks {stock_results}') stock_matches = fuzzy.extractBests( @@ -935,7 +935,8 @@ async def open_symbol_search( score_cutoff=50, ) - matches = adhoc_match_results | { + # matches = adhoc_match_results | { + matches = { item[0]: {} for item in stock_matches } # TODO: we used to deliver contract details From 2b1fb90e03f329cf1fccb3bfbd4bde1ab05c269a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 25 Jun 2022 18:41:49 -0400 Subject: [PATCH 56/58] Add tractor breaker assert.. --- piker/brokers/ib/broker.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 3638d6d6..4cc20b63 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -454,6 +454,14 @@ async def trades_dialogue( accounts = set() clients: list[tuple[Client, trio.MemoryReceiveChannel]] = [] + # TODO: this causes a massive tractor bug when you run marketstored + # with ``--tsdb``... you should get: + # - first error the assertion + # - chart should get that error and die + # - pikerd goes to debugger again from trio nursery multi-error + # - hitting final control-c to kill daemon will lead to hang + # assert 0 + async with ( trio.open_nursery() as nurse, open_client_proxies() as (proxies, aioclients), From 453ebdfe30eda804c068e10b661190acfa11089d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 25 Jun 2022 18:42:06 -0400 Subject: [PATCH 57/58] Fix field name to new `.bsuid` --- piker/clearing/_paper_engine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index bd78e1b4..cf580876 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -264,7 +264,7 @@ class PaperBoi: Symbol(key=symbol), size=pp_msg.size, be_price=pp_msg.avg_price, - uid=symbol.front_fqsn(), + bsuid=symbol, ) pp_msg.size, pp_msg.avg_price = pp.lifo_update(size, price) From 287a2c8396ada86ee5b28fe7e46806e663faa572 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 29 Jun 2022 10:00:38 -0400 Subject: [PATCH 58/58] Put swb2 in venue filter for now --- piker/brokers/ib/api.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index ddfef9a1..207f56f7 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -226,10 +226,14 @@ for qsn in _adhoc_futes_set: # how to do symbol-contract lookup correctly likely due # to not having the data feeds subscribed. _exch_skip_list = { + 'ASX', # aussie stocks 'MEXI', # mexican stocks - 'VALUE', # no idea - 'FUNDSERV', # no idea + + # no idea + 'VALUE', + 'FUNDSERV', + 'SWB2', } # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924