From 4f36a03df2621845a0ca294ce2844c8f9a76fa39 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Mar 2023 14:59:46 -0500 Subject: [PATCH 001/294] Ensure clearing table entries are time-sorted.. Not sure how this worked before but, the PPU calculation critically requires that the order of clearing transactions are in the correct chronological order! Fix this by sorting `trans: dict[str, Transaction]` in the `PpTable.update_from_trans()` method. Also, move the `get_likely_pair()` parser from the `kraken` backend here for future use particularly when we revamp the asset-transaction processing layer. --- piker/pp.py | 51 ++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/piker/pp.py b/piker/pp.py index a01bdc4e..5b2a8ce6 100644 --- a/piker/pp.py +++ b/piker/pp.py @@ -484,7 +484,9 @@ class Position(Struct): if self.split_ratio is not None: size = round(size * self.split_ratio) - return float(self.symbol.quantize_size(size)) + return float( + self.symbol.quantize_size(size), + ) def minimize_clears( self, @@ -564,9 +566,13 @@ class PpTable(Struct): pps = self.pps updated: dict[str, Position] = {} - # lifo update all pps from records - for tid, t in trans.items(): - + # lifo update all pps from records, ensuring + # we compute the PPU and size sorted in time! + for t in sorted( + trans.values(), + key=lambda t: t.dt, + reverse=True, + ): pp = pps.setdefault( t.bsuid, @@ -590,7 +596,10 @@ class PpTable(Struct): # included in the current pps state. if ( t.tid in clears - or first_clear_dt and t.dt < first_clear_dt + or ( + first_clear_dt + and t.dt < first_clear_dt + ) ): # NOTE: likely you'll see repeats of the same # ``Transaction`` passed in here if/when you are restarting @@ -607,6 +616,8 @@ class PpTable(Struct): for bsuid, pp in updated.items(): pp.ensure_state() + # deliver only the position entries that were actually updated + # (modified the state) from the input transaction set. return updated def dump_active( @@ -1031,6 +1042,36 @@ def open_pps( table.write_config() +def get_likely_pair( + src: str, + dst: str, + bsuid: str, + +) -> str: + ''' + Attempt to get the likely trading pair matching a given destination + asset `dst: str`. + + ''' + try: + src_name_start = bsuid.rindex(src) + except ( + ValueError, # substr not found + ): + # TODO: handle nested positions..(i.e. + # positions where the src fiat was used to + # buy some other dst which was furhter used + # to buy another dst..) + log.warning( + f'No src fiat {src} found in {bsuid}?' + ) + return + + likely_dst = bsuid[:src_name_start] + if likely_dst == dst: + return bsuid + + if __name__ == '__main__': import sys From badc30baae79a5edbe4e4b10dcc4d2860a926412 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Mar 2023 15:37:44 -0500 Subject: [PATCH 002/294] Add an inverse of `float_digits()`: `digits_to_dec() --- piker/data/_source.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/piker/data/_source.py b/piker/data/_source.py index d358cd96..e503105e 100644 --- a/piker/data/_source.py +++ b/piker/data/_source.py @@ -90,6 +90,21 @@ def float_digits( return int(-Decimal(str(value)).as_tuple().exponent) +def digits_to_dec( + ndigits: int, +) -> Decimal: + ''' + Return the minimum float value for an input integer value. + + eg. 3 -> 0.001 + + ''' + if ndigits == 0: + return Decimal('0') + + return Decimal('0.' + '0'*(ndigits-1) + '1') + + def ohlc_zeros(length: int) -> np.ndarray: """Construct an OHLC field formatted structarray. @@ -213,10 +228,13 @@ class Symbol(Struct): return Symbol( key=symbol, + tick_size=tick_size, lot_tick_size=lot_size, + tick_size_digits=float_digits(tick_size), lot_size_digits=float_digits(lot_size), + suffix=suffix, broker_info={broker: info}, ) From d01fdbf981324ddb0885e12c914417bae4d70e54 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Mar 2023 16:08:25 -0500 Subject: [PATCH 003/294] '`kraken`: fix pos loading using `digits_to_dec()` to pair info Our issue was not having the correct value set on each `Symbol.lot_tick_size`.. and then doing PPU calcs with the default set for legacy mkts.. Also, - actually write `pps.toml` on broker mode exit. - drop `get_likely_pair()` and import from pp module. --- piker/brokers/kraken/api.py | 4 ++ piker/brokers/kraken/broker.py | 71 +++++++++++++++------------------- 2 files changed, 36 insertions(+), 39 deletions(-) diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index 94d6dc41..e99e6b83 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -20,6 +20,7 @@ Kraken web API wrapping. ''' from contextlib import asynccontextmanager as acm from datetime import datetime +from decimal import Decimal import itertools from typing import ( Any, @@ -248,6 +249,9 @@ class Client: {}, ) by_bsuid = resp['result'] + + # TODO: we need to pull out the "asset" decimals + # data and return a `decimal.Decimal` instead here! return { self._atable[sym].lower(): float(bal) for sym, bal in by_bsuid.items() diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index e09dd35a..106ff61a 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -21,7 +21,6 @@ Order api and machinery from collections import ChainMap, defaultdict from contextlib import ( asynccontextmanager as acm, - contextmanager as cm, ) from functools import partial from itertools import count @@ -47,8 +46,12 @@ from piker.pp import ( Transaction, open_trade_ledger, open_pps, + get_likely_pair, +) +from piker.data._source import ( + Symbol, + digits_to_dec, ) -from piker.data._source import Symbol from piker.clearing._messages import ( Order, Status, @@ -470,12 +473,14 @@ async def trades_dialogue( with ( open_pps( 'kraken', - acctid + acctid, + write_on_exit=True, + ) as table, open_trade_ledger( 'kraken', - acctid + acctid, ) as ledger_dict, ): # transaction-ify the ledger entries @@ -494,7 +499,10 @@ async def trades_dialogue( # what amount of trades-transactions need # to be reloaded. balances = await client.get_balances() + # await tractor.breakpoint() + for dst, size in balances.items(): + # we don't care about tracking positions # in the user's source fiat currency. if ( @@ -508,45 +516,20 @@ async def trades_dialogue( ) continue - def get_likely_pair( - dst: str, - bsuid: str, - src_fiat: str = src_fiat - - ) -> str: - ''' - Attempt to get the likely trading pair masting - a given destination asset `dst: str`. - - ''' - try: - src_name_start = bsuid.rindex(src_fiat) - except ( - ValueError, # substr not found - ): - # TODO: handle nested positions..(i.e. - # positions where the src fiat was used to - # buy some other dst which was furhter used - # to buy another dst..) - log.warning( - f'No src fiat {src_fiat} found in {bsuid}?' - ) - return - - likely_dst = bsuid[:src_name_start] - if likely_dst == dst: - return bsuid - def has_pp( dst: str, size: float, - ) -> Position | bool: + ) -> Position | None: src2dst: dict[str, str] = {} for bsuid in table.pps: - likely_pair = get_likely_pair(dst, bsuid) + likely_pair = get_likely_pair( + src_fiat, + dst, + bsuid, + ) if likely_pair: src2dst[src_fiat] = dst @@ -574,7 +557,7 @@ async def trades_dialogue( ) return pp - return False + return None # signal no entry pos = has_pp(dst, size) if not pos: @@ -602,7 +585,11 @@ async def trades_dialogue( # yet and thus this likely pair grabber will # likely fail. for bsuid in table.pps: - likely_pair = get_likely_pair(dst, bsuid) + likely_pair = get_likely_pair( + src_fiat, + dst, + bsuid, + ) if likely_pair: break else: @@ -724,8 +711,8 @@ async def handle_order_updates( ''' Main msg handling loop for all things order management. - This code is broken out to make the context explicit and state variables - defined in the signature clear to the reader. + This code is broken out to make the context explicit and state + variables defined in the signature clear to the reader. ''' async for msg in ws_stream: @@ -1204,7 +1191,13 @@ def norm_trade_records( fqsn, info={ 'lot_size_digits': pair_info.lot_decimals, + 'lot_tick_size': digits_to_dec( + pair_info.lot_decimals, + ), 'tick_size_digits': pair_info.pair_decimals, + 'price_tick_size': digits_to_dec( + pair_info.pair_decimals, + ), 'asset_type': 'crypto', }, ) From beb6544bad60ad263873660bf5dc591568d9b17c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Mar 2023 16:42:37 -0500 Subject: [PATCH 004/294] Start a new `.accounting` subpkg, move `.pp` contents there --- piker/{pp.py => accounting/__init__.py} | 13 +++++++------ piker/brokers/ib/broker.py | 2 +- piker/brokers/kraken/api.py | 2 +- piker/brokers/kraken/broker.py | 2 +- piker/clearing/_allocate.py | 2 +- piker/clearing/_paper_engine.py | 4 +--- piker/ui/_position.py | 2 +- piker/ui/order_mode.py | 2 +- tests/test_paper.py | 2 +- 9 files changed, 15 insertions(+), 16 deletions(-) rename piker/{pp.py => accounting/__init__.py} (99%) diff --git a/piker/pp.py b/piker/accounting/__init__.py similarity index 99% rename from piker/pp.py rename to piker/accounting/__init__.py index 5b2a8ce6..cc8af877 100644 --- a/piker/pp.py +++ b/piker/accounting/__init__.py @@ -14,6 +14,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . + ''' Personal/Private position parsing, calculating, summarizing in a way that doesn't try to cuk most humans who prefer to not lose their moneys.. @@ -41,12 +42,12 @@ from pendulum import datetime, now import tomli import toml -from . import config -from .brokers import get_brokermod -from .clearing._messages import BrokerdPosition, Status -from .data._source import Symbol, unpack_fqsn -from .log import get_logger -from .data.types import Struct +from .. import config +from ..brokers import get_brokermod +from ..clearing._messages import BrokerdPosition, Status +from ..data._source import Symbol, unpack_fqsn +from ..data.types import Struct +from ..log import get_logger log = get_logger(__name__) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 56756a76..d5b7571e 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -51,7 +51,7 @@ from ib_insync.objects import Position as IbPosition import pendulum from piker import config -from piker.pp import ( +from piker.accounting import ( Position, Transaction, open_trade_ledger, diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index e99e6b83..74ad734b 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -49,7 +49,7 @@ from piker.brokers._util import ( BrokerError, DataThrottle, ) -from piker.pp import Transaction +from piker.accounting import Transaction from . import log # // diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 106ff61a..5d1bbb01 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -40,7 +40,7 @@ import pendulum import trio import tractor -from piker.pp import ( +from piker.accounting import ( Position, PpTable, Transaction, diff --git a/piker/clearing/_allocate.py b/piker/clearing/_allocate.py index d201368d..c457de05 100644 --- a/piker/clearing/_allocate.py +++ b/piker/clearing/_allocate.py @@ -25,7 +25,7 @@ from bidict import bidict from ..data._source import Symbol from ..data.types import Struct -from ..pp import Position +from ..accounting import Position _size_units = bidict({ diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 7a093ad4..39d5a474 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -39,7 +39,7 @@ import tractor from .. import data from ..data.types import Struct from ..data._source import Symbol -from ..pp import ( +from ..accounting import ( Position, Transaction, open_trade_ledger, @@ -58,8 +58,6 @@ from ._messages import ( BrokerdError, ) -from ..config import load - log = get_logger(__name__) diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 41421fb6..3574dd2d 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -46,7 +46,7 @@ from ..calc import ( puterize, ) from ..clearing._allocate import Allocator -from ..pp import Position +from ..accounting import Position from ..data._normalize import iterticks from ..data.feed import ( Feed, diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index cf5f53b1..bf60c0e6 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -36,7 +36,7 @@ import trio from PyQt5.QtCore import Qt from .. import config -from ..pp import Position +from ..accounting import Position from ..clearing._client import open_ems, OrderBook from ..clearing._allocate import ( mk_allocator, diff --git a/tests/test_paper.py b/tests/test_paper.py index 53e03f47..2f46c559 100644 --- a/tests/test_paper.py +++ b/tests/test_paper.py @@ -16,7 +16,7 @@ from functools import partial from piker.log import get_logger from piker.clearing._messages import Order -from piker.pp import ( +from piker.accounting import ( open_pps, ) From f549de7c8898cda8d905bca2ca3144527e320601 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Mar 2023 17:19:39 -0500 Subject: [PATCH 005/294] Break out old `.pp` components into submods: `._ledger` and `._pos` --- piker/accounting/__init__.py | 1041 +--------------------------------- piker/accounting/_ledger.py | 125 ++++ piker/accounting/_pos.py | 961 +++++++++++++++++++++++++++++++ 3 files changed, 1108 insertions(+), 1019 deletions(-) create mode 100644 piker/accounting/_ledger.py create mode 100644 piker/accounting/_pos.py diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index cc8af877..a371f7c2 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -16,1031 +16,33 @@ # along with this program. If not, see . ''' -Personal/Private position parsing, calculating, summarizing in a way -that doesn't try to cuk most humans who prefer to not lose their moneys.. -(looking at you `ib` and dirt-bird friends) +"Accounting for degens": count dem numberz that tracks how much you got +for tendiez. ''' -from __future__ import annotations -from contextlib import contextmanager as cm -from pprint import pformat -import os -from os import path -from math import copysign -import re -import time -from typing import ( - Any, - Iterator, - Optional, - Union, - Generator -) - -import pendulum -from pendulum import datetime, now -import tomli -import toml - -from .. import config -from ..brokers import get_brokermod -from ..clearing._messages import BrokerdPosition, Status -from ..data._source import Symbol, unpack_fqsn -from ..data.types import Struct from ..log import get_logger +from ._pos import ( + Transaction, + open_trade_ledger, + PpTable, +) +from ._pos import ( + open_pps, + load_pps_from_ledger, + Position, +) + log = get_logger(__name__) - -@cm -def open_trade_ledger( - broker: str, - account: str, - -) -> Generator[dict, None, None]: - ''' - Indempotently create and read in a trade log file from the - ``/ledgers/`` directory. - - Files are named per broker account of the form - ``_.toml``. The ``accountname`` here is the - name as defined in the user's ``brokers.toml`` config. - - ''' - ldir = path.join(config._config_dir, 'ledgers') - if not path.isdir(ldir): - os.makedirs(ldir) - - fname = f'trades_{broker}_{account}.toml' - tradesfile = path.join(ldir, fname) - - if not path.isfile(tradesfile): - log.info( - f'Creating new local trades ledger: {tradesfile}' - ) - with open(tradesfile, 'w') as cf: - pass # touch - with open(tradesfile, 'rb') as cf: - start = time.time() - ledger = tomli.load(cf) - log.info(f'Ledger load took {time.time() - start}s') - cpy = ledger.copy() - - try: - yield cpy - finally: - if cpy != ledger: - - # TODO: show diff output? - # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries - log.info(f'Updating ledger for {tradesfile}:\n') - ledger.update(cpy) - - # we write on close the mutated ledger data - with open(tradesfile, 'w') as cf: - toml.dump(ledger, cf) - - -class Transaction(Struct, frozen=True): - # TODO: should this be ``.to`` (see below)? - fqsn: str - - sym: Symbol - tid: Union[str, int] # unique transaction id - size: float - price: float - cost: float # commisions or other additional costs - dt: datetime - expiry: datetime | None = None - - # optional key normally derived from the broker - # backend which ensures the instrument-symbol this record - # is for is truly unique. - bsuid: Union[str, int] | None = None - - # optional fqsn for the source "asset"/money symbol? - # from: Optional[str] = None - - -def iter_by_dt( - clears: dict[str, Any], -) -> Iterator[tuple[str, dict]]: - ''' - Iterate entries of a ``clears: dict`` table sorted by entry recorded - datetime presumably set at the ``'dt'`` field in each entry. - - ''' - for tid, data in sorted( - list(clears.items()), - key=lambda item: item[1]['dt'], - ): - yield tid, data - - -class Position(Struct): - ''' - Basic pp (personal/piker position) model with attached clearing - transaction history. - - ''' - symbol: Symbol - - # can be +ve or -ve for long/short - size: float - - # "breakeven price" above or below which pnl moves above and below - # zero for the entirety of the current "trade state". - ppu: float - - # unique backend symbol id - bsuid: str - - split_ratio: Optional[int] = None - - # ordered record of known constituent trade messages - clears: dict[ - Union[str, int, Status], # trade id - dict[str, Any], # transaction history summaries - ] = {} - first_clear_dt: Optional[datetime] = None - - expiry: Optional[datetime] = None - - def to_dict(self) -> dict: - return { - f: getattr(self, f) - for f in self.__struct_fields__ - } - - def to_pretoml(self) -> tuple[str, dict]: - ''' - Prep this position's data contents for export to toml including - re-structuring of the ``.clears`` table to an array of - inline-subtables for better ``pps.toml`` compactness. - - ''' - d = self.to_dict() - clears = d.pop('clears') - expiry = d.pop('expiry') - - if self.split_ratio is None: - d.pop('split_ratio') - - # should be obvious from clears/event table - d.pop('first_clear_dt') - - # TODO: we need to figure out how to have one top level - # listing venue here even when the backend isn't providing - # it via the trades ledger.. - # drop symbol obj in serialized form - s = d.pop('symbol') - fqsn = s.front_fqsn() - - broker, key, suffix = unpack_fqsn(fqsn) - sym_info = s.broker_info[broker] - - d['asset_type'] = sym_info['asset_type'] - d['price_tick_size'] = ( - sym_info.get('price_tick_size') - or - s.tick_size - ) - d['lot_tick_size'] = ( - sym_info.get('lot_tick_size') - or - s.lot_tick_size - ) - - if self.expiry is None: - d.pop('expiry', None) - elif expiry: - d['expiry'] = str(expiry) - - toml_clears_list = [] - - # reverse sort so latest clears are at top of section? - for tid, data in iter_by_dt(clears): - inline_table = toml.TomlDecoder().get_empty_inline_table() - - # serialize datetime to parsable `str` - inline_table['dt'] = str(data['dt']) - - # insert optional clear fields in column order - for k in ['ppu', 'accum_size']: - val = data.get(k) - if val: - inline_table[k] = val - - # insert required fields - for k in ['price', 'size', 'cost']: - inline_table[k] = data[k] - - inline_table['tid'] = tid - toml_clears_list.append(inline_table) - - d['clears'] = toml_clears_list - - return fqsn, d - - def ensure_state(self) -> None: - ''' - Audit either the `.size` and `.ppu` local instance vars against - the clears table calculations and return the calc-ed values if - they differ and log warnings to console. - - ''' - clears = list(self.clears.values()) - self.first_clear_dt = min(list(entry['dt'] for entry in clears)) - last_clear = clears[-1] - - csize = self.calc_size() - accum = last_clear['accum_size'] - if not self.expired(): - if ( - csize != accum - and csize != round(accum * self.split_ratio or 1) - ): - raise ValueError(f'Size mismatch: {csize}') - else: - assert csize == 0, 'Contract is expired but non-zero size?' - - if self.size != csize: - log.warning( - 'Position state mismatch:\n' - f'{self.size} => {csize}' - ) - self.size = csize - - cppu = self.calc_ppu() - ppu = last_clear['ppu'] - if ( - cppu != ppu - and self.split_ratio is not None - # handle any split info entered (for now) manually by user - and cppu != (ppu / self.split_ratio) - ): - raise ValueError(f'PPU mismatch: {cppu}') - - if self.ppu != cppu: - log.warning( - 'Position state mismatch:\n' - f'{self.ppu} => {cppu}' - ) - self.ppu = cppu - - def update_from_msg( - self, - msg: BrokerdPosition, - - ) -> None: - - # XXX: better place to do this? - symbol = self.symbol - - lot_size_digits = symbol.lot_size_digits - ppu, size = ( - round( - msg['avg_price'], - ndigits=symbol.tick_size_digits - ), - round( - msg['size'], - ndigits=lot_size_digits - ), - ) - - self.ppu = ppu - self.size = size - - @property - def dsize(self) -> float: - ''' - The "dollar" size of the pp, normally in trading (fiat) unit - terms. - - ''' - return self.ppu * self.size - - # TODO: idea: "real LIFO" dynamic positioning. - # - when a trade takes place where the pnl for - # the (set of) trade(s) is below the breakeven price - # it may be that the trader took a +ve pnl on a short(er) - # term trade in the same account. - # - in this case we could recalc the be price to - # be reverted back to it's prior value before the nearest term - # trade was opened.? - # def lifo_price() -> float: - # ... - - def iter_clears(self) -> Iterator[tuple[str, dict]]: - ''' - Iterate the internally managed ``.clears: dict`` table in - datetime-stamped order. - - ''' - return iter_by_dt(self.clears) - - def calc_ppu( - self, - # include transaction cost in breakeven price - # and presume the worst case of the same cost - # to exit this transaction (even though in reality - # it will be dynamic based on exit stratetgy). - cost_scalar: float = 2, - - ) -> float: - ''' - Compute the "price-per-unit" price for the given non-zero sized - rolling position. - - The recurrence relation which computes this (exponential) mean - per new clear which **increases** the accumulative postiion size - is: - - ppu[-1] = ( - ppu[-2] * accum_size[-2] - + - ppu[-1] * size - ) / accum_size[-1] - - where `cost_basis` for the current step is simply the price - * size of the most recent clearing transaction. - - ''' - asize_h: list[float] = [] # historical accumulative size - ppu_h: list[float] = [] # historical price-per-unit - - tid: str - entry: dict[str, Any] - for (tid, entry) in self.iter_clears(): - clear_size = entry['size'] - clear_price = entry['price'] - - last_accum_size = asize_h[-1] if asize_h else 0 - accum_size = last_accum_size + clear_size - accum_sign = copysign(1, accum_size) - - sign_change: bool = False - - if accum_size == 0: - ppu_h.append(0) - asize_h.append(0) - continue - - if accum_size == 0: - ppu_h.append(0) - asize_h.append(0) - continue - - # test if the pp somehow went "passed" a net zero size state - # resulting in a change of the "sign" of the size (+ve for - # long, -ve for short). - sign_change = ( - copysign(1, last_accum_size) + accum_sign == 0 - and last_accum_size != 0 - ) - - # since we passed the net-zero-size state the new size - # after sum should be the remaining size the new - # "direction" (aka, long vs. short) for this clear. - if sign_change: - clear_size = accum_size - abs_diff = abs(accum_size) - asize_h.append(0) - ppu_h.append(0) - - else: - # old size minus the new size gives us size diff with - # +ve -> increase in pp size - # -ve -> decrease in pp size - abs_diff = abs(accum_size) - abs(last_accum_size) - - # XXX: LIFO breakeven price update. only an increaze in size - # of the position contributes the breakeven price, - # a decrease does not (i.e. the position is being made - # smaller). - # abs_clear_size = abs(clear_size) - abs_new_size = abs(accum_size) - - if abs_diff > 0: - - cost_basis = ( - # cost basis for this clear - clear_price * abs(clear_size) - + - # transaction cost - accum_sign * cost_scalar * entry['cost'] - ) - - if asize_h: - size_last = abs(asize_h[-1]) - cb_last = ppu_h[-1] * size_last - ppu = (cost_basis + cb_last) / abs_new_size - - else: - ppu = cost_basis / abs_new_size - - ppu_h.append(ppu) - asize_h.append(accum_size) - - else: - # on "exit" clears from a given direction, - # only the size changes not the price-per-unit - # need to be updated since the ppu remains constant - # and gets weighted by the new size. - asize_h.append(accum_size) - ppu_h.append(ppu_h[-1]) - - final_ppu = ppu_h[-1] if ppu_h else 0 - - # handle any split info entered (for now) manually by user - if self.split_ratio is not None: - final_ppu /= self.split_ratio - - return final_ppu - - def expired(self) -> bool: - ''' - Predicate which checks if the contract/instrument is past its expiry. - - ''' - return bool(self.expiry) and self.expiry < now() - - def calc_size(self) -> float: - ''' - Calculate the unit size of this position in the destination - asset using the clears/trade event table; zero if expired. - - ''' - size: float = 0 - - # time-expired pps (normally derivatives) are "closed" - # and have a zero size. - if self.expired(): - return 0 - - for tid, entry in self.clears.items(): - size += entry['size'] - - if self.split_ratio is not None: - size = round(size * self.split_ratio) - - return float( - self.symbol.quantize_size(size), - ) - - def minimize_clears( - self, - - ) -> dict[str, dict]: - ''' - Minimize the position's clears entries by removing - all transactions before the last net zero size to avoid - unecessary history irrelevant to the current pp state. - - ''' - size: float = 0 - clears_since_zero: list[tuple(str, dict)] = [] - - # TODO: we might just want to always do this when iterating - # a ledger? keep a state of the last net-zero and only do the - # full iterate when no state was stashed? - - # scan for the last "net zero" position by iterating - # transactions until the next net-zero size, rinse, repeat. - for tid, clear in self.clears.items(): - size += clear['size'] - clears_since_zero.append((tid, clear)) - - if size == 0: - clears_since_zero.clear() - - self.clears = dict(clears_since_zero) - return self.clears - - def add_clear( - self, - t: Transaction, - ) -> dict: - ''' - Update clearing table and populate rolling ppu and accumulative - size in both the clears entry and local attrs state. - - ''' - clear = self.clears[t.tid] = { - 'cost': t.cost, - 'price': t.price, - 'size': t.size, - 'dt': t.dt - } - - # TODO: compute these incrementally instead - # of re-looping through each time resulting in O(n**2) - # behaviour..? - - # NOTE: we compute these **after** adding the entry in order to - # make the recurrence relation math work inside - # ``.calc_size()``. - self.size = clear['accum_size'] = self.calc_size() - self.ppu = clear['ppu'] = self.calc_ppu() - - return clear - - def sugest_split(self) -> float: - ... - - -class PpTable(Struct): - - brokername: str - acctid: str - pps: dict[str, Position] - conf: Optional[dict] = {} - - def update_from_trans( - self, - trans: dict[str, Transaction], - cost_scalar: float = 2, - - ) -> dict[str, Position]: - - pps = self.pps - updated: dict[str, Position] = {} - - # lifo update all pps from records, ensuring - # we compute the PPU and size sorted in time! - for t in sorted( - trans.values(), - key=lambda t: t.dt, - reverse=True, - ): - pp = pps.setdefault( - t.bsuid, - - # if no existing pp, allocate fresh one. - Position( - Symbol.from_fqsn( - t.fqsn, - info={}, - ) if not t.sym else t.sym, - size=0.0, - ppu=0.0, - bsuid=t.bsuid, - expiry=t.expiry, - ) - ) - clears = pp.clears - if clears: - first_clear_dt = pp.first_clear_dt - - # don't do updates for ledger records we already have - # included in the current pps state. - if ( - t.tid in clears - or ( - first_clear_dt - and t.dt < first_clear_dt - ) - ): - # NOTE: likely you'll see repeats of the same - # ``Transaction`` passed in here if/when you are restarting - # a ``brokerd.ib`` where the API will re-report trades from - # the current session, so we need to make sure we don't - # "double count" these in pp calculations. - continue - - # update clearing table - pp.add_clear(t) - updated[t.bsuid] = pp - - # minimize clears tables and update sizing. - for bsuid, pp in updated.items(): - pp.ensure_state() - - # deliver only the position entries that were actually updated - # (modified the state) from the input transaction set. - return updated - - def dump_active( - self, - ) -> tuple[ - dict[str, Position], - dict[str, Position] - ]: - ''' - Iterate all tabulated positions, render active positions to - a ``dict`` format amenable to serialization (via TOML) and drop - from state (``.pps``) as well as return in a ``dict`` all - ``Position``s which have recently closed. - - ''' - # NOTE: newly closed position are also important to report/return - # since a consumer, like an order mode UI ;), might want to react - # based on the closure (for example removing the breakeven line - # and clearing the entry from any lists/monitors). - closed_pp_objs: dict[str, Position] = {} - open_pp_objs: dict[str, Position] = {} - - pp_objs = self.pps - for bsuid in list(pp_objs): - pp = pp_objs[bsuid] - - # XXX: debug hook for size mismatches - # qqqbsuid = 320227571 - # if bsuid == qqqbsuid: - # breakpoint() - - pp.ensure_state() - - if ( - # "net-zero" is a "closed" position - pp.size == 0 - - # time-expired pps (normally derivatives) are "closed" - or (pp.expiry and pp.expiry < now()) - ): - # for expired cases - pp.size = 0 - - # NOTE: we DO NOT pop the pp here since it can still be - # used to check for duplicate clears that may come in as - # new transaction from some backend API and need to be - # ignored; the closed positions won't be written to the - # ``pps.toml`` since ``pp_active_entries`` above is what's - # written. - closed_pp_objs[bsuid] = pp - - else: - open_pp_objs[bsuid] = pp - - return open_pp_objs, closed_pp_objs - - def to_toml( - self, - ) -> dict[str, Any]: - - active, closed = self.dump_active() - - # ONLY dict-serialize all active positions; those that are closed - # we don't store in the ``pps.toml``. - to_toml_dict = {} - - for bsuid, pos in active.items(): - - # keep the minimal amount of clears that make up this - # position since the last net-zero state. - pos.minimize_clears() - pos.ensure_state() - - # serialize to pre-toml form - fqsn, asdict = pos.to_pretoml() - log.info(f'Updating active pp: {fqsn}') - - # XXX: ugh, it's cuz we push the section under - # the broker name.. maybe we need to rethink this? - brokerless_key = fqsn.removeprefix(f'{self.brokername}.') - to_toml_dict[brokerless_key] = asdict - - return to_toml_dict - - def write_config(self) -> None: - ''' - Write the current position table to the user's ``pps.toml``. - - ''' - # TODO: show diff output? - # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries - # active, closed_pp_objs = table.dump_active() - pp_entries = self.to_toml() - if pp_entries: - log.info(f'Updating ``pps.toml`` for {path}:\n') - log.info(f'Current positions:\n{pp_entries}') - self.conf[self.brokername][self.acctid] = pp_entries - - elif ( - self.brokername in self.conf and - self.acctid in self.conf[self.brokername] - ): - del self.conf[self.brokername][self.acctid] - if len(self.conf[self.brokername]) == 0: - del self.conf[self.brokername] - - # TODO: why tf haven't they already done this for inline - # tables smh.. - enc = PpsEncoder(preserve=True) - # table_bs_type = type(toml.TomlDecoder().get_empty_inline_table()) - enc.dump_funcs[ - toml.decoder.InlineTableDict - ] = enc.dump_inline_table - - config.write( - self.conf, - 'pps', - encoder=enc, - fail_empty=False - ) - - -def load_pps_from_ledger( - - brokername: str, - acctname: str, - - # post normalization filter on ledger entries to be processed - filter_by: Optional[list[dict]] = None, - -) -> tuple[ - dict[str, Transaction], - dict[str, Position], -]: - ''' - Open a ledger file by broker name and account and read in and - process any trade records into our normalized ``Transaction`` form - and then update the equivalent ``Pptable`` and deliver the two - bsuid-mapped dict-sets of the transactions and pps. - - ''' - with ( - open_trade_ledger(brokername, acctname) as ledger, - open_pps(brokername, acctname) as table, - ): - if not ledger: - # null case, no ledger file with content - return {} - - mod = get_brokermod(brokername) - src_records: dict[str, Transaction] = mod.norm_trade_records(ledger) - - if filter_by: - records = {} - bsuids = set(filter_by) - for tid, r in src_records.items(): - if r.bsuid in bsuids: - records[tid] = r - else: - records = src_records - - updated = table.update_from_trans(records) - - return records, updated - - -# TODO: instead see if we can hack tomli and tomli-w to do the same: -# - https://github.com/hukkin/tomli -# - https://github.com/hukkin/tomli-w -class PpsEncoder(toml.TomlEncoder): - ''' - Special "styled" encoder that makes a ``pps.toml`` redable and - compact by putting `.clears` tables inline and everything else - flat-ish. - - ''' - separator = ',' - - def dump_list(self, v): - ''' - Dump an inline list with a newline after every element and - with consideration for denoted inline table types. - - ''' - retval = "[\n" - for u in v: - if isinstance(u, toml.decoder.InlineTableDict): - out = self.dump_inline_table(u) - else: - out = str(self.dump_value(u)) - - retval += " " + out + "," + "\n" - retval += "]" - return retval - - def dump_inline_table(self, section): - """Preserve inline table in its compact syntax instead of expanding - into subsection. - https://github.com/toml-lang/toml#user-content-inline-table - """ - val_list = [] - for k, v in section.items(): - # if isinstance(v, toml.decoder.InlineTableDict): - if isinstance(v, dict): - val = self.dump_inline_table(v) - else: - val = str(self.dump_value(v)) - - val_list.append(k + " = " + val) - - retval = "{ " + ", ".join(val_list) + " }" - return retval - - def dump_sections(self, o, sup): - retstr = "" - if sup != "" and sup[-1] != ".": - sup += '.' - retdict = self._dict() - arraystr = "" - for section in o: - qsection = str(section) - value = o[section] - - if not re.match(r'^[A-Za-z0-9_-]+$', section): - qsection = toml.encoder._dump_str(section) - - # arrayoftables = False - if ( - self.preserve - and isinstance(value, toml.decoder.InlineTableDict) - ): - retstr += ( - qsection - + - " = " - + - self.dump_inline_table(o[section]) - + - '\n' # only on the final terminating left brace - ) - - # XXX: this code i'm pretty sure is just blatantly bad - # and/or wrong.. - # if isinstance(o[section], list): - # for a in o[section]: - # if isinstance(a, dict): - # arrayoftables = True - # if arrayoftables: - # for a in o[section]: - # arraytabstr = "\n" - # arraystr += "[[" + sup + qsection + "]]\n" - # s, d = self.dump_sections(a, sup + qsection) - # if s: - # if s[0] == "[": - # arraytabstr += s - # else: - # arraystr += s - # while d: - # newd = self._dict() - # for dsec in d: - # s1, d1 = self.dump_sections(d[dsec], sup + - # qsection + "." + - # dsec) - # if s1: - # arraytabstr += ("[" + sup + qsection + - # "." + dsec + "]\n") - # arraytabstr += s1 - # for s1 in d1: - # newd[dsec + "." + s1] = d1[s1] - # d = newd - # arraystr += arraytabstr - - elif isinstance(value, dict): - retdict[qsection] = o[section] - - elif o[section] is not None: - retstr += ( - qsection - + - " = " - + - str(self.dump_value(o[section])) - ) - - # if not isinstance(value, dict): - if not isinstance(value, toml.decoder.InlineTableDict): - # inline tables should not contain newlines: - # https://toml.io/en/v1.0.0#inline-table - retstr += '\n' - - else: - raise ValueError(value) - - retstr += arraystr - return (retstr, retdict) - - -@cm -def open_pps( - brokername: str, - acctid: str, - write_on_exit: bool = False, -) -> Generator[PpTable, None, None]: - ''' - Read out broker-specific position entries from - incremental update file: ``pps.toml``. - - ''' - conf, path = config.load('pps') - brokersection = conf.setdefault(brokername, {}) - pps = brokersection.setdefault(acctid, {}) - - # TODO: ideally we can pass in an existing - # pps state to this right? such that we - # don't have to do a ledger reload all the - # time.. a couple ideas I can think of, - # - mirror this in some client side actor which - # does the actual ledger updates (say the paper - # engine proc if we decide to always spawn it?), - # - do diffs against updates from the ledger writer - # actor and the in-mem state here? - - pp_objs = {} - table = PpTable( - brokername, - acctid, - pp_objs, - conf=conf, - ) - - # unmarshal/load ``pps.toml`` config entries into object form - # and update `PpTable` obj entries. - for fqsn, entry in pps.items(): - bsuid = entry['bsuid'] - symbol = Symbol.from_fqsn( - fqsn, - - # NOTE & TODO: right now we fill in the defaults from - # `.data._source.Symbol` but eventually these should always - # either be already written to the pos table or provided at - # write time to ensure always having these values somewhere - # and thus allowing us to get our pos sizing precision - # correct! - info={ - 'asset_type': entry.get('asset_type', ''), - 'price_tick_size': entry.get('price_tick_size', 0.01), - 'lot_tick_size': entry.get('lot_tick_size', 0.0), - } - ) - - # convert clears sub-tables (only in this form - # for toml re-presentation) back into a master table. - clears_list = entry['clears'] - - # index clears entries in "object" form by tid in a top - # level dict instead of a list (as is presented in our - # ``pps.toml``). - clears = pp_objs.setdefault(bsuid, {}) - - # TODO: should be make a ``Struct`` for clear/event entries? - # convert "clear events table" from the toml config (list of - # a dicts) and load it into object form for use in position - # processing of new clear events. - trans: list[Transaction] = [] - - for clears_table in clears_list: - tid = clears_table.pop('tid') - dtstr = clears_table['dt'] - dt = pendulum.parse(dtstr) - clears_table['dt'] = dt - - trans.append(Transaction( - fqsn=bsuid, - sym=symbol, - bsuid=bsuid, - tid=tid, - size=clears_table['size'], - price=clears_table['price'], - cost=clears_table['cost'], - dt=dt, - )) - clears[tid] = clears_table - - size = entry['size'] - - # TODO: remove but, handle old field name for now - ppu = entry.get( - 'ppu', - entry.get('be_price', 0), - ) - - split_ratio = entry.get('split_ratio') - - expiry = entry.get('expiry') - if expiry: - expiry = pendulum.parse(expiry) - - pp = pp_objs[bsuid] = Position( - symbol, - size=size, - ppu=ppu, - split_ratio=split_ratio, - expiry=expiry, - bsuid=entry['bsuid'], - ) - - # XXX: super critical, we need to be sure to include - # all pps.toml clears to avoid reusing clears that were - # already included in the current incremental update - # state, since today's records may have already been - # processed! - for t in trans: - pp.add_clear(t) - - # audit entries loaded from toml - pp.ensure_state() - - try: - yield table - finally: - if write_on_exit: - table.write_config() +__all__ = [ + 'Transaction', + 'open_trade_ledger', + 'PpTable', + 'open_pps', + 'load_pps_from_ledger', + 'Position', +] def get_likely_pair( @@ -1075,6 +77,7 @@ def get_likely_pair( if __name__ == '__main__': import sys + from pprint import pformat args = sys.argv assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`' diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py new file mode 100644 index 00000000..74bee9ad --- /dev/null +++ b/piker/accounting/_ledger.py @@ -0,0 +1,125 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License + +# along with this program. If not, see . +from __future__ import annotations +from contextlib import contextmanager as cm +import os +from os import path +import time +from typing import ( + Any, + Iterator, + Union, + Generator +) + +from pendulum import ( + datetime, +) +import tomli +import toml + +from .. import config +from ..data._source import Symbol +from ..data.types import Struct +from ..log import get_logger + +log = get_logger(__name__) + + +@cm +def open_trade_ledger( + broker: str, + account: str, + +) -> Generator[dict, None, None]: + ''' + Indempotently create and read in a trade log file from the + ``/ledgers/`` directory. + + Files are named per broker account of the form + ``_.toml``. The ``accountname`` here is the + name as defined in the user's ``brokers.toml`` config. + + ''' + ldir = path.join(config._config_dir, 'ledgers') + if not path.isdir(ldir): + os.makedirs(ldir) + + fname = f'trades_{broker}_{account}.toml' + tradesfile = path.join(ldir, fname) + + if not path.isfile(tradesfile): + log.info( + f'Creating new local trades ledger: {tradesfile}' + ) + with open(tradesfile, 'w') as cf: + pass # touch + with open(tradesfile, 'rb') as cf: + start = time.time() + ledger = tomli.load(cf) + log.info(f'Ledger load took {time.time() - start}s') + cpy = ledger.copy() + + try: + yield cpy + finally: + if cpy != ledger: + + # TODO: show diff output? + # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries + log.info(f'Updating ledger for {tradesfile}:\n') + ledger.update(cpy) + + # we write on close the mutated ledger data + with open(tradesfile, 'w') as cf: + toml.dump(ledger, cf) + + +class Transaction(Struct, frozen=True): + # TODO: should this be ``.to`` (see below)? + fqsn: str + + sym: Symbol + tid: Union[str, int] # unique transaction id + size: float + price: float + cost: float # commisions or other additional costs + dt: datetime + expiry: datetime | None = None + + # optional key normally derived from the broker + # backend which ensures the instrument-symbol this record + # is for is truly unique. + bsuid: Union[str, int] | None = None + + # optional fqsn for the source "asset"/money symbol? + # from: Optional[str] = None + + +def iter_by_dt( + clears: dict[str, Any], +) -> Iterator[tuple[str, dict]]: + ''' + Iterate entries of a ``clears: dict`` table sorted by entry recorded + datetime presumably set at the ``'dt'`` field in each entry. + + ''' + for tid, data in sorted( + list(clears.items()), + key=lambda item: item[1]['dt'], + ): + yield tid, data diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py new file mode 100644 index 00000000..2a9ca0d8 --- /dev/null +++ b/piker/accounting/_pos.py @@ -0,0 +1,961 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License + +# along with this program. If not, see . + +''' +Personal/Private position parsing, calculating, summarizing in a way +that doesn't try to cuk most humans who prefer to not lose their moneys.. + +(looking at you `ib` and dirt-bird friends) + +''' +from __future__ import annotations +from contextlib import contextmanager as cm +from math import copysign +import re +from typing import ( + Any, + Iterator, + Optional, + Union, + Generator +) + +import pendulum +from pendulum import datetime, now +import toml + +from ._ledger import ( + Transaction, + iter_by_dt, + open_trade_ledger, +) +from .. import config +from ..brokers import get_brokermod +from ..clearing._messages import BrokerdPosition, Status +from ..data._source import Symbol, unpack_fqsn +from ..data.types import Struct +from ..log import get_logger + +log = get_logger(__name__) + + +class Position(Struct): + ''' + Basic pp (personal/piker position) model with attached clearing + transaction history. + + ''' + symbol: Symbol + + # can be +ve or -ve for long/short + size: float + + # "breakeven price" above or below which pnl moves above and below + # zero for the entirety of the current "trade state". + ppu: float + + # unique backend symbol id + bsuid: str + + split_ratio: Optional[int] = None + + # ordered record of known constituent trade messages + clears: dict[ + Union[str, int, Status], # trade id + dict[str, Any], # transaction history summaries + ] = {} + first_clear_dt: Optional[datetime] = None + + expiry: Optional[datetime] = None + + def to_dict(self) -> dict: + return { + f: getattr(self, f) + for f in self.__struct_fields__ + } + + def to_pretoml(self) -> tuple[str, dict]: + ''' + Prep this position's data contents for export to toml including + re-structuring of the ``.clears`` table to an array of + inline-subtables for better ``pps.toml`` compactness. + + ''' + d = self.to_dict() + clears = d.pop('clears') + expiry = d.pop('expiry') + + if self.split_ratio is None: + d.pop('split_ratio') + + # should be obvious from clears/event table + d.pop('first_clear_dt') + + # TODO: we need to figure out how to have one top level + # listing venue here even when the backend isn't providing + # it via the trades ledger.. + # drop symbol obj in serialized form + s = d.pop('symbol') + fqsn = s.front_fqsn() + + broker, key, suffix = unpack_fqsn(fqsn) + sym_info = s.broker_info[broker] + + d['asset_type'] = sym_info['asset_type'] + d['price_tick_size'] = ( + sym_info.get('price_tick_size') + or + s.tick_size + ) + d['lot_tick_size'] = ( + sym_info.get('lot_tick_size') + or + s.lot_tick_size + ) + + if self.expiry is None: + d.pop('expiry', None) + elif expiry: + d['expiry'] = str(expiry) + + toml_clears_list = [] + + # reverse sort so latest clears are at top of section? + for tid, data in iter_by_dt(clears): + inline_table = toml.TomlDecoder().get_empty_inline_table() + + # serialize datetime to parsable `str` + inline_table['dt'] = str(data['dt']) + + # insert optional clear fields in column order + for k in ['ppu', 'accum_size']: + val = data.get(k) + if val: + inline_table[k] = val + + # insert required fields + for k in ['price', 'size', 'cost']: + inline_table[k] = data[k] + + inline_table['tid'] = tid + toml_clears_list.append(inline_table) + + d['clears'] = toml_clears_list + + return fqsn, d + + def ensure_state(self) -> None: + ''' + Audit either the `.size` and `.ppu` local instance vars against + the clears table calculations and return the calc-ed values if + they differ and log warnings to console. + + ''' + clears = list(self.clears.values()) + self.first_clear_dt = min(list(entry['dt'] for entry in clears)) + last_clear = clears[-1] + + csize = self.calc_size() + accum = last_clear['accum_size'] + if not self.expired(): + if ( + csize != accum + and csize != round(accum * self.split_ratio or 1) + ): + raise ValueError(f'Size mismatch: {csize}') + else: + assert csize == 0, 'Contract is expired but non-zero size?' + + if self.size != csize: + log.warning( + 'Position state mismatch:\n' + f'{self.size} => {csize}' + ) + self.size = csize + + cppu = self.calc_ppu() + ppu = last_clear['ppu'] + if ( + cppu != ppu + and self.split_ratio is not None + # handle any split info entered (for now) manually by user + and cppu != (ppu / self.split_ratio) + ): + raise ValueError(f'PPU mismatch: {cppu}') + + if self.ppu != cppu: + log.warning( + 'Position state mismatch:\n' + f'{self.ppu} => {cppu}' + ) + self.ppu = cppu + + def update_from_msg( + self, + msg: BrokerdPosition, + + ) -> None: + + # XXX: better place to do this? + symbol = self.symbol + + lot_size_digits = symbol.lot_size_digits + ppu, size = ( + round( + msg['avg_price'], + ndigits=symbol.tick_size_digits + ), + round( + msg['size'], + ndigits=lot_size_digits + ), + ) + + self.ppu = ppu + self.size = size + + @property + def dsize(self) -> float: + ''' + The "dollar" size of the pp, normally in trading (fiat) unit + terms. + + ''' + return self.ppu * self.size + + # TODO: idea: "real LIFO" dynamic positioning. + # - when a trade takes place where the pnl for + # the (set of) trade(s) is below the breakeven price + # it may be that the trader took a +ve pnl on a short(er) + # term trade in the same account. + # - in this case we could recalc the be price to + # be reverted back to it's prior value before the nearest term + # trade was opened.? + # def lifo_price() -> float: + # ... + + def iter_clears(self) -> Iterator[tuple[str, dict]]: + ''' + Iterate the internally managed ``.clears: dict`` table in + datetime-stamped order. + + ''' + return iter_by_dt(self.clears) + + def calc_ppu( + self, + # include transaction cost in breakeven price + # and presume the worst case of the same cost + # to exit this transaction (even though in reality + # it will be dynamic based on exit stratetgy). + cost_scalar: float = 2, + + ) -> float: + ''' + Compute the "price-per-unit" price for the given non-zero sized + rolling position. + + The recurrence relation which computes this (exponential) mean + per new clear which **increases** the accumulative postiion size + is: + + ppu[-1] = ( + ppu[-2] * accum_size[-2] + + + ppu[-1] * size + ) / accum_size[-1] + + where `cost_basis` for the current step is simply the price + * size of the most recent clearing transaction. + + ''' + asize_h: list[float] = [] # historical accumulative size + ppu_h: list[float] = [] # historical price-per-unit + + tid: str + entry: dict[str, Any] + for (tid, entry) in self.iter_clears(): + clear_size = entry['size'] + clear_price = entry['price'] + + last_accum_size = asize_h[-1] if asize_h else 0 + accum_size = last_accum_size + clear_size + accum_sign = copysign(1, accum_size) + + sign_change: bool = False + + if accum_size == 0: + ppu_h.append(0) + asize_h.append(0) + continue + + if accum_size == 0: + ppu_h.append(0) + asize_h.append(0) + continue + + # test if the pp somehow went "passed" a net zero size state + # resulting in a change of the "sign" of the size (+ve for + # long, -ve for short). + sign_change = ( + copysign(1, last_accum_size) + accum_sign == 0 + and last_accum_size != 0 + ) + + # since we passed the net-zero-size state the new size + # after sum should be the remaining size the new + # "direction" (aka, long vs. short) for this clear. + if sign_change: + clear_size = accum_size + abs_diff = abs(accum_size) + asize_h.append(0) + ppu_h.append(0) + + else: + # old size minus the new size gives us size diff with + # +ve -> increase in pp size + # -ve -> decrease in pp size + abs_diff = abs(accum_size) - abs(last_accum_size) + + # XXX: LIFO breakeven price update. only an increaze in size + # of the position contributes the breakeven price, + # a decrease does not (i.e. the position is being made + # smaller). + # abs_clear_size = abs(clear_size) + abs_new_size = abs(accum_size) + + if abs_diff > 0: + + cost_basis = ( + # cost basis for this clear + clear_price * abs(clear_size) + + + # transaction cost + accum_sign * cost_scalar * entry['cost'] + ) + + if asize_h: + size_last = abs(asize_h[-1]) + cb_last = ppu_h[-1] * size_last + ppu = (cost_basis + cb_last) / abs_new_size + + else: + ppu = cost_basis / abs_new_size + + ppu_h.append(ppu) + asize_h.append(accum_size) + + else: + # on "exit" clears from a given direction, + # only the size changes not the price-per-unit + # need to be updated since the ppu remains constant + # and gets weighted by the new size. + asize_h.append(accum_size) + ppu_h.append(ppu_h[-1]) + + final_ppu = ppu_h[-1] if ppu_h else 0 + + # handle any split info entered (for now) manually by user + if self.split_ratio is not None: + final_ppu /= self.split_ratio + + return final_ppu + + def expired(self) -> bool: + ''' + Predicate which checks if the contract/instrument is past its expiry. + + ''' + return bool(self.expiry) and self.expiry < now() + + def calc_size(self) -> float: + ''' + Calculate the unit size of this position in the destination + asset using the clears/trade event table; zero if expired. + + ''' + size: float = 0 + + # time-expired pps (normally derivatives) are "closed" + # and have a zero size. + if self.expired(): + return 0 + + for tid, entry in self.clears.items(): + size += entry['size'] + + if self.split_ratio is not None: + size = round(size * self.split_ratio) + + return float( + self.symbol.quantize_size(size), + ) + + def minimize_clears( + self, + + ) -> dict[str, dict]: + ''' + Minimize the position's clears entries by removing + all transactions before the last net zero size to avoid + unecessary history irrelevant to the current pp state. + + ''' + size: float = 0 + clears_since_zero: list[tuple(str, dict)] = [] + + # TODO: we might just want to always do this when iterating + # a ledger? keep a state of the last net-zero and only do the + # full iterate when no state was stashed? + + # scan for the last "net zero" position by iterating + # transactions until the next net-zero size, rinse, repeat. + for tid, clear in self.clears.items(): + size += clear['size'] + clears_since_zero.append((tid, clear)) + + if size == 0: + clears_since_zero.clear() + + self.clears = dict(clears_since_zero) + return self.clears + + def add_clear( + self, + t: Transaction, + ) -> dict: + ''' + Update clearing table and populate rolling ppu and accumulative + size in both the clears entry and local attrs state. + + ''' + clear = self.clears[t.tid] = { + 'cost': t.cost, + 'price': t.price, + 'size': t.size, + 'dt': t.dt + } + + # TODO: compute these incrementally instead + # of re-looping through each time resulting in O(n**2) + # behaviour..? + + # NOTE: we compute these **after** adding the entry in order to + # make the recurrence relation math work inside + # ``.calc_size()``. + self.size = clear['accum_size'] = self.calc_size() + self.ppu = clear['ppu'] = self.calc_ppu() + + return clear + + def sugest_split(self) -> float: + ... + + +class PpTable(Struct): + + brokername: str + acctid: str + pps: dict[str, Position] + conf: Optional[dict] = {} + + def update_from_trans( + self, + trans: dict[str, Transaction], + cost_scalar: float = 2, + + ) -> dict[str, Position]: + + pps = self.pps + updated: dict[str, Position] = {} + + # lifo update all pps from records, ensuring + # we compute the PPU and size sorted in time! + for t in sorted( + trans.values(), + key=lambda t: t.dt, + reverse=True, + ): + pp = pps.setdefault( + t.bsuid, + + # if no existing pp, allocate fresh one. + Position( + Symbol.from_fqsn( + t.fqsn, + info={}, + ) if not t.sym else t.sym, + size=0.0, + ppu=0.0, + bsuid=t.bsuid, + expiry=t.expiry, + ) + ) + clears = pp.clears + if clears: + first_clear_dt = pp.first_clear_dt + + # don't do updates for ledger records we already have + # included in the current pps state. + if ( + t.tid in clears + or ( + first_clear_dt + and t.dt < first_clear_dt + ) + ): + # NOTE: likely you'll see repeats of the same + # ``Transaction`` passed in here if/when you are restarting + # a ``brokerd.ib`` where the API will re-report trades from + # the current session, so we need to make sure we don't + # "double count" these in pp calculations. + continue + + # update clearing table + pp.add_clear(t) + updated[t.bsuid] = pp + + # minimize clears tables and update sizing. + for bsuid, pp in updated.items(): + pp.ensure_state() + + # deliver only the position entries that were actually updated + # (modified the state) from the input transaction set. + return updated + + def dump_active( + self, + ) -> tuple[ + dict[str, Position], + dict[str, Position] + ]: + ''' + Iterate all tabulated positions, render active positions to + a ``dict`` format amenable to serialization (via TOML) and drop + from state (``.pps``) as well as return in a ``dict`` all + ``Position``s which have recently closed. + + ''' + # NOTE: newly closed position are also important to report/return + # since a consumer, like an order mode UI ;), might want to react + # based on the closure (for example removing the breakeven line + # and clearing the entry from any lists/monitors). + closed_pp_objs: dict[str, Position] = {} + open_pp_objs: dict[str, Position] = {} + + pp_objs = self.pps + for bsuid in list(pp_objs): + pp = pp_objs[bsuid] + + # XXX: debug hook for size mismatches + # qqqbsuid = 320227571 + # if bsuid == qqqbsuid: + # breakpoint() + + pp.ensure_state() + + if ( + # "net-zero" is a "closed" position + pp.size == 0 + + # time-expired pps (normally derivatives) are "closed" + or (pp.expiry and pp.expiry < now()) + ): + # for expired cases + pp.size = 0 + + # NOTE: we DO NOT pop the pp here since it can still be + # used to check for duplicate clears that may come in as + # new transaction from some backend API and need to be + # ignored; the closed positions won't be written to the + # ``pps.toml`` since ``pp_active_entries`` above is what's + # written. + closed_pp_objs[bsuid] = pp + + else: + open_pp_objs[bsuid] = pp + + return open_pp_objs, closed_pp_objs + + def to_toml( + self, + ) -> dict[str, Any]: + + active, closed = self.dump_active() + + # ONLY dict-serialize all active positions; those that are closed + # we don't store in the ``pps.toml``. + to_toml_dict = {} + + for bsuid, pos in active.items(): + + # keep the minimal amount of clears that make up this + # position since the last net-zero state. + pos.minimize_clears() + pos.ensure_state() + + # serialize to pre-toml form + fqsn, asdict = pos.to_pretoml() + log.info(f'Updating active pp: {fqsn}') + + # XXX: ugh, it's cuz we push the section under + # the broker name.. maybe we need to rethink this? + brokerless_key = fqsn.removeprefix(f'{self.brokername}.') + to_toml_dict[brokerless_key] = asdict + + return to_toml_dict + + def write_config(self) -> None: + ''' + Write the current position table to the user's ``pps.toml``. + + ''' + # TODO: show diff output? + # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries + # active, closed_pp_objs = table.dump_active() + pp_entries = self.to_toml() + if pp_entries: + log.info( + f'Updating ``pps.toml``:\n' + f'Current positions:\n{pp_entries}' + ) + self.conf[self.brokername][self.acctid] = pp_entries + + elif ( + self.brokername in self.conf and + self.acctid in self.conf[self.brokername] + ): + del self.conf[self.brokername][self.acctid] + if len(self.conf[self.brokername]) == 0: + del self.conf[self.brokername] + + # TODO: why tf haven't they already done this for inline + # tables smh.. + enc = PpsEncoder(preserve=True) + # table_bs_type = type(toml.TomlDecoder().get_empty_inline_table()) + enc.dump_funcs[ + toml.decoder.InlineTableDict + ] = enc.dump_inline_table + + config.write( + self.conf, + 'pps', + encoder=enc, + fail_empty=False + ) + + +def load_pps_from_ledger( + + brokername: str, + acctname: str, + + # post normalization filter on ledger entries to be processed + filter_by: Optional[list[dict]] = None, + +) -> tuple[ + dict[str, Transaction], + dict[str, Position], +]: + ''' + Open a ledger file by broker name and account and read in and + process any trade records into our normalized ``Transaction`` form + and then update the equivalent ``Pptable`` and deliver the two + bsuid-mapped dict-sets of the transactions and pps. + + ''' + with ( + open_trade_ledger(brokername, acctname) as ledger, + open_pps(brokername, acctname) as table, + ): + if not ledger: + # null case, no ledger file with content + return {} + + mod = get_brokermod(brokername) + src_records: dict[str, Transaction] = mod.norm_trade_records(ledger) + + if filter_by: + records = {} + bsuids = set(filter_by) + for tid, r in src_records.items(): + if r.bsuid in bsuids: + records[tid] = r + else: + records = src_records + + updated = table.update_from_trans(records) + + return records, updated + + +# TODO: instead see if we can hack tomli and tomli-w to do the same: +# - https://github.com/hukkin/tomli +# - https://github.com/hukkin/tomli-w +class PpsEncoder(toml.TomlEncoder): + ''' + Special "styled" encoder that makes a ``pps.toml`` redable and + compact by putting `.clears` tables inline and everything else + flat-ish. + + ''' + separator = ',' + + def dump_list(self, v): + ''' + Dump an inline list with a newline after every element and + with consideration for denoted inline table types. + + ''' + retval = "[\n" + for u in v: + if isinstance(u, toml.decoder.InlineTableDict): + out = self.dump_inline_table(u) + else: + out = str(self.dump_value(u)) + + retval += " " + out + "," + "\n" + retval += "]" + return retval + + def dump_inline_table(self, section): + """Preserve inline table in its compact syntax instead of expanding + into subsection. + https://github.com/toml-lang/toml#user-content-inline-table + """ + val_list = [] + for k, v in section.items(): + # if isinstance(v, toml.decoder.InlineTableDict): + if isinstance(v, dict): + val = self.dump_inline_table(v) + else: + val = str(self.dump_value(v)) + + val_list.append(k + " = " + val) + + retval = "{ " + ", ".join(val_list) + " }" + return retval + + def dump_sections(self, o, sup): + retstr = "" + if sup != "" and sup[-1] != ".": + sup += '.' + retdict = self._dict() + arraystr = "" + for section in o: + qsection = str(section) + value = o[section] + + if not re.match(r'^[A-Za-z0-9_-]+$', section): + qsection = toml.encoder._dump_str(section) + + # arrayoftables = False + if ( + self.preserve + and isinstance(value, toml.decoder.InlineTableDict) + ): + retstr += ( + qsection + + + " = " + + + self.dump_inline_table(o[section]) + + + '\n' # only on the final terminating left brace + ) + + # XXX: this code i'm pretty sure is just blatantly bad + # and/or wrong.. + # if isinstance(o[section], list): + # for a in o[section]: + # if isinstance(a, dict): + # arrayoftables = True + # if arrayoftables: + # for a in o[section]: + # arraytabstr = "\n" + # arraystr += "[[" + sup + qsection + "]]\n" + # s, d = self.dump_sections(a, sup + qsection) + # if s: + # if s[0] == "[": + # arraytabstr += s + # else: + # arraystr += s + # while d: + # newd = self._dict() + # for dsec in d: + # s1, d1 = self.dump_sections(d[dsec], sup + + # qsection + "." + + # dsec) + # if s1: + # arraytabstr += ("[" + sup + qsection + + # "." + dsec + "]\n") + # arraytabstr += s1 + # for s1 in d1: + # newd[dsec + "." + s1] = d1[s1] + # d = newd + # arraystr += arraytabstr + + elif isinstance(value, dict): + retdict[qsection] = o[section] + + elif o[section] is not None: + retstr += ( + qsection + + + " = " + + + str(self.dump_value(o[section])) + ) + + # if not isinstance(value, dict): + if not isinstance(value, toml.decoder.InlineTableDict): + # inline tables should not contain newlines: + # https://toml.io/en/v1.0.0#inline-table + retstr += '\n' + + else: + raise ValueError(value) + + retstr += arraystr + return (retstr, retdict) + + +@cm +def open_pps( + brokername: str, + acctid: str, + write_on_exit: bool = False, +) -> Generator[PpTable, None, None]: + ''' + Read out broker-specific position entries from + incremental update file: ``pps.toml``. + + ''' + conf, path = config.load('pps') + brokersection = conf.setdefault(brokername, {}) + pps = brokersection.setdefault(acctid, {}) + + # TODO: ideally we can pass in an existing + # pps state to this right? such that we + # don't have to do a ledger reload all the + # time.. a couple ideas I can think of, + # - mirror this in some client side actor which + # does the actual ledger updates (say the paper + # engine proc if we decide to always spawn it?), + # - do diffs against updates from the ledger writer + # actor and the in-mem state here? + + pp_objs = {} + table = PpTable( + brokername, + acctid, + pp_objs, + conf=conf, + ) + + # unmarshal/load ``pps.toml`` config entries into object form + # and update `PpTable` obj entries. + for fqsn, entry in pps.items(): + bsuid = entry['bsuid'] + symbol = Symbol.from_fqsn( + fqsn, + + # NOTE & TODO: right now we fill in the defaults from + # `.data._source.Symbol` but eventually these should always + # either be already written to the pos table or provided at + # write time to ensure always having these values somewhere + # and thus allowing us to get our pos sizing precision + # correct! + info={ + 'asset_type': entry.get('asset_type', ''), + 'price_tick_size': entry.get('price_tick_size', 0.01), + 'lot_tick_size': entry.get('lot_tick_size', 0.0), + } + ) + + # convert clears sub-tables (only in this form + # for toml re-presentation) back into a master table. + clears_list = entry['clears'] + + # index clears entries in "object" form by tid in a top + # level dict instead of a list (as is presented in our + # ``pps.toml``). + clears = pp_objs.setdefault(bsuid, {}) + + # TODO: should be make a ``Struct`` for clear/event entries? + # convert "clear events table" from the toml config (list of + # a dicts) and load it into object form for use in position + # processing of new clear events. + trans: list[Transaction] = [] + + for clears_table in clears_list: + tid = clears_table.pop('tid') + dtstr = clears_table['dt'] + dt = pendulum.parse(dtstr) + clears_table['dt'] = dt + + trans.append(Transaction( + fqsn=bsuid, + sym=symbol, + bsuid=bsuid, + tid=tid, + size=clears_table['size'], + price=clears_table['price'], + cost=clears_table['cost'], + dt=dt, + )) + clears[tid] = clears_table + + size = entry['size'] + + # TODO: remove but, handle old field name for now + ppu = entry.get( + 'ppu', + entry.get('be_price', 0), + ) + + split_ratio = entry.get('split_ratio') + + expiry = entry.get('expiry') + if expiry: + expiry = pendulum.parse(expiry) + + pp = pp_objs[bsuid] = Position( + symbol, + size=size, + ppu=ppu, + split_ratio=split_ratio, + expiry=expiry, + bsuid=entry['bsuid'], + ) + + # XXX: super critical, we need to be sure to include + # all pps.toml clears to avoid reusing clears that were + # already included in the current incremental update + # state, since today's records may have already been + # processed! + for t in trans: + pp.add_clear(t) + + # audit entries loaded from toml + pp.ensure_state() + + try: + yield table + finally: + if write_on_exit: + table.write_config() From 22622e1c01a2d0c99c3da85d06c2a000938f7010 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 10 Mar 2023 17:59:00 -0500 Subject: [PATCH 006/294] `ib`: (cukcit) just presume a stonk if we can read type from existing ledger.. --- piker/brokers/ib/broker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index d5b7571e..bc65d6a2 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -1153,7 +1153,7 @@ def norm_trade_records( # special handling of symbol extraction from # flex records using some ad-hoc schema parsing. - asset_type: str = record.get('assetCategory') or record['secType'] + asset_type: str = record.get('assetCategory') or record.get('secType', 'STK') # TODO: XXX: WOA this is kinda hacky.. probably # should figure out the correct future pair key more From 7904c271272466385c946bdb329a74071c88c1ae Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 13 Mar 2023 16:25:48 -0400 Subject: [PATCH 007/294] (u)Limit the fd allocation for java 8 runtime.. Can't believe this was actually the issue..seriously i don't envy jvm users. See following issues: - https://stackoverflow.com/a/56895801 - https://bugs.openjdk.org/browse/JDK-8150460 --- dockering/ib/docker-compose.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/dockering/ib/docker-compose.yml b/dockering/ib/docker-compose.yml index 8c676623..7e6fd05f 100644 --- a/dockering/ib/docker-compose.yml +++ b/dockering/ib/docker-compose.yml @@ -2,8 +2,21 @@ # https://github.com/waytrade/ib-gateway-docker/blob/master/docker-compose.yml version: "3.5" + services: + ib_gw_paper: + + # apparently java is a mega cukc: + # https://stackoverflow.com/a/56895801 + # https://bugs.openjdk.org/browse/JDK-8150460 + ulimits: + # nproc: 65535 + nproc: 6000 + nofile: + soft: 2000 + hard: 3000 + # other image tags available: # https://github.com/waytrade/ib-gateway-docker#supported-tags # image: waytrade/ib-gateway:981.3j From 9f03484c4d809d1870627f290081ec99fa62103b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 13 Mar 2023 17:42:20 -0400 Subject: [PATCH 008/294] Move all fqsn parsing and `Symbol` to new `accounting._mktinfo --- piker/accounting/_ledger.py | 2 +- piker/accounting/_mktinfo.py | 302 ++++++++++++++++++++++++++++++++ piker/accounting/_pos.py | 6 +- piker/brokers/ib/api.py | 2 +- piker/brokers/ib/broker.py | 2 +- piker/brokers/kraken/api.py | 2 +- piker/brokers/kraken/broker.py | 2 +- piker/clearing/_allocate.py | 2 +- piker/clearing/_client.py | 2 +- piker/clearing/_ems.py | 3 +- piker/clearing/_messages.py | 2 +- piker/clearing/_paper_engine.py | 2 +- piker/data/_source.py | 267 +--------------------------- piker/data/feed.py | 4 +- piker/data/flows.py | 4 +- piker/fsp/_engine.py | 2 +- piker/ui/_app.py | 2 +- piker/ui/_axes.py | 2 +- piker/ui/_chart.py | 2 +- piker/ui/_fsp.py | 2 +- piker/ui/order_mode.py | 2 +- tests/test_feeds.py | 2 +- 22 files changed, 335 insertions(+), 283 deletions(-) create mode 100644 piker/accounting/_mktinfo.py diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 74bee9ad..df7bb4aa 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -33,9 +33,9 @@ import tomli import toml from .. import config -from ..data._source import Symbol from ..data.types import Struct from ..log import get_logger +from ._mktinfo import Symbol log = get_logger(__name__) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py new file mode 100644 index 00000000..a9036170 --- /dev/null +++ b/piker/accounting/_mktinfo.py @@ -0,0 +1,302 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License + +# along with this program. If not, see . + +''' +Market (pair) meta-info layer: sane addressing semantics and meta-data +for cross-provider marketplaces. + +We intoduce the concept of, + +- a FQMA: fully qualified market address, +- a sane schema for FQMAs including derivatives, +- a msg-serializeable description of markets for + easy sharing with other pikers B) + +''' +from __future__ import annotations +from decimal import ( + Decimal, + ROUND_HALF_EVEN, +) +from typing import ( + Any, +) + +from ..data.types import Struct + + +class MktPair(Struct, frozen=True): + + src: str # source asset name being used to buy + src_type: str # source asset's financial type/classification name + # ^ specifies a "class" of financial instrument + # egs. stock, futer, option, bond etc. + + dst: str # destination asset name being bought + dst_type: str # destination asset's financial type/classification name + + price_tick: float # minimum price increment value increment + price_tick_digits: int # required decimal digits for above + + size_tick: float # minimum size (aka vlm) increment value increment + + # size_tick_digits: int # required decimal digits for above + @property + def size_tick_digits(self) -> int: + return self.size_tick + + venue: str | None = None # market venue provider name + expiry: str | None = None # for derivs, expiry datetime parseable str + + # for derivs, info describing contract, egs. + # strike price, call or put, swap type, exercise model, etc. + contract_info: str | None = None + + @classmethod + def from_msg( + self, + msg: dict[str, Any], + + ) -> MktPair: + ''' + Constructor for a received msg-dict normally received over IPC. + + ''' + ... + + # fqa, fqma, .. etc. see issue: + # https://github.com/pikers/piker/issues/467 + @property + def fqsn(self) -> str: + ''' + Return the fully qualified market (endpoint) name for the + pair of transacting assets. + + ''' + ... + + +def mk_fqsn( + provider: str, + symbol: str, + +) -> str: + ''' + Generate a "fully qualified symbol name" which is + a reverse-hierarchical cross broker/provider symbol + + ''' + return '.'.join([symbol, provider]).lower() + + +def float_digits( + value: float, +) -> int: + ''' + Return the number of precision digits read from a float value. + + ''' + if value == 0: + return 0 + + return int(-Decimal(str(value)).as_tuple().exponent) + + +def digits_to_dec( + ndigits: int, +) -> Decimal: + ''' + Return the minimum float value for an input integer value. + + eg. 3 -> 0.001 + + ''' + if ndigits == 0: + return Decimal('0') + + return Decimal('0.' + '0'*(ndigits-1) + '1') + + +def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: + ''' + Unpack a fully-qualified-symbol-name to ``tuple``. + + ''' + venue = '' + suffix = '' + + # TODO: probably reverse the order of all this XD + tokens = fqsn.split('.') + if len(tokens) < 3: + # probably crypto + symbol, broker = tokens + return ( + broker, + symbol, + '', + ) + + elif len(tokens) > 3: + symbol, venue, suffix, broker = tokens + else: + symbol, venue, broker = tokens + suffix = '' + + # head, _, broker = fqsn.rpartition('.') + # symbol, _, suffix = head.rpartition('.') + return ( + broker, + '.'.join([symbol, venue]), + suffix, + ) + +# TODO: rework the below `Symbol` (which was originally inspired and +# derived from stuff in quantdom) into a simpler, ipc msg ready, market +# endpoint meta-data container type as per the drafted interace above. +class Symbol(Struct): + ''' + I guess this is some kinda container thing for dealing with + all the different meta-data formats from brokers? + + ''' + key: str + tick_size: float = 0.01 + lot_tick_size: float = 0.0 # "volume" precision as min step value + tick_size_digits: int = 2 + lot_size_digits: int = 0 + suffix: str = '' + broker_info: dict[str, dict[str, Any]] = {} + + @classmethod + def from_broker_info( + cls, + broker: str, + symbol: str, + info: dict[str, Any], + suffix: str = '', + + ) -> Symbol: + + tick_size = info.get('price_tick_size', 0.01) + lot_size = info.get('lot_tick_size', 0.0) + + return Symbol( + key=symbol, + + tick_size=tick_size, + lot_tick_size=lot_size, + + tick_size_digits=float_digits(tick_size), + lot_size_digits=float_digits(lot_size), + + suffix=suffix, + broker_info={broker: info}, + ) + + @classmethod + def from_fqsn( + cls, + fqsn: str, + info: dict[str, Any], + + ) -> Symbol: + broker, key, suffix = unpack_fqsn(fqsn) + return cls.from_broker_info( + broker, + key, + info=info, + suffix=suffix, + ) + + @property + def type_key(self) -> str: + return list(self.broker_info.values())[0]['asset_type'] + + @property + def brokers(self) -> list[str]: + return list(self.broker_info.keys()) + + def nearest_tick(self, value: float) -> float: + ''' + Return the nearest tick value based on mininum increment. + + ''' + mult = 1 / self.tick_size + return round(value * mult) / mult + + def front_feed(self) -> tuple[str, str]: + ''' + Return the "current" feed key for this symbol. + + (i.e. the broker + symbol key in a tuple). + + ''' + return ( + list(self.broker_info.keys())[0], + self.key, + ) + + def tokens(self) -> tuple[str]: + broker, key = self.front_feed() + if self.suffix: + return (key, self.suffix, broker) + else: + return (key, broker) + + @property + def fqsn(self) -> str: + return '.'.join(self.tokens()).lower() + + def front_fqsn(self) -> str: + ''' + fqsn = "fully qualified symbol name" + + Basically the idea here is for all client-ish code (aka programs/actors + that ask the provider agnostic layers in the stack for data) should be + able to tell which backend / venue / derivative each data feed/flow is + from by an explicit string key of the current form: + + ... + + TODO: I have thoughts that we should actually change this to be + more like an "attr lookup" (like how the web should have done + urls, but marketting peeps ruined it etc. etc.): + + ... + + ''' + tokens = self.tokens() + fqsn = '.'.join(map(str.lower, tokens)) + return fqsn + + def quantize_size( + self, + size: float, + + ) -> Decimal: + ''' + Truncate input ``size: float`` using ``Decimal`` + and ``.lot_size_digits``. + + ''' + digits = self.lot_size_digits + return Decimal(size).quantize( + Decimal(f'1.{"0".ljust(digits, "0")}'), + rounding=ROUND_HALF_EVEN + ) + + diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 2a9ca0d8..204e7a8e 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -43,10 +43,13 @@ from ._ledger import ( iter_by_dt, open_trade_ledger, ) +from ._mktinfo import ( + Symbol, + unpack_fqsn, +) from .. import config from ..brokers import get_brokermod from ..clearing._messages import BrokerdPosition, Status -from ..data._source import Symbol, unpack_fqsn from ..data.types import Struct from ..log import get_logger @@ -154,6 +157,7 @@ class Position(Struct): inline_table['tid'] = tid toml_clears_list.append(inline_table) + d['clears'] = toml_clears_list return fqsn, d diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index bfa66a9d..c6513204 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -644,7 +644,7 @@ class Client: # fqsn parsing stage # ------------------ if '.ib' in pattern: - from ..data._source import unpack_fqsn + from ..accounting._mktinfo import unpack_fqsn _, symbol, expiry = unpack_fqsn(pattern) else: diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index bc65d6a2..77f0bb53 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -70,7 +70,7 @@ from piker.clearing._messages import ( BrokerdFill, BrokerdError, ) -from piker.data._source import ( +from piker.accounting._mktinfo import ( Symbol, float_digits, ) diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index 74ad734b..82479329 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -42,7 +42,7 @@ import trio from piker import config from piker.data.types import Struct -from piker.data._source import Symbol +from piker.accounting._mktinfo import Symbol from piker.brokers._util import ( resproc, SymbolNotFound, diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 5d1bbb01..72d6f0fe 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -48,7 +48,7 @@ from piker.accounting import ( open_pps, get_likely_pair, ) -from piker.data._source import ( +from piker.accounting._mktinfo import ( Symbol, digits_to_dec, ) diff --git a/piker/clearing/_allocate.py b/piker/clearing/_allocate.py index c457de05..023d1e92 100644 --- a/piker/clearing/_allocate.py +++ b/piker/clearing/_allocate.py @@ -23,7 +23,7 @@ from typing import Optional from bidict import bidict -from ..data._source import Symbol +from ..accounting._mktinfo import Symbol from ..data.types import Struct from ..accounting import Position diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index 7d03406a..ee176f87 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -27,6 +27,7 @@ import trio import tractor from tractor.trionics import broadcast_receiver +from ..accounting._mktinfo import unpack_fqsn from ..log import get_logger from ..data.types import Struct from ..service import maybe_open_emsd @@ -228,7 +229,6 @@ async def open_ems( # ready for order commands book = get_orders() - from ..data._source import unpack_fqsn broker, symbol, suffix = unpack_fqsn(fqsn) async with maybe_open_emsd(broker) as portal: diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 477da310..b2c4c614 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -43,7 +43,7 @@ import tractor from ..log import get_logger from ..data._normalize import iterticks -from ..data._source import ( +from ..accounting._mktinfo import ( unpack_fqsn, mk_fqsn, float_digits, @@ -521,7 +521,6 @@ class Router(Struct): none already exists. ''' - from ..data._source import unpack_fqsn broker, symbol, suffix = unpack_fqsn(fqsn) async with ( diff --git a/piker/clearing/_messages.py b/piker/clearing/_messages.py index c7693b9f..f084af05 100644 --- a/piker/clearing/_messages.py +++ b/piker/clearing/_messages.py @@ -29,7 +29,7 @@ from typing import ( from msgspec import field -from ..data._source import Symbol +from ..accounting._mktinfo import Symbol from ..data.types import Struct diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 39d5a474..00611e6d 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -38,7 +38,7 @@ import tractor from .. import data from ..data.types import Struct -from ..data._source import Symbol +from ..accounting._mktinfo import Symbol from ..accounting import ( Position, Transaction, diff --git a/piker/data/_source.py b/piker/data/_source.py index e503105e..61c2e52f 100644 --- a/piker/data/_source.py +++ b/piker/data/_source.py @@ -28,8 +28,12 @@ from bidict import bidict import numpy as np from .types import Struct -# from numba import from_dtype - +from ..accounting._mktinfo import ( + # mkfqsn, + unpack_fqsn, + # digits_to_dec, + float_digits, +) ohlc_fields = [ ('time', float), @@ -50,6 +54,7 @@ base_ohlc_dtype = np.dtype(ohlc_fields) # TODO: for now need to construct this manually for readonly arrays, see # https://github.com/numba/numba/issues/4511 +# from numba import from_dtype # numba_ohlc_dtype = from_dtype(base_ohlc_dtype) # map time frame "keys" to seconds values @@ -64,47 +69,6 @@ tf_in_1s = bidict({ }) -def mk_fqsn( - provider: str, - symbol: str, - -) -> str: - ''' - Generate a "fully qualified symbol name" which is - a reverse-hierarchical cross broker/provider symbol - - ''' - return '.'.join([symbol, provider]).lower() - - -def float_digits( - value: float, -) -> int: - ''' - Return the number of precision digits read from a float value. - - ''' - if value == 0: - return 0 - - return int(-Decimal(str(value)).as_tuple().exponent) - - -def digits_to_dec( - ndigits: int, -) -> Decimal: - ''' - Return the minimum float value for an input integer value. - - eg. 3 -> 0.001 - - ''' - if ndigits == 0: - return Decimal('0') - - return Decimal('0.' + '0'*(ndigits-1) + '1') - - def ohlc_zeros(length: int) -> np.ndarray: """Construct an OHLC field formatted structarray. @@ -115,223 +79,6 @@ def ohlc_zeros(length: int) -> np.ndarray: return np.zeros(length, dtype=base_ohlc_dtype) -def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: - ''' - Unpack a fully-qualified-symbol-name to ``tuple``. - - ''' - venue = '' - suffix = '' - - # TODO: probably reverse the order of all this XD - tokens = fqsn.split('.') - if len(tokens) < 3: - # probably crypto - symbol, broker = tokens - return ( - broker, - symbol, - '', - ) - - elif len(tokens) > 3: - symbol, venue, suffix, broker = tokens - else: - symbol, venue, broker = tokens - suffix = '' - - # head, _, broker = fqsn.rpartition('.') - # symbol, _, suffix = head.rpartition('.') - return ( - broker, - '.'.join([symbol, venue]), - suffix, - ) - - -class MktPair(Struct, frozen=True): - - src: str # source asset name being used to buy - src_type: str # source asset's financial type/classification name - # ^ specifies a "class" of financial instrument - # egs. stock, futer, option, bond etc. - - dst: str # destination asset name being bought - dst_type: str # destination asset's financial type/classification name - - price_tick: float # minimum price increment value increment - price_tick_digits: int # required decimal digits for above - - size_tick: float # minimum size (aka vlm) increment value increment - size_tick_digits: int # required decimal digits for above - - venue: str | None = None # market venue provider name - expiry: str | None = None # for derivs, expiry datetime parseable str - - # for derivs, info describing contract, egs. - # strike price, call or put, swap type, exercise model, etc. - contract_info: str | None = None - - @classmethod - def from_msg( - self, - msg: dict[str, Any], - - ) -> MktPair: - ''' - Constructor for a received msg-dict normally received over IPC. - - ''' - ... - - # fqa, fqma, .. etc. see issue: - # https://github.com/pikers/piker/issues/467 - @property - def fqsn(self) -> str: - ''' - Return the fully qualified market (endpoint) name for the - pair of transacting assets. - - ''' - ... - - -# TODO: rework the below `Symbol` (which was originally inspired and -# derived from stuff in quantdom) into a simpler, ipc msg ready, market -# endpoint meta-data container type as per the drafted interace above. -class Symbol(Struct): - ''' - I guess this is some kinda container thing for dealing with - all the different meta-data formats from brokers? - - ''' - key: str - tick_size: float = 0.01 - lot_tick_size: float = 0.0 # "volume" precision as min step value - tick_size_digits: int = 2 - lot_size_digits: int = 0 - suffix: str = '' - broker_info: dict[str, dict[str, Any]] = {} - - @classmethod - def from_broker_info( - cls, - broker: str, - symbol: str, - info: dict[str, Any], - suffix: str = '', - - ) -> Symbol: - - tick_size = info.get('price_tick_size', 0.01) - lot_size = info.get('lot_tick_size', 0.0) - - return Symbol( - key=symbol, - - tick_size=tick_size, - lot_tick_size=lot_size, - - tick_size_digits=float_digits(tick_size), - lot_size_digits=float_digits(lot_size), - - suffix=suffix, - broker_info={broker: info}, - ) - - @classmethod - def from_fqsn( - cls, - fqsn: str, - info: dict[str, Any], - - ) -> Symbol: - broker, key, suffix = unpack_fqsn(fqsn) - return cls.from_broker_info( - broker, - key, - info=info, - suffix=suffix, - ) - - @property - def type_key(self) -> str: - return list(self.broker_info.values())[0]['asset_type'] - - @property - def brokers(self) -> list[str]: - return list(self.broker_info.keys()) - - def nearest_tick(self, value: float) -> float: - ''' - Return the nearest tick value based on mininum increment. - - ''' - mult = 1 / self.tick_size - return round(value * mult) / mult - - def front_feed(self) -> tuple[str, str]: - ''' - Return the "current" feed key for this symbol. - - (i.e. the broker + symbol key in a tuple). - - ''' - return ( - list(self.broker_info.keys())[0], - self.key, - ) - - def tokens(self) -> tuple[str]: - broker, key = self.front_feed() - if self.suffix: - return (key, self.suffix, broker) - else: - return (key, broker) - - @property - def fqsn(self) -> str: - return '.'.join(self.tokens()).lower() - - def front_fqsn(self) -> str: - ''' - fqsn = "fully qualified symbol name" - - Basically the idea here is for all client-ish code (aka programs/actors - that ask the provider agnostic layers in the stack for data) should be - able to tell which backend / venue / derivative each data feed/flow is - from by an explicit string key of the current form: - - ... - - TODO: I have thoughts that we should actually change this to be - more like an "attr lookup" (like how the web should have done - urls, but marketting peeps ruined it etc. etc.): - - ... - - ''' - tokens = self.tokens() - fqsn = '.'.join(map(str.lower, tokens)) - return fqsn - - def quantize_size( - self, - size: float, - - ) -> Decimal: - ''' - Truncate input ``size: float`` using ``Decimal`` - and ``.lot_size_digits``. - - ''' - digits = self.lot_size_digits - return Decimal(size).quantize( - Decimal(f'1.{"0".ljust(digits, "0")}'), - rounding=ROUND_HALF_EVEN - ) - - def _nan_to_closest_num(array: np.ndarray): """Return interpolated values instead of NaN. diff --git a/piker/data/feed.py b/piker/data/feed.py index 7efd5eb3..5e1a1aec 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -70,11 +70,11 @@ from ._sharedmem import ( ) from .ingest import get_ingestormod from .types import Struct -from ._source import ( - base_iohlc_dtype, +from ..accounting._mktinfo import ( Symbol, unpack_fqsn, ) +from ._source import base_iohlc_dtype from ..ui import _search from ._sampling import ( open_sample_stream, diff --git a/piker/data/flows.py b/piker/data/flows.py index 9d8b3103..19615f61 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -30,10 +30,10 @@ import tractor import pendulum import numpy as np -from .types import Struct -from ._source import ( +from ..accounting._mktinfo import ( Symbol, ) +from .types import Struct from ._sharedmem import ( attach_shm_array, ShmArray, diff --git a/piker/fsp/_engine.py b/piker/fsp/_engine.py index 37852cfc..a77e662f 100644 --- a/piker/fsp/_engine.py +++ b/piker/fsp/_engine.py @@ -45,7 +45,7 @@ from ..data._sampling import ( _default_delay_s, open_sample_stream, ) -from ..data._source import Symbol +from ..accounting._mktinfo import Symbol from ._api import ( Fsp, _load_builtins, diff --git a/piker/ui/_app.py b/piker/ui/_app.py index 9978dbe3..0e7dad47 100644 --- a/piker/ui/_app.py +++ b/piker/ui/_app.py @@ -28,7 +28,7 @@ from ..service import maybe_spawn_brokerd from . import _event from ._exec import run_qtractor from ..data.feed import install_brokerd_search -from ..data._source import unpack_fqsn +from ..accounting._mktinfo import unpack_fqsn from . import _search from ._chart import GodWidget from ..log import get_logger diff --git a/piker/ui/_axes.py b/piker/ui/_axes.py index 62214f60..040d0552 100644 --- a/piker/ui/_axes.py +++ b/piker/ui/_axes.py @@ -29,7 +29,7 @@ from PyQt5 import QtCore, QtGui, QtWidgets from PyQt5.QtCore import QPointF from . import _pg_overrides as pgo -from ..data._source import float_digits +from ..accounting._mktinfo import float_digits from ._label import Label from ._style import DpiAwareFont, hcolor, _font from ._interaction import ChartView diff --git a/piker/ui/_chart.py b/piker/ui/_chart.py index 7811278b..b05d6fcf 100644 --- a/piker/ui/_chart.py +++ b/piker/ui/_chart.py @@ -68,7 +68,7 @@ from ..data.feed import ( Feed, Flume, ) -from ..data._source import Symbol +from ..accounting._mktinfo import Symbol from ..log import get_logger from ._interaction import ChartView from ._forms import FieldsForm diff --git a/piker/ui/_fsp.py b/piker/ui/_fsp.py index 6e600743..960b287a 100644 --- a/piker/ui/_fsp.py +++ b/piker/ui/_fsp.py @@ -46,7 +46,7 @@ from ..data._sharedmem import ( try_read, ) from ..data.feed import Flume -from ..data._source import Symbol +from ..accounting._mktinfo import Symbol from ._chart import ( ChartPlotWidget, LinkedSplits, diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index bf60c0e6..6ac0f1f4 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -42,7 +42,7 @@ from ..clearing._allocate import ( mk_allocator, ) from ._style import _font -from ..data._source import Symbol +from ..accounting._mktinfo import Symbol from ..data.feed import ( Feed, Flume, diff --git a/tests/test_feeds.py b/tests/test_feeds.py index a79ca861..0435ed61 100644 --- a/tests/test_feeds.py +++ b/tests/test_feeds.py @@ -13,7 +13,7 @@ from piker.data import ( ShmArray, open_feed, ) -from piker.data._source import ( +from piker.accounting._mktinfo import ( unpack_fqsn, ) From 91dda3020e81d5440b2d8022c733400105b4295e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Mar 2023 19:40:47 -0400 Subject: [PATCH 009/294] Simplify `Symbol` extend `MktPair`, add `Asset` Drop everything we can in terms of methods and attrs from `Symbol`: - kill `.tokens()`, `.front_feed()`, `.tokens()`, `.nearest_tick()`, `.front_fqsn()`, instead moving logic from these methods into dependents (and obviously removing any usage from rest of code base, coming in follow up commits). - rename `.quantize_size()` -> `.quantize()`. - re-implement `.brokers`, `.lot_size_digits`, `.tick_size_digits` as `@property` methods; for the latter two, allows us to minimize to only accepting min tick decimal values on alternative constructor class methods and to drop the equivalent instance vars. - map `_fqsn` related variable names to new and preferred `_fqme`. We also juggle around some utility functions, moving limited precision related `decimal.Decimal` routines to the top of module and soon-to-be legacy `fqsn` related routines to the bottom. `MktPair` draft type extensions: - drop requirements for `src_type`, and offer the optional `.dst_type` field as either a `str` or (new `typing.Literal`) `AssetTypeName`. - define an equivalent `.quantize()` as (re)defined in `Symbol` but with `quantity_type: str` field which specifies whether to use the price or the size precision. - add a lot more docs, a `.key` property for the "symbol" name, draft property for a `.fqme: str` - allow `.src` and `.dst` to be of type `str | Asset` Add a new `Asset` to capture "things which can be used in markets and/or transactions" XD - defines a `.name`, `.atype: AssetTypeName` a financial category tag, `tx_tick: Decimal` the precision limit for transactions and of course a `.quantime()` method for doing accounting arithmetic on a given tech stack. - define the `atype: AssetTypeName` type as a finite set of `str`s expected to be used in various ways for default settings in other parts of the data and order control layers.. --- piker/accounting/_mktinfo.py | 347 ++++++++++++++++++++++++----------- 1 file changed, 235 insertions(+), 112 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index a9036170..dde8ce27 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -34,34 +34,169 @@ from decimal import ( ) from typing import ( Any, + Literal, ) from ..data.types import Struct +_underlyings: list[str] = [ + 'stock', + 'bond', + 'crypto_currency', + 'fiat_currency', + 'commodity', +] + + +_derivs: list[str] = [ + 'swap', + 'future', + 'continuous_future', + 'option', + 'futures_option', +] + +# NOTE: a tag for other subsystems to try +# and do default settings for certain things: +# - allocator does unit vs. dolla size limiting. +AssetTypeName: Literal[ + _underlyings + + + _derivs +] + +# egs. stock, futer, option, bond etc. + + +def float_digits( + value: float, +) -> int: + ''' + Return the number of precision digits read from a decimal or float + value. + + ''' + if value == 0: + return 0 + + return int( + -Decimal(str(value)).as_tuple().exponent + ) + + +def digits_to_dec( + ndigits: int, +) -> Decimal: + ''' + Return the minimum float value for an input integer value. + + eg. 3 -> 0.001 + + ''' + if ndigits == 0: + return Decimal('0') + + return Decimal('0.' + '0'*(ndigits-1) + '1') + + +class Asset(Struct, frozen=True): + ''' + Container type describing any transactable asset's technology. + + ''' + name: str + atype: AssetTypeName + + # minimum transaction size / precision. + # eg. for buttcoin this is a "satoshi". + tx_tick: Decimal + + # NOTE: additional info optionally packed in by the backend, but + # should not be explicitly required in our generic API. + info: dict = {} # make it frozen? + + def __str__(self) -> str: + return self.name + + def quantize( + self, + size: float, + + ) -> Decimal: + ''' + Truncate input ``size: float`` using ``Decimal`` + quantized form of the digit precision defined + by ``self.lot_tick_size``. + + ''' + digits = float_digits(self.tx_tick) + return Decimal(size).quantize( + Decimal(f'1.{"0".ljust(digits, "0")}'), + rounding=ROUND_HALF_EVEN + ) + + class MktPair(Struct, frozen=True): + ''' + Market description for a pair of assets which are tradeable: + a market which enables transactions of the form, + buy: source asset -> destination asset + sell: destination asset -> source asset - src: str # source asset name being used to buy - src_type: str # source asset's financial type/classification name - # ^ specifies a "class" of financial instrument - # egs. stock, futer, option, bond etc. + The main intention of this type is for a cross-asset, venue, broker + normalized descriptive data type from which all market-auctions can + be mapped, simply. - dst: str # destination asset name being bought - dst_type: str # destination asset's financial type/classification name + ''' + # "source asset" (name) used to buy *from* + # (or used to sell *to*) + src: str | Asset + # "destination asset" (name) used to buy *to* + # (or used to sell *from*) + dst: str | Asset - price_tick: float # minimum price increment value increment - price_tick_digits: int # required decimal digits for above - - size_tick: float # minimum size (aka vlm) increment value increment - - # size_tick_digits: int # required decimal digits for above @property - def size_tick_digits(self) -> int: - return self.size_tick + def key(self) -> str: + ''' + The "endpoint key" for this market. + In most other tina platforms this is referred to as the + "symbol". + + ''' + return f'{self.src}{self.dst}' + + # the tick size is the number describing the smallest step in value + # available in this market between the source and destination + # assets. + # https://en.wikipedia.org/wiki/Tick_size + # https://en.wikipedia.org/wiki/Commodity_tick + # https://en.wikipedia.org/wiki/Percentage_in_point + price_tick: Decimal # minimum price increment value increment + size_tick: Decimal # minimum size (aka vlm) increment value increment + + # @property + # def size_tick_digits(self) -> int: + # return float_digits(self.size_tick) + + broker: str | None = None # the middle man giving access venue: str | None = None # market venue provider name expiry: str | None = None # for derivs, expiry datetime parseable str + # destination asset's financial type/classification name + # NOTE: this is required for the order size allocator system, + # since we use different default settings based on the type + # of the destination asset, eg. futes use a units limits vs. + # equities a $limit. + dst_type: AssetTypeName | None = None + + # source asset's financial type/classification name + # TODO: is a src type required for trading? + # there's no reason to need any more then the one-way alloc-limiter + # config right? + # src_type: AssetTypeName + # for derivs, info describing contract, egs. # strike price, call or put, swap type, exercise model, etc. contract_info: str | None = None @@ -81,13 +216,53 @@ class MktPair(Struct, frozen=True): # fqa, fqma, .. etc. see issue: # https://github.com/pikers/piker/issues/467 @property - def fqsn(self) -> str: + def fqme(self) -> str: ''' - Return the fully qualified market (endpoint) name for the + Return the fully qualified market endpoint-address for the pair of transacting assets. + Yes, you can pronounce it colloquially as "f#$%-me".. + ''' - ... + + # fqsn = fqme + + def quantize( + self, + size: float, + + quantity_type: Literal['price', 'size'] = 'size', + + ) -> Decimal: + ''' + Truncate input ``size: float`` using ``Decimal`` + and ``.size_tick``'s # of digits. + + ''' + match quantity_type: + case 'price': + digits = float_digits(self.price_tick) + case 'size': + digits = float_digits(self.size_tick) + + return Decimal(size).quantize( + Decimal(f'1.{"0".ljust(digits, "0")}'), + rounding=ROUND_HALF_EVEN + ) + + # TODO: remove this? + @property + def type_key(self) -> str: + return list(self.broker_info.values())[0]['asset_type'] + + # @classmethod + # def from_fqme( + # cls, + # fqme: str, + # **kwargs, + + # ) -> MktPair: + # broker, key, suffix = unpack_fqme(fqme) def mk_fqsn( @@ -103,34 +278,6 @@ def mk_fqsn( return '.'.join([symbol, provider]).lower() -def float_digits( - value: float, -) -> int: - ''' - Return the number of precision digits read from a float value. - - ''' - if value == 0: - return 0 - - return int(-Decimal(str(value)).as_tuple().exponent) - - -def digits_to_dec( - ndigits: int, -) -> Decimal: - ''' - Return the minimum float value for an input integer value. - - eg. 3 -> 0.001 - - ''' - if ndigits == 0: - return Decimal('0') - - return Decimal('0.' + '0'*(ndigits-1) + '1') - - def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: ''' Unpack a fully-qualified-symbol-name to ``tuple``. @@ -164,6 +311,10 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: suffix, ) + +unpack_fqme = unpack_fqsn + + # TODO: rework the below `Symbol` (which was originally inspired and # derived from stuff in quantdom) into a simpler, ipc msg ready, market # endpoint meta-data container type as per the drafted interace above. @@ -176,37 +327,9 @@ class Symbol(Struct): key: str tick_size: float = 0.01 lot_tick_size: float = 0.0 # "volume" precision as min step value - tick_size_digits: int = 2 - lot_size_digits: int = 0 suffix: str = '' broker_info: dict[str, dict[str, Any]] = {} - @classmethod - def from_broker_info( - cls, - broker: str, - symbol: str, - info: dict[str, Any], - suffix: str = '', - - ) -> Symbol: - - tick_size = info.get('price_tick_size', 0.01) - lot_size = info.get('lot_tick_size', 0.0) - - return Symbol( - key=symbol, - - tick_size=tick_size, - lot_tick_size=lot_size, - - tick_size_digits=float_digits(tick_size), - lot_size_digits=float_digits(lot_size), - - suffix=suffix, - broker_info={broker: info}, - ) - @classmethod def from_fqsn( cls, @@ -215,13 +338,25 @@ class Symbol(Struct): ) -> Symbol: broker, key, suffix = unpack_fqsn(fqsn) - return cls.from_broker_info( - broker, - key, - info=info, + tick_size = info.get('price_tick_size', 0.01) + lot_size = info.get('lot_tick_size', 0.0) + + return Symbol( + key=key, + + tick_size=tick_size, + lot_tick_size=lot_size, + + # tick_size_digits=float_digits(tick_size), + # lot_size_digits=float_digits(lot_size), + suffix=suffix, + broker_info={broker: info}, ) + # compat name mapping + from_fqme = from_fqsn + @property def type_key(self) -> str: return list(self.broker_info.values())[0]['asset_type'] @@ -230,38 +365,20 @@ class Symbol(Struct): def brokers(self) -> list[str]: return list(self.broker_info.keys()) - def nearest_tick(self, value: float) -> float: - ''' - Return the nearest tick value based on mininum increment. + @property + def tick_size_digits(self) -> int: + return float_digits(self.lot_tick_size) - ''' - mult = 1 / self.tick_size - return round(value * mult) / mult + @property + def lot_size_digits(self) -> int: + return float_digits(self.lot_tick_size) - def front_feed(self) -> tuple[str, str]: - ''' - Return the "current" feed key for this symbol. - - (i.e. the broker + symbol key in a tuple). - - ''' - return ( - list(self.broker_info.keys())[0], - self.key, - ) - - def tokens(self) -> tuple[str]: - broker, key = self.front_feed() - if self.suffix: - return (key, self.suffix, broker) - else: - return (key, broker) + @property + def broker(self) -> str: + return list(self.broker_info.keys())[0] @property def fqsn(self) -> str: - return '.'.join(self.tokens()).lower() - - def front_fqsn(self) -> str: ''' fqsn = "fully qualified symbol name" @@ -279,24 +396,30 @@ class Symbol(Struct): ... ''' - tokens = self.tokens() - fqsn = '.'.join(map(str.lower, tokens)) - return fqsn + broker = self.broker + key = self.key + if self.suffix: + tokens = (key, self.suffix, broker) + else: + tokens = (key, broker) - def quantize_size( + return '.'.join(tokens).lower() + + fqme = fqsn + + def quantize( self, size: float, ) -> Decimal: ''' Truncate input ``size: float`` using ``Decimal`` - and ``.lot_size_digits``. + quantized form of the digit precision defined + by ``self.lot_tick_size``. ''' - digits = self.lot_size_digits + digits = float_digits(self.lot_tick_size) return Decimal(size).quantize( Decimal(f'1.{"0".ljust(digits, "0")}'), rounding=ROUND_HALF_EVEN ) - - From acc5af1fdb3a898f95534fbb1c53be2198a1921d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Mar 2023 20:17:19 -0400 Subject: [PATCH 010/294] Drop `Symbol.front_feed()` usage from order mode --- piker/ui/order_mode.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 6ac0f1f4..ab89798b 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -354,7 +354,7 @@ class OrderMode: order = staged.copy() order.oid = oid - order.symbol = order.symbol.front_fqsn() + order.symbol = order.symbol.fqme lines = self.lines_from_order( order, @@ -948,8 +948,8 @@ async def process_trade_msg( ): sym = mode.chart.linked.symbol pp_msg_symbol = msg['symbol'].lower() - fqsn = sym.front_fqsn() - broker, key = sym.front_feed() + fqsn = sym.fqme + broker = sym.broker if ( pp_msg_symbol == fqsn or pp_msg_symbol == fqsn.removesuffix(f'.{broker}') @@ -997,7 +997,7 @@ async def process_trade_msg( assert msg.resp in ('open', 'dark_open'), f'Unknown msg: {msg}' sym = mode.chart.linked.symbol - fqsn = sym.front_fqsn() + fqsn = sym.fqme if ( ((order.symbol + f'.{msg.src}') == fqsn) From e65f3f84b99d01c824a243676bc0646fc9412418 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Mar 2023 20:18:00 -0400 Subject: [PATCH 011/294] Drop `Symbol.front_fqsn()` usage from chart, fsp and clearing stuff --- piker/clearing/_messages.py | 4 ++-- piker/clearing/_paper_engine.py | 2 +- piker/fsp/_engine.py | 2 +- piker/ui/_chart.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/piker/clearing/_messages.py b/piker/clearing/_messages.py index f084af05..0d97ee3f 100644 --- a/piker/clearing/_messages.py +++ b/piker/clearing/_messages.py @@ -300,10 +300,10 @@ class BrokerdError(Struct): class BrokerdPosition(Struct): - '''Position update event from brokerd. + ''' + Position update event from brokerd. ''' - broker: str account: str symbol: str diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 00611e6d..1726aed6 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -544,7 +544,7 @@ async def trades_dialogue( pp_msgs.append(BrokerdPosition( broker=broker, account='paper', - symbol=pos.symbol.front_fqsn(), + symbol=pos.symbol.fqme, size=pos.size, avg_price=pos.ppu, )) diff --git a/piker/fsp/_engine.py b/piker/fsp/_engine.py index a77e662f..3e500f46 100644 --- a/piker/fsp/_engine.py +++ b/piker/fsp/_engine.py @@ -104,7 +104,7 @@ async def fsp_compute( disabled=True ) - fqsn = symbol.front_fqsn() + fqsn = symbol.fqme out_stream = func( # TODO: do we even need this if we do the feed api right? diff --git a/piker/ui/_chart.py b/piker/ui/_chart.py index b05d6fcf..9fffeee2 100644 --- a/piker/ui/_chart.py +++ b/piker/ui/_chart.py @@ -290,7 +290,7 @@ class GodWidget(QWidget): symbol = self.rt_linked.symbol if symbol is not None: self.window.setWindowTitle( - f'{symbol.front_fqsn()} ' + f'{symbol.fqme} ' f'tick:{symbol.tick_size}' ) From a44b6f7c2f02ab0bcb2e7bb297df5860e7a5ca18 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Mar 2023 20:29:57 -0400 Subject: [PATCH 012/294] `ib`: adjust to new simplified `Symbol` Drop usage of removed methods and attrs and only pass in the `.tick_size: Decimal` value during construction. --- piker/brokers/ib/broker.py | 37 +++++++++++++++++++++---------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 77f0bb53..0944bf86 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -21,6 +21,7 @@ from __future__ import annotations from bisect import insort from contextlib import ExitStack from dataclasses import asdict +from decimal import Decimal from functools import partial from pprint import pformat import time @@ -72,7 +73,6 @@ from piker.clearing._messages import ( ) from piker.accounting._mktinfo import ( Symbol, - float_digits, ) from .api import ( _accounts2clients, @@ -414,7 +414,7 @@ async def update_and_audit_msgs( # right since `.broker` is already included? account=f'ib.{acctid}', # XXX: the `.ib` is stripped..? - symbol=p.symbol.front_fqsn(), + symbol=p.symbol.fqme, # currency=ibppmsg.currency, size=p.size, avg_price=p.ppu, @@ -625,7 +625,7 @@ async def trades_dialogue( pairinfo = pp.symbol if msg.size != pp.size: log.error( - f'Pos size mismatch {pairinfo.front_fqsn()}:\n' + f'Pos size mismatch {pairinfo.fqsn}:\n' f'ib: {msg.size}\n' f'piker: {pp.size}\n' ) @@ -1110,7 +1110,6 @@ def norm_trade_records( comms = -1*record['ibCommission'] price = record.get('price') or record['tradePrice'] - price_tick_digits = float_digits(price) # the api doesn't do the -/+ on the quantity for you but flex # records do.. are you fucking serious ib...!? @@ -1153,7 +1152,9 @@ def norm_trade_records( # special handling of symbol extraction from # flex records using some ad-hoc schema parsing. - asset_type: str = record.get('assetCategory') or record.get('secType', 'STK') + asset_type: str = record.get( + 'assetCategory' + ) or record.get('secType', 'STK') # TODO: XXX: WOA this is kinda hacky.. probably # should figure out the correct future pair key more @@ -1170,34 +1171,38 @@ def norm_trade_records( suffix = f'{exch}.{expiry}' expiry = pendulum.parse(expiry) - src: str = record['currency'] + # src: str = record['currency'] + # price_tick_digits = float_digits(price) + tick_size = Decimal( + Decimal(10)**Decimal(str(price)).as_tuple().exponent + ) pair = Symbol.from_fqsn( fqsn=f'{symbol}.{suffix}.ib', info={ - 'tick_size_digits': price_tick_digits, + 'tick_size': tick_size, # NOTE: for "legacy" assets, volume is normally discreet, not # a float, but we keep a digit in case the suitz decide # to get crazy and change it; we'll be kinda ready # schema-wise.. - 'lot_size_digits': 1, + 'lot_tick_size': 0.0, # TODO: remove when we switching from # ``Symbol`` -> ``MktPair`` 'asset_type': asset_type, - # TODO: figure out a target fin-type name - # set and normalize to that here! - 'dst_type': asset_type.lower(), + # # TODO: figure out a target fin-type name + # # set and normalize to that here! + # 'dst_type': asset_type.lower(), - # starting to use new key naming as in ``MktPair`` - # type have drafted... - 'src': src, - 'src_type': 'fiat', + # # starting to use new key naming as in ``MktPair`` + # # type have drafted... + # 'src': src, + # 'src_type': 'fiat', }, ) - fqsn = pair.front_fqsn().rstrip('.ib') + fqsn = pair.fqme.rstrip('.ib') # NOTE: for flex records the normal fields for defining an fqsn # sometimes won't be available so we rely on two approaches for From 3be53540c1fbd1e1a2fabb8642debb60abfefa60 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Mar 2023 20:31:28 -0400 Subject: [PATCH 013/294] `kraken`: pack `Asset` into local client cache Try out using our new internal type for storing info about kraken's asset infos now stored in the `Client.assets: dict[str, Asset]` table. Handle a server error when requesting such info msgs. --- piker/brokers/kraken/api.py | 78 +++++++++++++++++++++++++++++++------ 1 file changed, 67 insertions(+), 11 deletions(-) diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index 82479329..a7415b34 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -42,7 +42,12 @@ import trio from piker import config from piker.data.types import Struct -from piker.accounting._mktinfo import Symbol +from piker.accounting._mktinfo import ( + Asset, + digits_to_dec, + MktPair, + Symbol, +) from piker.brokers._util import ( resproc, SymbolNotFound, @@ -177,11 +182,13 @@ class Client: 'User-Agent': 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)' }) - self.conf: dict[str, str] = config self._name = name self._api_key = api_key self._secret = secret + self.conf: dict[str, str] = config + self.assets: dict[str, Asset] = {} + @property def pairs(self) -> dict[str, Pair]: if self._pairs is None: @@ -252,19 +259,50 @@ class Client: # TODO: we need to pull out the "asset" decimals # data and return a `decimal.Decimal` instead here! + # using the underlying Asset return { self._atable[sym].lower(): float(bal) for sym, bal in by_bsuid.items() } async def get_assets(self) -> dict[str, dict]: + ''' + Get all assets available for trading and xfer. + + https://docs.kraken.com/rest/#tag/Market-Data/operation/getAssetInfo + + return msg: + "asset1": { + "aclass": "string", + "altname": "string", + "decimals": 0, + "display_decimals": 0, + "collateral_value": 0, + "status": "string" + } + + ''' resp = await self._public('Assets', {}) return resp['result'] async def cache_assets(self) -> None: - assets = self.assets = await self.get_assets() + ''' + Load and cache all asset infos and pack into + our native ``Asset`` struct. + + ''' + assets = await self.get_assets() for bsuid, info in assets.items(): - self._atable[bsuid] = info['altname'] + + aname = self._atable[bsuid] = info['altname'] + aclass = info['aclass'] + + self.assets[bsuid] = Asset( + name=aname.lower(), + atype=f'crypto_{aclass}', + tx_tick=digits_to_dec(info['decimals']), + info=info, + ) async def get_trades( self, @@ -327,10 +365,15 @@ class Client: Currently only withdrawals are supported. ''' - xfers: list[dict] = (await self.endpoint( + resp = await self.endpoint( 'WithdrawStatus', {'asset': asset}, - ))['result'] + ) + try: + xfers: list[dict] = resp['result'] + except KeyError: + log.exception(f'Kraken suxxx: {resp}') + return [] # eg. resp schema: # 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset': @@ -345,19 +388,32 @@ class Client: # look up the normalized name and asset info asset_key = entry['asset'] - asset_info = self.assets[asset_key] - asset = self._atable[asset_key].lower() + asset = self.assets[asset_key] + asset_key = self._atable[asset_key].lower() # XXX: this is in the asset units (likely) so it isn't # quite the same as a commisions cost necessarily..) cost = float(entry['fee']) - fqsn = asset + '.kraken' + fqsn = asset_key + '.kraken' + + # pair = MktPair( + # src=Asset( + # name=asset_key, + # type='crypto_currency', + # tx_tick=asset_info['decimals'] + + # tx_tick= + # info=asset_info, + # ) + # broker='kraken', + # ) + pairinfo = Symbol.from_fqsn( fqsn, info={ 'asset_type': 'crypto', - 'lot_tick_size': asset_info['decimals'], + 'lot_tick_size': asset.tx_tick, }, ) @@ -366,7 +422,7 @@ class Client: sym=pairinfo, tid=entry['txid'], dt=pendulum.from_timestamp(entry['time']), - bsuid=f'{asset}{src_asset}', + bsuid=f'{asset_key}{src_asset}', size=-1*( float(entry['amount']) + From 69c9ecc5e3d504e7f3b9fe6c022f55c6e2b37df9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Mar 2023 20:55:37 -0400 Subject: [PATCH 014/294] `kraken`: write `pps.toml` on updates for now --- piker/brokers/kraken/broker.py | 34 ++++++++++++++++++++++++++++++---- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 72d6f0fe..5509968a 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -50,6 +50,7 @@ from piker.accounting import ( ) from piker.accounting._mktinfo import ( Symbol, + MktPair, digits_to_dec, ) from piker.clearing._messages import ( @@ -370,6 +371,8 @@ def trades2pps( acctid: str, new_trans: dict[str, Transaction] = {}, + write_storage: bool = True, + ) -> tuple[ list[BrokerdPosition], list[Transaction], @@ -400,13 +403,20 @@ def trades2pps( # right since `.broker` is already # included? account='kraken.' + acctid, - symbol=p.symbol.front_fqsn(), + symbol=p.symbol.fqme, size=p.size, avg_price=p.ppu, currency='', ) position_msgs.append(msg) + if write_storage: + # TODO: ideally this blocks the this task + # as little as possible. we need to either do + # these writes in another actor, or try out `trio`'s + # async file IO api? + table.write_config() + return position_msgs @@ -639,6 +649,12 @@ async def trades_dialogue( ) await ctx.started((ppmsgs, [acc_name])) + # TODO: ideally this blocks the this task + # as little as possible. we need to either do + # these writes in another actor, or try out `trio`'s + # async file IO api? + table.write_config() + # Get websocket token for authenticated data stream # Assert that a token was actually received. resp = await client.endpoint('GetWebSocketsToken', {}) @@ -814,8 +830,6 @@ async def handle_order_updates( for pp_msg in ppmsgs: await ems_stream.send(pp_msg) - ledger_trans.update(new_trans) - # process and relay order state change events # https://docs.kraken.com/websockets/#message-openOrders case [ @@ -1184,9 +1198,21 @@ def norm_trade_records( }[record['type']] # we normalize to kraken's `altname` always.. - bsuid, pair_info = Client.normalize_symbol(record['pair']) + bsuid, pair_info = Client.normalize_symbol( + record['pair'] + ) fqsn = f'{bsuid}.kraken' + dst, src = pair_info.wsname.lower().split('/') + # mkpair = MktPair( + # src=src, + # dst=dst, + # price_tick=digits_to_dec(pair_info.pair_decimals), + # size_tick=digits_to_dec(pair_info.lot_decimals), + # dst_type='crypto_currency', + # ) + # breakpoint() + mktpair = Symbol.from_fqsn( fqsn, info={ From 65a7853cf3c562ca72923c7e74d8babc3efe7f76 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Mar 2023 20:56:35 -0400 Subject: [PATCH 015/294] Delegate to new `.accounting._mktinfo._derivs` from `ui._positioning` --- piker/ui/_position.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 3574dd2d..547977dc 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -46,7 +46,13 @@ from ..calc import ( puterize, ) from ..clearing._allocate import Allocator -from ..accounting import Position +from ..accounting import ( + Position, +) +from ..accounting._mktinfo import ( + _derivs, +) + from ..data._normalize import iterticks from ..data.feed import ( Feed, @@ -85,7 +91,7 @@ async def update_pnl_from_feed( pp: PositionTracker = order_mode.current_pp live: Position = pp.live_pp - key: str = live.symbol.front_fqsn() + key: str = live.symbol.fqme log.info(f'Starting pnl display for {pp.alloc.account}') @@ -424,7 +430,7 @@ class SettingsPane: # maybe start update task global _pnl_tasks - fqsn = sym.front_fqsn() + fqsn = sym.fqme if fqsn not in _pnl_tasks: _pnl_tasks[fqsn] = True self.order_mode.nursery.start_soon( @@ -495,14 +501,6 @@ def pp_line( return line -_derivs = ( - 'future', - 'continuous_future', - 'option', - 'futures_option', -) - - # TODO: move into annoate module? def mk_level_marker( chart: ChartPlotWidget, From 2583706b35d2b6307803c060655c6912607eaa25 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Mar 2023 20:58:55 -0400 Subject: [PATCH 016/294] Port `accounting._pos` to new `Symbol` simplifications --- piker/accounting/_pos.py | 31 +++++++++++++------------------ 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 204e7a8e..43f62ed9 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -45,6 +45,7 @@ from ._ledger import ( ) from ._mktinfo import ( Symbol, + # MktPair, unpack_fqsn, ) from .. import config @@ -62,7 +63,7 @@ class Position(Struct): transaction history. ''' - symbol: Symbol + symbol: Symbol # | MktPair # can be +ve or -ve for long/short size: float @@ -113,7 +114,7 @@ class Position(Struct): # it via the trades ledger.. # drop symbol obj in serialized form s = d.pop('symbol') - fqsn = s.front_fqsn() + fqsn = s.fqme broker, key, suffix = unpack_fqsn(fqsn) sym_info = s.broker_info[broker] @@ -157,7 +158,6 @@ class Position(Struct): inline_table['tid'] = tid toml_clears_list.append(inline_table) - d['clears'] = toml_clears_list return fqsn, d @@ -218,19 +218,14 @@ class Position(Struct): symbol = self.symbol lot_size_digits = symbol.lot_size_digits - ppu, size = ( - round( - msg['avg_price'], - ndigits=symbol.tick_size_digits - ), - round( - msg['size'], - ndigits=lot_size_digits - ), + self.ppu = round( + msg['avg_price'], + ndigits=symbol.tick_size_digits, + ) + self.size = round( + msg['size'], + ndigits=lot_size_digits, ) - - self.ppu = ppu - self.size = size @property def dsize(self) -> float: @@ -406,7 +401,7 @@ class Position(Struct): size = round(size * self.split_ratio) return float( - self.symbol.quantize_size(size), + self.symbol.quantize(size), ) def minimize_clears( @@ -466,8 +461,8 @@ class Position(Struct): return clear - def sugest_split(self) -> float: - ... + # def sugest_split(self) -> float: + # ... class PpTable(Struct): From 63304f535c3723ce49f91c2cea417e51b8576572 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 14 Mar 2023 20:59:32 -0400 Subject: [PATCH 017/294] Start to prep `Transaction` for `MktPair`.. --- piker/accounting/_ledger.py | 42 +++++++++++++++++++++++++++++++++---- 1 file changed, 38 insertions(+), 4 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index df7bb4aa..73df9064 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -35,7 +35,10 @@ import toml from .. import config from ..data.types import Struct from ..log import get_logger -from ._mktinfo import Symbol +from ._mktinfo import ( + Symbol, # legacy + MktPair, +) log = get_logger(__name__) @@ -90,10 +93,13 @@ def open_trade_ledger( class Transaction(Struct, frozen=True): - # TODO: should this be ``.to`` (see below)? - fqsn: str - sym: Symbol + # TODO: unify this with the `MktPair`, + # once we have that as a required field, + # we don't really need the fqsn any more.. + fqsn: str + sym: Symbol | MktPair + tid: Union[str, int] # unique transaction id size: float price: float @@ -101,17 +107,45 @@ class Transaction(Struct, frozen=True): dt: datetime expiry: datetime | None = None + @property + def mktpair(self) -> MktPair: + sym = self.sym + + if isinstance(sym, MktPair): + # presume it's already set as our desired + # ``MktPair`` type: + return sym + + # cast to new type + return MktPair.from_fqme( + sym.fqme, + price_tick=digits_to_dec( + Decimal(str(sym.tick_size)), + ), + size_tick=digits_to_dec( + Decimal(str(sym.lot_tick_size)), + ), + ) + + # remap for back-compat + @property + def fqme(self) -> str: + return self.fqsn + # optional key normally derived from the broker # backend which ensures the instrument-symbol this record # is for is truly unique. bsuid: Union[str, int] | None = None + # XXX NOTE: this will come from the `MktPair` + # instead of defined here right? # optional fqsn for the source "asset"/money symbol? # from: Optional[str] = None def iter_by_dt( clears: dict[str, Any], + ) -> Iterator[tuple[str, dict]]: ''' Iterate entries of a ``clears: dict`` table sorted by entry recorded From 56f736e7ca02776571ba67dee803362037d8acd6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 15 Mar 2023 11:43:04 -0400 Subject: [PATCH 018/294] Drop use of `Symbol.brokers` everywhere --- piker/data/feed.py | 2 +- piker/ui/_display.py | 2 +- piker/ui/order_mode.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index 5e1a1aec..530bed92 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -1635,7 +1635,7 @@ async def open_feed( # apply `brokerd`-common steam to each flume # tracking a symbol from that provider. for fqsn, flume in feed.flumes.items(): - if brokermod.name in flume.symbol.brokers: + if brokermod.name == flume.symbol.broker: flume.stream = stream assert len(feed.mods) == len(feed.portals) == len(feed.streams) diff --git a/piker/ui/_display.py b/piker/ui/_display.py index 3da33809..a6f8e6c2 100644 --- a/piker/ui/_display.py +++ b/piker/ui/_display.py @@ -1288,7 +1288,7 @@ async def display_symbol_data( hist_ohlcv: ShmArray = flume.hist_shm symbol = flume.symbol - brokername = symbol.brokers[0] + brokername = symbol.broker fqsn = symbol.fqsn hist_chart = hist_linked.plot_ohlc_main( diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index ab89798b..43598d9b 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -292,7 +292,7 @@ class OrderMode: account=self.current_pp.alloc.account, size=0, symbol=symbol, - brokers=symbol.brokers, + brokers=[symbol.broker], oid='', # filled in on submit exec_mode=trigger_type, # dark or live ) @@ -709,7 +709,7 @@ async def open_order_mode( # load account names from ``brokers.toml`` accounts_def = config.load_accounts( - providers=symbol.brokers + providers=[symbol.broker], ) # XXX: ``brokerd`` delivers a set of account names that it From 85ddfc0f2de77aa7651daaa1f67ab7656293e31e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 15 Mar 2023 13:16:08 -0400 Subject: [PATCH 019/294] Drop use of `mk_fqsn()` --- piker/clearing/_ems.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index b2c4c614..4a735a4e 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -45,7 +45,6 @@ from ..log import get_logger from ..data._normalize import iterticks from ..accounting._mktinfo import ( unpack_fqsn, - mk_fqsn, float_digits, ) from ..data.feed import ( @@ -948,7 +947,7 @@ async def translate_and_relay_brokerd_events( # NOTE: be sure to pack an fqsn for the client side! order = Order(**status_msg.req) - order.symbol = mk_fqsn(broker, order.symbol) + order.symbol = f'{order.symbol}.{broker}' assert order.price and order.size status_msg.req = order From cf9442f4d561d34fedf686ecce640bbb7963a2ea Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 15 Mar 2023 19:31:44 -0400 Subject: [PATCH 020/294] Further refinement and shimming of `MktPair` Prepping to entirely replace `Symbol`; this adds a buncha docs/comments, better implementation for representing and parsing the FQME: "fully qualified market endpoint". Deatz: - make `.src` an optional field until we figure out how we're going to support loading source assets from all backends sensibly.. - implement `MktPair.fqme: str` (what was previously called `fqsn`) using a new util func: `maybe_cons_tokens()`. - `Symbol.brokers` and expect only `.broker` usage. - remap anything with `fqsn` in the name to `fqme` with aliases from the old name. - implement `unpack_fqme()` with `match:` syntax B) - add `MktPair.tick_size_digits`, `.lot_size_digits`, `.fqsn`, `.key` for backward compat. - make all fqme generation related fields empty `str`s by default. - add `MktPair.resolved: bool` a flag indicating whether or not `.dst` is an `Asset` instance or just a string and, `.bs_mktid` the field to hold the "backend system market id" per broker. --- piker/accounting/_mktinfo.py | 278 +++++++++++++++++++++-------------- 1 file changed, 166 insertions(+), 112 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index dde8ce27..18a6209c 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -43,8 +43,8 @@ from ..data.types import Struct _underlyings: list[str] = [ 'stock', 'bond', - 'crypto_currency', - 'fiat_currency', + 'crypto', + 'fiat', 'commodity', ] @@ -102,7 +102,8 @@ def digits_to_dec( class Asset(Struct, frozen=True): ''' - Container type describing any transactable asset's technology. + Container type describing any transactable asset and its + contract-like and/or underlying technology meta-info. ''' name: str @@ -116,6 +117,9 @@ class Asset(Struct, frozen=True): # should not be explicitly required in our generic API. info: dict = {} # make it frozen? + # TODO? + # _to_dict_skip = {'info'} + def __str__(self) -> str: return self.name @@ -137,6 +141,18 @@ class Asset(Struct, frozen=True): ) +def maybe_cons_tokens( + tokens: list[Any], + delim_char: str = '.', +) -> str: + ''' + Construct `str` output from a maybe-concatenation of input + sequence of elements in ``tokens``. + + ''' + return '.'.join(filter(bool, tokens)).lower() + + class MktPair(Struct, frozen=True): ''' Market description for a pair of assets which are tradeable: @@ -144,52 +160,50 @@ class MktPair(Struct, frozen=True): buy: source asset -> destination asset sell: destination asset -> source asset - The main intention of this type is for a cross-asset, venue, broker - normalized descriptive data type from which all market-auctions can - be mapped, simply. + The main intention of this type is for a **simple** cross-asset + venue/broker normalized descrption type from which all + market-auctions can be mapped from FQME identifiers. + + TODO: our eventual target fqme format/schema is: + /.... -> .. + ^ -- optional tokens ------------------------------- ^ ''' - # "source asset" (name) used to buy *from* - # (or used to sell *to*) - src: str | Asset + dst: str | Asset # "destination asset" (name) used to buy *to* # (or used to sell *from*) - dst: str | Asset - - @property - def key(self) -> str: - ''' - The "endpoint key" for this market. - - In most other tina platforms this is referred to as the - "symbol". - - ''' - return f'{self.src}{self.dst}' + price_tick: Decimal # minimum price increment + size_tick: Decimal # minimum size (aka vlm) increment # the tick size is the number describing the smallest step in value # available in this market between the source and destination # assets. # https://en.wikipedia.org/wiki/Tick_size # https://en.wikipedia.org/wiki/Commodity_tick # https://en.wikipedia.org/wiki/Percentage_in_point - price_tick: Decimal # minimum price increment value increment - size_tick: Decimal # minimum size (aka vlm) increment value increment - # @property - # def size_tick_digits(self) -> int: - # return float_digits(self.size_tick) + # unique "broker id" since every market endpoint provider + # has their own nomenclature and schema for market maps. + bs_mktid: str + broker: str # the middle man giving access - broker: str | None = None # the middle man giving access - venue: str | None = None # market venue provider name - expiry: str | None = None # for derivs, expiry datetime parseable str + # NOTE: to start this field is optional but should eventually be + # required; the reason is for backward compat since more positioning + # calculations were not originally stored with a src asset.. + + src: str | Asset | None = None + # "source asset" (name) used to buy *from* + # (or used to sell *to*). + + venue: str = '' # market venue provider name + expiry: str = '' # for derivs, expiry datetime parseable str # destination asset's financial type/classification name # NOTE: this is required for the order size allocator system, # since we use different default settings based on the type # of the destination asset, eg. futes use a units limits vs. # equities a $limit. - dst_type: AssetTypeName | None = None + # dst_type: AssetTypeName | None = None # source asset's financial type/classification name # TODO: is a src type required for trading? @@ -211,21 +225,101 @@ class MktPair(Struct, frozen=True): Constructor for a received msg-dict normally received over IPC. ''' - ... + raise NotImplementedError - # fqa, fqma, .. etc. see issue: - # https://github.com/pikers/piker/issues/467 + @property + def resolved(self) -> bool: + return isinstance(self.dst, Asset) + + @classmethod + def from_fqme( + cls, + fqme: str, + price_tick: float | str, + size_tick: float | str, + bs_mktid: str, + + ) -> MktPair: + + broker, key, suffix = unpack_fqme(fqme) + + # XXX: loading from a fqme string will + # leave this pair as "un resolved" meaning + # we don't yet have `.dst` set as an `Asset` + # which we expect to be filled in by some + # backend client with access to that data-info. + return cls( + dst=key, # not resolved + price_tick=price_tick, + size_tick=size_tick, + bs_mktid=bs_mktid, + broker=broker, + ) + + @property + def key(self) -> str: + ''' + The "endpoint key" for this market. + + Eg. mnq/usd or btc/usdt or xmr/btc + + In most other tina platforms this is referred to as the + "symbol". + + ''' + return maybe_cons_tokens([self.dst, self.src]) + + # NOTE: the main idea behind an fqme is to map a "market address" + # to some endpoint from a transaction provider (eg. a broker) such + # that we build a table of `fqme: str -> bs_mktid: Any` where any "piker + # market address" maps 1-to-1 to some broker trading endpoint. + # @cached_property @property def fqme(self) -> str: ''' Return the fully qualified market endpoint-address for the pair of transacting assets. - Yes, you can pronounce it colloquially as "f#$%-me".. + fqme = "fully qualified market endpoint" + + And yes, you pronounce it colloquially as read.. + + Basically the idea here is for all client code (consumers of piker's + APIs which query the data/broker-provider agnostic layer(s)) should be + able to tell which backend / venue / derivative each data feed/flow is + from by an explicit string-key of the current form: + + + . + . + . + . + + eg. for an explicit daq mini futes contract: mnq.cme.20230317.ib + + TODO: I have thoughts that we should actually change this to be + more like an "attr lookup" (like how the web should have done + urls, but marketting peeps ruined it etc. etc.) + + ... + + TODO: + See community discussion on naming and nomenclature, order + of addressing hierarchy, general schema, internal representation: + + https://github.com/pikers/piker/issues/467 ''' + return maybe_cons_tokens([ + self.key, # final "pair name" (eg. qqq[/usd], btcusdt) + self.venue, + self.expiry, + self.broker, + ]) - # fqsn = fqme + @property + def fqsn(self) -> str: + return self.fqme def quantize( self, @@ -250,35 +344,27 @@ class MktPair(Struct, frozen=True): rounding=ROUND_HALF_EVEN ) - # TODO: remove this? + # @property + # def size_tick_digits(self) -> int: + # return float_digits(self.size_tick) + + # TODO: BACKWARD COMPAT, TO REMOVE? @property def type_key(self) -> str: - return list(self.broker_info.values())[0]['asset_type'] + return str(self.dst.atype) - # @classmethod - # def from_fqme( - # cls, - # fqme: str, - # **kwargs, + @property + def tick_size_digits(self) -> int: + return float_digits(self.price_tick) - # ) -> MktPair: - # broker, key, suffix = unpack_fqme(fqme) + @property + def lot_size_digits(self) -> int: + return float_digits(self.size_tick) -def mk_fqsn( - provider: str, - symbol: str, - -) -> str: - ''' - Generate a "fully qualified symbol name" which is - a reverse-hierarchical cross broker/provider symbol - - ''' - return '.'.join([symbol, provider]).lower() - - -def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: +def unpack_fqme( + fqme: str, +) -> tuple[str, str, str]: ''' Unpack a fully-qualified-symbol-name to ``tuple``. @@ -287,37 +373,38 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: suffix = '' # TODO: probably reverse the order of all this XD - tokens = fqsn.split('.') - if len(tokens) < 3: - # probably crypto - symbol, broker = tokens - return ( - broker, - symbol, - '', - ) + tokens = fqme.split('.') - elif len(tokens) > 3: - symbol, venue, suffix, broker = tokens - else: - symbol, venue, broker = tokens - suffix = '' + match tokens: + case [mkt_ep, broker]: + # probably crypto + # mkt_ep, broker = tokens + return ( + broker, + mkt_ep, + '', + ) + + # TODO: swap venue and suffix/deriv-info here? + case [mkt_ep, venue, suffix, broker]: + pass + + case [mkt_ep, venue, broker]: + suffix = '' + + case _: + raise ValueError(f'Invalid fqme: {fqme}') - # head, _, broker = fqsn.rpartition('.') - # symbol, _, suffix = head.rpartition('.') return ( broker, - '.'.join([symbol, venue]), + '.'.join([mkt_ep, venue]), suffix, ) -unpack_fqme = unpack_fqsn +unpack_fqsn = unpack_fqme -# TODO: rework the below `Symbol` (which was originally inspired and -# derived from stuff in quantdom) into a simpler, ipc msg ready, market -# endpoint meta-data container type as per the drafted interace above. class Symbol(Struct): ''' I guess this is some kinda container thing for dealing with @@ -343,13 +430,8 @@ class Symbol(Struct): return Symbol( key=key, - tick_size=tick_size, lot_tick_size=lot_size, - - # tick_size_digits=float_digits(tick_size), - # lot_size_digits=float_digits(lot_size), - suffix=suffix, broker_info={broker: info}, ) @@ -361,10 +443,6 @@ class Symbol(Struct): def type_key(self) -> str: return list(self.broker_info.values())[0]['asset_type'] - @property - def brokers(self) -> list[str]: - return list(self.broker_info.keys()) - @property def tick_size_digits(self) -> int: return float_digits(self.lot_tick_size) @@ -379,23 +457,6 @@ class Symbol(Struct): @property def fqsn(self) -> str: - ''' - fqsn = "fully qualified symbol name" - - Basically the idea here is for all client-ish code (aka programs/actors - that ask the provider agnostic layers in the stack for data) should be - able to tell which backend / venue / derivative each data feed/flow is - from by an explicit string key of the current form: - - ... - - TODO: I have thoughts that we should actually change this to be - more like an "attr lookup" (like how the web should have done - urls, but marketting peeps ruined it etc. etc.): - - ... - - ''' broker = self.broker key = self.key if self.suffix: @@ -410,14 +471,7 @@ class Symbol(Struct): def quantize( self, size: float, - ) -> Decimal: - ''' - Truncate input ``size: float`` using ``Decimal`` - quantized form of the digit precision defined - by ``self.lot_tick_size``. - - ''' digits = float_digits(self.lot_tick_size) return Decimal(size).quantize( Decimal(f'1.{"0".ljust(digits, "0")}'), From 7b28c7a43f013b67e4cf42dea0e46f2a6e625174 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 15 Mar 2023 21:59:16 -0400 Subject: [PATCH 021/294] Prep for dropping `Transaction.sym` Instead let's name it `.sys` for "system", the thing we use to conduct the "transactions" .. Also rename `.bsuid` -> `.bs_mktid` for "backend system market id` which is more explicit, easier to remember and read. --- piker/accounting/_ledger.py | 49 ++++++++++++++++++------------------- 1 file changed, 24 insertions(+), 25 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 73df9064..1718dda1 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -38,6 +38,7 @@ from ..log import get_logger from ._mktinfo import ( Symbol, # legacy MktPair, + Asset, ) log = get_logger(__name__) @@ -98,7 +99,18 @@ class Transaction(Struct, frozen=True): # once we have that as a required field, # we don't really need the fqsn any more.. fqsn: str - sym: Symbol | MktPair + + # TODO: drop the Symbol type + + # the underlying "transaction system", normally one of a ``MktPair`` + # (a description of a tradable double auction) or a ledger-recorded + # ("ledger" in any sense as long as you can record transfers) of any + # sort) ``Asset``. + sym: MktPair | Asset | Symbol + + @property + def sys(self) -> Symbol: + return self.sym tid: Union[str, int] # unique transaction id size: float @@ -107,35 +119,22 @@ class Transaction(Struct, frozen=True): dt: datetime expiry: datetime | None = None - @property - def mktpair(self) -> MktPair: - sym = self.sym - - if isinstance(sym, MktPair): - # presume it's already set as our desired - # ``MktPair`` type: - return sym - - # cast to new type - return MktPair.from_fqme( - sym.fqme, - price_tick=digits_to_dec( - Decimal(str(sym.tick_size)), - ), - size_tick=digits_to_dec( - Decimal(str(sym.lot_tick_size)), - ), - ) - # remap for back-compat @property def fqme(self) -> str: return self.fqsn - # optional key normally derived from the broker - # backend which ensures the instrument-symbol this record - # is for is truly unique. - bsuid: Union[str, int] | None = None + # (optional) key-id defined by the broker-service backend which + # ensures the instrument-symbol market key for this record is unique + # in the "their backend/system" sense; i.e. this uid for the market + # as defined (internally) in some namespace defined by the broker + # service. + bsuid: str | int | None = None + + @property + def bs_mktid(self) -> str | int | None: + print(f'STOP USING .bsuid` for {self.fqme}') + return self.bs_mktid # XXX NOTE: this will come from the `MktPair` # instead of defined here right? From 72c97d4672e40642ae7b85eb960ea7e328b3ac29 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 17 Mar 2023 19:45:43 -0400 Subject: [PATCH 022/294] Handle read and write of `pps.toml` using `MktPair` Add a logic branch for now that switches on an instance check. Generally swap over all `Position.symbol` and `Transaction.sym` refs to `MktPair`. Do a wholesale rename of all `.bsuid` var names to `.bs_mktid`. --- piker/accounting/__init__.py | 10 +-- piker/accounting/_ledger.py | 7 +- piker/accounting/_pos.py | 150 +++++++++++++++++++------------- piker/brokers/ib/README.rst | 2 +- piker/brokers/ib/broker.py | 24 ++--- piker/clearing/_allocate.py | 2 +- piker/clearing/_paper_engine.py | 4 +- piker/ui/order_mode.py | 2 +- 8 files changed, 112 insertions(+), 89 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index a371f7c2..7d7fbb85 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -48,7 +48,7 @@ __all__ = [ def get_likely_pair( src: str, dst: str, - bsuid: str, + bs_mktid: str, ) -> str: ''' @@ -57,7 +57,7 @@ def get_likely_pair( ''' try: - src_name_start = bsuid.rindex(src) + src_name_start = bs_mktid.rindex(src) except ( ValueError, # substr not found ): @@ -66,13 +66,13 @@ def get_likely_pair( # buy some other dst which was furhter used # to buy another dst..) log.warning( - f'No src fiat {src} found in {bsuid}?' + f'No src fiat {src} found in {bs_mktid}?' ) return - likely_dst = bsuid[:src_name_start] + likely_dst = bs_mktid[:src_name_start] if likely_dst == dst: - return bsuid + return bs_mktid if __name__ == '__main__': diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 1718dda1..14fca94c 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -129,12 +129,7 @@ class Transaction(Struct, frozen=True): # in the "their backend/system" sense; i.e. this uid for the market # as defined (internally) in some namespace defined by the broker # service. - bsuid: str | int | None = None - - @property - def bs_mktid(self) -> str | int | None: - print(f'STOP USING .bsuid` for {self.fqme}') - return self.bs_mktid + bs_mktid: str | int | None = None # XXX NOTE: this will come from the `MktPair` # instead of defined here right? diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 43f62ed9..288a702e 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -45,7 +45,8 @@ from ._ledger import ( ) from ._mktinfo import ( Symbol, - # MktPair, + MktPair, + Asset, unpack_fqsn, ) from .. import config @@ -63,7 +64,7 @@ class Position(Struct): transaction history. ''' - symbol: Symbol # | MktPair + symbol: Symbol | MktPair # can be +ve or -ve for long/short size: float @@ -72,17 +73,17 @@ class Position(Struct): # zero for the entirety of the current "trade state". ppu: float - # unique backend symbol id - bsuid: str + # unique "backend system market id" + bs_mktid: str - split_ratio: Optional[int] = None + split_ratio: int | None = None # ordered record of known constituent trade messages clears: dict[ Union[str, int, Status], # trade id dict[str, Any], # transaction history summaries ] = {} - first_clear_dt: Optional[datetime] = None + first_clear_dt: datetime | None = None expiry: Optional[datetime] = None @@ -117,19 +118,34 @@ class Position(Struct): fqsn = s.fqme broker, key, suffix = unpack_fqsn(fqsn) - sym_info = s.broker_info[broker] - d['asset_type'] = sym_info['asset_type'] - d['price_tick_size'] = ( - sym_info.get('price_tick_size') - or - s.tick_size - ) - d['lot_tick_size'] = ( - sym_info.get('lot_tick_size') - or - s.lot_tick_size - ) + if isinstance(s, Symbol): + sym_info = s.broker_info[broker] + d['asset_type'] = sym_info['asset_type'] + d['price_tick'] = ( + sym_info.get('price_tick_size') + or + s.tick_size + ) + d['size_tick'] = ( + sym_info.get('lot_tick_size') + or + s.lot_tick_size + ) + + # the newwww wayyy B) + else: + mkt = s + assert isinstance(mkt, MktPair) + + # an asset resolved mkt where we have ``Asset`` info about + # each tradeable asset in the market. + if mkt.resolved: + dst: Asset = mkt.dst + d['asset_type'] = dst.atype + + d['price_tick'] = mkt.price_tick + d['size_tick'] = mkt.size_tick if self.expiry is None: d.pop('expiry', None) @@ -217,14 +233,19 @@ class Position(Struct): # XXX: better place to do this? symbol = self.symbol - lot_size_digits = symbol.lot_size_digits + # TODO: switch to new fields..? + # .size_tick_digits, .price_tick_digits + size_tick_digits = symbol.lot_size_digits + price_tick_digits = symbol.tick_size_digits + self.ppu = round( + # TODO: change this to ppu? msg['avg_price'], - ndigits=symbol.tick_size_digits, + ndigits=price_tick_digits, ) self.size = round( msg['size'], - ndigits=lot_size_digits, + ndigits=size_tick_digits, ) @property @@ -490,7 +511,7 @@ class PpTable(Struct): reverse=True, ): pp = pps.setdefault( - t.bsuid, + t.bs_mktid, # if no existing pp, allocate fresh one. Position( @@ -500,7 +521,7 @@ class PpTable(Struct): ) if not t.sym else t.sym, size=0.0, ppu=0.0, - bsuid=t.bsuid, + bs_mktid=t.bs_mktid, expiry=t.expiry, ) ) @@ -526,10 +547,10 @@ class PpTable(Struct): # update clearing table pp.add_clear(t) - updated[t.bsuid] = pp + updated[t.bs_mktid] = pp # minimize clears tables and update sizing. - for bsuid, pp in updated.items(): + for bs_mktid, pp in updated.items(): pp.ensure_state() # deliver only the position entries that were actually updated @@ -557,14 +578,8 @@ class PpTable(Struct): open_pp_objs: dict[str, Position] = {} pp_objs = self.pps - for bsuid in list(pp_objs): - pp = pp_objs[bsuid] - - # XXX: debug hook for size mismatches - # qqqbsuid = 320227571 - # if bsuid == qqqbsuid: - # breakpoint() - + for bs_mktid in list(pp_objs): + pp = pp_objs[bs_mktid] pp.ensure_state() if ( @@ -583,10 +598,10 @@ class PpTable(Struct): # ignored; the closed positions won't be written to the # ``pps.toml`` since ``pp_active_entries`` above is what's # written. - closed_pp_objs[bsuid] = pp + closed_pp_objs[bs_mktid] = pp else: - open_pp_objs[bsuid] = pp + open_pp_objs[bs_mktid] = pp return open_pp_objs, closed_pp_objs @@ -600,7 +615,7 @@ class PpTable(Struct): # we don't store in the ``pps.toml``. to_toml_dict = {} - for bsuid, pos in active.items(): + for bs_mktid, pos in active.items(): # keep the minimal amount of clears that make up this # position since the last net-zero state. @@ -674,7 +689,7 @@ def load_pps_from_ledger( Open a ledger file by broker name and account and read in and process any trade records into our normalized ``Transaction`` form and then update the equivalent ``Pptable`` and deliver the two - bsuid-mapped dict-sets of the transactions and pps. + bs_mktid-mapped dict-sets of the transactions and pps. ''' with ( @@ -690,9 +705,9 @@ def load_pps_from_ledger( if filter_by: records = {} - bsuids = set(filter_by) + bs_mktids = set(filter_by) for tid, r in src_records.items(): - if r.bsuid in bsuids: + if r.bs_mktid in bs_mktids: records[tid] = r else: records = src_records @@ -868,22 +883,35 @@ def open_pps( # unmarshal/load ``pps.toml`` config entries into object form # and update `PpTable` obj entries. - for fqsn, entry in pps.items(): - bsuid = entry['bsuid'] - symbol = Symbol.from_fqsn( - fqsn, + for fqme, entry in pps.items(): - # NOTE & TODO: right now we fill in the defaults from - # `.data._source.Symbol` but eventually these should always - # either be already written to the pos table or provided at - # write time to ensure always having these values somewhere - # and thus allowing us to get our pos sizing precision - # correct! - info={ - 'asset_type': entry.get('asset_type', ''), - 'price_tick_size': entry.get('price_tick_size', 0.01), - 'lot_tick_size': entry.get('lot_tick_size', 0.0), - } + # atype = entry.get('asset_type', '') + + # unique broker market id + bs_mktid = ( + entry.get('bsuid') + or entry.get('bs_mktid') + ) + price_tick = ( + entry.get('price_tick_size') + or entry.get('price_tick') + or 0.01 + ) + size_tick = ( + entry.get('lot_tick_size') + or entry.get('size_tick') + or 0.0 + ) + + # load the pair using the fqme which + # will make the pair "unresolved" until + # the backend broker actually loads + # the market and position info. + mkt = MktPair.from_fqme( + fqme, + price_tick=price_tick, + size_tick=size_tick, + bs_mktid=bs_mktid ) # convert clears sub-tables (only in this form @@ -893,7 +921,7 @@ def open_pps( # index clears entries in "object" form by tid in a top # level dict instead of a list (as is presented in our # ``pps.toml``). - clears = pp_objs.setdefault(bsuid, {}) + clears = pp_objs.setdefault(bs_mktid, {}) # TODO: should be make a ``Struct`` for clear/event entries? # convert "clear events table" from the toml config (list of @@ -908,9 +936,9 @@ def open_pps( clears_table['dt'] = dt trans.append(Transaction( - fqsn=bsuid, - sym=symbol, - bsuid=bsuid, + fqsn=bs_mktid, + sym=mkt, + bs_mktid=bs_mktid, tid=tid, size=clears_table['size'], price=clears_table['price'], @@ -933,13 +961,13 @@ def open_pps( if expiry: expiry = pendulum.parse(expiry) - pp = pp_objs[bsuid] = Position( - symbol, + pp = pp_objs[bs_mktid] = Position( + mkt, size=size, ppu=ppu, split_ratio=split_ratio, expiry=expiry, - bsuid=entry['bsuid'], + bs_mktid=bs_mktid, ) # XXX: super critical, we need to be sure to include diff --git a/piker/brokers/ib/README.rst b/piker/brokers/ib/README.rst index c8661317..d56b52ca 100644 --- a/piker/brokers/ib/README.rst +++ b/piker/brokers/ib/README.rst @@ -127,7 +127,7 @@ your ``pps.toml`` file will have position entries like, [ib.algopaper."mnq.globex.20221216"] size = -1.0 ppu = 12423.630576923071 - bsuid = 515416577 + bs_mktid = 515416577 expiry = "2022-12-16T00:00:00+00:00" clears = [ { dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" }, diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 0944bf86..66dfe212 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -335,12 +335,12 @@ async def update_and_audit_msgs( msgs: list[BrokerdPosition] = [] for p in pps: - bsuid = p.bsuid + bs_mktid = p.bs_mktid # retreive equivalent ib reported position message # for comparison/audit versus the piker equivalent # breakeven pp calcs. - ibppmsg = cids2pps.get((acctid, bsuid)) + ibppmsg = cids2pps.get((acctid, bs_mktid)) if ibppmsg: msg = BrokerdPosition( @@ -555,18 +555,18 @@ async def trades_dialogue( # collect all ib-pp reported positions so that we can be # sure know which positions to update from the ledger if # any are missing from the ``pps.toml`` - bsuid, msg = pack_position(pos) + bs_mktid, msg = pack_position(pos) acctid = msg.account = accounts_def.inverse[msg.account] acctid = acctid.strip('ib.') - cids2pps[(acctid, bsuid)] = msg + cids2pps[(acctid, bs_mktid)] = msg assert msg.account in accounts, ( f'Position for unknown account: {msg.account}') ledger = ledgers[acctid] table = tables[acctid] - pp = table.pps.get(bsuid) + pp = table.pps.get(bs_mktid) if ( not pp or pp.size != msg.size @@ -605,12 +605,12 @@ async def trades_dialogue( # the updated output (maybe this is a bug?) but # if you create a pos from TWS and then load it # from the api trades it seems we get a key - # error from ``update[bsuid]`` ? - pp = table.pps.get(bsuid) + # error from ``update[bs_mktid]`` ? + pp = table.pps.get(bs_mktid) if not pp: log.error( f'The contract id for {msg} may have ' - f'changed to {bsuid}\nYou may need to ' + f'changed to {bs_mktid}\nYou may need to ' 'adjust your ledger for this, skipping ' 'for now.' ) @@ -620,8 +620,8 @@ async def trades_dialogue( # the updated output (maybe this is a bug?) but # if you create a pos from TWS and then load it # from the api trades it seems we get a key - # error from ``update[bsuid]`` ? - pp = table.pps[bsuid] + # error from ``update[bs_mktid]`` ? + pp = table.pps[bs_mktid] pairinfo = pp.symbol if msg.size != pp.size: log.error( @@ -760,7 +760,7 @@ async def emit_pp_update( # re-formatted pps as msgs to the ems. for pos in filter( bool, - [active.get(r.bsuid), closed.get(r.bsuid)] + [active.get(r.bs_mktid), closed.get(r.bs_mktid)] ): msgs = await update_and_audit_msgs( acctid, @@ -1225,7 +1225,7 @@ def norm_trade_records( cost=comms, dt=dt, expiry=expiry, - bsuid=conid, + bs_mktid=conid, ), key=lambda t: t.dt ) diff --git a/piker/clearing/_allocate.py b/piker/clearing/_allocate.py index 023d1e92..657ba8e1 100644 --- a/piker/clearing/_allocate.py +++ b/piker/clearing/_allocate.py @@ -206,7 +206,7 @@ class Allocator(Struct): symbol=sym, size=order_size, ppu=price, - bsuid=sym, + bs_mktid=sym, ) ) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 1726aed6..bfec7260 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) Tyler Goodlet (in stewardship for piker0) +# Copyright (C) Tyler Goodlet (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -258,7 +258,7 @@ class PaperBoi(Struct): price=price, cost=0, # TODO: cost model dt=pendulum.from_timestamp(fill_time_s), - bsuid=key, + bs_mktid=key, ) with ( diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 43598d9b..e6c4ed33 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -737,7 +737,7 @@ async def open_order_mode( ppu=0, # XXX: BLEH, do we care about this on the client side? - bsuid=symbol, + bs_mktid=symbol.key, ) # allocator config From 71fc8b95dd7899d1ab9537dbcc039ccf29557fb3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 17 Mar 2023 20:18:46 -0400 Subject: [PATCH 023/294] Flip to `.bs_mktid` in `ib` and `kraken` --- piker/brokers/kraken/README.rst | 2 +- piker/brokers/kraken/api.py | 38 ++++++++++++++------------------- piker/brokers/kraken/broker.py | 18 ++++++++-------- 3 files changed, 26 insertions(+), 32 deletions(-) diff --git a/piker/brokers/kraken/README.rst b/piker/brokers/kraken/README.rst index 80e56913..b85c0c3e 100644 --- a/piker/brokers/kraken/README.rst +++ b/piker/brokers/kraken/README.rst @@ -58,7 +58,7 @@ your ``pps.toml`` file will have position entries like, [kraken.spot."xmreur.kraken"] size = 4.80907954 ppu = 103.97000000 - bsuid = "XXMRZEUR" + bs_mktid = "XXMRZEUR" clears = [ { tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" }, ] diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index a7415b34..013d8b01 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -255,14 +255,14 @@ class Client: 'Balance', {}, ) - by_bsuid = resp['result'] + by_bsmktid = resp['result'] # TODO: we need to pull out the "asset" decimals # data and return a `decimal.Decimal` instead here! # using the underlying Asset return { self._atable[sym].lower(): float(bal) - for sym, bal in by_bsuid.items() + for sym, bal in by_bsmktid.items() } async def get_assets(self) -> dict[str, dict]: @@ -292,12 +292,12 @@ class Client: ''' assets = await self.get_assets() - for bsuid, info in assets.items(): + for bs_mktid, info in assets.items(): - aname = self._atable[bsuid] = info['altname'] + aname = self._atable[bs_mktid] = info['altname'] aclass = info['aclass'] - self.assets[bsuid] = Asset( + self.assets[bs_mktid] = Asset( name=aname.lower(), atype=f'crypto_{aclass}', tx_tick=digits_to_dec(info['decimals']), @@ -398,31 +398,25 @@ class Client: fqsn = asset_key + '.kraken' # pair = MktPair( - # src=Asset( - # name=asset_key, - # type='crypto_currency', - # tx_tick=asset_info['decimals'] - - # tx_tick= - # info=asset_info, - # ) + # src=asset, + # dst=asset, # broker='kraken', # ) - pairinfo = Symbol.from_fqsn( - fqsn, - info={ - 'asset_type': 'crypto', - 'lot_tick_size': asset.tx_tick, - }, - ) + # pairinfo = Symbol.from_fqsn( + # fqsn, + # info={ + # 'asset_type': 'crypto', + # 'lot_tick_size': asset.tx_tick, + # }, + # ) tran = Transaction( fqsn=fqsn, - sym=pairinfo, + sym=asset, tid=entry['txid'], dt=pendulum.from_timestamp(entry['time']), - bsuid=f'{asset_key}{src_asset}', + bs_mktid=f'{asset_key}{src_asset}', size=-1*( float(entry['amount']) + diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 5509968a..5a153381 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -518,7 +518,7 @@ async def trades_dialogue( if ( dst == src_fiat or not any( - dst in bsuid for bsuid in table.pps + dst in bs_mktid for bs_mktid in table.pps ) ): log.warning( @@ -534,11 +534,11 @@ async def trades_dialogue( src2dst: dict[str, str] = {} - for bsuid in table.pps: + for bs_mktid in table.pps: likely_pair = get_likely_pair( src_fiat, dst, - bsuid, + bs_mktid, ) if likely_pair: src2dst[src_fiat] = dst @@ -558,7 +558,7 @@ async def trades_dialogue( ): log.warning( f'`kraken` account says you have a ZERO ' - f'balance for {bsuid}:{pair}\n' + f'balance for {bs_mktid}:{pair}\n' f'but piker seems to think `{pp.size}`\n' 'This is likely a discrepancy in piker ' 'accounting if the above number is' @@ -594,11 +594,11 @@ async def trades_dialogue( # in the ``pps.toml`` for the necessary pair # yet and thus this likely pair grabber will # likely fail. - for bsuid in table.pps: + for bs_mktid in table.pps: likely_pair = get_likely_pair( src_fiat, dst, - bsuid, + bs_mktid, ) if likely_pair: break @@ -1198,10 +1198,10 @@ def norm_trade_records( }[record['type']] # we normalize to kraken's `altname` always.. - bsuid, pair_info = Client.normalize_symbol( + bs_mktid, pair_info = Client.normalize_symbol( record['pair'] ) - fqsn = f'{bsuid}.kraken' + fqsn = f'{bs_mktid}.kraken' dst, src = pair_info.wsname.lower().split('/') # mkpair = MktPair( @@ -1236,7 +1236,7 @@ def norm_trade_records( price=float(record['price']), cost=float(record['fee']), dt=pendulum.from_timestamp(float(record['time'])), - bsuid=bsuid, + bs_mktid=bs_mktid, # XXX: there are no derivs on kraken right? # expiry=expiry, From 9e2eff507e5647e5e7604d51d5eee4a755905643 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 17 Mar 2023 20:20:41 -0400 Subject: [PATCH 024/294] Drop shm logging levels to debug over warning --- piker/data/_sharedmem.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/piker/data/_sharedmem.py b/piker/data/_sharedmem.py index bd40ad7e..00865731 100644 --- a/piker/data/_sharedmem.py +++ b/piker/data/_sharedmem.py @@ -649,7 +649,7 @@ def maybe_open_shm_array( token = _known_tokens[key] return attach_shm_array(token=token, **kwargs), False except KeyError: - log.warning(f"Could not find {key} in shms cache") + log.debug(f"Could not find {key} in shms cache") if dtype: token = _make_token( key, @@ -659,7 +659,7 @@ def maybe_open_shm_array( try: return attach_shm_array(token=token, **kwargs), False except FileNotFoundError: - log.warning(f"Could not attach to shm with token {token}") + log.debug(f"Could not attach to shm with token {token}") # This actor does not know about memory # associated with the provided "key". From da10422160b186d223d389ff9a9deac9ee6126f3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Mar 2023 19:55:04 -0400 Subject: [PATCH 025/294] `kraken`: add `Client.mkt_info()` Allows building a `MktPair` from the backend specific `Pair` for eventual use in the data feed layer. Also adds `Pair.price/tick_size` to get to the expected tick precision info format. --- piker/brokers/kraken/api.py | 64 ++++++++++++++++++++++--------------- 1 file changed, 39 insertions(+), 25 deletions(-) diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index 013d8b01..e9acc4a7 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -24,7 +24,6 @@ from decimal import Decimal import itertools from typing import ( Any, - Optional, Union, ) import time @@ -44,9 +43,8 @@ from piker import config from piker.data.types import Struct from piker.accounting._mktinfo import ( Asset, - digits_to_dec, MktPair, - Symbol, + digits_to_dec, ) from piker.brokers._util import ( resproc, @@ -161,6 +159,14 @@ class Pair(Struct): short_position_limit: float = 0 long_position_limit: float = float('inf') + @property + def price_tick(self) -> Decimal: + return digits_to_dec(self.pair_decimals) + + @property + def size_tick(self) -> Decimal: + return digits_to_dec(self.lot_decimals) + class Client: @@ -395,24 +401,10 @@ class Client: # quite the same as a commisions cost necessarily..) cost = float(entry['fee']) - fqsn = asset_key + '.kraken' + fqme = asset_key + '.kraken' - # pair = MktPair( - # src=asset, - # dst=asset, - # broker='kraken', - # ) - - # pairinfo = Symbol.from_fqsn( - # fqsn, - # info={ - # 'asset_type': 'crypto', - # 'lot_tick_size': asset.tx_tick, - # }, - # ) - - tran = Transaction( - fqsn=fqsn, + tx = Transaction( + fqsn=fqme, sym=asset, tid=entry['txid'], dt=pendulum.from_timestamp(entry['time']), @@ -429,7 +421,7 @@ class Client: # XXX: see note above cost=cost, ) - trans[tran.tid] = tran + trans[tx.tid] = tx return trans @@ -478,9 +470,9 @@ class Client: # txid is a transaction id given by kraken return await self.endpoint('CancelOrder', {"txid": reqid}) - async def symbol_info( + async def pair_info( self, - pair: Optional[str] = None, + pair: str | None = None, ) -> dict[str, Pair] | Pair: @@ -501,7 +493,29 @@ class Client: _, data = next(iter(pairs.items())) return Pair(**data) else: - return {key: Pair(**data) for key, data in pairs.items()} + return { + key: Pair(**data) + for key, data in pairs.items() + } + + async def mkt_info( + self, + pair_str: str, + + ) -> MktPair: + + pair_info: Pair # = await self.pair_info(pair) + bs_mktid: str + bs_mktid, pair_info = Client.normalize_symbol(pair_str) + dst_asset = self.assets[pair_info.base] + + return MktPair( + dst=dst_asset, + price_tick=pair_info.price_tick, + size_tick=pair_info.size_tick, + bs_mktid=bs_mktid, + broker='kraken', + ) async def cache_symbols(self) -> dict: ''' @@ -514,7 +528,7 @@ class Client: ''' if not self._pairs: - self._pairs.update(await self.symbol_info()) + self._pairs.update(await self.pair_info()) # table of all ws and rest keys to their alt-name values. ntable: dict[str, str] = {} From 7aba2905414c173d6922ea8c4ee981bc24da8266 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Mar 2023 20:01:26 -0400 Subject: [PATCH 026/294] `kraken`: use `MktPair` in trasactions --- piker/brokers/kraken/broker.py | 38 +++++++--------------------------- 1 file changed, 8 insertions(+), 30 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 5a153381..d32b6321 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -49,9 +49,7 @@ from piker.accounting import ( get_likely_pair, ) from piker.accounting._mktinfo import ( - Symbol, MktPair, - digits_to_dec, ) from piker.clearing._messages import ( Order, @@ -1201,45 +1199,25 @@ def norm_trade_records( bs_mktid, pair_info = Client.normalize_symbol( record['pair'] ) - fqsn = f'{bs_mktid}.kraken' + fqme = f'{bs_mktid}.kraken' dst, src = pair_info.wsname.lower().split('/') - # mkpair = MktPair( - # src=src, - # dst=dst, - # price_tick=digits_to_dec(pair_info.pair_decimals), - # size_tick=digits_to_dec(pair_info.lot_decimals), - # dst_type='crypto_currency', - # ) - # breakpoint() - - mktpair = Symbol.from_fqsn( - fqsn, - info={ - 'lot_size_digits': pair_info.lot_decimals, - 'lot_tick_size': digits_to_dec( - pair_info.lot_decimals, - ), - 'tick_size_digits': pair_info.pair_decimals, - 'price_tick_size': digits_to_dec( - pair_info.pair_decimals, - ), - 'asset_type': 'crypto', - }, + mkt = MktPair.from_fqme( + fqme, + price_tick=pair_info.price_tick, + size_tick=pair_info.size_tick, + bs_mktid=bs_mktid, ) records[tid] = Transaction( - fqsn=fqsn, - sym=mktpair, + fqsn=fqme, + sym=mkt, tid=tid, size=size, price=float(record['price']), cost=float(record['fee']), dt=pendulum.from_timestamp(float(record['time'])), bs_mktid=bs_mktid, - - # XXX: there are no derivs on kraken right? - # expiry=expiry, ) return records From cfbba9e0b3e3acd1f2d794526ebccdc09b5856fe Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Mar 2023 20:02:27 -0400 Subject: [PATCH 027/294] Add `MktPair._atype` for back-compat, always `str(.dst)` --- piker/accounting/_mktinfo.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 18a6209c..dc6ee4a6 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -215,6 +215,8 @@ class MktPair(Struct, frozen=True): # strike price, call or put, swap type, exercise model, etc. contract_info: str | None = None + _atype: str = '' + @classmethod def from_msg( self, @@ -239,6 +241,8 @@ class MktPair(Struct, frozen=True): size_tick: float | str, bs_mktid: str, + **kwargs, + ) -> MktPair: broker, key, suffix = unpack_fqme(fqme) @@ -254,6 +258,8 @@ class MktPair(Struct, frozen=True): size_tick=size_tick, bs_mktid=bs_mktid, broker=broker, + + **kwargs, ) @property @@ -267,7 +273,7 @@ class MktPair(Struct, frozen=True): "symbol". ''' - return maybe_cons_tokens([self.dst, self.src]) + return maybe_cons_tokens([str(self.dst), self.src]) # NOTE: the main idea behind an fqme is to map a "market address" # to some endpoint from a transaction provider (eg. a broker) such @@ -351,7 +357,10 @@ class MktPair(Struct, frozen=True): # TODO: BACKWARD COMPAT, TO REMOVE? @property def type_key(self) -> str: - return str(self.dst.atype) + if isinstance(self.dst, Asset): + return str(self.dst.atype) + + return self._atype @property def tick_size_digits(self) -> int: From 66782d29d10393af3ae9404fe38af0122903d1ad Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Mar 2023 20:04:42 -0400 Subject: [PATCH 028/294] `kraken`: use `Client.mkt_info()` in quotes feed init msg --- piker/brokers/kraken/feed.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index b8228a55..ae42485e 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -35,6 +35,9 @@ from trio_util import trio_async_generator import tractor import trio +from piker.accounting._mktinfo import ( + MktPair, +) from piker._cacheables import open_cached_client from piker.brokers._util import ( BrokerError, @@ -287,29 +290,26 @@ async def stream_quotes( get_console_log(loglevel or tractor.current_actor().loglevel) ws_pairs = {} - sym_infos = {} - - async with open_cached_client('kraken') as client, send_chan as send_chan: + mkt_infos = {} + async with ( + open_cached_client('kraken') as client, + send_chan as send_chan, + ): # keep client cached for real-time section for sym in symbols: # transform to upper since piker style is always lower sym = sym.upper() - si: Pair = await client.symbol_info(sym) - # try: - # si = Pair(**sym_info) # validation - # except TypeError: - # fields_diff = set(sym_info) - set(Pair.__struct_fields__) - # raise TypeError( - # f'Missing msg fields {fields_diff}' - # ) - syminfo = si.to_dict() - syminfo['price_tick_size'] = 1. / 10**si.pair_decimals - syminfo['lot_tick_size'] = 1. / 10**si.lot_decimals - syminfo['asset_type'] = 'crypto' - sym_infos[sym] = syminfo - ws_pairs[sym] = si.wsname + pair: Pair = await client.pair_info(sym) + mkt: MktPair = await client.mkt_info(sym) + mktinfo = mkt.to_dict() + mkt_infos[sym] = mktinfo + + # TODO: remove this once we drop ``Symbol``!! + mktinfo['asset_type'] = mkt.dst.atype + + ws_pairs[sym] = pair.wsname symbol = symbols[0].lower() @@ -317,7 +317,7 @@ async def stream_quotes( # pass back token, and bool, signalling if we're the writer # and that history has been written symbol: { - 'symbol_info': sym_infos[sym], + 'symbol_info': mkt_infos[sym], 'shm_write_opts': {'sum_tick_vml': False}, 'fqsn': sym, }, From 8fdff8769d029ce4afd0cdf65580871ffe55aaa1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Mar 2023 21:20:46 -0400 Subject: [PATCH 029/294] Ensure `Symbol` tick sizes are decoded as `Decimal`.. --- piker/accounting/_mktinfo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index dc6ee4a6..63b89f75 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -421,8 +421,8 @@ class Symbol(Struct): ''' key: str - tick_size: float = 0.01 - lot_tick_size: float = 0.0 # "volume" precision as min step value + tick_size: Decimal = 0.01 + lot_tick_size: Decimal = 0.0 # "volume" precision as min step value suffix: str = '' broker_info: dict[str, dict[str, Any]] = {} From 6431071b2a737fdfc3b90d896e1781b41fe68315 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Mar 2023 21:21:57 -0400 Subject: [PATCH 030/294] Pass old fields in sym info init msg section --- piker/brokers/kraken/feed.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index ae42485e..55ce4f76 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -308,6 +308,8 @@ async def stream_quotes( # TODO: remove this once we drop ``Symbol``!! mktinfo['asset_type'] = mkt.dst.atype + mktinfo['price_tick_size'] = mkt.price_tick + mktinfo['lot_tick_size'] = mkt.size_tick ws_pairs[sym] = pair.wsname From 335e8d10d4bf83e2f812e07d0f1e05bc622cc8c0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Mar 2023 21:22:21 -0400 Subject: [PATCH 031/294] Cast back to float from decimal for cursor y-increment --- piker/ui/_cursor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/ui/_cursor.py b/piker/ui/_cursor.py index 79df305b..d8aa2cf7 100644 --- a/piker/ui/_cursor.py +++ b/piker/ui/_cursor.py @@ -363,7 +363,7 @@ class Cursor(pg.GraphicsObject): # value used for rounding y-axis discreet tick steps # computing once, up front, here cuz why not - self._y_incr_mult = 1 / self.linked._symbol.tick_size + self._y_incr_mult = float(1 / self.linked._symbol.tick_size) # line width in view coordinates self._lw = self.pixelWidth() * self.lines_pen.width() From 7a8e615fa6353644dae899c27dc814fda3f5ab67 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Mar 2023 21:25:42 -0400 Subject: [PATCH 032/294] Explicitly decode tick sizes as decimal for symbol loading in `Flume` --- piker/data/flows.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/piker/data/flows.py b/piker/data/flows.py index 19615f61..d7594c01 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -22,6 +22,7 @@ real-time data processing data-structures. """ from __future__ import annotations +from decimal import Decimal from typing import ( TYPE_CHECKING, ) @@ -172,7 +173,11 @@ class Flume(Struct): # TODO: get native msgspec decoding for these workinn def to_msg(self) -> dict: + msg = self.to_dict() + + # TODO: do we even need to convert to dict + # first now? msg['symbol'] = msg['symbol'].to_dict() # can't serialize the stream or feed objects, it's expected @@ -184,7 +189,14 @@ class Flume(Struct): @classmethod def from_msg(cls, msg: dict) -> dict: + + # XXX NOTE: ``msgspec`` can encode `Decimal` + # but it doesn't decide to it by default since + # we aren't spec-cing these msgs as structs... symbol = Symbol(**msg.pop('symbol')) + symbol.tick_size = Decimal(symbol.tick_size) + symbol.lot_tick_size = Decimal(symbol.lot_tick_size) + return cls( symbol=symbol, **msg, From 17b976eb88ef520b476e8a731244ab01164447bf Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Mar 2023 21:54:37 -0400 Subject: [PATCH 033/294] Use `MktPair` building `Position` objects in `PpTable.update_from_trans()` --- piker/accounting/_pos.py | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 288a702e..d3ac3acf 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -47,7 +47,7 @@ from ._mktinfo import ( Symbol, MktPair, Asset, - unpack_fqsn, + unpack_fqme, ) from .. import config from ..brokers import get_brokermod @@ -117,7 +117,7 @@ class Position(Struct): s = d.pop('symbol') fqsn = s.fqme - broker, key, suffix = unpack_fqsn(fqsn) + broker, key, suffix = unpack_fqme(fqsn) if isinstance(s, Symbol): sym_info = s.broker_info[broker] @@ -510,18 +510,29 @@ class PpTable(Struct): key=lambda t: t.dt, reverse=True, ): + fqme = t.fqme + bs_mktid = t.bs_mktid + + # template the mkt-info presuming a legacy market ticks + # if no info exists in the transactions.. + mkt = t.sys + if not mkt: + mkt = MktPair.from_fqme( + fqme, + price_tick='0.01', + size_tick='0.0', + bs_mktid=bs_mktid, + ) + pp = pps.setdefault( - t.bs_mktid, + bs_mktid, # if no existing pp, allocate fresh one. Position( - Symbol.from_fqsn( - t.fqsn, - info={}, - ) if not t.sym else t.sym, + mkt, size=0.0, ppu=0.0, - bs_mktid=t.bs_mktid, + bs_mktid=bs_mktid, expiry=t.expiry, ) ) From 55b6cba31efdc4152fdb55d92aa54d7d6ef06192 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 20 Mar 2023 21:55:39 -0400 Subject: [PATCH 034/294] Encode a `mktpair` field if passed in msg by caller --- piker/data/flows.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/piker/data/flows.py b/piker/data/flows.py index d7594c01..da9a73ba 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -32,6 +32,7 @@ import pendulum import numpy as np from ..accounting._mktinfo import ( + MktPair, Symbol, ) from .types import Struct @@ -90,7 +91,7 @@ class Flume(Struct): queuing properties. ''' - symbol: Symbol + symbol: Symbol | MktPair first_quote: dict _rt_shm_token: _Token @@ -178,7 +179,11 @@ class Flume(Struct): # TODO: do we even need to convert to dict # first now? + # TODO: drop the former. msg['symbol'] = msg['symbol'].to_dict() + mktpair = msg.get('mktpair') + if mktpair: + msg['mktpair'] = mktpair.to_dict() # can't serialize the stream or feed objects, it's expected # you'll have a ref to it since this msg should be rxed on From 99199905b6647341c3c042a4c027aac3e5ab8ea4 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 00:36:16 -0400 Subject: [PATCH 035/294] Add parity mapping from altnames back to themsevles in `Client._ntable` --- piker/brokers/kraken/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index e9acc4a7..1223924a 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -538,7 +538,7 @@ class Client: pair: Pair = self._pairs[rest_key] altname = pair.altname wsname = pair.wsname - ntable[rest_key] = ntable[wsname] = altname + ntable[altname] = ntable[rest_key] = ntable[wsname] = altname # register the pair under all monikers, a giant flat # surjection of all possible names to each info obj. From 677a6fc11386f699c6de42b758e216906ac1a89d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 00:39:23 -0400 Subject: [PATCH 036/294] Cast to float from decimal for level line y-increment Qt only accepts `float` to it's APIs obvs.. --- piker/ui/_cursor.py | 11 +++++++++-- piker/ui/_lines.py | 2 +- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/piker/ui/_cursor.py b/piker/ui/_cursor.py index d8aa2cf7..83986762 100644 --- a/piker/ui/_cursor.py +++ b/piker/ui/_cursor.py @@ -363,7 +363,8 @@ class Cursor(pg.GraphicsObject): # value used for rounding y-axis discreet tick steps # computing once, up front, here cuz why not - self._y_incr_mult = float(1 / self.linked._symbol.tick_size) + mkt = self.linked._symbol + self._y_tick_mult = 1/float(mkt.price_tick) # line width in view coordinates self._lw = self.pixelWidth() * self.lines_pen.width() @@ -571,9 +572,15 @@ class Cursor(pg.GraphicsObject): line_offset = self._lw / 2 # round y value to nearest tick step - m = self._y_incr_mult + m = self._y_tick_mult iy = round(y * m) / m vl_y = iy - line_offset + # print( + # f'tick: {self._y_tick}\n' + # f'y: {y}\n' + # f'iy: {iy}\n' + # f'vl_y: {vl_y}\n' + # ) # update y-range items if iy != last_iy: diff --git a/piker/ui/_lines.py b/piker/ui/_lines.py index 4469a673..59796d4f 100644 --- a/piker/ui/_lines.py +++ b/piker/ui/_lines.py @@ -126,7 +126,7 @@ class LevelLine(pg.InfiniteLine): self._on_drag_start = lambda l: None self._on_drag_end = lambda l: None - self._y_incr_mult = 1 / chart.linked.symbol.tick_size + self._y_incr_mult = float(1 / chart.linked.symbol.size_tick) self._right_end_sc: float = 0 # use px caching From 76cd5519b3a429c4aa7283d75a3244b8833e7f3c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 10:33:38 -0400 Subject: [PATCH 037/294] Fix `Symbol.tick_size_digits`, add `.price/size_tick` props --- piker/accounting/_mktinfo.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 63b89f75..fa606fcb 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -454,12 +454,20 @@ class Symbol(Struct): @property def tick_size_digits(self) -> int: - return float_digits(self.lot_tick_size) + return float_digits(self.tick_size) @property def lot_size_digits(self) -> int: return float_digits(self.lot_tick_size) + @property + def price_tick(self) -> Decimal: + return Decimal(str(self.tick_size)) + + @property + def size_tick(self) -> Decimal: + return Decimal(str(self.lot_tick_size)) + @property def broker(self) -> str: return list(self.broker_info.keys())[0] From ea9ea4a6d74d71a62fea2a2adf37f0c8a660ad5d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 11:33:16 -0400 Subject: [PATCH 038/294] Rename `float_digits()` -> `dec_digits()`, since decimal. --- piker/accounting/_mktinfo.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index fa606fcb..5d9edbd6 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -69,8 +69,9 @@ AssetTypeName: Literal[ # egs. stock, futer, option, bond etc. -def float_digits( - value: float, +def dec_digits( + value: float | str | Decimal, + ) -> int: ''' Return the number of precision digits read from a decimal or float @@ -85,6 +86,9 @@ def float_digits( ) +float_digits = dec_digits + + def digits_to_dec( ndigits: int, ) -> Decimal: From b9c7e1b0c74f05380bc811d64022849fdb870f48 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 13:38:54 -0400 Subject: [PATCH 039/294] `binance`: deliver mkt precision info as `Decimal` --- piker/brokers/binance.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 37377136..e4818f9b 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -20,6 +20,7 @@ Binance backend """ from contextlib import asynccontextmanager as acm from datetime import datetime +from decimal import Decimal from typing import ( Any, Union, Optional, AsyncGenerator, Callable, @@ -173,10 +174,10 @@ class Client: ) return resproc(resp, log) - async def symbol_info( + async def mkt_info( self, - sym: Optional[str] = None, + sym: str | None = None, ) -> dict[str, Any]: '''Get symbol info for the exchange. @@ -208,11 +209,13 @@ class Client: else: return syms + symbol_info = mkt_info + async def cache_symbols( self, ) -> dict: if not self._pairs: - self._pairs = await self.symbol_info() + self._pairs = await self.mkt_info() return self._pairs @@ -224,7 +227,7 @@ class Client: if self._pairs is not None: data = self._pairs else: - data = await self.symbol_info() + data = await self.mkt_info() matches = fuzzy.extractBests( pattern, @@ -476,11 +479,11 @@ async def stream_quotes( # XXX: after manually inspecting the response format we # just directly pick out the info we need - si['price_tick_size'] = float( - filters['PRICE_FILTER']['tickSize'] + si['price_tick_size'] = Decimal( + filters['PRICE_FILTER']['tickSize'].rstrip('0') ) - si['lot_tick_size'] = float( - filters['LOT_SIZE']['stepSize'] + si['lot_tick_size'] = Decimal( + filters['LOT_SIZE']['stepSize'].rstrip('0') ) si['asset_type'] = 'crypto' @@ -585,7 +588,7 @@ async def open_symbol_search( async with ctx.open_stream() as stream: async for pattern in stream: - # results = await client.symbol_info(sym=pattern.upper()) + # results = await client.mkt_info(sym=pattern.upper()) matches = fuzzy.extractBests( pattern, From 25363ebd2e2ec15882b73ccf5c7afdaeb65e260c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 13:40:20 -0400 Subject: [PATCH 040/294] `ib`: deliver mkt precision info as `Decimal` --- piker/brokers/ib/feed.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index 358bb066..d16a02a9 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -20,6 +20,7 @@ Data feed endpoints pre-wrapped and ready for use with ``tractor``/``trio``. from __future__ import annotations import asyncio from contextlib import asynccontextmanager as acm +from decimal import Decimal from dataclasses import asdict from datetime import datetime from functools import partial @@ -765,15 +766,19 @@ async def stream_quotes( }: syminfo['no_vlm'] = True + # XXX: pretty sure we don't need this any more right? # for stocks it seems TWS reports too small a tick size # such that you can't submit orders with that granularity? - min_tick = 0.01 if atype == 'stock' else 0 + # min_price_tick = Decimal('0.01') if atype == 'stock' else 0 + # price_tick = max(price_tick, min_tick) - syminfo['price_tick_size'] = max(syminfo['minTick'], min_tick) + price_tick: Decimal = Decimal(str(syminfo['minTick'])) + size_tick: Decimal = Decimal(str(syminfo['minSize']).rstrip('0')) - # for "legacy" assets, volume is normally discreet, not - # a float - syminfo['lot_tick_size'] = 0.0 + syminfo['price_tick_size'] = price_tick + # NOTE: as you'd expect for "legacy" assets, the "volume + # precision" is normally discreet. + syminfo['lot_tick_size'] = size_tick ibclient = proxy._aio_ns.ib.client host, port = ibclient.host, ibclient.port From 6d5d9731edfc2fb6470383bf651920d527ffab0d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 13:59:06 -0400 Subject: [PATCH 041/294] Implement `MktPair.from_msg()` constructor Handle case where `'dst'` field is just a `str` (in which case delegate to `.from_fqme()`) as well as do `Asset` loading and use our `Struct.copy()` to enforce type-casting to (for eg. `Decimal`s) such that we'll now capture typing errors despite IPC transport. Change `Symbol.tick_size` and `.lot_tick_size` defaults to decimal for proper casting and type `MktPair.atype: str` since `msgspec` can't cast to `AssetTypeName` without special handling.. --- piker/accounting/_mktinfo.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 5d9edbd6..fcb6c0f8 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -111,7 +111,7 @@ class Asset(Struct, frozen=True): ''' name: str - atype: AssetTypeName + atype: str # AssetTypeName # minimum transaction size / precision. # eg. for buttcoin this is a "satoshi". @@ -223,7 +223,7 @@ class MktPair(Struct, frozen=True): @classmethod def from_msg( - self, + cls, msg: dict[str, Any], ) -> MktPair: @@ -231,7 +231,17 @@ class MktPair(Struct, frozen=True): Constructor for a received msg-dict normally received over IPC. ''' - raise NotImplementedError + dst_asset_msg = msg.pop('dst') + if isinstance(dst_asset_msg, str): + return cls.from_fqme( + dst_asset_msg, + **msg, + ) + + # NOTE: we call `.copy()` here to ensure + # type casting! + dst = Asset(**dst_asset_msg).copy() + return cls(dst=dst, **msg).copy() @property def resolved(self) -> bool: @@ -264,7 +274,7 @@ class MktPair(Struct, frozen=True): broker=broker, **kwargs, - ) + ).copy() @property def key(self) -> str: @@ -425,8 +435,11 @@ class Symbol(Struct): ''' key: str - tick_size: Decimal = 0.01 - lot_tick_size: Decimal = 0.0 # "volume" precision as min step value + + # precision descriptors for price and vlm + tick_size: Decimal = Decimal('0.01') + lot_tick_size: Decimal = Decimal('0.0') + suffix: str = '' broker_info: dict[str, dict[str, Any]] = {} From 589b3f420166405872696f81732f54ae01ad3f45 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 14:03:30 -0400 Subject: [PATCH 042/294] Default `pps.toml` precision fields to `Decimal` For `price_tick` and `size_tick` we read in `str` and decimal-ize and now correctly fail over to default values of the same type.. Also, always treat `bs_mktid` field as a `str` in TOML form. Drop the strange `clears: dict` var from the loading code (not sure why that was left in smh) and better name `toml_clears_list` for the TOML-loaded-pre-transaction sequence. --- piker/accounting/_pos.py | 41 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index d3ac3acf..724b357f 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -24,6 +24,7 @@ that doesn't try to cuk most humans who prefer to not lose their moneys.. ''' from __future__ import annotations from contextlib import contextmanager as cm +from decimal import Decimal from math import copysign import re from typing import ( @@ -152,7 +153,7 @@ class Position(Struct): elif expiry: d['expiry'] = str(expiry) - toml_clears_list = [] + toml_clears_list: list[dict[str, Any]] = [] # reverse sort so latest clears are at top of section? for tid, data in iter_by_dt(clears): @@ -864,6 +865,7 @@ def open_pps( brokername: str, acctid: str, write_on_exit: bool = False, + ) -> Generator[PpTable, None, None]: ''' Read out broker-specific position entries from @@ -899,20 +901,20 @@ def open_pps( # atype = entry.get('asset_type', '') # unique broker market id - bs_mktid = ( + bs_mktid = str( entry.get('bsuid') or entry.get('bs_mktid') ) - price_tick = ( + price_tick = Decimal(str( entry.get('price_tick_size') or entry.get('price_tick') - or 0.01 - ) - size_tick = ( + or '0.01' + )) + size_tick = Decimal(str( entry.get('lot_tick_size') or entry.get('size_tick') - or 0.0 - ) + or '0.0' + )) # load the pair using the fqme which # will make the pair "unresolved" until @@ -925,22 +927,20 @@ def open_pps( bs_mktid=bs_mktid ) + # TODO: RE: general "events" instead of just "clears": + # - make this an `events` field and support more event types + # such as 'split', 'name_change', 'mkt_info', etc.. + # - should be make a ``Struct`` for clear/event entries? convert + # "clear events table" from the toml config (list of a dicts) + # and load it into object form for use in position processing of + # new clear events. + # convert clears sub-tables (only in this form # for toml re-presentation) back into a master table. - clears_list = entry['clears'] - - # index clears entries in "object" form by tid in a top - # level dict instead of a list (as is presented in our - # ``pps.toml``). - clears = pp_objs.setdefault(bs_mktid, {}) - - # TODO: should be make a ``Struct`` for clear/event entries? - # convert "clear events table" from the toml config (list of - # a dicts) and load it into object form for use in position - # processing of new clear events. + toml_clears_list: list[dict[str, Any]] = entry['clears'] trans: list[Transaction] = [] + for clears_table in toml_clears_list: - for clears_table in clears_list: tid = clears_table.pop('tid') dtstr = clears_table['dt'] dt = pendulum.parse(dtstr) @@ -956,7 +956,6 @@ def open_pps( cost=clears_table['cost'], dt=dt, )) - clears[tid] = clears_table size = entry['size'] From 7eb0b1d2492e52ab53efa4da057aeefc37e03661 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 14:08:54 -0400 Subject: [PATCH 043/294] Comment about `Struct.typecast()` conflict with frozen instances --- piker/data/types.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/piker/data/types.py b/piker/data/types.py index 1359526c..c37de233 100644 --- a/piker/data/types.py +++ b/piker/data/types.py @@ -80,6 +80,8 @@ class Struct( msgspec.msgpack.Encoder().encode(self) ) + # NOTE XXX: this won't work on frozen types! + # use ``.copy()`` above in such cases. def typecast( self, # fields: Optional[list[str]] = None, From 2cc80d53ca54b01b1dd249aed34a59c5650f898b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 14:09:57 -0400 Subject: [PATCH 044/294] First stage port of `.data.feed` to `MktPair` Add `MktPair` handling block for when a backend delivers a `mkt_info`-field containing init msg. Adjust the original `Symbol`-style `'symbol_info'` msg processing to do `Decimal` defaults and convert to `MktPair` including slapping in a hacky `_atype: str` field XD General initial name changes to `bs_mktid` and `_fqme` throughout! --- piker/data/feed.py | 111 ++++++++++++++++++++++++++++++--------------- 1 file changed, 74 insertions(+), 37 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index 530bed92..13072acf 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -26,6 +26,7 @@ from collections import ( Counter, ) from contextlib import asynccontextmanager as acm +from decimal import Decimal from datetime import datetime from functools import partial import time @@ -71,8 +72,10 @@ from ._sharedmem import ( from .ingest import get_ingestormod from .types import Struct from ..accounting._mktinfo import ( + Asset, + MktPair, + unpack_fqme, Symbol, - unpack_fqsn, ) from ._source import base_iohlc_dtype from ..ui import _search @@ -565,7 +568,7 @@ async def tsdb_backfill( timeframe=timeframe, ) - broker, symbol, expiry = unpack_fqsn(fqsn) + broker, symbol, expiry = unpack_fqme(fqsn) try: ( latest_start_dt, @@ -1009,17 +1012,44 @@ async def allocate_persistent_feed( # the broker-specific fully qualified symbol name, # but ensure it is lower-cased for external use. - bfqsn = msg['fqsn'].lower() + bs_mktid = msg['fqsn'].lower() - # true fqsn including broker/provider suffix - fqsn = '.'.join((bfqsn, brokername)) - # msg['fqsn'] = bfqsn + # true fqme including broker/provider suffix + fqme = '.'.join((bs_mktid, brokername)) - symbol = Symbol.from_fqsn( - fqsn=fqsn, - info=msg['symbol_info'], - ) - assert symbol.type_key + mktinfo = msg.get('mkt_info') + if not mktinfo: + + mktinfo = msg['symbol_info'] + + # TODO: read out renamed/new tick size fields in block below! + price_tick = mktinfo.get( + 'price_tick_size', + Decimal('0.01'), + ) + size_tick = mktinfo.get( + 'lot_tick_size', + Decimal('0.0'), + ) + + log.warning(f'FQME: {fqme} -> backend needs port to `MktPair`') + mkt = MktPair.from_fqme( + fqme, + price_tick=price_tick, + size_tick=size_tick, + bs_mktid=bs_mktid, + + _atype=mktinfo['asset_type'] + ) + + else: + # the new msg-protocol is to expect an already packed + # ``Asset`` and ``MktPair`` object from the backend + mkt = mktinfo + assert isinstance(mkt, MktPair) + assert isinstance(mkt.dst, Asset) + + assert mkt.type_key # HISTORY storage, run 2 tasks: # - a history loader / maintainer @@ -1040,17 +1070,24 @@ async def allocate_persistent_feed( manage_history, mod, bus, - fqsn, + fqme, some_data_ready, feed_is_live, ) # yield back control to starting nursery once we receive either # some history or a real-time quote. - log.info(f'waiting on history to load: {fqsn}') + log.info(f'waiting on history to load: {fqme}') await some_data_ready.wait() + symbol = Symbol.from_fqsn( + fqsn=fqme, + info=msg['symbol_info'], + ) flume = Flume( + # TODO: we have to use this for now since currently the + # MktPair above doesn't render the correct output key it seems + # when we provide the `MktInfo` here?..? symbol=symbol, first_quote=first_quote, _rt_shm_token=rt_shm.token, @@ -1061,7 +1098,7 @@ async def allocate_persistent_feed( # for ambiguous names we simply apply the retreived # feed to that name (for now). - bus.feeds[symstr] = bus.feeds[bfqsn] = flume + bus.feeds[symstr] = bus.feeds[bs_mktid] = flume task_status.started() @@ -1104,7 +1141,7 @@ async def allocate_persistent_feed( # start sample loop and shm incrementer task for OHLC style sampling # at the above registered step periods. try: - log.info(f'Starting sampler task for {fqsn}') + log.info(f'Starting sampler task for {fqme}') await sample_and_broadcast( bus, rt_shm, @@ -1114,7 +1151,7 @@ async def allocate_persistent_feed( sum_tick_vlm ) finally: - log.warning(f'{fqsn} feed task terminated') + log.warning(f'{fqme} feed task terminated') @tractor.context @@ -1197,22 +1234,22 @@ async def open_feed_bus( # subscriber flume = bus.feeds[symbol] sym = flume.symbol - bfqsn = sym.key + bs_mktid = sym.key fqsn = sym.fqsn # true fqsn - assert bfqsn in fqsn and brokername in fqsn + assert bs_mktid in fqsn and brokername in fqsn if sym.suffix: - bfqsn = fqsn.removesuffix(f'.{brokername}') - log.warning(f'{brokername} expanded symbol {symbol} -> {bfqsn}') + bs_mktid = fqsn.removesuffix(f'.{brokername}') + log.warning(f'{brokername} expanded symbol {symbol} -> {bs_mktid}') # pack for ``.started()`` sync msg flumes[fqsn] = flume - # we use the broker-specific fqsn (bfqsn) for - # the sampler subscription since the backend isn't (yet) - # expected to append it's own name to the fqsn, so we filter - # on keys which *do not* include that name (e.g .ib) . - bus._subscribers.setdefault(bfqsn, set()) + # we use the broker-specific market id (bs_mktid) for the + # sampler subscription since the backend isn't (yet) expected to + # append it's own name to the fqsn, so we filter on keys which + # *do not* include that name (e.g .ib) . + bus._subscribers.setdefault(bs_mktid, set()) # sync feed subscribers with flume handles await ctx.started( @@ -1276,9 +1313,9 @@ async def open_feed_bus( # maybe use the current task-id to key the sub list that's # added / removed? Or maybe we can add a general # pause-resume by sub-key api? - bfqsn = fqsn.removesuffix(f'.{brokername}') - local_subs.setdefault(bfqsn, set()).add(sub) - bus.add_subs(bfqsn, {sub}) + bs_mktid = fqsn.removesuffix(f'.{brokername}') + local_subs.setdefault(bs_mktid, set()).add(sub) + bus.add_subs(bs_mktid, {sub}) # sync caller with all subs registered state sub_registered.set() @@ -1291,16 +1328,16 @@ async def open_feed_bus( async for msg in stream: if msg == 'pause': - for bfqsn, subs in local_subs.items(): + for bs_mktid, subs in local_subs.items(): log.info( - f'Pausing {bfqsn} feed for {uid}') - bus.remove_subs(bfqsn, subs) + f'Pausing {bs_mktid} feed for {uid}') + bus.remove_subs(bs_mktid, subs) elif msg == 'resume': - for bfqsn, subs in local_subs.items(): + for bs_mktid, subs in local_subs.items(): log.info( - f'Resuming {bfqsn} feed for {uid}') - bus.add_subs(bfqsn, subs) + f'Resuming {bs_mktid} feed for {uid}') + bus.add_subs(bs_mktid, subs) else: raise ValueError(msg) @@ -1314,8 +1351,8 @@ async def open_feed_bus( cs.cancel() # drop all subs for this task from the bus - for bfqsn, subs in local_subs.items(): - bus.remove_subs(bfqsn, subs) + for bs_mktid, subs in local_subs.items(): + bus.remove_subs(bs_mktid, subs) class Feed(Struct): @@ -1512,7 +1549,7 @@ async def open_feed( feed = Feed() for fqsn in fqsns: - brokername, key, suffix = unpack_fqsn(fqsn) + brokername, key, suffix = unpack_fqme(fqsn) bfqsn = fqsn.replace('.' + brokername, '') try: From 452cd7db8aa036a500b4e99a3d52ff7b2471b5db Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 14:21:36 -0400 Subject: [PATCH 045/294] Optionally load `MktPair` in `Flume`s --- piker/data/flows.py | 35 +++++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/piker/data/flows.py b/piker/data/flows.py index da9a73ba..34adf876 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -22,7 +22,7 @@ real-time data processing data-structures. """ from __future__ import annotations -from decimal import Decimal +# from decimal import Decimal from typing import ( TYPE_CHECKING, ) @@ -193,19 +193,30 @@ class Flume(Struct): return msg @classmethod - def from_msg(cls, msg: dict) -> dict: + def from_msg( + cls, + msg: dict, - # XXX NOTE: ``msgspec`` can encode `Decimal` - # but it doesn't decide to it by default since - # we aren't spec-cing these msgs as structs... - symbol = Symbol(**msg.pop('symbol')) - symbol.tick_size = Decimal(symbol.tick_size) - symbol.lot_tick_size = Decimal(symbol.lot_tick_size) + ) -> dict: + ''' + Load from an IPC msg presumably in either `dict` or + `msgspec.Struct` form. - return cls( - symbol=symbol, - **msg, - ) + ''' + sym_msg = msg.pop('symbol') + + if 'dst' in sym_msg: + mkt = MktPair.from_msg(sym_msg) + + else: + # XXX NOTE: ``msgspec`` can encode `Decimal` + # but it doesn't decide to it by default since + # we aren't spec-cing these msgs as structs, SO + # we have to ensure we do a struct type case (which `.copy()` + # does) to ensure we get the right type! + mkt = Symbol(**sym_msg).copy() + + return cls(symbol=mkt, **msg) def get_index( self, From d4a5a3057c836ff45ade820300233890f3a536ea Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 16:40:33 -0400 Subject: [PATCH 046/294] Add `MktPair.suffix: str` read from contract info To be compat with the `Symbol` (for now) and generally allow for reading the (derivative) contract specific part of the fqme. Adjust `contract_info: list[str]` and make `src: str = ''` by default. --- piker/accounting/_mktinfo.py | 34 ++++++++++++++++++++++++++++++---- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index fcb6c0f8..4c94962d 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -154,7 +154,7 @@ def maybe_cons_tokens( sequence of elements in ``tokens``. ''' - return '.'.join(filter(bool, tokens)).lower() + return delim_char.join(filter(bool, tokens)).lower() class MktPair(Struct, frozen=True): @@ -195,7 +195,7 @@ class MktPair(Struct, frozen=True): # required; the reason is for backward compat since more positioning # calculations were not originally stored with a src asset.. - src: str | Asset | None = None + src: str | Asset = '' # "source asset" (name) used to buy *from* # (or used to sell *to*). @@ -217,7 +217,7 @@ class MktPair(Struct, frozen=True): # for derivs, info describing contract, egs. # strike price, call or put, swap type, exercise model, etc. - contract_info: str | None = None + contract_info: list[str] | None = None _atype: str = '' @@ -287,7 +287,32 @@ class MktPair(Struct, frozen=True): "symbol". ''' - return maybe_cons_tokens([str(self.dst), self.src]) + return maybe_cons_tokens( + [str(self.dst), self.src], + delim_char='', + ) + + @property + def suffix(self) -> str: + ''' + The "contract suffix" for this market. + + Eg. mnq/usd.20230616.cme.ib + ^ ----- ^ + or tsla/usd.20230324.200c.cboe.ib + ^ ---------- ^ + + In most other tina platforms they only show you these details in + some kinda "meta data" format, we have FQMEs so we do this up + front and explicit. + + ''' + field_strs = [self.expiry] + con_info = self.contract_info + if con_info is not None: + field_strs.extend(con_info) + + return maybe_cons_tokens(field_strs) # NOTE: the main idea behind an fqme is to map a "market address" # to some endpoint from a transaction provider (eg. a broker) such @@ -387,6 +412,7 @@ class MktPair(Struct, frozen=True): def unpack_fqme( fqme: str, + ) -> tuple[str, str, str]: ''' Unpack a fully-qualified-symbol-name to ``tuple``. From 1d08ee6d011e0ca9e8752f7d6a206105d3784082 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 16:59:45 -0400 Subject: [PATCH 047/294] `.clearing`: broad rename of `fqsn` -> `fqme` --- piker/clearing/_client.py | 10 ++--- piker/clearing/_ems.py | 91 ++++++++++++++++++++------------------- 2 files changed, 51 insertions(+), 50 deletions(-) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index ee176f87..01196f41 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -27,7 +27,7 @@ import trio import tractor from tractor.trionics import broadcast_receiver -from ..accounting._mktinfo import unpack_fqsn +from ..accounting._mktinfo import unpack_fqme from ..log import get_logger from ..data.types import Struct from ..service import maybe_open_emsd @@ -177,7 +177,7 @@ async def relay_order_cmds_from_sync_code( @acm async def open_ems( - fqsn: str, + fqme: str, mode: str = 'live', loglevel: str = 'error', @@ -229,7 +229,7 @@ async def open_ems( # ready for order commands book = get_orders() - broker, symbol, suffix = unpack_fqsn(fqsn) + broker, symbol, suffix = unpack_fqme(fqme) async with maybe_open_emsd(broker) as portal: @@ -246,7 +246,7 @@ async def open_ems( portal.open_context( _emsd_main, - fqsn=fqsn, + fqme=fqme, exec_mode=mode, loglevel=loglevel, @@ -266,7 +266,7 @@ async def open_ems( async with trio.open_nursery() as n: n.start_soon( relay_order_cmds_from_sync_code, - fqsn, + fqme, trades_stream ) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 4a735a4e..24d491c5 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -44,7 +44,7 @@ import tractor from ..log import get_logger from ..data._normalize import iterticks from ..accounting._mktinfo import ( - unpack_fqsn, + unpack_fqme, float_digits, ) from ..data.feed import ( @@ -156,7 +156,7 @@ async def clear_dark_triggers( brokerd_orders_stream: tractor.MsgStream, quote_stream: tractor.ReceiveMsgStream, # noqa broker: str, - fqsn: str, + fqme: str, book: DarkBook, @@ -231,7 +231,7 @@ async def clear_dark_triggers( account=account, size=size, ): - bfqsn: str = symbol.replace(f'.{broker}', '') + bfqme: str = symbol.replace(f'.{broker}', '') submit_price = price + abs_diff_away resp = 'triggered' # hidden on client-side @@ -244,7 +244,7 @@ async def clear_dark_triggers( oid=oid, account=account, time_ns=time.time_ns(), - symbol=bfqsn, + symbol=bfqme, price=submit_price, size=size, ) @@ -287,14 +287,14 @@ async def clear_dark_triggers( # send response to client-side await router.client_broadcast( - fqsn, + fqme, status, ) else: # condition scan loop complete log.debug(f'execs are {execs}') if execs: - book.triggers[fqsn] = execs + book.triggers[fqme] = execs # print(f'execs scan took: {time.time() - start}') @@ -335,7 +335,7 @@ class Router(Struct): # sets of clients mapped from subscription keys subscribers: defaultdict[ - str, # sub key, default fqsn + str, # sub key, default fqme set[tractor.MsgStream], # unique client streams ] = defaultdict(set) @@ -424,7 +424,7 @@ class Router(Struct): # actor to simulate the real IPC load it'll have when also # pulling data from feeds open_trades_endpoint = paper.open_paperboi( - fqsn='.'.join([symbol, broker]), + fqme='.'.join([symbol, broker]), loglevel=loglevel, ) @@ -506,7 +506,7 @@ class Router(Struct): async def open_trade_relays( self, - fqsn: str, + fqme: str, exec_mode: str, loglevel: str, @@ -520,24 +520,24 @@ class Router(Struct): none already exists. ''' - broker, symbol, suffix = unpack_fqsn(fqsn) + broker, symbol, suffix = unpack_fqme(fqme) async with ( maybe_open_feed( - [fqsn], + [fqme], loglevel=loglevel, ) as feed, ): - brokername, _, _ = unpack_fqsn(fqsn) + brokername, _, _ = unpack_fqme(fqme) brokermod = feed.mods[brokername] broker = brokermod.name portal = feed.portals[brokermod] # XXX: this should be initial price quote from target provider - flume = feed.flumes[fqsn] + flume = feed.flumes[fqme] first_quote: dict = flume.first_quote book: DarkBook = self.get_dark_book(broker) - book.lasts[fqsn]: float = first_quote['last'] + book.lasts[fqme]: float = first_quote['last'] async with self.maybe_open_brokerd_dialog( brokermod=brokermod, @@ -556,7 +556,7 @@ class Router(Struct): relay.brokerd_stream, flume.stream, broker, - fqsn, # form: ... + fqme, # form: ... book ) @@ -945,7 +945,7 @@ async def translate_and_relay_brokerd_events( # may end up with collisions? status_msg = Status(**brokerd_msg) - # NOTE: be sure to pack an fqsn for the client side! + # NOTE: be sure to pack an fqme for the client side! order = Order(**status_msg.req) order.symbol = f'{order.symbol}.{broker}' @@ -1022,7 +1022,7 @@ async def process_client_order_cmds( client_order_stream: tractor.MsgStream, brokerd_order_stream: tractor.MsgStream, - fqsn: str, + fqme: str, flume: Flume, dark_book: DarkBook, router: Router, @@ -1049,11 +1049,11 @@ async def process_client_order_cmds( # backend can be routed and relayed to subscribed clients. subs = router.dialogs[oid] - # add all subscribed clients for this fqsn (should eventually be + # add all subscribed clients for this fqme (should eventually be # a more generalize subscription system) to received order msg # updates (and thus show stuff in the UI). subs.add(client_order_stream) - subs.update(router.subscribers[fqsn]) + subs.update(router.subscribers[fqme]) reqid = dark_book._ems2brokerd_ids.inverse.get(oid) @@ -1111,7 +1111,7 @@ async def process_client_order_cmds( and status.resp == 'dark_open' ): # remove from dark book clearing - entry = dark_book.triggers[fqsn].pop(oid, None) + entry = dark_book.triggers[fqme].pop(oid, None) if entry: ( pred, @@ -1127,7 +1127,7 @@ async def process_client_order_cmds( status.req = cmd await router.client_broadcast( - fqsn, + fqme, status, ) @@ -1137,7 +1137,7 @@ async def process_client_order_cmds( dark_book._active.pop(oid) else: - log.exception(f'No dark order for {fqsn}?') + log.exception(f'No dark order for {fqme}?') # TODO: eventually we should be receiving # this struct on the wire unpacked in a scoped protocol @@ -1146,7 +1146,7 @@ async def process_client_order_cmds( # LIVE order REQUEST case { 'oid': oid, - 'symbol': fqsn, + 'symbol': fqme, 'price': trigger_price, 'size': size, 'action': ('buy' | 'sell') as action, @@ -1159,7 +1159,7 @@ async def process_client_order_cmds( # remove the broker part before creating a message # to send to the specific broker since they probably # aren't expectig their own name, but should they? - sym = fqsn.replace(f'.{broker}', '') + sym = fqme.replace(f'.{broker}', '') if status is not None: # if we already had a broker order id then @@ -1216,7 +1216,7 @@ async def process_client_order_cmds( # DARK-order / alert REQUEST case { 'oid': oid, - 'symbol': fqsn, + 'symbol': fqme, 'price': trigger_price, 'size': size, 'exec_mode': exec_mode, @@ -1238,7 +1238,7 @@ async def process_client_order_cmds( # price received from the feed, instead of being # like every other shitty tina platform that makes # the user choose the predicate operator. - last = dark_book.lasts[fqsn] + last = dark_book.lasts[fqme] # sometimes the real-time feed hasn't come up # so just pull from the latest history. @@ -1280,7 +1280,7 @@ async def process_client_order_cmds( # NOTE: this may result in an override of an existing # dark book entry if the order id already exists dark_book.triggers.setdefault( - fqsn, {} + fqme, {} )[oid] = ( pred, tickfilter, @@ -1305,7 +1305,7 @@ async def process_client_order_cmds( # broadcast status to all subscribed clients await router.client_broadcast( - fqsn, + fqme, status, ) @@ -1316,35 +1316,36 @@ async def process_client_order_cmds( @acm async def maybe_open_trade_relays( router: Router, - fqsn: str, + fqme: str, exec_mode: str, # ('paper', 'live') loglevel: str = 'info', ) -> tuple: - def cache_on_fqsn_unless_paper( + def cache_on_fqme_unless_paper( router: Router, - fqsn: str, + fqme: str, exec_mode: str, # ('paper', 'live') loglevel: str = 'info', ) -> Hashable: if exec_mode == 'paper': - return f'paper_{fqsn}' + return f'paper_{fqme}' else: - return fqsn + return fqme # XXX: closure to enable below use of # ``tractor.trionics.maybe_open_context()`` @acm async def cached_mngr( router: Router, - fqsn: str, + fqme: str, exec_mode: str, # ('paper', 'live') loglevel: str = 'info', ): + relay, feed, client_ready = await _router.nursery.start( _router.open_trade_relays, - fqsn, + fqme, exec_mode, loglevel, ) @@ -1354,11 +1355,11 @@ async def maybe_open_trade_relays( acm_func=cached_mngr, kwargs={ 'router': _router, - 'fqsn': fqsn, + 'fqme': fqme, 'exec_mode': exec_mode, 'loglevel': loglevel, }, - key=cache_on_fqsn_unless_paper, + key=cache_on_fqme_unless_paper, ) as ( cache_hit, (relay, feed, client_ready) @@ -1369,7 +1370,7 @@ async def maybe_open_trade_relays( @tractor.context async def _emsd_main( ctx: tractor.Context, - fqsn: str, + fqme: str, exec_mode: str, # ('paper', 'live') loglevel: str = 'info', @@ -1426,7 +1427,7 @@ async def _emsd_main( global _router assert _router - broker, symbol, suffix = unpack_fqsn(fqsn) + broker, symbol, suffix = unpack_fqme(fqme) # TODO: would be nice if in tractor we can require either a ctx arg, # or a named arg with ctx in it and a type annotation of @@ -1443,7 +1444,7 @@ async def _emsd_main( # few duplicate streams as necessary per ems actor. async with maybe_open_trade_relays( _router, - fqsn, + fqme, exec_mode, loglevel, ) as (relay, feed, client_ready): @@ -1466,28 +1467,28 @@ async def _emsd_main( # register the client side before starting the # brokerd-side relay task to ensure the client is # delivered all exisiting open orders on startup. - # TODO: instead of by fqsn we need a subscription + # TODO: instead of by fqme we need a subscription # system/schema here to limit what each new client is # allowed to see in terms of broadcasted order flow # updates per dialog. - _router.subscribers[fqsn].add(client_stream) + _router.subscribers[fqme].add(client_stream) client_ready.set() # start inbound (from attached client) order request processing # main entrypoint, run here until cancelled. try: - flume = feed.flumes[fqsn] + flume = feed.flumes[fqme] await process_client_order_cmds( client_stream, brokerd_stream, - fqsn, + fqme, flume, dark_book, _router, ) finally: # try to remove client from subscription registry - _router.subscribers[fqsn].remove(client_stream) + _router.subscribers[fqme].remove(client_stream) for oid, client_streams in _router.dialogs.items(): client_streams.discard(client_stream) From 0f3041724b95cc4bbae9e08e5a9b0e29d417e835 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 21:50:35 -0400 Subject: [PATCH 048/294] Use `MktPair` for `Flume.symbol` when used by backend Initial attempt at getting the sampling and shm layer to use the new mkt info meta-data type. Draft out a potential `BackendInitMsg: msgspec.Struct` for validating the init msg returned from the `stream_quotes()` start value; obvs don't actually use it yet. --- piker/data/feed.py | 55 ++++++++++++++++++++++++++++++++++++---------- 1 file changed, 43 insertions(+), 12 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index 13072acf..405a8f57 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -933,6 +933,24 @@ async def manage_history( await trio.sleep_forever() +class BackendInitMsg(Struct, frozen=True): + ''' + A stringent data provider startup msg schema validator. + + The fields defined here are matched with those absolutely required + from each backend broker/data provider. + + ''' + fqsn: str + symbol_info: dict | None = None + mkt_info: MktPair | None = None + shm_write_opts: dict[str, Any] | None = None + + +def validate_init_msg() -> None: + ... + + async def allocate_persistent_feed( bus: _FeedsBus, sub_registered: trio.Event, @@ -977,7 +995,10 @@ async def allocate_persistent_feed( # establish broker backend quote stream by calling # ``stream_quotes()``, which is a required broker backend endpoint. - init_msg, first_quote = await bus.nursery.start( + ( + init_msg, + first_quote, + ) = await bus.nursery.start( partial( mod.stream_quotes, send_chan=send, @@ -1008,19 +1029,24 @@ async def allocate_persistent_feed( # a small streaming machine around the remote feed which can then # do the normal work of sampling and writing shm buffers # (depending on if we want sampling done on the far end or not?) - msg = init_msg[symstr] + per_mkt_init_msg = init_msg[symstr] # the broker-specific fully qualified symbol name, # but ensure it is lower-cased for external use. - bs_mktid = msg['fqsn'].lower() + bs_mktid = per_mkt_init_msg['fqsn'].lower() # true fqme including broker/provider suffix fqme = '.'.join((bs_mktid, brokername)) - mktinfo = msg.get('mkt_info') + mktinfo = per_mkt_init_msg.get('mkt_info') if not mktinfo: - mktinfo = msg['symbol_info'] + log.warning( + f'BACKEND {brokername} is using old `Symbol` style API\n' + 'IT SHOULD BE PORTED TO THE NEW `.accounting._mktinfo.MktPair`\n' + 'STATTTTT!!!\n' + ) + mktinfo = per_mkt_init_msg['symbol_info'] # TODO: read out renamed/new tick size fields in block below! price_tick = mktinfo.get( @@ -1042,10 +1068,15 @@ async def allocate_persistent_feed( _atype=mktinfo['asset_type'] ) + symbol = Symbol.from_fqsn( + fqsn=fqme, + info=mktinfo, + ) + else: # the new msg-protocol is to expect an already packed # ``Asset`` and ``MktPair`` object from the backend - mkt = mktinfo + symbol = mkt = mktinfo assert isinstance(mkt, MktPair) assert isinstance(mkt.dst, Asset) @@ -1080,15 +1111,13 @@ async def allocate_persistent_feed( log.info(f'waiting on history to load: {fqme}') await some_data_ready.wait() - symbol = Symbol.from_fqsn( - fqsn=fqme, - info=msg['symbol_info'], - ) flume = Flume( + # TODO: we have to use this for now since currently the # MktPair above doesn't render the correct output key it seems # when we provide the `MktInfo` here?..? symbol=symbol, + first_quote=first_quote, _rt_shm_token=rt_shm.token, _hist_shm_token=hist_shm.token, @@ -1109,6 +1138,8 @@ async def allocate_persistent_feed( # the backend will indicate when real-time quotes have begun. await feed_is_live.wait() + # NOTE: if not configured otherwise, we always sum tick volume + # values in the OHLCV sampler. sum_tick_vlm: bool = init_msg.get( 'shm_write_opts', {} ).get('sum_tick_vlm', True) @@ -1132,7 +1163,7 @@ async def allocate_persistent_feed( rt_shm.array['time'][1] = ts + 1 elif hist_shm.array.size == 0: - await tractor.breakpoint() + raise RuntimeError(f'History (1m) Shm for {fqme} is empty!?') # wait the spawning parent task to register its subscriber # send-stream entry before we start the sample loop. @@ -1235,7 +1266,7 @@ async def open_feed_bus( flume = bus.feeds[symbol] sym = flume.symbol bs_mktid = sym.key - fqsn = sym.fqsn # true fqsn + fqsn = sym.fqme # true fqsn assert bs_mktid in fqsn and brokername in fqsn if sym.suffix: From 580165f2f408cc4a58f7489a9a3a778cec8d8fd0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 21:56:39 -0400 Subject: [PATCH 049/294] Expect new `MktPair.tick_size: Decimal` attr in ems --- piker/clearing/_ems.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 24d491c5..ffe63292 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -1248,7 +1248,7 @@ async def process_client_order_cmds( pred = mk_check(trigger_price, last, action) spread_slap: float = 5 - min_tick = flume.symbol.tick_size + min_tick = flume.symbol.size_tick min_tick_digits = float_digits(min_tick) if action == 'buy': From a9778e40017c3bd568ba3177356c1a9ff226c18f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 21:58:40 -0400 Subject: [PATCH 050/294] Always cast `Order.symbol: str` for now To make nested `msgspec.Struct`s work we need to tell the codec that the `.symbol` is some struct def, since we don't really need to enforce that (yet) we're just going to enc/dec as `str` until we further formalize and/or need something more complex. --- piker/clearing/_messages.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/piker/clearing/_messages.py b/piker/clearing/_messages.py index 0d97ee3f..61579787 100644 --- a/piker/clearing/_messages.py +++ b/piker/clearing/_messages.py @@ -29,7 +29,6 @@ from typing import ( from msgspec import field -from ..accounting._mktinfo import Symbol from ..data.types import Struct @@ -94,7 +93,8 @@ class Order(Struct): # internal ``emdsd`` unique "order id" oid: str # uuid4 - symbol: str | Symbol + # TODO: figure out how to optionally typecast this to `MktPair`? + symbol: str # | MktPair account: str # should we set a default as '' ? price: float From d62fb655eb24142e61b6464a2198c127becce26a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 22:08:43 -0400 Subject: [PATCH 051/294] `kraken`: parse our source asset key and set on `MktPair.src: str` --- piker/brokers/kraken/api.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index 1223924a..4ce05322 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -509,11 +509,16 @@ class Client: bs_mktid, pair_info = Client.normalize_symbol(pair_str) dst_asset = self.assets[pair_info.base] + # NOTE XXX parse out the src asset name until we figure out + # how to get the src asset's `Pair` info from kraken.. + src_key = pair_str.lstrip(dst_asset.name.upper()).lower() + return MktPair( dst=dst_asset, price_tick=pair_info.price_tick, size_tick=pair_info.size_tick, bs_mktid=bs_mktid, + src=src_key, broker='kraken', ) From b6df83a0e9ef864cc3d31e8b88f40f7fa363146d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 22:28:26 -0400 Subject: [PATCH 052/294] Typecast `OrderMode.staged.symbol: str` before `.copy()`! --- piker/ui/order_mode.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index e6c4ed33..3125fefe 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -349,12 +349,22 @@ class OrderMode: ''' if not order: staged: Order = self._staged_order + # apply order fields for ems oid = str(uuid.uuid4()) - order = staged.copy() - order.oid = oid - order.symbol = order.symbol.fqme + # we have to copy and slap in the `MktPair` first + # since we can't cast to it without being mega explicit + # with `msgspec.Struct`, which we're not yet.. + fqme = staged.symbol + if not isinstance(fqme, str): + mkt = staged.symbol.copy() + fqme = mkt.fqme + staged.symbol = fqme + + order = staged.copy() + order.symbol = fqme + order.oid = oid lines = self.lines_from_order( order, From 7be85a882b51fa42fc8443d01becc0d0520436e1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 22:29:26 -0400 Subject: [PATCH 053/294] Drop use of legacy `Symbol.broker_info` in display startup --- piker/ui/_display.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/piker/ui/_display.py b/piker/ui/_display.py index a6f8e6c2..227e4a66 100644 --- a/piker/ui/_display.py +++ b/piker/ui/_display.py @@ -1221,7 +1221,6 @@ async def display_symbol_data( # use expanded contract symbols passed back from feed layer. fqsns = list(feed.flumes.keys()) - # step_size_s = 1 # tf_key = tf_in_1s[step_size_s] godwidget.window.setWindowTitle( @@ -1288,7 +1287,6 @@ async def display_symbol_data( hist_ohlcv: ShmArray = flume.hist_shm symbol = flume.symbol - brokername = symbol.broker fqsn = symbol.fqsn hist_chart = hist_linked.plot_ohlc_main( @@ -1337,8 +1335,7 @@ async def display_symbol_data( None | ChartPlotWidget ] = {}.fromkeys(feed.flumes) if ( - not symbol.broker_info[brokername].get('no_vlm', False) - and has_vlm(ohlcv) + has_vlm(ohlcv) and vlm_chart is None ): vlm_chart = vlm_charts[fqsn] = await ln.start( @@ -1497,13 +1494,13 @@ async def display_symbol_data( ) # boot order-mode - order_ctl_symbol: str = fqsns[0] + order_ctl_fqme: str = fqsns[0] mode: OrderMode async with ( open_order_mode( feed, godwidget, - fqsns[0], + order_ctl_fqme, order_mode_started, loglevel=loglevel ) as mode @@ -1511,7 +1508,7 @@ async def display_symbol_data( rt_linked.mode = mode - rt_viz = rt_chart.get_viz(order_ctl_symbol) + rt_viz = rt_chart.get_viz(order_ctl_fqme) rt_viz.plot.setFocus() # default view adjuments and sidepane alignment From dc2332c980d0dd98d4d81a76f4cec8b604edd668 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 22:32:24 -0400 Subject: [PATCH 054/294] '`kraken`: finally, use new `MktPair` in `'mkt_info'` init msg field!' --- piker/brokers/kraken/feed.py | 32 ++++++++++++++------------------ 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index 55ce4f76..e37fdb49 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -290,7 +290,7 @@ async def stream_quotes( get_console_log(loglevel or tractor.current_actor().loglevel) ws_pairs = {} - mkt_infos = {} + mkt_infos: dict[str, MktPair] = {} async with ( open_cached_client('kraken') as client, @@ -299,29 +299,25 @@ async def stream_quotes( # keep client cached for real-time section for sym in symbols: - # transform to upper since piker style is always lower - sym = sym.upper() - pair: Pair = await client.pair_info(sym) - mkt: MktPair = await client.mkt_info(sym) - mktinfo = mkt.to_dict() - mkt_infos[sym] = mktinfo + # uppercase since piker style is always lowercase. + sym_str = sym.upper() + pair: Pair = await client.pair_info(sym_str) + mkt: MktPair = await client.mkt_info(sym_str) + mkt_infos[sym_str] = mkt - # TODO: remove this once we drop ``Symbol``!! - mktinfo['asset_type'] = mkt.dst.atype - mktinfo['price_tick_size'] = mkt.price_tick - mktinfo['lot_tick_size'] = mkt.size_tick - - ws_pairs[sym] = pair.wsname + ws_pairs[sym_str] = pair.wsname symbol = symbols[0].lower() + # sync with `.data.feed` caller + # TODO: should we make this init msg a `Struct`? init_msgs = { - # pass back token, and bool, signalling if we're the writer - # and that history has been written symbol: { - 'symbol_info': mkt_infos[sym], - 'shm_write_opts': {'sum_tick_vml': False}, - 'fqsn': sym, + 'fqsn': sym_str, + 'mkt_info': mkt_infos[sym_str], + 'shm_write_opts': { + 'sum_tick_vml': False, + }, }, } From 6272cae8d45e284a3f2700e9dbf186da96a1fe14 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 21 Mar 2023 22:33:54 -0400 Subject: [PATCH 055/294] Drop more `Optional` usage on our `Struct` --- piker/data/types.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/piker/data/types.py b/piker/data/types.py index c37de233..7a3bc6bb 100644 --- a/piker/data/types.py +++ b/piker/data/types.py @@ -19,7 +19,6 @@ Built-in (extension) types. """ import sys -from typing import Optional from pprint import pformat import msgspec @@ -59,7 +58,7 @@ class Struct( def copy( self, - update: Optional[dict] = None, + update: dict | None = None, ) -> msgspec.Struct: ''' @@ -84,7 +83,7 @@ class Struct( # use ``.copy()`` above in such cases. def typecast( self, - # fields: Optional[list[str]] = None, + # fields: list[str] | None = None, ) -> None: for fname, ftype in self.__annotations__.items(): setattr(self, fname, ftype(getattr(self, fname))) From 406565f74d2ab87ed25b94f497ee6c2fe79260c2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 08:23:01 -0400 Subject: [PATCH 056/294] Rename `fqsn` -> `fqme` in paper engine --- piker/clearing/_paper_engine.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index bfec7260..c4c6108d 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -46,7 +46,7 @@ from ..accounting import ( open_pps, ) from ..data._normalize import iterticks -from ..data._source import unpack_fqsn +from ..accounting._mktinfo import unpack_fqme from ..log import get_logger from ._messages import ( BrokerdCancel, @@ -195,7 +195,7 @@ class PaperBoi(Struct): async def fake_fill( self, - fqsn: str, + fqme: str, price: float, size: float, action: str, # one of {'buy', 'sell'} @@ -249,10 +249,10 @@ class PaperBoi(Struct): await self.ems_trades_stream.send(msg) # lookup any existing position - key = fqsn.rstrip(f'.{self.broker}') + key = fqme.rstrip(f'.{self.broker}') t = Transaction( - fqsn=fqsn, - sym=self._syms[fqsn], + fqsn=fqme, + sym=self._syms[fqme], tid=oid, size=size, price=price, @@ -275,7 +275,7 @@ class PaperBoi(Struct): pp_msg = BrokerdPosition( broker=self.broker, account='paper', - symbol=fqsn, + symbol=fqme, # TODO: we need to look up the asset currency from # broker info. i guess for crypto this can be # inferred from the pair? @@ -419,7 +419,7 @@ async def simulate_fills( # clearing price would have filled entirely await client.fake_fill( - fqsn=sym, + fqme=sym, # todo slippage to determine fill price price=tick_price, size=size, @@ -518,7 +518,7 @@ async def trades_dialogue( ctx: tractor.Context, broker: str, - fqsn: str, + fqme: str, loglevel: str = None, ) -> None: @@ -527,7 +527,7 @@ async def trades_dialogue( async with ( data.open_feed( - [fqsn], + [fqme], loglevel=loglevel, ) as feed, @@ -571,8 +571,8 @@ async def trades_dialogue( # TODO: load postions from ledger file _trade_ledger={}, _syms={ - fqsn: flume.symbol - for fqsn, flume in feed.flumes.items() + fqme: flume.symbol + for fqme, flume in feed.flumes.items() } ) @@ -588,7 +588,7 @@ async def trades_dialogue( @asynccontextmanager async def open_paperboi( - fqsn: str, + fqme: str, loglevel: str, ) -> Callable: @@ -597,7 +597,7 @@ async def open_paperboi( its context. ''' - broker, symbol, expiry = unpack_fqsn(fqsn) + broker, symbol, expiry = unpack_fqme(fqme) service_name = f'paperboi.{broker}' async with ( @@ -617,7 +617,7 @@ async def open_paperboi( async with portal.open_context( trades_dialogue, broker=broker, - fqsn=fqsn, + fqme=fqme, loglevel=loglevel, ) as (ctx, first): From fd9e484b55db61526fab218ed76b8537a18d054a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 10:36:52 -0400 Subject: [PATCH 057/294] Add `.__str__()` to mktpair and symbol types, fix `MktPair.fqme` token order --- piker/accounting/_mktinfo.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 4c94962d..19657541 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -221,6 +221,10 @@ class MktPair(Struct, frozen=True): _atype: str = '' + # NOTE: when cast to `str` return fqme + def __str__(self) -> str: + return self.fqme + @classmethod def from_msg( cls, @@ -358,7 +362,7 @@ class MktPair(Struct, frozen=True): return maybe_cons_tokens([ self.key, # final "pair name" (eg. qqq[/usd], btcusdt) self.venue, - self.expiry, + self.suffix, # includes expiry and other con info self.broker, ]) @@ -537,3 +541,7 @@ class Symbol(Struct): Decimal(f'1.{"0".ljust(digits, "0")}'), rounding=ROUND_HALF_EVEN ) + + # NOTE: when cast to `str` return fqme + def __str__(self) -> str: + return self.fqme From 069466218e867bf750715dcb8370a31eda6ce7c4 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 10:40:48 -0400 Subject: [PATCH 058/294] Use `str(cmd.symbol)` for fqme on cancels, add `_nowait()` method names --- piker/clearing/_client.py | 32 +++++++++++++++++++++++++++----- 1 file changed, 27 insertions(+), 5 deletions(-) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index 01196f41..cbf5add9 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -64,7 +64,7 @@ class OrderBook(Struct): _from_order_book: trio.abc.ReceiveChannel _sent_orders: dict[str, Order] = {} - def send( + def send_nowait( self, msg: Order | dict, @@ -73,6 +73,15 @@ class OrderBook(Struct): self._to_ems.send_nowait(msg) return msg + # TODO: make this an async version.. + def send( + self, + msg: Order | dict, + + ) -> dict: + log.warning('USE `.send_nowait()` instead!') + return self.send_nowait(msg) + def send_update( self, @@ -86,10 +95,14 @@ class OrderBook(Struct): self._to_ems.send_nowait(msg) return cmd - def cancel(self, uuid: str) -> bool: - """Cancel an order (or alert) in the EMS. + def cancel_nowait( + self, + uuid: str, + ) -> bool: + ''' + Cancel an order (or alert) in the EMS. - """ + ''' cmd = self._sent_orders.get(uuid) if not cmd: log.error( @@ -97,12 +110,21 @@ class OrderBook(Struct): f'Maybe there is a stale entry or line?\n' f'You should report this as a bug!' ) + fqme = str(cmd.symbol) msg = Cancel( oid=uuid, - symbol=cmd.symbol, + symbol=fqme, ) self._to_ems.send_nowait(msg) + # TODO: make this an async version.. + def cancel( + self, + uuid: str, + ) -> bool: + log.warning('USE `.cancel_nowait()` instead!') + return self.cancel_nowait(uuid) + _orders: OrderBook = None From 581782800d9f24b6c04f2d8ae34ced83d244a725 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 10:50:10 -0400 Subject: [PATCH 059/294] Rename `Client.send_update()` -> `.update_nowait()` --- piker/clearing/_client.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index cbf5add9..7c79c3ea 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -82,7 +82,7 @@ class OrderBook(Struct): log.warning('USE `.send_nowait()` instead!') return self.send_nowait(msg) - def send_update( + def update_nowait( self, uuid: str, @@ -95,6 +95,14 @@ class OrderBook(Struct): self._to_ems.send_nowait(msg) return cmd + # TODO: async meth for this! + # def update( + # self, + # uuid: str, + # **data: dict, + # ) -> dict: + # ... + def cancel_nowait( self, uuid: str, From 7498cbb5f4d86c48634af3b9d5ce497718bbf6a4 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 11:41:11 -0400 Subject: [PATCH 060/294] Use `Struct.copy()` with update dict for `Order` from staged --- piker/ui/order_mode.py | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 3125fefe..6f1477f7 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -353,18 +353,13 @@ class OrderMode: # apply order fields for ems oid = str(uuid.uuid4()) - # we have to copy and slap in the `MktPair` first - # since we can't cast to it without being mega explicit - # with `msgspec.Struct`, which we're not yet.. - fqme = staged.symbol - if not isinstance(fqme, str): - mkt = staged.symbol.copy() - fqme = mkt.fqme - staged.symbol = fqme - - order = staged.copy() - order.symbol = fqme - order.oid = oid + # NOTE: we have to str-ify `MktPair` first since we can't + # cast to it without being mega explicit with + # `msgspec.Struct`, which we're not yet.. + order = staged.copy({ + 'symbol': str(staged.symbol), + 'oid': oid, + }) lines = self.lines_from_order( order, @@ -411,7 +406,7 @@ class OrderMode: # send order cmd to ems if send_msg: - self.book.send(order) + self.book.send_nowait(order) else: # just register for control over this order # TODO: some kind of mini-perms system here based on @@ -445,7 +440,7 @@ class OrderMode: size = dialog.order.size # NOTE: sends modified order msg to EMS - self.book.send_update( + self.book.update_nowait( uuid=line.dialog.uuid, price=level, size=size, @@ -617,7 +612,7 @@ class OrderMode: dialog.last_status_close = cancel_status_close ids.append(oid) - self.book.cancel(uuid=oid) + self.book.cancel_nowait(uuid=oid) return ids From 2454dda18f62f1595e384bd32da459bdc5cd40e9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 11:41:39 -0400 Subject: [PATCH 061/294] Use `MktPair` attr `.size_tick` in charting --- piker/ui/_chart.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/ui/_chart.py b/piker/ui/_chart.py index 9fffeee2..464fb8b7 100644 --- a/piker/ui/_chart.py +++ b/piker/ui/_chart.py @@ -291,7 +291,7 @@ class GodWidget(QWidget): if symbol is not None: self.window.setWindowTitle( f'{symbol.fqme} ' - f'tick:{symbol.tick_size}' + f'tick:{symbol.size_tick}' ) return order_mode_started From 3bf48ab5971cf0f89005d314fb14caeeff0257cc Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 11:48:35 -0400 Subject: [PATCH 062/294] Use a single log for entire `.clearing` subsys --- piker/clearing/__init__.py | 3 +++ piker/clearing/_client.py | 7 +++---- piker/clearing/_ems.py | 7 +++---- piker/clearing/_paper_engine.py | 6 +++--- piker/clearing/_util.py | 33 +++++++++++++++++++++++++++++++++ 5 files changed, 45 insertions(+), 11 deletions(-) create mode 100644 piker/clearing/_util.py diff --git a/piker/clearing/__init__.py b/piker/clearing/__init__.py index 06a9212e..bd95a8ab 100644 --- a/piker/clearing/__init__.py +++ b/piker/clearing/__init__.py @@ -18,9 +18,12 @@ Market machinery for order executions, book, management. """ +from ..log import get_logger from ._client import open_ems __all__ = [ 'open_ems', ] + +log = get_logger(__name__) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index 7c79c3ea..14c77d54 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -27,8 +27,10 @@ import trio import tractor from tractor.trionics import broadcast_receiver +from ._util import ( + log, # sub-sys logger +) from ..accounting._mktinfo import unpack_fqme -from ..log import get_logger from ..data.types import Struct from ..service import maybe_open_emsd from ._messages import ( @@ -44,9 +46,6 @@ if TYPE_CHECKING: ) -log = get_logger(__name__) - - class OrderBook(Struct): '''EMS-client-side order book ctl and tracking. diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index ffe63292..429c1935 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -41,7 +41,9 @@ import trio from trio_typing import TaskStatus import tractor -from ..log import get_logger +from ._util import ( + log, # sub-sys logger +) from ..data._normalize import iterticks from ..accounting._mktinfo import ( unpack_fqme, @@ -68,9 +70,6 @@ from ._messages import ( ) -log = get_logger(__name__) - - # TODO: numba all of this def mk_check( diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index c4c6108d..0fadfeb6 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -47,7 +47,9 @@ from ..accounting import ( ) from ..data._normalize import iterticks from ..accounting._mktinfo import unpack_fqme -from ..log import get_logger +from ._util import ( + log, # sub-sys logger +) from ._messages import ( BrokerdCancel, BrokerdOrder, @@ -58,8 +60,6 @@ from ._messages import ( BrokerdError, ) -log = get_logger(__name__) - class PaperBoi(Struct): ''' diff --git a/piker/clearing/_util.py b/piker/clearing/_util.py new file mode 100644 index 00000000..ec93512d --- /dev/null +++ b/piker/clearing/_util.py @@ -0,0 +1,33 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +""" +Sub-sys module commons. + +""" +from functools import partial + +from ..log import ( + get_logger, + get_console_log, +) +subsys: str = 'piker.clearing' + +log = get_logger(subsys) + +get_console_log = partial( + get_console_log, + name=subsys, +) From a462de6f2d4d2ee42081db4a29de0cca218cc5d1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 12:07:08 -0400 Subject: [PATCH 063/294] Use a single log for entire `.service` subsys --- piker/service/__init__.py | 1 + piker/service/_actor_runtime.py | 6 ++---- piker/service/_ahab.py | 6 ++---- piker/service/_daemon.py | 6 ++---- piker/service/_mngr.py | 6 ++---- piker/service/_registry.py | 7 ++----- piker/service/_util.py | 33 +++++++++++++++++++++++++++++++++ piker/service/elastic.py | 14 +++++--------- piker/service/marketstore.py | 10 +++++----- 9 files changed, 54 insertions(+), 35 deletions(-) create mode 100644 piker/service/_util.py diff --git a/piker/service/__init__.py b/piker/service/__init__.py index 3b9767cd..a885bc39 100644 --- a/piker/service/__init__.py +++ b/piker/service/__init__.py @@ -20,6 +20,7 @@ Actor-runtime service orchestration machinery. """ from __future__ import annotations +from ._util import log from ._mngr import Services from ._registry import ( # noqa _tractor_kwargs, diff --git a/piker/service/_actor_runtime.py b/piker/service/_actor_runtime.py index b92ad221..db727e2e 100644 --- a/piker/service/_actor_runtime.py +++ b/piker/service/_actor_runtime.py @@ -34,8 +34,8 @@ from contextlib import ( import tractor import trio -from ..log import ( - get_logger, +from ._util import ( + log, # sub-sys logger get_console_log, ) from ._mngr import ( @@ -47,8 +47,6 @@ from ._registry import ( # noqa open_registry, ) -log = get_logger(__name__) - def get_tractor_runtime_kwargs() -> dict[str, Any]: ''' diff --git a/piker/service/_ahab.py b/piker/service/_ahab.py index 7c3133e1..0629aeda 100644 --- a/piker/service/_ahab.py +++ b/piker/service/_ahab.py @@ -48,14 +48,12 @@ from requests.exceptions import ( ReadTimeout, ) -from ..log import ( - get_logger, +from ._util import ( + log, # sub-sys logger get_console_log, ) from .. import config -log = get_logger(__name__) - class DockerNotStarted(Exception): 'Prolly you dint start da daemon bruh' diff --git a/piker/service/_daemon.py b/piker/service/_daemon.py index 45d6cb81..e2581081 100644 --- a/piker/service/_daemon.py +++ b/piker/service/_daemon.py @@ -30,8 +30,8 @@ from contextlib import ( import tractor -from ..log import ( - get_logger, +from ._util import ( + log, # sub-sys logger get_console_log, ) from ..brokers import get_brokermod @@ -41,8 +41,6 @@ from ._mngr import ( from ._actor_runtime import maybe_open_pikerd from ._registry import find_service -log = get_logger(__name__) - # `brokerd` enabled modules # NOTE: keeping this list as small as possible is part of our caps-sec # model and should be treated with utmost care! diff --git a/piker/service/_mngr.py b/piker/service/_mngr.py index 04f396af..80a84487 100644 --- a/piker/service/_mngr.py +++ b/piker/service/_mngr.py @@ -28,12 +28,10 @@ import trio from trio_typing import TaskStatus import tractor -from ..log import ( - get_logger, +from ._util import ( + log, # sub-sys logger ) -log = get_logger(__name__) - # TODO: factor this into a ``tractor.highlevel`` extension # pack for the library. diff --git a/piker/service/_registry.py b/piker/service/_registry.py index f487e2a4..7ae11937 100644 --- a/piker/service/_registry.py +++ b/piker/service/_registry.py @@ -28,13 +28,10 @@ from typing import ( import tractor - -from ..log import ( - get_logger, +from ._util import ( + log, # sub-sys logger ) -log = get_logger(__name__) - _default_registry_host: str = '127.0.0.1' _default_registry_port: int = 6116 _default_reg_addr: tuple[str, int] = ( diff --git a/piker/service/_util.py b/piker/service/_util.py new file mode 100644 index 00000000..bdf23dab --- /dev/null +++ b/piker/service/_util.py @@ -0,0 +1,33 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +""" +Sub-sys module commons. + +""" +from functools import partial + +from ..log import ( + get_logger, + get_console_log, +) +subsys: str = 'piker.service' + +log = get_logger(subsys) + +get_console_log = partial( + get_console_log, + name=subsys, +) diff --git a/piker/service/elastic.py b/piker/service/elastic.py index 31221d57..71097dcb 100644 --- a/piker/service/elastic.py +++ b/piker/service/elastic.py @@ -20,21 +20,17 @@ from typing import ( TYPE_CHECKING, ) +import asks if TYPE_CHECKING: import docker from ._ahab import DockerContainer -from piker.log import ( - get_logger, - get_console_log +from . import log # sub-sys logger +from ._util import ( + get_console_log, ) -import asks - - -log = get_logger(__name__) - # container level config _config = { @@ -92,7 +88,7 @@ def start_elasticsearch( 'http://localhost:19200/_cat/health', params={'format': 'json'} )).json() - kog.info( + log.info( 'ElasticSearch cntr health:\n' f'{health}' ) diff --git a/piker/service/marketstore.py b/piker/service/marketstore.py index 5c4f90db..8d99b7cd 100644 --- a/piker/service/marketstore.py +++ b/piker/service/marketstore.py @@ -54,14 +54,14 @@ if TYPE_CHECKING: import docker from ._ahab import DockerContainer +from ._util import ( + log, # sub-sys logger + get_console_log, +) from ..data.feed import maybe_open_feed -from ..log import get_logger, get_console_log from .._profile import Profiler -log = get_logger(__name__) - - # ahabd-supervisor and container level config _config = { 'grpc_listen_port': 5995, @@ -703,7 +703,7 @@ async def open_tsdb_client( # profiler('Finished db arrays diffs') - syms = await storage.client.list_symbols() + _ = await storage.client.list_symbols() # log.info(f'Existing tsdb symbol set:\n{pformat(syms)}') # profiler(f'listed symbols {syms}') yield storage From 2ae9576cd8e761e645692bd37a3b8ea4bc6c9ded Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 12:28:33 -0400 Subject: [PATCH 064/294] Add common logger instance for `.brokers` --- piker/brokers/_util.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/piker/brokers/_util.py b/piker/brokers/_util.py index d1b2aac5..ba123156 100644 --- a/piker/brokers/_util.py +++ b/piker/brokers/_util.py @@ -15,13 +15,28 @@ # along with this program. If not, see . """ -Handy utils. +Handy cross-broker utils. + """ +from functools import partial + import json import asks import logging -from ..log import colorize_json +from ..log import ( + get_logger, + get_console_log, + colorize_json, +) +subsys: str = 'piker.brokers' + +log = get_logger(subsys) + +get_console_log = partial( + get_console_log, + name=subsys, +) class BrokerError(Exception): @@ -69,7 +84,6 @@ class DataThrottle(BrokerError): # TODO: add in throttle metrics/feedback - def resproc( resp: asks.response_objects.Response, log: logging.Logger, From ea42f66b545712005cb744096b60474db78cdc90 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 12:32:21 -0400 Subject: [PATCH 065/294] Use common `.brokers` logger in most backends --- piker/brokers/cli.py | 12 ++++++++---- piker/brokers/core.py | 5 +---- piker/brokers/data.py | 8 ++++---- piker/brokers/ib/_util.py | 4 +--- piker/brokers/ib/api.py | 4 +--- piker/brokers/questrade.py | 11 +++++++---- piker/brokers/robinhood.py | 15 +++++++++------ 7 files changed, 31 insertions(+), 28 deletions(-) diff --git a/piker/brokers/cli.py b/piker/brokers/cli.py index f86c679e..1bfb05d6 100644 --- a/piker/brokers/cli.py +++ b/piker/brokers/cli.py @@ -28,7 +28,13 @@ import tractor from ..cli import cli from .. import watchlists as wl -from ..log import get_console_log, colorize_json, get_logger +from ..log import ( + colorize_json, +) +from ._util import ( + log, + get_console_log, +) from ..service import ( maybe_spawn_brokerd, maybe_open_pikerd, @@ -38,9 +44,7 @@ from ..brokers import ( get_brokermod, data, ) - -log = get_logger('cli') -DEFAULT_BROKER = 'questrade' +DEFAULT_BROKER = 'binance' _config_dir = click.get_app_dir('piker') _watchlists_data_path = os.path.join(_config_dir, 'watchlists.json') diff --git a/piker/brokers/core.py b/piker/brokers/core.py index 3e9e1614..b3651c1d 100644 --- a/piker/brokers/core.py +++ b/piker/brokers/core.py @@ -26,15 +26,12 @@ from typing import List, Dict, Any, Optional import trio -from ..log import get_logger +from ._util import log from . import get_brokermod from ..service import maybe_spawn_brokerd from .._cacheables import open_cached_client -log = get_logger(__name__) - - async def api(brokername: str, methname: str, **kwargs) -> dict: """Make (proxy through) a broker API call by name and return its result. """ diff --git a/piker/brokers/data.py b/piker/brokers/data.py index 5183d2c4..6d178b51 100644 --- a/piker/brokers/data.py +++ b/piker/brokers/data.py @@ -41,13 +41,13 @@ import tractor from tractor.experimental import msgpub from async_generator import asynccontextmanager -from ..log import get_logger, get_console_log +from ._util import ( + log, + get_console_log, +) from . import get_brokermod -log = get_logger(__name__) - - async def wait_for_network( net_func: Callable, sleep: int = 1 diff --git a/piker/brokers/ib/_util.py b/piker/brokers/ib/_util.py index 14fd4d0b..a94f77f1 100644 --- a/piker/brokers/ib/_util.py +++ b/piker/brokers/ib/_util.py @@ -24,9 +24,7 @@ import subprocess import tractor -from piker.log import get_logger - -log = get_logger(__name__) +from .._util import log _reset_tech: Literal[ diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index c6513204..62c0adeb 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -68,12 +68,10 @@ import numpy as np from piker import config from piker.log import get_logger +from piker.brokers._util import log from piker.data._source import base_ohlc_dtype -log = get_logger(__name__) - - _time_units = { 's': ' sec', 'm': ' mins', diff --git a/piker/brokers/questrade.py b/piker/brokers/questrade.py index a3b5cfe0..b7042bdf 100644 --- a/piker/brokers/questrade.py +++ b/piker/brokers/questrade.py @@ -43,10 +43,13 @@ from ..calc import humanize, percent_change from .._cacheables import open_cached_client, async_lifo_cache from .. import config from ._util import resproc, BrokerError, SymbolNotFound -from ..log import get_logger, colorize_json, get_console_log - - -log = get_logger(__name__) +from ..log import ( + colorize_json, +) +from .util import ( + log, + get_console_log, +) _use_practice_account = False _refresh_token_ep = 'https://{}login.questrade.com/oauth2/' diff --git a/piker/brokers/robinhood.py b/piker/brokers/robinhood.py index 71b21055..8fc5739f 100644 --- a/piker/brokers/robinhood.py +++ b/piker/brokers/robinhood.py @@ -27,12 +27,13 @@ from typing import List from async_generator import asynccontextmanager import asks -from ..log import get_logger -from ._util import resproc, BrokerError +from ._util import ( + resproc, + BrokerError, + log, +) from ..calc import percent_change -log = get_logger(__name__) - _service_ep = 'https://api.robinhood.com' @@ -65,8 +66,10 @@ class Client: self.api = _API(self._sess) def _zip_in_order(self, symbols: [str], quotes: List[dict]): - return {quote.get('symbol', sym) if quote else sym: quote - for sym, quote in zip(symbols, results_dict)} + return { + quote.get('symbol', sym) if quote else sym: quote + for sym, quote in zip(symbols, quotes) + } async def quote(self, symbols: [str]): """Retrieve quotes for a list of ``symbols``. From 1c576d72d1d5de82bf335c639eeac2644b2296cf Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 13:22:24 -0400 Subject: [PATCH 066/294] Dump `Position`s as pformatted dicts for now.. --- piker/accounting/_pos.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 724b357f..e7f83d98 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -26,6 +26,7 @@ from __future__ import annotations from contextlib import contextmanager as cm from decimal import Decimal from math import copysign +from pprint import pformat import re from typing import ( Any, @@ -88,6 +89,9 @@ class Position(Struct): expiry: Optional[datetime] = None + def __repr__(self) -> str: + return pformat(self.to_dict()) + def to_dict(self) -> dict: return { f: getattr(self, f) @@ -657,7 +661,7 @@ class PpTable(Struct): if pp_entries: log.info( f'Updating ``pps.toml``:\n' - f'Current positions:\n{pp_entries}' + f'Current positions:\n{pformat(pp_entries)}' ) self.conf[self.brokername][self.acctid] = pp_entries From 3e5da64571c8d6ebf71f63f5c81cf821ed3dea03 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 13:23:23 -0400 Subject: [PATCH 067/294] Cache contract lookups from `Client.get_con()` --- piker/brokers/ib/api.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 62c0adeb..7eac4bcc 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -24,7 +24,10 @@ from contextlib import asynccontextmanager as acm from contextlib import AsyncExitStack from dataclasses import asdict, astuple from datetime import datetime -from functools import partial +from functools import ( + partial, + lru_cache, +) import itertools from math import isnan from typing import ( @@ -328,7 +331,7 @@ class Client: self.ib.RaiseRequestErrors = True # contract cache - self._feeds: dict[str, trio.abc.SendChannel] = {} + self._cons: dict[str, Contract] = {} # NOTE: the ib.client here is "throttled" to 45 rps by default @@ -612,13 +615,20 @@ class Client: return con + # TODO: make this work with our `MethodProxy`.. + # @lru_cache(maxsize=None) async def get_con( self, conid: int, ) -> Contract: - return await self.ib.qualifyContractsAsync( - ibis.Contract(conId=conid) - ) + try: + return self._cons[conid] + except KeyError: + con: Contract = await self.ib.qualifyContractsAsync( + ibis.Contract(conId=conid) + ) + self._cons[conid] = con + return con def parse_patt2fqsn( self, From c59ec77d9cdf37259293f374414fab5b116c7c97 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 14:09:23 -0400 Subject: [PATCH 068/294] WIP: refactor ib pp load init --- piker/brokers/ib/broker.py | 276 ++++++++++++++++++++++--------------- 1 file changed, 163 insertions(+), 113 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 66dfe212..7cd857d7 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -59,7 +59,7 @@ from piker.accounting import ( open_pps, PpTable, ) -from piker.log import get_console_log +from .._util import get_console_log from piker.clearing._messages import ( Order, Status, @@ -281,18 +281,21 @@ async def recv_trade_updates( async def update_ledger_from_api_trades( trade_entries: list[dict[str, Any]], client: Union[Client, MethodProxy], + accounts_def_inv: bidict[str, str], ) -> tuple[ dict[str, Transaction], dict[str, dict], ]: - # XXX; ERRGGG.. # pack in the "primary/listing exchange" value from a # contract lookup since it seems this isn't available by # default from the `.fills()` method endpoint... for entry in trade_entries: condict = entry['contract'] + # print( + # f"{condict['symbol']}: GETTING CONTRACT INFO!\n" + # ) conid = condict['conId'] pexch = condict['primaryExchange'] @@ -310,9 +313,8 @@ async def update_ledger_from_api_trades( # pack in the ``Contract.secType`` entry['asset_type'] = condict['secType'] - conf = get_config() entries = api_trades_to_ledger_entries( - conf['accounts'].inverse, + accounts_def_inv, trade_entries, ) # normalize recent session's trades to the `Transaction` type @@ -340,9 +342,16 @@ async def update_and_audit_msgs( # retreive equivalent ib reported position message # for comparison/audit versus the piker equivalent # breakeven pp calcs. + # if ( + # acctid == 'reg' + # and bs_mktid == 36285627 + # ): + # await tractor.breakpoint() + ibppmsg = cids2pps.get((acctid, bs_mktid)) if ibppmsg: + symbol = ibppmsg.symbol msg = BrokerdPosition( broker='ib', @@ -353,7 +362,7 @@ async def update_and_audit_msgs( # table.. account=ibppmsg.account, # XXX: the `.ib` is stripped..? - symbol=ibppmsg.symbol, + symbol=symbol, currency=ibppmsg.currency, size=p.size, avg_price=p.ppu, @@ -432,6 +441,81 @@ async def update_and_audit_msgs( return msgs +async def aggr_open_orders( + order_msgs: list[Status], + client: Client, + proxy: MethodProxy, + accounts_def: bidict[str, str], + +) -> None: + ''' + Collect all open orders from client and fill in `order_msgs: list`. + + ''' + trades: list[Trade] = client.ib.openTrades() + for trade in trades: + order = trade.order + quant = trade.order.totalQuantity + action = order.action.lower() + size = { + 'sell': -1, + 'buy': 1, + }[action] * quant + con = trade.contract + + # TODO: in the case of the SMART venue (aka ib's + # router-clearing sys) we probably should handle + # showing such orders overtop of the fqsn for the + # primary exchange, how to map this easily is going + # to be a bit tricky though? + deats = await proxy.con_deats(contracts=[con]) + fqsn = list(deats)[0] + + reqid = order.orderId + + # TODO: maybe embed a ``BrokerdOrder`` instead + # since then we can directly load it on the client + # side in the order mode loop? + msg = Status( + time_ns=time.time_ns(), + resp='open', + oid=str(reqid), + reqid=reqid, + + # embedded order info + req=Order( + action=action, + exec_mode='live', + oid=str(reqid), + symbol=fqsn, + account=accounts_def.inverse[order.account], + price=order.lmtPrice, + size=size, + ), + src='ib', + ) + order_msgs.append(msg) + + return order_msgs + + +# proxy wrapper for starting trade event stream +async def open_trade_event_stream( + client: Client, + task_status: TaskStatus[ + trio.abc.ReceiveChannel + ] = trio.TASK_STATUS_IGNORED, +): + # each api client has a unique event stream + async with tractor.to_asyncio.open_channel_from( + recv_trade_updates, + client=client, + ) as (first, trade_event_stream): + + task_status.started(trade_event_stream) + await trio.sleep_forever() + + @tractor.context async def trades_dialogue( @@ -465,7 +549,10 @@ async def trades_dialogue( # we might also want to delegate a specific actor for # ledger writing / reading for speed? async with ( - open_client_proxies() as (proxies, aioclients), + open_client_proxies() as ( + proxies, + aioclients, + ), ): # Open a trade ledgers stack for appending trade records over # multiple accounts. @@ -473,6 +560,9 @@ async def trades_dialogue( ledgers: dict[str, dict] = {} tables: dict[str, PpTable] = {} order_msgs: list[Status] = [] + conf = get_config() + accounts_def_inv = conf['accounts'].inverse + with ( ExitStack() as lstack, ): @@ -491,7 +581,17 @@ async def trades_dialogue( acctid, ) ) - table = tables[acctid] = lstack.enter_context( + + # load all positions from `pps.toml`, cross check with + # ib's positions data, and relay re-formatted pps as + # msgs to the ems. + # __2 cases__: + # - new trades have taken place this session that we want to + # always reprocess indempotently, + # - no new trades yet but we want to reload and audit any + # positions reported by ib's sys that may not yet be in + # piker's ``pps.toml`` state-file. + tables[acctid] = lstack.enter_context( open_pps( 'ib', acctid, @@ -501,57 +601,54 @@ async def trades_dialogue( for account, proxy in proxies.items(): client = aioclients[account] - trades: list[Trade] = client.ib.openTrades() - for trade in trades: - order = trade.order - quant = trade.order.totalQuantity - action = order.action.lower() - size = { - 'sell': -1, - 'buy': 1, - }[action] * quant - con = trade.contract - # TODO: in the case of the SMART venue (aka ib's - # router-clearing sys) we probably should handle - # showing such orders overtop of the fqsn for the - # primary exchange, how to map this easily is going - # to be a bit tricky though? - deats = await proxy.con_deats(contracts=[con]) - fqsn = list(deats)[0] + # order_msgs is filled in by this helper + await aggr_open_orders( + order_msgs, + client, + proxy, + accounts_def, + ) + acctid: str = account.strip('ib.') + ledger: dict = ledgers[acctid] + table: PpTable = tables[acctid] - reqid = order.orderId - - # TODO: maybe embed a ``BrokerdOrder`` instead - # since then we can directly load it on the client - # side in the order mode loop? - msg = Status( - time_ns=time.time_ns(), - resp='open', - oid=str(reqid), - reqid=reqid, - - # embedded order info - req=Order( - action=action, - exec_mode='live', - oid=str(reqid), - symbol=fqsn, - account=accounts_def.inverse[order.account], - price=order.lmtPrice, - size=size, - ), - src='ib', + # update trades ledgers for all accounts from connected + # api clients which report trades for **this session**. + trades = await proxy.trades() + if trades: + ( + trans_by_acct, + api_to_ledger_entries, + ) = await update_ledger_from_api_trades( + trades, + proxy, + accounts_def_inv, ) - order_msgs.append(msg) - # process pp value reported from ib's system. we only use these - # to cross-check sizing since average pricing on their end uses - # the so called (bs) "FIFO" style which more or less results in - # a price that's not useful for traders who want to not lose - # money.. xb + # if new trades are detected from the API, prepare + # them for the ledger file and update the pptable. + if api_to_ledger_entries: + trade_entries = api_to_ledger_entries.get(acctid) + + if trade_entries: + # write ledger with all new trades + # **AFTER** we've updated the + # `pps.toml` from the original + # ledger state! (i.e. this is + # currently done on exit) + ledger.update(trade_entries) + + trans = trans_by_acct.get(acctid) + if trans: + table.update_from_trans(trans) + + # process pp value reported from ib's system. we only + # use these to cross-check sizing since average pricing + # on their end uses the so called (bs) "FIFO" style + # which more or less results in a price that's not + # useful for traders who want to not lose money.. xb for pos in client.positions(): - # collect all ib-pp reported positions so that we can be # sure know which positions to update from the ledger if # any are missing from the ``pps.toml`` @@ -560,13 +657,14 @@ async def trades_dialogue( acctid = msg.account = accounts_def.inverse[msg.account] acctid = acctid.strip('ib.') cids2pps[(acctid, bs_mktid)] = msg + assert msg.account in accounts, ( f'Position for unknown account: {msg.account}') - ledger = ledgers[acctid] - table = tables[acctid] + ledger: dict = ledgers[acctid] + table: PpTable = tables[acctid] + pp: Position = table.pps.get(bs_mktid) - pp = table.pps.get(bs_mktid) if ( not pp or pp.size != msg.size @@ -574,33 +672,6 @@ async def trades_dialogue( trans = norm_trade_records(ledger) table.update_from_trans(trans) - # update trades ledgers for all accounts from connected - # api clients which report trades for **this session**. - trades = await proxy.trades() - ( - trans_by_acct, - api_to_ledger_entries, - ) = await update_ledger_from_api_trades( - trades, - proxy, - ) - - # if new trades are detected from the API, prepare - # them for the ledger file and update the pptable. - if api_to_ledger_entries: - trade_entries = api_to_ledger_entries.get(acctid) - - if trade_entries: - # write ledger with all new trades **AFTER** - # we've updated the `pps.toml` from the - # original ledger state! (i.e. this is - # currently done on exit) - ledger.update(trade_entries) - - trans = trans_by_acct.get(acctid) - if trans: - table.update_from_trans(trans) - # XXX: not sure exactly why it wouldn't be in # the updated output (maybe this is a bug?) but # if you create a pos from TWS and then load it @@ -630,17 +701,12 @@ async def trades_dialogue( f'piker: {pp.size}\n' ) + # iterate all (newly) updated pps tables for every + # client-account and build out position msgs to deliver to + # EMS. + for acctid, table in tables.items(): active_pps, closed_pps = table.dump_active() - # load all positions from `pps.toml`, cross check with - # ib's positions data, and relay re-formatted pps as - # msgs to the ems. - # __2 cases__: - # - new trades have taken place this session that we want to - # always reprocess indempotently, - # - no new trades yet but we want to reload and audit any - # positions reported by ib's sys that may not yet be in - # piker's ``pps.toml`` state-file. for pps in [active_pps, closed_pps]: msgs = await update_and_audit_msgs( acctid, @@ -661,22 +727,6 @@ async def trades_dialogue( tuple(name for name in accounts_def if name in accounts), )) - # proxy wrapper for starting trade event stream - async def open_trade_event_stream( - client: Client, - task_status: TaskStatus[ - trio.abc.ReceiveChannel - ] = trio.TASK_STATUS_IGNORED, - ): - # each api client has a unique event stream - async with tractor.to_asyncio.open_channel_from( - recv_trade_updates, - client=client, - ) as (first, trade_event_stream): - - task_status.started(trade_event_stream) - await trio.sleep_forever() - async with ( ctx.open_stream() as ems_stream, trio.open_nursery() as n, @@ -723,7 +773,7 @@ async def trades_dialogue( async def emit_pp_update( ems_stream: tractor.MsgStream, trade_entry: dict, - accounts_def: bidict, + accounts_def: bidict[str, str], proxies: dict, cids2pps: dict, @@ -733,16 +783,16 @@ async def emit_pp_update( ) -> None: # compute and relay incrementally updated piker pp - acctid = accounts_def.inverse[trade_entry['execution']['acctNumber']] + accounts_def_inv: bidict[str, str] = accounts_def.inverse + acctid = accounts_def_inv[trade_entry['execution']['acctNumber']] proxy = proxies[acctid] - - acctid = acctid.strip('ib.') ( records_by_acct, api_to_ledger_entries, ) = await update_ledger_from_api_trades( [trade_entry], proxy, + accounts_def_inv, ) trans = records_by_acct[acctid] r = list(trans.values())[0] @@ -1244,7 +1294,7 @@ def parse_flex_dt( def api_trades_to_ledger_entries( - accounts: bidict, + accounts: bidict[str, str], # TODO: maybe we should just be passing through the # ``ib_insync.order.Trade`` instance directly here From 59b095b2d5e710371547140a251e1fd84aa9b93e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 22 Mar 2023 19:15:13 -0400 Subject: [PATCH 069/294] `kraken`: heh, use `trio_util` for trades streamz tooo XD --- piker/brokers/kraken/broker.py | 5 +- piker/brokers/kraken/feed.py | 100 +++++++++++++++++---------------- 2 files changed, 52 insertions(+), 53 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index d32b6321..1c551343 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -34,7 +34,6 @@ from typing import ( Union, ) -from async_generator import aclosing from bidict import bidict import pendulum import trio @@ -672,11 +671,9 @@ async def trades_dialogue( token=token, ), ) as ws, - aclosing(stream_messages(ws)) as stream, + stream_messages(ws) as stream, trio.open_nursery() as nurse, ): - stream = stream_messages(ws) - # task for processing inbound requests from ems nurse.start_soon( handle_order_requests, diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index e37fdb49..5ea96e28 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -78,6 +78,7 @@ class OHLC(Struct): ticks: list[Any] = [] +@trio_async_generator async def stream_messages( ws: NoBsWs, ): @@ -133,63 +134,64 @@ async def process_data_feed_msgs( Parse and pack data feed messages. ''' - async for msg in stream_messages(ws): - match msg: - case { - 'errorMessage': errmsg - }: - raise BrokerError(errmsg) + async with stream_messages(ws) as ws_stream: + async for msg in ws_stream: + match msg: + case { + 'errorMessage': errmsg + }: + raise BrokerError(errmsg) - case { - 'event': 'subscriptionStatus', - } as sub: - log.info( - 'WS subscription is active:\n' - f'{sub}' - ) - continue - - case [ - chan_id, - *payload_array, - chan_name, - pair - ]: - if 'ohlc' in chan_name: - ohlc = OHLC( - chan_id, - chan_name, - pair, - *payload_array[0] + case { + 'event': 'subscriptionStatus', + } as sub: + log.info( + 'WS subscription is active:\n' + f'{sub}' ) - ohlc.typecast() - yield 'ohlc', ohlc + continue - elif 'spread' in chan_name: + case [ + chan_id, + *payload_array, + chan_name, + pair + ]: + if 'ohlc' in chan_name: + ohlc = OHLC( + chan_id, + chan_name, + pair, + *payload_array[0] + ) + ohlc.typecast() + yield 'ohlc', ohlc - bid, ask, ts, bsize, asize = map( - float, payload_array[0]) + elif 'spread' in chan_name: - # TODO: really makes you think IB has a horrible API... - quote = { - 'symbol': pair.replace('/', ''), - 'ticks': [ - {'type': 'bid', 'price': bid, 'size': bsize}, - {'type': 'bsize', 'price': bid, 'size': bsize}, + bid, ask, ts, bsize, asize = map( + float, payload_array[0]) - {'type': 'ask', 'price': ask, 'size': asize}, - {'type': 'asize', 'price': ask, 'size': asize}, - ], - } - yield 'l1', quote + # TODO: really makes you think IB has a horrible API... + quote = { + 'symbol': pair.replace('/', ''), + 'ticks': [ + {'type': 'bid', 'price': bid, 'size': bsize}, + {'type': 'bsize', 'price': bid, 'size': bsize}, - # elif 'book' in msg[-2]: - # chan_id, *payload_array, chan_name, pair = msg - # print(msg) + {'type': 'ask', 'price': ask, 'size': asize}, + {'type': 'asize', 'price': ask, 'size': asize}, + ], + } + yield 'l1', quote - case _: - print(f'UNHANDLED MSG: {msg}') - # yield msg + # elif 'book' in msg[-2]: + # chan_id, *payload_array, chan_name, pair = msg + # print(msg) + + case _: + print(f'UNHANDLED MSG: {msg}') + # yield msg def normalize( From 199a5e8b38a650f4a25644b965bdf2219835d1c8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Mar 2023 10:10:34 -0400 Subject: [PATCH 070/294] `ib`: stick exc handler around client connection erros --- piker/brokers/ib/feed.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index d16a02a9..ead4b8e4 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -736,9 +736,19 @@ async def stream_quotes( sym = symbols[0] log.info(f'request for real-time quotes: {sym}') + proxy: MethodProxy async with open_data_client() as proxy: - con, first_ticker, details = await proxy.get_sym_details(symbol=sym) + try: + ( + con, + first_ticker, + details, + ) = await proxy.get_sym_details(symbol=sym) + except ConnectionError: + log.exception(f'Proxy is ded {proxy._aio_ns}') + raise + first_quote = normalize(first_ticker) # print(f'first quote: {first_quote}') @@ -825,7 +835,7 @@ async def stream_quotes( await trio.sleep_forever() return # we never expect feed to come up? - cs: Optional[trio.CancelScope] = None + cs: trio.CancelScope | None = None startup: bool = True while ( startup From 16e11d447cc9c84d5b384b6c249648ab9d83897c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Mar 2023 10:23:29 -0400 Subject: [PATCH 071/294] Move toml table decoder to separate mod --- piker/accounting/_pos.py | 137 +-------------------------------- piker/accounting/_toml.py | 157 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 161 insertions(+), 133 deletions(-) create mode 100644 piker/accounting/_toml.py diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index e7f83d98..5bab2394 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -27,7 +27,6 @@ from contextlib import contextmanager as cm from decimal import Decimal from math import copysign from pprint import pformat -import re from typing import ( Any, Iterator, @@ -38,8 +37,11 @@ from typing import ( import pendulum from pendulum import datetime, now -import toml +from ._toml import ( + toml, + PpsEncoder, +) from ._ledger import ( Transaction, iter_by_dt, @@ -733,137 +735,6 @@ def load_pps_from_ledger( return records, updated -# TODO: instead see if we can hack tomli and tomli-w to do the same: -# - https://github.com/hukkin/tomli -# - https://github.com/hukkin/tomli-w -class PpsEncoder(toml.TomlEncoder): - ''' - Special "styled" encoder that makes a ``pps.toml`` redable and - compact by putting `.clears` tables inline and everything else - flat-ish. - - ''' - separator = ',' - - def dump_list(self, v): - ''' - Dump an inline list with a newline after every element and - with consideration for denoted inline table types. - - ''' - retval = "[\n" - for u in v: - if isinstance(u, toml.decoder.InlineTableDict): - out = self.dump_inline_table(u) - else: - out = str(self.dump_value(u)) - - retval += " " + out + "," + "\n" - retval += "]" - return retval - - def dump_inline_table(self, section): - """Preserve inline table in its compact syntax instead of expanding - into subsection. - https://github.com/toml-lang/toml#user-content-inline-table - """ - val_list = [] - for k, v in section.items(): - # if isinstance(v, toml.decoder.InlineTableDict): - if isinstance(v, dict): - val = self.dump_inline_table(v) - else: - val = str(self.dump_value(v)) - - val_list.append(k + " = " + val) - - retval = "{ " + ", ".join(val_list) + " }" - return retval - - def dump_sections(self, o, sup): - retstr = "" - if sup != "" and sup[-1] != ".": - sup += '.' - retdict = self._dict() - arraystr = "" - for section in o: - qsection = str(section) - value = o[section] - - if not re.match(r'^[A-Za-z0-9_-]+$', section): - qsection = toml.encoder._dump_str(section) - - # arrayoftables = False - if ( - self.preserve - and isinstance(value, toml.decoder.InlineTableDict) - ): - retstr += ( - qsection - + - " = " - + - self.dump_inline_table(o[section]) - + - '\n' # only on the final terminating left brace - ) - - # XXX: this code i'm pretty sure is just blatantly bad - # and/or wrong.. - # if isinstance(o[section], list): - # for a in o[section]: - # if isinstance(a, dict): - # arrayoftables = True - # if arrayoftables: - # for a in o[section]: - # arraytabstr = "\n" - # arraystr += "[[" + sup + qsection + "]]\n" - # s, d = self.dump_sections(a, sup + qsection) - # if s: - # if s[0] == "[": - # arraytabstr += s - # else: - # arraystr += s - # while d: - # newd = self._dict() - # for dsec in d: - # s1, d1 = self.dump_sections(d[dsec], sup + - # qsection + "." + - # dsec) - # if s1: - # arraytabstr += ("[" + sup + qsection + - # "." + dsec + "]\n") - # arraytabstr += s1 - # for s1 in d1: - # newd[dsec + "." + s1] = d1[s1] - # d = newd - # arraystr += arraytabstr - - elif isinstance(value, dict): - retdict[qsection] = o[section] - - elif o[section] is not None: - retstr += ( - qsection - + - " = " - + - str(self.dump_value(o[section])) - ) - - # if not isinstance(value, dict): - if not isinstance(value, toml.decoder.InlineTableDict): - # inline tables should not contain newlines: - # https://toml.io/en/v1.0.0#inline-table - retstr += '\n' - - else: - raise ValueError(value) - - retstr += arraystr - return (retstr, retdict) - - @cm def open_pps( brokername: str, diff --git a/piker/accounting/_toml.py b/piker/accounting/_toml.py new file mode 100644 index 00000000..9b02970d --- /dev/null +++ b/piker/accounting/_toml.py @@ -0,0 +1,157 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License + +# along with this program. If not, see . + +''' +TOML codec hacks to make position tables look decent. + +(looking at you "`toml`-lib"..) + +''' +import re + +import toml + + +# TODO: instead see if we can hack tomli and tomli-w to do the same: +# - https://github.com/hukkin/tomli +# - https://github.com/hukkin/tomli-w +class PpsEncoder(toml.TomlEncoder): + ''' + Special "styled" encoder that makes a ``pps.toml`` redable and + compact by putting `.clears` tables inline and everything else + flat-ish. + + ''' + separator = ',' + + def dump_list(self, v): + ''' + Dump an inline list with a newline after every element and + with consideration for denoted inline table types. + + ''' + retval = "[\n" + for u in v: + if isinstance(u, toml.decoder.InlineTableDict): + out = self.dump_inline_table(u) + else: + out = str(self.dump_value(u)) + + retval += " " + out + "," + "\n" + retval += "]" + return retval + + def dump_inline_table(self, section): + """Preserve inline table in its compact syntax instead of expanding + into subsection. + https://github.com/toml-lang/toml#user-content-inline-table + """ + val_list = [] + for k, v in section.items(): + # if isinstance(v, toml.decoder.InlineTableDict): + if isinstance(v, dict): + val = self.dump_inline_table(v) + else: + val = str(self.dump_value(v)) + + val_list.append(k + " = " + val) + + retval = "{ " + ", ".join(val_list) + " }" + return retval + + def dump_sections(self, o, sup): + retstr = "" + if sup != "" and sup[-1] != ".": + sup += '.' + retdict = self._dict() + arraystr = "" + for section in o: + qsection = str(section) + value = o[section] + + if not re.match(r'^[A-Za-z0-9_-]+$', section): + qsection = toml.encoder._dump_str(section) + + # arrayoftables = False + if ( + self.preserve + and isinstance(value, toml.decoder.InlineTableDict) + ): + retstr += ( + qsection + + + " = " + + + self.dump_inline_table(o[section]) + + + '\n' # only on the final terminating left brace + ) + + # XXX: this code i'm pretty sure is just blatantly bad + # and/or wrong.. + # if isinstance(o[section], list): + # for a in o[section]: + # if isinstance(a, dict): + # arrayoftables = True + # if arrayoftables: + # for a in o[section]: + # arraytabstr = "\n" + # arraystr += "[[" + sup + qsection + "]]\n" + # s, d = self.dump_sections(a, sup + qsection) + # if s: + # if s[0] == "[": + # arraytabstr += s + # else: + # arraystr += s + # while d: + # newd = self._dict() + # for dsec in d: + # s1, d1 = self.dump_sections(d[dsec], sup + + # qsection + "." + + # dsec) + # if s1: + # arraytabstr += ("[" + sup + qsection + + # "." + dsec + "]\n") + # arraytabstr += s1 + # for s1 in d1: + # newd[dsec + "." + s1] = d1[s1] + # d = newd + # arraystr += arraytabstr + + elif isinstance(value, dict): + retdict[qsection] = o[section] + + elif o[section] is not None: + retstr += ( + qsection + + + " = " + + + str(self.dump_value(o[section])) + ) + + # if not isinstance(value, dict): + if not isinstance(value, toml.decoder.InlineTableDict): + # inline tables should not contain newlines: + # https://toml.io/en/v1.0.0#inline-table + retstr += '\n' + + else: + raise ValueError(value) + + retstr += arraystr + return (retstr, retdict) From f3049016d680bfa1c215ef5a4ccf87db47729dc0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Mar 2023 12:16:21 -0400 Subject: [PATCH 072/294] `ib`: drop use of `_account2clients` in `load_clients_for_trio()` Instead adjust `load_aio_clients()` to only reload clients detected as non-loaded or disconnected (2 birds), and avoid use of the global module table which could result in stale disconnected clients persisting on multiple `brokerd` client reconnects, resulting in error. --- piker/brokers/ib/api.py | 31 +++++++++++++++---------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 7eac4bcc..2281fa25 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -387,8 +387,7 @@ class Client: bar_size, duration, dt_duration = _samplings[sample_period_s] global _enters - # log.info(f'REQUESTING BARS {_enters} @ end={end_dt}') - print( + log.info( f"REQUESTING {duration}'s worth {bar_size} BARS\n" f'{_enters} @ end={end_dt}"' ) @@ -730,7 +729,7 @@ class Client: ) elif ( - exch in ('IDEALPRO') + exch in {'IDEALPRO'} or sectype == 'CASH' ): # if '/' in symbol: @@ -1199,9 +1198,14 @@ async def load_aio_clients( for host, port in combos: sockaddr = (host, port) + + maybe_client = _client_cache.get(sockaddr) if ( - sockaddr in _client_cache - or sockaddr in _scan_ignore + sockaddr in _scan_ignore + or ( + maybe_client + and maybe_client.ib.isConnected() + ) ): continue @@ -1307,19 +1311,13 @@ async def load_clients_for_trio( a ``tractor.to_asyncio.open_channel_from()``. ''' - global _accounts2clients + async with load_aio_clients() as accts2clients: - if _accounts2clients: - to_trio.send_nowait(_accounts2clients) + to_trio.send_nowait(accts2clients) + + # TODO: maybe a sync event to wait on instead? await asyncio.sleep(float('inf')) - else: - async with load_aio_clients() as accts2clients: - to_trio.send_nowait(accts2clients) - - # TODO: maybe a sync event to wait on instead? - await asyncio.sleep(float('inf')) - @acm async def open_client_proxies() -> tuple[ @@ -1517,7 +1515,8 @@ async def open_client_proxy( # mock all remote methods on ib ``Client``. for name, method in inspect.getmembers( - Client, predicate=inspect.isfunction + Client, + predicate=inspect.isfunction, ): if '_' == name[0]: continue From aa5f25231a1b3b360788721d8f06d5741201f959 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Mar 2023 12:52:53 -0400 Subject: [PATCH 073/294] `ib`: never override existing ledger records If user has loaded from a flex report then we don't want the API records from the same period to override those; instead just update with any missing fields from the API schema. Also, always `str`-ify the contract id (what is set for the `.bs_mktid` *before* packing into transaction type to ensure when serialized to `pps.toml` there are no discrepancies at the codec level.. smh --- piker/brokers/ib/broker.py | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 7cd857d7..fa94044c 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -342,12 +342,6 @@ async def update_and_audit_msgs( # retreive equivalent ib reported position message # for comparison/audit versus the piker equivalent # breakeven pp calcs. - # if ( - # acctid == 'reg' - # and bs_mktid == 36285627 - # ): - # await tractor.breakpoint() - ibppmsg = cids2pps.get((acctid, bs_mktid)) if ibppmsg: @@ -777,15 +771,15 @@ async def emit_pp_update( proxies: dict, cids2pps: dict, - ledgers, - tables, + ledgers: dict[str, dict[str, Any]], + tables: dict[str, PpTable], ) -> None: # compute and relay incrementally updated piker pp accounts_def_inv: bidict[str, str] = accounts_def.inverse - acctid = accounts_def_inv[trade_entry['execution']['acctNumber']] - proxy = proxies[acctid] + fq_acctid = accounts_def_inv[trade_entry['execution']['acctNumber']] + proxy = proxies[fq_acctid] ( records_by_acct, api_to_ledger_entries, @@ -794,9 +788,10 @@ async def emit_pp_update( proxy, accounts_def_inv, ) - trans = records_by_acct[acctid] + trans = records_by_acct[fq_acctid] r = list(trans.values())[0] + acctid = fq_acctid.strip('ib.') table = tables[acctid] table.update_from_trans(trans) active, closed = table.dump_active() @@ -804,7 +799,11 @@ async def emit_pp_update( # NOTE: update ledger with all new trades for acctid, trades_by_id in api_to_ledger_entries.items(): ledger = ledgers[acctid] - ledger.update(trades_by_id) + + for tid, tdict in trades_by_id.items(): + # NOTE: don't override flex/previous entries with new API + # ones, just update with new fields! + ledger.setdefaults(tid, {}).update(tdict) # generate pp msgs and cross check with ib's positions data, relay # re-formatted pps as msgs to the ems. @@ -909,8 +908,8 @@ async def deliver_trade_events( # https://github.com/erdewit/ib_insync/issues/363 # acctid = accounts_def.inverse[trade.order.account] - # # double check there is no error when - # # cancelling.. gawwwd + # double check there is no error when + # cancelling.. gawwwd # if ib_status_key == 'cancelled': # last_log = trade.log[-1] # if ( @@ -1050,6 +1049,7 @@ async def deliver_trade_events( accounts_def, proxies, cids2pps, + ledgers, tables, ) @@ -1084,6 +1084,7 @@ async def deliver_trade_events( accounts_def, proxies, cids2pps, + ledgers, tables, ) @@ -1145,7 +1146,7 @@ async def deliver_trade_events( def norm_trade_records( ledger: dict[str, Any], -) -> list[Transaction]: +) -> dict[str, Transaction]: ''' Normalize a flex report or API retrieved executions ledger into our standard record format. @@ -1275,7 +1276,7 @@ def norm_trade_records( cost=comms, dt=dt, expiry=expiry, - bs_mktid=conid, + bs_mktid=str(conid), ), key=lambda t: t.dt ) From 8f79c37b9989fda00787b1db1a986a762355b467 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Mar 2023 16:29:31 -0400 Subject: [PATCH 074/294] Generalize `MktPair.from_msg()` handling Accept a msg with any of: - `.src: Asset` and `.dst: Asset` - `.src: str` and `.dst: str` - `.src: Asset` and `.dst: str` but not the final combo tho XD Also, fix `.key` to properly cast any `.src: Asset` to string! --- piker/accounting/_mktinfo.py | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 19657541..19f1b87e 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -236,16 +236,31 @@ class MktPair(Struct, frozen=True): ''' dst_asset_msg = msg.pop('dst') + src_asset_msg = msg.pop('src') + if isinstance(dst_asset_msg, str): + src: str = str(src_asset_msg) + assert isinstance(src, str) return cls.from_fqme( dst_asset_msg, + src=src, **msg, ) - # NOTE: we call `.copy()` here to ensure - # type casting! - dst = Asset(**dst_asset_msg).copy() - return cls(dst=dst, **msg).copy() + else: + # NOTE: we call `.copy()` here to ensure + # type casting! + dst = Asset(**dst_asset_msg).copy() + if not isinstance(src_asset_msg, str): + src = Asset(**src_asset_msg).copy() + else: + src = str(src_asset_msg) + + return cls( + dst=dst, + src=src, + **msg, + ).copy() @property def resolved(self) -> bool: @@ -292,7 +307,8 @@ class MktPair(Struct, frozen=True): ''' return maybe_cons_tokens( - [str(self.dst), self.src], + [str(self.dst), + str(self.src)], delim_char='', ) From b718b5634e4d2b8efaa9f7c8813c281bc238c015 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Mar 2023 16:32:20 -0400 Subject: [PATCH 075/294] `binance`: use `MktPair` in live feed setup Turns out `binance` is pretty great with their schema since they have more or less the same data schema for their exchange info ep which we wrap in a `Pair` struct: https://binance-docs.github.io/apidocs/spot/en/#exchange-information That makes it super easy to provide the most general case for filling out a `MktPair` with both `.src/dst: Asset` to maintain maximum meta-data B) Deatz: - adjust `Pair` to have `.size/price_tick: Decimal` by parsing out the values from the filters field; TODO: we should probably just rewrite the input `.filter` at init time so we can keep the frozen style. - rename `Client.mkt_info()` (was `.symbol_info` to `.exch_info()` better matching the ep name and have it build, cache, and return a `dict[str, Pair]`; allows dropping `.cache_symbols()` - only pass the `mkt_info: MktPair` field in the init msg! --- piker/brokers/binance.py | 152 +++++++++++++++++++++++++-------------- 1 file changed, 98 insertions(+), 54 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index e4818f9b..840b6d0e 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -37,14 +37,19 @@ import numpy as np import tractor import wsproto +from ..accounting._mktinfo import ( + Asset, + MktPair, + digits_to_dec, +) from .._cacheables import open_cached_client from ._util import ( resproc, SymbolNotFound, DataUnavailable, ) -from ..log import ( - get_logger, +from ._util import ( + log, get_console_log, ) from ..data.types import Struct @@ -53,8 +58,6 @@ from ..data._web_bs import ( NoBsWs, ) -log = get_logger(__name__) - _url = 'https://api.binance.com' @@ -89,7 +92,10 @@ _show_wap_in_history = False # https://binance-docs.github.io/apidocs/spot/en/#exchange-information -class Pair(Struct, frozen=True): + +# TODO: make this frozen again by pre-processing the +# filters list to a dict at init time? +class Pair(Struct): # , frozen=True): symbol: str status: str @@ -115,9 +121,41 @@ class Pair(Struct, frozen=True): defaultSelfTradePreventionMode: str allowedSelfTradePreventionModes: list[str] - filters: list[dict[str, Union[str, int, float]]] + filters: list[ + dict[ + str, + Union[str, int, float] + ] + ] permissions: list[str] + _filtersbykey: dict | None = None + + def get_filter(self) -> dict[str, dict]: + filters = self._filtersbykey + + if self._filtersbykey: + return filters + + filters = self._filtersbykey = {} + for entry in self.filters: + ftype = entry['filterType'] + filters[ftype] = entry + + return filters + + def size_tick(self) -> Decimal: + # XXX: lul, after manually inspecting the response format we + # just directly pick out the info we need + return Decimal( + self.get_filter()['PRICE_FILTER']['tickSize'].rstrip('0') + ) + + def price_tick(self) -> Decimal: + return Decimal( + self.get_filter()['LOT_SIZE']['stepSize'].rstrip('0') + ) + class OHLC(Struct): ''' @@ -160,7 +198,7 @@ class Client: def __init__(self) -> None: self._sesh = asks.Session(connections=4) self._sesh.base_location = _url - self._pairs: dict[str, Any] = {} + self._pairs: dict[str, Pair] = {} async def _api( self, @@ -174,50 +212,43 @@ class Client: ) return resproc(resp, log) - async def mkt_info( + async def exch_info( self, sym: str | None = None, - ) -> dict[str, Any]: - '''Get symbol info for the exchange. + ) -> dict[str, Pair] | Pair: + ''' + Fresh exchange-pairs info query for symbol ``sym: str``: + https://binance-docs.github.io/apidocs/spot/en/#exchange-information ''' - # TODO: we can load from our self._pairs cache - # on repeat calls... + cached_pair = self._pairs.get(sym) + if cached_pair: + return cached_pair - # will retrieve all symbols by default + # retrieve all symbols by default params = {} - if sym is not None: sym = sym.lower() params = {'symbol': sym} - resp = await self._api( - 'exchangeInfo', - params=params, - ) - + resp = await self._api('exchangeInfo', params=params) entries = resp['symbols'] if not entries: - raise SymbolNotFound(f'{sym} not found') + raise SymbolNotFound(f'{sym} not found:\n{resp}') - syms = {item['symbol']: item for item in entries} + pairs = { + item['symbol']: Pair(**item) for item in entries + } + self._pairs.update(pairs) if sym is not None: - return syms[sym] + return pairs[sym] else: - return syms + return self._pairs - symbol_info = mkt_info - - async def cache_symbols( - self, - ) -> dict: - if not self._pairs: - self._pairs = await self.mkt_info() - - return self._pairs + symbol_info = exch_info async def search_symbols( self, @@ -227,7 +258,7 @@ class Client: if self._pairs is not None: data = self._pairs else: - data = await self.mkt_info() + data = await self.exch_info() matches = fuzzy.extractBests( pattern, @@ -302,7 +333,7 @@ class Client: @acm async def get_client() -> Client: client = Client() - await client.cache_symbols() + await client.exch_info() yield client @@ -465,27 +496,38 @@ async def stream_quotes( ): # keep client cached for real-time section - cache = await client.cache_symbols() + pairs = await client.exch_info() + sym_infos: dict[str, dict] = {} + mkt_infos: dict[str, MktPair] = {} for sym in symbols: - d = cache[sym.upper()] - syminfo = Pair(**d) # validation - si = sym_infos[sym] = syminfo.to_dict() - filters = {} - for entry in syminfo.filters: - ftype = entry['filterType'] - filters[ftype] = entry + pair: Pair = pairs[sym.upper()] + price_tick = pair.price_tick() + size_tick = pair.size_tick() - # XXX: after manually inspecting the response format we - # just directly pick out the info we need - si['price_tick_size'] = Decimal( - filters['PRICE_FILTER']['tickSize'].rstrip('0') + mkt_infos[sym] = MktPair( + dst=Asset( + name=pair.baseAsset, + atype='crypto', + tx_tick=digits_to_dec(pair.baseAssetPrecision), + ), + src=Asset( + name=pair.quoteAsset, + atype='crypto', + tx_tick=digits_to_dec(pair.quoteAssetPrecision), + ), + price_tick=price_tick, + size_tick=size_tick, + bs_mktid=pair.symbol, + broker='binance', ) - si['lot_tick_size'] = Decimal( - filters['LOT_SIZE']['stepSize'].rstrip('0') - ) - si['asset_type'] = 'crypto' + + sym_infos[sym] = { + 'price_tick_size': price_tick, + 'lot_tick_size': size_tick, + 'asset_type': 'crypto', + } symbol = symbols[0] @@ -493,9 +535,11 @@ async def stream_quotes( # pass back token, and bool, signalling if we're the writer # and that history has been written symbol: { - 'symbol_info': sym_infos[sym], - 'shm_write_opts': {'sum_tick_vml': False}, 'fqsn': sym, + + # 'symbol_info': sym_infos[sym], + 'mkt_info': mkt_infos[sym], + 'shm_write_opts': {'sum_tick_vml': False}, }, } @@ -582,13 +626,13 @@ async def open_symbol_search( async with open_cached_client('binance') as client: # load all symbols locally for fast search - cache = await client.cache_symbols() + cache = await client.exch_info() await ctx.started() async with ctx.open_stream() as stream: async for pattern in stream: - # results = await client.mkt_info(sym=pattern.upper()) + # results = await client.exch_info(sym=pattern.upper()) matches = fuzzy.extractBests( pattern, From c5b172a7df2644564cef21743abf6236f466cf13 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 23 Mar 2023 17:34:55 -0400 Subject: [PATCH 076/294] `binance`: pre-process `Pair` filters at init Allows us to keep the struct frozen as well avoid complexity in the pure data type. Also changes `.price/size_tick` to plain ol' properties. --- piker/brokers/binance.py | 60 +++++++++++++++++++--------------------- 1 file changed, 28 insertions(+), 32 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 840b6d0e..7f7d7ae0 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -95,7 +95,7 @@ _show_wap_in_history = False # TODO: make this frozen again by pre-processing the # filters list to a dict at init time? -class Pair(Struct): # , frozen=True): +class Pair(Struct, frozen=True): symbol: str status: str @@ -121,40 +121,21 @@ class Pair(Struct): # , frozen=True): defaultSelfTradePreventionMode: str allowedSelfTradePreventionModes: list[str] - filters: list[ - dict[ - str, - Union[str, int, float] - ] + filters: dict[ + str, + Union[str, int, float] ] permissions: list[str] - _filtersbykey: dict | None = None - - def get_filter(self) -> dict[str, dict]: - filters = self._filtersbykey - - if self._filtersbykey: - return filters - - filters = self._filtersbykey = {} - for entry in self.filters: - ftype = entry['filterType'] - filters[ftype] = entry - - return filters - + @property def size_tick(self) -> Decimal: # XXX: lul, after manually inspecting the response format we # just directly pick out the info we need - return Decimal( - self.get_filter()['PRICE_FILTER']['tickSize'].rstrip('0') - ) + return Decimal(self.filters['PRICE_FILTER']['tickSize'].rstrip('0')) + @property def price_tick(self) -> Decimal: - return Decimal( - self.get_filter()['LOT_SIZE']['stepSize'].rstrip('0') - ) + return Decimal(self.filters['LOT_SIZE']['stepSize'].rstrip('0')) class OHLC(Struct): @@ -238,9 +219,24 @@ class Client: if not entries: raise SymbolNotFound(f'{sym} not found:\n{resp}') - pairs = { - item['symbol']: Pair(**item) for item in entries - } + # pre-process .filters field into a table + pairs = {} + for item in entries: + symbol = item['symbol'] + filters = {} + filters_ls: list = item.pop('filters') + for entry in filters_ls: + ftype = entry['filterType'] + filters[ftype] = entry + + pairs[symbol] = Pair( + filters=filters, + **item, + ) + + # pairs = { + # item['symbol']: Pair(**item) for item in entries + # } self._pairs.update(pairs) if sym is not None: @@ -503,8 +499,8 @@ async def stream_quotes( for sym in symbols: pair: Pair = pairs[sym.upper()] - price_tick = pair.price_tick() - size_tick = pair.size_tick() + price_tick = pair.price_tick + size_tick = pair.size_tick mkt_infos[sym] = MktPair( dst=Asset( From 485a17af262f855f43289a56bfc5c4477a0d6b2f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 24 Mar 2023 18:38:29 -0400 Subject: [PATCH 077/294] Drop weird extra line from license headers --- piker/accounting/__init__.py | 1 - piker/accounting/_ledger.py | 6 +++++- piker/accounting/_mktinfo.py | 1 - piker/accounting/_pos.py | 1 - piker/accounting/_toml.py | 1 - 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index 7d7fbb85..6455df95 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -12,7 +12,6 @@ # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License - # along with this program. If not, see . ''' diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 14fca94c..3649e753 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -12,8 +12,12 @@ # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License - # along with this program. If not, see . + +''' +Trade and transaction ledger processing. + +''' from __future__ import annotations from contextlib import contextmanager as cm import os diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 19f1b87e..0a301986 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -12,7 +12,6 @@ # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License - # along with this program. If not, see . ''' diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 5bab2394..23c419fe 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -12,7 +12,6 @@ # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License - # along with this program. If not, see . ''' diff --git a/piker/accounting/_toml.py b/piker/accounting/_toml.py index 9b02970d..7ac91b06 100644 --- a/piker/accounting/_toml.py +++ b/piker/accounting/_toml.py @@ -12,7 +12,6 @@ # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License - # along with this program. If not, see . ''' From e0067a4e1de638a19eca1adfc1d98b7952fcb825 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 24 Mar 2023 18:39:45 -0400 Subject: [PATCH 078/294] WIP: trying out `typer` for ledger cli --- piker/accounting/cli.py | 42 +++++++++++++++++++++++++++++++++++++++++ setup.py | 1 + 2 files changed, 43 insertions(+) create mode 100644 piker/accounting/cli.py diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py new file mode 100644 index 00000000..a2104653 --- /dev/null +++ b/piker/accounting/cli.py @@ -0,0 +1,42 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +CLI front end for trades ledger and position tracking management. + +''' +import typer + +from ._pos import open_pps + + +ledger = typer.Typer() + + +@ledger.command() +def sync( + brokername: str, + account: str, +): + with open_pps( + brokername, + account, + ) as table: + breakpoint() + + +if __name__ == "__main__": + ledger() diff --git a/setup.py b/setup.py index 0cd9d3fb..59690acd 100755 --- a/setup.py +++ b/setup.py @@ -40,6 +40,7 @@ setup( 'console_scripts': [ 'piker = piker.cli:cli', 'pikerd = piker.cli:pikerd', + 'ledger = piker.accounting.cli:ledger', ] }, install_requires=[ From ccfafeeec2b793b21db7d290b0fd44b7a572c576 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 24 Mar 2023 18:40:04 -0400 Subject: [PATCH 079/294] Drop `cryptofeed`, what a mess XD --- requirements.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 5e10a4ff..25951629 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,6 +13,3 @@ # ``asyncvnc`` for sending interactions to ib-gw inside docker -e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc - -# ``cryptofeed`` for connecting to various crypto exchanges + custom fixes --e git+https://github.com/pikers/cryptofeed.git@date_parsing#egg=cryptofeed From ff285fbbda5d16398841a2b20a2a36a142092165 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 26 Mar 2023 18:15:54 -0400 Subject: [PATCH 080/294] `binance`: adjust earch to expect `Pair`s --- piker/brokers/binance.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 7f7d7ae0..3ec02328 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -636,7 +636,7 @@ async def open_symbol_search( score_cutoff=50, ) # repack in dict form - await stream.send( - {item[0]['symbol']: item[0] - for item in matches} - ) + await stream.send({ + item[0].symbol: item[0] + for item in matches + }) From 2c23bc166b640882b5d571c5624a899340b6d270 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 27 Mar 2023 12:18:39 -0400 Subject: [PATCH 081/294] First working `brokerd` -> `trades_dialogue()` ep loader --- piker/accounting/cli.py | 123 ++++++++++++++++++++++++++++++++++++++-- 1 file changed, 118 insertions(+), 5 deletions(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index a2104653..71ad9757 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -18,24 +18,137 @@ CLI front end for trades ledger and position tracking management. ''' +from typing import ( + Any, +) + +import tractor +import trio import typer -from ._pos import open_pps +from ..service import ( + open_piker_runtime, +) +# from ._pos import open_pps ledger = typer.Typer() +def broker_init( + brokername: str, + loglevel: str | None = None, + + **start_actor_kwargs, + +) -> dict: + ''' + Given an input broker name, load all named arguments + which can be passed to a daemon + context spawn for + the relevant `brokerd` service endpoint. + + ''' + # log.info(f'Spawning {brokername} broker daemon') + from ..brokers import get_brokermod + brokermod = get_brokermod(brokername) + modpath = brokermod.__name__ + + start_actor_kwargs['name'] = f'brokerd.{brokername}' + start_actor_kwargs.update( + getattr( + brokermod, + '_spawn_kwargs', + {}, + ) + ) + + # lookup actor-enabled modules declared by the backend offering the + # `brokerd` endpoint(s). + enabled = start_actor_kwargs['enable_modules'] = [modpath] + for submodname in getattr( + brokermod, + '__enable_modules__', + [], + ): + subpath = f'{modpath}.{submodname}' + enabled.append(subpath) + + # non-blocking setup of brokerd service nursery + from ..data import _setup_persistent_brokerd + + return ( + start_actor_kwargs, # to `ActorNursery.start_actor()` + _setup_persistent_brokerd, # service task ep + getattr( # trades endpoint + brokermod, + 'trades_dialogue', + None, + ), + ) + + @ledger.command() def sync( brokername: str, account: str, + + loglevel: str = 'cancel', ): - with open_pps( + + start_kwargs, _, trades_ep = broker_init( brokername, - account, - ) as table: - breakpoint() + loglevel=loglevel, + ) + + async def main(): + + async with ( + open_piker_runtime( + name='ledger_cli', + loglevel=loglevel, + ) as (actor, sockaddr), + + tractor.open_nursery() as an, + ): + portal = await an.start_actor( + loglevel=loglevel, + debug_mode=True, + **start_kwargs, + ) + + if ( + brokername == 'paper' + or trades_ep is None + ): + # from . import _paper_engine as paper + # open_trades_endpoint = paper.open_paperboi( + # fqme='.'.join([symbol, broker]), + # loglevel=loglevel, + # ) + RuntimeError('Paper mode not supported for sync!') + else: + # open live brokerd trades endpoint + open_trades_endpoint = portal.open_context( + trades_ep, + loglevel=loglevel, + ) + + positions: dict[str, Any] + accounts: list[str] + # brokerd_trades_stream: tractor.MsgStream + async with ( + open_trades_endpoint as ( + brokerd_ctx, + (positions, accounts,), + ), + # brokerd_ctx.open_stream() as brokerd_trades_stream, + ): + await tractor.breakpoint() + await brokerd_ctx.cancel() + + await portal.cancel_actor() + + trio.run(main) if __name__ == "__main__": From 978c59f5f076a0b121238eaa86c76ffeb12ffcb9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 27 Mar 2023 12:19:09 -0400 Subject: [PATCH 082/294] `ib`: break up data vs. broker enabled modules --- piker/brokers/ib/__init__.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/piker/brokers/ib/__init__.py b/piker/brokers/ib/__init__.py index 48024dc8..80bc228f 100644 --- a/piker/brokers/ib/__init__.py +++ b/piker/brokers/ib/__init__.py @@ -35,7 +35,6 @@ from .feed import ( ) from .broker import ( trades_dialogue, - norm_trade_records, ) __all__ = [ @@ -46,14 +45,23 @@ __all__ = [ 'stream_quotes', ] - -# tractor RPC enable arg -__enable_modules__: list[str] = [ +_brokerd_mods: list[str] = [ 'api', - 'feed', 'broker', ] +_datad_mods: list[str] = [ + 'feed', +] + + +# tractor RPC enable arg +__enable_modules__: list[str] = ( + _brokerd_mods + + + _datad_mods +) + # passed to ``tractor.ActorNursery.start_actor()`` _spawn_kwargs = { 'infect_asyncio': True, From 29ad20bc63232b482fd70bc983ae7607689a9ca0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 27 Mar 2023 14:14:39 -0400 Subject: [PATCH 083/294] `ib`: only process ledger-txs once per client Previous we were re-processing all ledgers for every position msg received from the API, per client.. Instead do that once in a first pass and drop all key-miss lookups for `bs_mktid`s; it should never happen. Better typing for in-routine vars, convert pos msg/objects to `dict` prior to logging so it's sane to read on console. Skip processing specifically option contracts for now. --- piker/brokers/ib/broker.py | 73 ++++++++++++++++++-------------------- 1 file changed, 34 insertions(+), 39 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index fa94044c..03d073fc 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -38,6 +38,7 @@ from trio_typing import TaskStatus import tractor from ib_insync.contract import ( Contract, + Option, ) from ib_insync.order import ( Trade, @@ -88,14 +89,17 @@ from .api import ( def pack_position( pos: IbPosition -) -> dict[str, Any]: +) -> tuple[ + str, + dict[str, Any] +]: con = pos.contract fqsn, calc_price = con2fqsn(con) # TODO: options contracts into a sane format.. return ( - con.conId, + str(con.conId), BrokerdPosition( broker='ib', account=pos.account, @@ -383,20 +387,19 @@ async def update_and_audit_msgs( # raise ValueError( log.error( f'POSITION MISMATCH ib <-> piker ledger:\n' - f'ib: {ibppmsg}\n' - f'piker: {msg}\n' - f'reverse_split_ratio: {reverse_split_ratio}\n' - f'split_ratio: {split_ratio}\n\n' 'FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?\n\n' 'If you are expecting a (reverse) split in this ' - 'instrument you should probably put the following ' + 'instrument you should probably put the following\n\n' f'in the `pps.toml` section:\n{entry}' + f'IB:\nm{ibppmsg.to_dict()}\n\n' + f'PIKER:\n{msg.to_dict()}\n\n' + # f'reverse_split_ratio: {reverse_split_ratio}\n' + # f'split_ratio: {split_ratio}\n\n' ) msg.size = ibsize if ibppmsg.avg_price != msg.avg_price: - - # TODO: make this a "propoganda" log level? + # TODO: make this a "propaganda" log level? log.warning( 'The mega-cucks at IB want you to believe with their ' f'"FIFO" positioning for {msg.symbol}:\n' @@ -425,10 +428,10 @@ async def update_and_audit_msgs( if validate and p.size: # raise ValueError( log.error( - f'UNEXPECTED POSITION says ib:\n' - f'piker: {msg}\n' + f'UNEXPECTED POSITION says IB:\n' 'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?\n' - 'THEY LIQUIDATED YOU OR YOUR MISSING LEDGER RECORDS!?' + 'THEY LIQUIDATED YOU OR YOUR MISSING LEDGER RECORDS!?\n' + f'PIKER:\n{msg.to_dict()}\n' ) msgs.append(msg) @@ -611,6 +614,8 @@ async def trades_dialogue( # api clients which report trades for **this session**. trades = await proxy.trades() if trades: + trans_by_acct: dict[str, Transaction] + api_to_ledger_entries: dict[str, dict] ( trans_by_acct, api_to_ledger_entries, @@ -637,17 +642,30 @@ async def trades_dialogue( if trans: table.update_from_trans(trans) + trans = norm_trade_records(ledger) + table.update_from_trans(trans) + # process pp value reported from ib's system. we only # use these to cross-check sizing since average pricing # on their end uses the so called (bs) "FIFO" style # which more or less results in a price that's not # useful for traders who want to not lose money.. xb + # -> collect all ib-pp reported positions so that we can be + # sure know which positions to update from the ledger if + # any are missing from the ``pps.toml`` + pos: IbPosition # named tuple actually for pos in client.positions(): - # collect all ib-pp reported positions so that we can be - # sure know which positions to update from the ledger if - # any are missing from the ``pps.toml`` - bs_mktid, msg = pack_position(pos) + # NOTE XXX: we skip options for now since we don't + # yet support the symbology nor the live feeds. + if isinstance(pos.contract, Option): + log.warning( + f'Option contracts not supported for now:\n' + f'{pos._asdict()}' + ) + continue + + bs_mktid, msg = pack_position(pos) acctid = msg.account = accounts_def.inverse[msg.account] acctid = acctid.strip('ib.') cids2pps[(acctid, bs_mktid)] = msg @@ -663,29 +681,6 @@ async def trades_dialogue( not pp or pp.size != msg.size ): - trans = norm_trade_records(ledger) - table.update_from_trans(trans) - - # XXX: not sure exactly why it wouldn't be in - # the updated output (maybe this is a bug?) but - # if you create a pos from TWS and then load it - # from the api trades it seems we get a key - # error from ``update[bs_mktid]`` ? - pp = table.pps.get(bs_mktid) - if not pp: - log.error( - f'The contract id for {msg} may have ' - f'changed to {bs_mktid}\nYou may need to ' - 'adjust your ledger for this, skipping ' - 'for now.' - ) - continue - - # XXX: not sure exactly why it wouldn't be in - # the updated output (maybe this is a bug?) but - # if you create a pos from TWS and then load it - # from the api trades it seems we get a key - # error from ``update[bs_mktid]`` ? pp = table.pps[bs_mktid] pairinfo = pp.symbol if msg.size != pp.size: From 60123066e1db5b215cecbcb86e504b29010063ec Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 27 Mar 2023 14:18:40 -0400 Subject: [PATCH 084/294] Use our `@acm` alias in paper eng --- piker/clearing/_paper_engine.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 0fadfeb6..708c6af0 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -19,7 +19,7 @@ Fake trading for forward testing. """ from collections import defaultdict -from contextlib import asynccontextmanager +from contextlib import asynccontextmanager as acm from datetime import datetime from operator import itemgetter import itertools @@ -530,9 +530,7 @@ async def trades_dialogue( [fqme], loglevel=loglevel, ) as feed, - ): - with open_pps(broker, 'paper') as table: # save pps in local state _positions.update(table.pps) @@ -586,7 +584,7 @@ async def trades_dialogue( await simulate_fills(feed.streams[broker], client) -@asynccontextmanager +@acm async def open_paperboi( fqme: str, loglevel: str, From 53c76d36807f3e4dd0802ce9b6530920a5045b45 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 27 Mar 2023 14:21:09 -0400 Subject: [PATCH 085/294] Drop `Optional` use from daemon mod --- piker/service/_daemon.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/piker/service/_daemon.py b/piker/service/_daemon.py index e2581081..3e0d2080 100644 --- a/piker/service/_daemon.py +++ b/piker/service/_daemon.py @@ -20,7 +20,6 @@ Daemon-actor spawning "endpoint-hooks". """ from __future__ import annotations from typing import ( - Optional, Callable, Any, ) @@ -42,6 +41,7 @@ from ._actor_runtime import maybe_open_pikerd from ._registry import find_service # `brokerd` enabled modules +# TODO: move this def to the `.data` subpkg.. # NOTE: keeping this list as small as possible is part of our caps-sec # model and should be treated with utmost care! _data_mods = [ @@ -59,7 +59,7 @@ async def maybe_spawn_daemon( service_name: str, service_task_target: Callable, spawn_args: dict[str, Any], - loglevel: Optional[str] = None, + loglevel: str | None = None, singleton: bool = False, **kwargs, @@ -100,7 +100,6 @@ async def maybe_spawn_daemon( # pikerd is not live we now become the root of the # process tree async with maybe_open_pikerd( - loglevel=loglevel, **kwargs, @@ -141,7 +140,8 @@ async def maybe_spawn_daemon( async def spawn_brokerd( brokername: str, - loglevel: Optional[str] = None, + loglevel: str | None = None, + **tractor_kwargs, ) -> bool: @@ -190,7 +190,7 @@ async def spawn_brokerd( async def maybe_spawn_brokerd( brokername: str, - loglevel: Optional[str] = None, + loglevel: str | None = None, **kwargs, ) -> tractor.Portal: @@ -216,7 +216,7 @@ async def maybe_spawn_brokerd( async def spawn_emsd( - loglevel: Optional[str] = None, + loglevel: str | None = None, **extra_tractor_kwargs ) -> bool: @@ -252,7 +252,7 @@ async def spawn_emsd( async def maybe_open_emsd( brokername: str, - loglevel: Optional[str] = None, + loglevel: str | None = None, **kwargs, ) -> tractor._portal.Portal: # noqa From bc249fbeca087263530d2adfb6fa056ebbdd9b3a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 27 Mar 2023 14:24:27 -0400 Subject: [PATCH 086/294] Move `.clearing._allocate` -> `accounting._allocate` --- piker/{clearing => accounting}/_allocate.py | 4 ++-- piker/ui/_position.py | 2 +- piker/ui/order_mode.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) rename piker/{clearing => accounting}/_allocate.py (99%) diff --git a/piker/clearing/_allocate.py b/piker/accounting/_allocate.py similarity index 99% rename from piker/clearing/_allocate.py rename to piker/accounting/_allocate.py index 657ba8e1..4bafc2f6 100644 --- a/piker/clearing/_allocate.py +++ b/piker/accounting/_allocate.py @@ -23,9 +23,9 @@ from typing import Optional from bidict import bidict -from ..accounting._mktinfo import Symbol +from ._pos import Position +from ._mktinfo import Symbol from ..data.types import Struct -from ..accounting import Position _size_units = bidict({ diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 547977dc..56659056 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -45,7 +45,7 @@ from ..calc import ( pnl, puterize, ) -from ..clearing._allocate import Allocator +from ..accounting._allocate import Allocator from ..accounting import ( Position, ) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 6f1477f7..61fdb6d9 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -37,10 +37,10 @@ from PyQt5.QtCore import Qt from .. import config from ..accounting import Position -from ..clearing._client import open_ems, OrderBook -from ..clearing._allocate import ( +from ..accounting._allocate import ( mk_allocator, ) +from ..clearing._client import open_ems, OrderBook from ._style import _font from ..accounting._mktinfo import Symbol from ..data.feed import ( From 7b3d72490886dc8a8a29a36d6a3922b26982b0a4 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 27 Mar 2023 16:07:21 -0400 Subject: [PATCH 087/294] Rework `.config` routines to use `pathlib.Path` Been meaning to do this port for a while and since it makes passing around file handles (presumably alongside the in mem obj form) a lot simpler/nicer and the implementations of all the config file handling much more terse with less presumptions about the form of filename/dir `str` values all over the place B) moar technically, let's us: - drop remaining `.config` usage of `os.path`. - return `Path`s from most routines. - adds a special case to `get_conf_path()` such that if the input name contains a `pps.` pattern, we avoid validating the name; this is going to be used by new `.accounting.open_pps()` code which will instead write a separate TOML file for each account B) --- piker/config.py | 116 +++++++++++++++++++++++++----------------------- 1 file changed, 60 insertions(+), 56 deletions(-) diff --git a/piker/config.py b/piker/config.py index 397342e3..136c3819 100644 --- a/piker/config.py +++ b/piker/config.py @@ -21,8 +21,6 @@ Platform configuration (files) mgmt. import platform import sys import os -from os import path -from os.path import dirname import shutil from typing import Optional from pathlib import Path @@ -126,30 +124,35 @@ def get_app_dir( ) -_config_dir = _click_config_dir = get_app_dir('piker') -_parent_user = os.environ.get('SUDO_USER') +_click_config_dir: Path = Path(get_app_dir('piker')) +_config_dir: Path = _click_config_dir +_parent_user: str = os.environ.get('SUDO_USER') if _parent_user: - non_root_user_dir = os.path.expanduser( - f'~{_parent_user}' + non_root_user_dir = Path( + os.path.expanduser(f'~{_parent_user}') ) - root = 'root' + root: str = 'root' + _ccds: str = str(_click_config_dir) # click config dir string + i_tail: int = int(_ccds.rfind(root) + len(root)) _config_dir = ( - non_root_user_dir + - _click_config_dir[ - _click_config_dir.rfind(root) + len(root): - ] + non_root_user_dir + / + Path(_ccds[i_tail+1:]) # +1 to capture trailing '/' ) + _conf_names: set[str] = { 'brokers', - 'pps', + # 'pps', 'trades', 'watchlists', 'paper_trades' } -_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json') +# TODO: probably drop all this super legacy, questrade specific, +# config stuff XD ? +_watchlists_data_path: Path = _config_dir / Path('watchlists.json') _context_defaults = dict( default_map={ # Questrade specific quote poll rates @@ -180,7 +183,7 @@ def _conf_fn_w_ext( def get_conf_path( conf_name: str = 'brokers', -) -> str: +) -> Path: ''' Return the top-level default config path normally under ``~/.config/piker`` on linux for a given ``conf_name``, the config @@ -196,72 +199,68 @@ def get_conf_path( - strats.toml ''' - assert conf_name in _conf_names + if 'pps.' not in conf_name: + assert str(conf_name) in _conf_names + fn = _conf_fn_w_ext(conf_name) - return os.path.join( - _config_dir, - fn, - ) + return _config_dir / Path(fn) -def repodir(): +def repodir() -> Path: ''' - Return the abspath to the repo directory. + Return the abspath as ``Path`` to the git repo's root dir. ''' - dirpath = path.abspath( - # we're 3 levels down in **this** module file - dirname(dirname(os.path.realpath(__file__))) - ) - return dirpath + return Path(__file__).absolute().parent.parent def load( conf_name: str = 'brokers', - path: str = None, + path: Path | None = None, **tomlkws, -) -> (dict, str): +) -> tuple[dict, str]: ''' Load config file by name. ''' - path = path or get_conf_path(conf_name) + path: Path = path or get_conf_path(conf_name) - if not os.path.isdir(_config_dir): - Path(_config_dir).mkdir(parents=True, exist_ok=True) - - if not os.path.isfile(path): - fn = _conf_fn_w_ext(conf_name) - - template = os.path.join( - repodir(), - 'config', - fn + if not _config_dir.is_dir(): + _config_dir.mkdir( + parents=True, + exist_ok=True, ) - # try to copy in a template config to the user's directory - # if one exists. - if os.path.isfile(template): + + if not path.is_file(): + fn: str = _conf_fn_w_ext(conf_name) + + # try to copy in a template config to the user's directory if + # one exists. + template: Path = repodir() / 'config' / fn + if template.is_file(): shutil.copyfile(template, path) else: - # create an empty file - with open(path, 'x'): + # create empty file + with path.open(mode='x'): pass else: - with open(path, 'r'): + with path.open(mode='r'): pass # touch it - config = toml.load(path, **tomlkws) + config: dict = toml.load(str(path), **tomlkws) log.debug(f"Read config file {path}") return config, path def write( config: dict, # toml config as dict - name: str = 'brokers', - path: str = None, + + name: str | None = None, + path: Path | None = None, fail_empty: bool = True, + **toml_kwargs, ) -> None: @@ -271,21 +270,26 @@ def write( Create a ``brokers.ini`` file if one does not exist. ''' - path = path or get_conf_path(name) - dirname = os.path.dirname(path) - if not os.path.isdir(dirname): - log.debug(f"Creating config dir {_config_dir}") - os.makedirs(dirname) + if name: + path: Path = path or get_conf_path(name) + dirname: Path = path.parent + if not dirname.is_dir(): + log.debug(f"Creating config dir {_config_dir}") + dirname.mkdir() - if not config and fail_empty: + if ( + not config + and fail_empty + ): raise ValueError( - "Watch out you're trying to write a blank config!") + "Watch out you're trying to write a blank config!" + ) log.debug( f"Writing config `{name}` file to:\n" f"{path}" ) - with open(path, 'w') as cf: + with path.open(mode='w') as cf: return toml.dump( config, cf, From 4494acbc01d0a291197b9263a761db4ade104c53 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 27 Mar 2023 16:17:50 -0400 Subject: [PATCH 088/294] Write a separate `pps...toml` file per account --- piker/accounting/_pos.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 23c419fe..63b3a4e0 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -26,10 +26,10 @@ from contextlib import contextmanager as cm from decimal import Decimal from math import copysign from pprint import pformat +from pathlib import Path from typing import ( Any, Iterator, - Optional, Union, Generator ) @@ -88,7 +88,7 @@ class Position(Struct): ] = {} first_clear_dt: datetime | None = None - expiry: Optional[datetime] = None + expiry: datetime | None = None def __repr__(self) -> str: return pformat(self.to_dict()) @@ -497,7 +497,8 @@ class PpTable(Struct): brokername: str acctid: str pps: dict[str, Position] - conf: Optional[dict] = {} + conf_path: Path + conf: dict | None = {} def update_from_trans( self, @@ -683,8 +684,8 @@ class PpTable(Struct): ] = enc.dump_inline_table config.write( - self.conf, - 'pps', + config=self.conf, + path=self.conf_path, encoder=enc, fail_empty=False ) @@ -696,7 +697,7 @@ def load_pps_from_ledger( acctname: str, # post normalization filter on ledger entries to be processed - filter_by: Optional[list[dict]] = None, + filter_by: list[dict] | None = None, ) -> tuple[ dict[str, Transaction], @@ -746,7 +747,11 @@ def open_pps( incremental update file: ``pps.toml``. ''' - conf, path = config.load('pps') + conf: dict + conf_path: Path + conf, conf_path = config.load( + f'pps.{brokername}.{acctid}', + ) brokersection = conf.setdefault(brokername, {}) pps = brokersection.setdefault(acctid, {}) @@ -765,6 +770,7 @@ def open_pps( brokername, acctid, pp_objs, + conf_path, conf=conf, ) From 2cb59fe4509357359447f113764cb6680e7d4f55 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 27 Mar 2023 16:18:29 -0400 Subject: [PATCH 089/294] Flatter format for pos/ledger mngr statements --- piker/clearing/_paper_engine.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 708c6af0..a8edeb8b 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -262,8 +262,16 @@ class PaperBoi(Struct): ) with ( - open_trade_ledger(self.broker, 'paper') as ledger, - open_pps(self.broker, 'paper', write_on_exit=True) as table + open_trade_ledger( + self.broker, + 'paper', + ) as ledger, + + open_pps( + brokername=self.broker, + acctid='paper', + write_on_exit=True, + ) as table ): tx = t.to_dict() tx.pop('sym') From a336def65f12f0f460d100e896bdfeefaf38651c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 27 Mar 2023 16:49:17 -0400 Subject: [PATCH 090/294] `ib`: again, only *update* ledger records from API --- piker/brokers/ib/broker.py | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 03d073fc..d1757856 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -612,36 +612,39 @@ async def trades_dialogue( # update trades ledgers for all accounts from connected # api clients which report trades for **this session**. - trades = await proxy.trades() - if trades: + api_trades = await proxy.trades() + if api_trades: + trans_by_acct: dict[str, Transaction] api_to_ledger_entries: dict[str, dict] ( trans_by_acct, api_to_ledger_entries, ) = await update_ledger_from_api_trades( - trades, + api_trades, proxy, accounts_def_inv, ) - # if new trades are detected from the API, prepare + # if new api_trades are detected from the API, prepare # them for the ledger file and update the pptable. if api_to_ledger_entries: trade_entries = api_to_ledger_entries.get(acctid) - + await tractor.breakpoint() if trade_entries: - # write ledger with all new trades - # **AFTER** we've updated the - # `pps.toml` from the original - # ledger state! (i.e. this is - # currently done on exit) - ledger.update(trade_entries) + # write ledger with all new api_trades + # **AFTER** we've updated the `pps.toml` + # from the original ledger state! (i.e. this + # is currently done on exit) + for tid, entry in trade_entries.items(): + ledger.setdefault(tid, {}).update(entry) trans = trans_by_acct.get(acctid) if trans: table.update_from_trans(trans) + # update position table with latest ledger from all + # gathered transactions: ledger file + api records. trans = norm_trade_records(ledger) table.update_from_trans(trans) @@ -653,7 +656,8 @@ async def trades_dialogue( # -> collect all ib-pp reported positions so that we can be # sure know which positions to update from the ledger if # any are missing from the ``pps.toml`` - pos: IbPosition # named tuple actually + + pos: IbPosition # named tuple subtype for pos in client.positions(): # NOTE XXX: we skip options for now since we don't From 29a5910b90a342a16b8eded776f786de9383e88c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 28 Mar 2023 12:03:29 -0400 Subject: [PATCH 091/294] `ib`: move flex utils to new submod --- piker/brokers/ib/_flex_reports.py | 187 ++++++++++++++++++++++++++++++ piker/brokers/ib/broker.py | 163 ++------------------------ 2 files changed, 194 insertions(+), 156 deletions(-) create mode 100644 piker/brokers/ib/_flex_reports.py diff --git a/piker/brokers/ib/_flex_reports.py b/piker/brokers/ib/_flex_reports.py new file mode 100644 index 00000000..d26e0e3f --- /dev/null +++ b/piker/brokers/ib/_flex_reports.py @@ -0,0 +1,187 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +""" +"FLEX" report processing utils. + +""" +from bidict import bidict +import pendulum +from pprint import pformat +from typing import Any + +from .api import ( + get_config, + log, +) +from piker.accounting import ( + open_trade_ledger, +) + + +def parse_flex_dt( + record: str, +) -> pendulum.datetime: + date, ts = record.split(';') + dt = pendulum.parse(date) + ts = f'{ts[:2]}:{ts[2:4]}:{ts[4:]}' + tsdt = pendulum.parse(ts) + return dt.set(hour=tsdt.hour, minute=tsdt.minute, second=tsdt.second) + + +def flex_records_to_ledger_entries( + accounts: bidict, + trade_entries: list[object], + +) -> dict: + ''' + Convert flex report entry objects into ``dict`` form, pretty much + straight up without modification except add a `pydatetime` field + from the parsed timestamp. + + ''' + trades_by_account = {} + for t in trade_entries: + entry = t.__dict__ + + # XXX: LOL apparently ``toml`` has a bug + # where a section key error will show up in the write + # if you leave a table key as an `int`? So i guess + # cast to strs for all keys.. + + # oddly for some so-called "BookTrade" entries + # this field seems to be blank, no cuckin clue. + # trade['ibExecID'] + tid = str(entry.get('ibExecID') or entry['tradeID']) + # date = str(entry['tradeDate']) + + # XXX: is it going to cause problems if a account name + # get's lost? The user should be able to find it based + # on the actual exec history right? + acctid = accounts[str(entry['accountId'])] + + # probably a flex record with a wonky non-std timestamp.. + dt = entry['pydatetime'] = parse_flex_dt(entry['dateTime']) + entry['datetime'] = str(dt) + + if not tid: + # this is likely some kind of internal adjustment + # transaction, likely one of the following: + # - an expiry event that will show a "book trade" indicating + # some adjustment to cash balances: zeroing or itm settle. + # - a manual cash balance position adjustment likely done by + # the user from the accounts window in TWS where they can + # manually set the avg price and size: + # https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST + log.warning(f'Skipping ID-less ledger entry:\n{pformat(entry)}') + continue + + trades_by_account.setdefault( + acctid, {} + )[tid] = entry + + for acctid in trades_by_account: + trades_by_account[acctid] = dict(sorted( + trades_by_account[acctid].items(), + key=lambda entry: entry[1]['pydatetime'], + )) + + return trades_by_account + + +def load_flex_trades( + path: str | None = None, + +) -> dict[str, Any]: + + from ib_insync import flexreport, util + + conf = get_config() + + if not path: + # load ``brokers.toml`` and try to get the flex + # token and query id that must be previously defined + # by the user. + token = conf.get('flex_token') + if not token: + raise ValueError( + 'You must specify a ``flex_token`` field in your' + '`brokers.toml` in order load your trade log, see our' + 'intructions for how to set this up here:\n' + 'PUT LINK HERE!' + ) + + qid = conf['flex_trades_query_id'] + + # TODO: hack this into our logging + # system like we do with the API client.. + util.logToConsole() + + # TODO: rewrite the query part of this with async..httpx? + report = flexreport.FlexReport( + token=token, + queryId=qid, + ) + + else: + # XXX: another project we could potentially look at, + # https://pypi.org/project/ibflex/ + report = flexreport.FlexReport(path=path) + + trade_entries = report.extract('Trade') + ln = len(trade_entries) + log.info(f'Loaded {ln} trades from flex query') + + trades_by_account = flex_records_to_ledger_entries( + conf['accounts'].inverse, # reverse map to user account names + trade_entries, + ) + + ledger_dict: dict | None = None + + for acctid in trades_by_account: + trades_by_id = trades_by_account[acctid] + + with open_trade_ledger('ib', acctid) as ledger_dict: + tid_delta = set(trades_by_id) - set(ledger_dict) + log.info( + 'New trades detected\n' + f'{pformat(tid_delta)}' + ) + if tid_delta: + sorted_delta = dict(sorted( + {tid: trades_by_id[tid] for tid in tid_delta}.items(), + key=lambda entry: entry[1].pop('pydatetime'), + )) + ledger_dict.update(sorted_delta) + + return ledger_dict + + +if __name__ == '__main__': + import sys + import os + + args = sys.argv + if len(args) > 1: + args = args[1:] + for arg in args: + path = os.path.abspath(arg) + load_flex_trades(path=path) + else: + # expect brokers.toml to have an entry and + # pull from the web service. + load_flex_trades() diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index d1757856..9bf84787 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -13,6 +13,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . + """ Order and trades endpoints for use with ``piker``'s EMS. @@ -84,6 +85,7 @@ from .api import ( Client, MethodProxy, ) +from ._flex_reports import parse_flex_dt def pack_position( @@ -630,7 +632,11 @@ async def trades_dialogue( # them for the ledger file and update the pptable. if api_to_ledger_entries: trade_entries = api_to_ledger_entries.get(acctid) - await tractor.breakpoint() + + # TODO: fix this `tractor` BUG! + # https://github.com/goodboy/tractor/issues/354 + # await tractor.breakpoint() + if trade_entries: # write ledger with all new api_trades # **AFTER** we've updated the `pps.toml` @@ -1283,16 +1289,6 @@ def norm_trade_records( return {r.tid: r for r in records} -def parse_flex_dt( - record: str, -) -> pendulum.datetime: - date, ts = record.split(';') - dt = pendulum.parse(date) - ts = f'{ts[:2]}:{ts[2:4]}:{ts[4:]}' - tsdt = pendulum.parse(ts) - return dt.set(hour=tsdt.hour, minute=tsdt.minute, second=tsdt.second) - - def api_trades_to_ledger_entries( accounts: bidict[str, str], @@ -1364,148 +1360,3 @@ def api_trades_to_ledger_entries( )) return trades_by_account - - -def flex_records_to_ledger_entries( - accounts: bidict, - trade_entries: list[object], - -) -> dict: - ''' - Convert flex report entry objects into ``dict`` form, pretty much - straight up without modification except add a `pydatetime` field - from the parsed timestamp. - - ''' - trades_by_account = {} - for t in trade_entries: - entry = t.__dict__ - - # XXX: LOL apparently ``toml`` has a bug - # where a section key error will show up in the write - # if you leave a table key as an `int`? So i guess - # cast to strs for all keys.. - - # oddly for some so-called "BookTrade" entries - # this field seems to be blank, no cuckin clue. - # trade['ibExecID'] - tid = str(entry.get('ibExecID') or entry['tradeID']) - # date = str(entry['tradeDate']) - - # XXX: is it going to cause problems if a account name - # get's lost? The user should be able to find it based - # on the actual exec history right? - acctid = accounts[str(entry['accountId'])] - - # probably a flex record with a wonky non-std timestamp.. - dt = entry['pydatetime'] = parse_flex_dt(entry['dateTime']) - entry['datetime'] = str(dt) - - if not tid: - # this is likely some kind of internal adjustment - # transaction, likely one of the following: - # - an expiry event that will show a "book trade" indicating - # some adjustment to cash balances: zeroing or itm settle. - # - a manual cash balance position adjustment likely done by - # the user from the accounts window in TWS where they can - # manually set the avg price and size: - # https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST - log.warning(f'Skipping ID-less ledger entry:\n{pformat(entry)}') - continue - - trades_by_account.setdefault( - acctid, {} - )[tid] = entry - - for acctid in trades_by_account: - trades_by_account[acctid] = dict(sorted( - trades_by_account[acctid].items(), - key=lambda entry: entry[1]['pydatetime'], - )) - - return trades_by_account - - -def load_flex_trades( - path: Optional[str] = None, - -) -> dict[str, Any]: - - from ib_insync import flexreport, util - - conf = get_config() - - if not path: - # load ``brokers.toml`` and try to get the flex - # token and query id that must be previously defined - # by the user. - token = conf.get('flex_token') - if not token: - raise ValueError( - 'You must specify a ``flex_token`` field in your' - '`brokers.toml` in order load your trade log, see our' - 'intructions for how to set this up here:\n' - 'PUT LINK HERE!' - ) - - qid = conf['flex_trades_query_id'] - - # TODO: hack this into our logging - # system like we do with the API client.. - util.logToConsole() - - # TODO: rewrite the query part of this with async..httpx? - report = flexreport.FlexReport( - token=token, - queryId=qid, - ) - - else: - # XXX: another project we could potentially look at, - # https://pypi.org/project/ibflex/ - report = flexreport.FlexReport(path=path) - - trade_entries = report.extract('Trade') - ln = len(trade_entries) - log.info(f'Loaded {ln} trades from flex query') - - trades_by_account = flex_records_to_ledger_entries( - conf['accounts'].inverse, # reverse map to user account names - trade_entries, - ) - - ledger_dict: Optional[dict] = None - - for acctid in trades_by_account: - trades_by_id = trades_by_account[acctid] - - with open_trade_ledger('ib', acctid) as ledger_dict: - tid_delta = set(trades_by_id) - set(ledger_dict) - log.info( - 'New trades detected\n' - f'{pformat(tid_delta)}' - ) - if tid_delta: - sorted_delta = dict(sorted( - {tid: trades_by_id[tid] for tid in tid_delta}.items(), - key=lambda entry: entry[1].pop('pydatetime'), - )) - ledger_dict.update(sorted_delta) - - return ledger_dict - - -if __name__ == '__main__': - import sys - import os - - args = sys.argv - if len(args) > 1: - args = args[1:] - for arg in args: - path = os.path.abspath(arg) - load_flex_trades(path=path) - else: - # expect brokers.toml to have an entry and - # pull from the web service. - load_flex_trades() From 50be10a9bd8573a886bf3d61de9e4a2513a86ea2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 28 Mar 2023 17:35:06 -0400 Subject: [PATCH 092/294] `ib`: keep broker name in `Transaction.fqsn` --- piker/brokers/ib/broker.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 9bf84787..7c42cf37 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -1233,6 +1233,8 @@ def norm_trade_records( tick_size = Decimal( Decimal(10)**Decimal(str(price)).as_tuple().exponent ) + + # TODO: convert to MktPair!!! pair = Symbol.from_fqsn( fqsn=f'{symbol}.{suffix}.ib', info={ @@ -1258,22 +1260,22 @@ def norm_trade_records( # 'src_type': 'fiat', }, ) - fqsn = pair.fqme.rstrip('.ib') + fqme = pair.fqme - # NOTE: for flex records the normal fields for defining an fqsn + # NOTE: for flex records the normal fields for defining an fqme # sometimes won't be available so we rely on two approaches for - # the "reverse lookup" of piker style fqsn keys: + # the "reverse lookup" of piker style fqme keys: # - when dealing with API trade records received from # `IB.trades()` we do a contract lookup at he time of processing # - when dealing with flex records, it is assumed the record # is at least a day old and thus the TWS position reporting system # should already have entries if the pps are still open, in - # which case, we can pull the fqsn from that table (see + # which case, we can pull the fqme from that table (see # `trades_dialogue()` above). insort( records, Transaction( - fqsn=fqsn, + fqsn=fqme, sym=pair, tid=tid, size=size, From 9f7aa3d1ff529dc872ed84e36cc49c8fbc50b163 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 29 Mar 2023 18:01:36 -0400 Subject: [PATCH 093/294] Always use the "most resolved" `Position.symbol: MktPair` When loading a `Position` from a pps file we might not have the entire `MktPair` field-set loaded (though going forward that shouldn't really ever happen except in the case of a legacy `pps.toml`), in which case we can check if the `.fqme: str` value loaded from the transaction is longer and use that instead - presuming it must have more mkt meta-data filled out. Also includes some more `fqsn` -> `fqme` renames. --- piker/accounting/_pos.py | 36 +++++++++++++++++++++--------------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 63b3a4e0..0a3e2949 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -121,9 +121,8 @@ class Position(Struct): # it via the trades ledger.. # drop symbol obj in serialized form s = d.pop('symbol') - fqsn = s.fqme - - broker, key, suffix = unpack_fqme(fqsn) + fqme = s.fqme + broker, key, suffix = unpack_fqme(fqme) if isinstance(s, Symbol): sym_info = s.broker_info[broker] @@ -182,7 +181,7 @@ class Position(Struct): d['clears'] = toml_clears_list - return fqsn, d + return fqme, d def ensure_state(self) -> None: ''' @@ -522,7 +521,7 @@ class PpTable(Struct): # template the mkt-info presuming a legacy market ticks # if no info exists in the transactions.. - mkt = t.sys + mkt: MktPair | Symbol = t.sys if not mkt: mkt = MktPair.from_fqme( fqme, @@ -531,18 +530,25 @@ class PpTable(Struct): bs_mktid=bs_mktid, ) - pp = pps.setdefault( - bs_mktid, - + pp = pps.get(bs_mktid) + if not pp: # if no existing pp, allocate fresh one. - Position( + pp = pps[bs_mktid] = Position( mkt, size=0.0, ppu=0.0, bs_mktid=bs_mktid, expiry=t.expiry, ) - ) + else: + # NOTE: if for some reason a "less resolved" mkt pair + # info has been set (based on the `.fqme` being + # a shorter string), instead use the one from the + # transaction since it likely has (more) full + # information from the provider. + if len(pp.symbol.fqme) < len(fqme): + pp.symbol = mkt + clears = pp.clears if clears: first_clear_dt = pp.first_clear_dt @@ -641,12 +647,12 @@ class PpTable(Struct): pos.ensure_state() # serialize to pre-toml form - fqsn, asdict = pos.to_pretoml() - log.info(f'Updating active pp: {fqsn}') + fqme, asdict = pos.to_pretoml() + log.info(f'Updating active pp: {fqme}') # XXX: ugh, it's cuz we push the section under # the broker name.. maybe we need to rethink this? - brokerless_key = fqsn.removeprefix(f'{self.brokername}.') + brokerless_key = fqme.removeprefix(f'{self.brokername}.') to_toml_dict[brokerless_key] = asdict return to_toml_dict @@ -662,8 +668,8 @@ class PpTable(Struct): pp_entries = self.to_toml() if pp_entries: log.info( - f'Updating ``pps.toml``:\n' - f'Current positions:\n{pformat(pp_entries)}' + f'Updating positions in ``{self.conf_path}``:\n' + f'n{pformat(pp_entries)}' ) self.conf[self.brokername][self.acctid] = pp_entries From 61fb783c4e7e41eece20255fe895a529ede5290b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 29 Mar 2023 18:28:42 -0400 Subject: [PATCH 094/294] Formalize a ledger type + API: `TransactionLedger` Add a new `class TransactionLedger(collections.UserDict)` for managing ledger (files) from a `dict`-like API. The main motivations being easy conversion between `dict` <-> `Transaction` obj forms as well as dynamic (toml) file updates via a set of methods: - `.write_config()` to render and write state to the local toml file. - `.iter_trans()` to allow iterator style conversion to `Transaction` form for each entry. - `.to_trans()` for the dict output from the above. Some adjustments to `Transaction` namely making `.sym/.sys` optional for now so that paper engine entries can be loaded (offline) without connecting to the emulated broker backend. Move to using `pathlib.Path` throughout for bootyful toml file mgmt B) --- piker/accounting/__init__.py | 8 +- piker/accounting/_ledger.py | 223 +++++++++++++++++++++++++---------- 2 files changed, 165 insertions(+), 66 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index 6455df95..eb420bab 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -21,21 +21,23 @@ for tendiez. ''' from ..log import get_logger -from ._pos import ( +from ._ledger import ( Transaction, + TransactionLedger, open_trade_ledger, - PpTable, ) from ._pos import ( - open_pps, load_pps_from_ledger, + open_pps, Position, + PpTable, ) log = get_logger(__name__) __all__ = [ 'Transaction', + 'TransactionLedger', 'open_trade_ledger', 'PpTable', 'open_pps', diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 3649e753..1ff593bc 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -19,9 +19,9 @@ Trade and transaction ledger processing. ''' from __future__ import annotations +from collections import UserDict from contextlib import contextmanager as cm -import os -from os import path +from pathlib import Path import time from typing import ( Any, @@ -32,6 +32,7 @@ from typing import ( from pendulum import ( datetime, + parse, ) import tomli import toml @@ -48,6 +49,145 @@ from ._mktinfo import ( log = get_logger(__name__) +class Transaction(Struct, frozen=True): + + # TODO: unify this with the `MktPair`, + # once we have that as a required field, + # we don't really need the fqsn any more.. + fqsn: str + + tid: Union[str, int] # unique transaction id + size: float + price: float + cost: float # commisions or other additional costs + dt: datetime + + # TODO: we can drop this right since we + # can instead expect the backend to provide this + # via the `MktPair`? + expiry: datetime | None = None + + # remap for back-compat + @property + def fqme(self) -> str: + return self.fqsn + + # TODO: drop the Symbol type + + # the underlying "transaction system", normally one of a ``MktPair`` + # (a description of a tradable double auction) or a ledger-recorded + # ("ledger" in any sense as long as you can record transfers) of any + # sort) ``Asset``. + sym: MktPair | Asset | Symbol | None = None + + @property + def sys(self) -> Symbol: + return self.sym + + # (optional) key-id defined by the broker-service backend which + # ensures the instrument-symbol market key for this record is unique + # in the "their backend/system" sense; i.e. this uid for the market + # as defined (internally) in some namespace defined by the broker + # service. + bs_mktid: str | int | None = None + + def to_dict(self) -> dict: + dct = super().to_dict() + # ensure we use a pendulum formatted + # ISO style str here!@ + dct['dt'] = str(self.dt) + return dct + + +class TransactionLedger(UserDict): + ''' + Very simple ``dict`` wrapper + ``pathlib.Path`` handle to + a TOML formatted transaction file for enabling file writes + dynamically whilst still looking exactly like a ``dict`` from the + outside. + + ''' + def __init__( + self, + ledger_dict: dict, + file_path: Path, + + ) -> None: + self.file_path = file_path + super().__init__(ledger_dict) + + def write_config(self) -> None: + ''' + Render the self.data ledger dict to it's TML file form. + + ''' + with self.file_path.open(mode='w') as fp: + toml.dump(self.data, fp) + + def iter_trans( + self, + broker: str = 'paper', + + ) -> Generator[ + tuple[str, Transaction], + None, + None, + ]: + ''' + Deliver trades records in ``(key: str, t: Transaction)`` + form via generator. + + ''' + if broker != 'paper': + raise NotImplementedError('Per broker support not dun yet!') + + # TODO: lookup some standard normalizer + # func in the backend? + # from ..brokers import get_brokermod + # mod = get_brokermod(broker) + # trans_dict = mod.norm_trade_records(self.data) + + # NOTE: instead i propose the normalizer is + # a one shot routine (that can be lru cached) + # and instead call it for each entry incrementally: + # normer = mod.norm_trade_record(txdict) + + for tid, txdict in self.data.items(): + # special field handling for datetimes + # to ensure pendulum is used! + fqme = txdict.get('fqme', txdict['fqsn']) + dt = parse(txdict['dt']) + expiry = txdict.get('expiry') + + yield ( + tid, + Transaction( + fqsn=fqme, + tid=txdict['tid'], + dt=dt, + price=txdict['price'], + size=txdict['size'], + cost=txdict.get('cost', 0), + bs_mktid=txdict['bs_mktid'], + + # optional + sym=None, + expiry=parse(expiry) if expiry else None, + ) + ) + + def to_trans( + self, + broker: str = 'paper', + + ) -> dict[str, Transaction]: + ''' + Return the entire output from ``.iter_trans()`` in a ``dict``. + + ''' + return dict(self.iter_trans()) + + @cm def open_trade_ledger( broker: str, @@ -63,82 +203,39 @@ def open_trade_ledger( name as defined in the user's ``brokers.toml`` config. ''' - ldir = path.join(config._config_dir, 'ledgers') - if not path.isdir(ldir): - os.makedirs(ldir) + ldir: Path = config._config_dir / 'ledgers' + if not ldir.is_dir(): + ldir.mkdir() fname = f'trades_{broker}_{account}.toml' - tradesfile = path.join(ldir, fname) + tradesfile: Path = ldir / fname - if not path.isfile(tradesfile): + if not tradesfile.is_file(): log.info( f'Creating new local trades ledger: {tradesfile}' ) - with open(tradesfile, 'w') as cf: - pass # touch - with open(tradesfile, 'rb') as cf: + tradesfile.touch() + + with tradesfile.open(mode='rb') as cf: start = time.time() - ledger = tomli.load(cf) + ledger_dict = tomli.load(cf) log.info(f'Ledger load took {time.time() - start}s') - cpy = ledger.copy() + cpy = ledger_dict.copy() + + ledger = TransactionLedger( + ledger_dict=cpy, + file_path=tradesfile, + ) try: - yield cpy + yield ledger finally: - if cpy != ledger: + if ledger.data != ledger_dict: # TODO: show diff output? # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries log.info(f'Updating ledger for {tradesfile}:\n') - ledger.update(cpy) - - # we write on close the mutated ledger data - with open(tradesfile, 'w') as cf: - toml.dump(ledger, cf) - - -class Transaction(Struct, frozen=True): - - # TODO: unify this with the `MktPair`, - # once we have that as a required field, - # we don't really need the fqsn any more.. - fqsn: str - - # TODO: drop the Symbol type - - # the underlying "transaction system", normally one of a ``MktPair`` - # (a description of a tradable double auction) or a ledger-recorded - # ("ledger" in any sense as long as you can record transfers) of any - # sort) ``Asset``. - sym: MktPair | Asset | Symbol - - @property - def sys(self) -> Symbol: - return self.sym - - tid: Union[str, int] # unique transaction id - size: float - price: float - cost: float # commisions or other additional costs - dt: datetime - expiry: datetime | None = None - - # remap for back-compat - @property - def fqme(self) -> str: - return self.fqsn - - # (optional) key-id defined by the broker-service backend which - # ensures the instrument-symbol market key for this record is unique - # in the "their backend/system" sense; i.e. this uid for the market - # as defined (internally) in some namespace defined by the broker - # service. - bs_mktid: str | int | None = None - - # XXX NOTE: this will come from the `MktPair` - # instead of defined here right? - # optional fqsn for the source "asset"/money symbol? - # from: Optional[str] = None + ledger.write_config() def iter_by_dt( From a74caa9f770de925459dbe1cf21b2f315e95fd56 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 29 Mar 2023 18:35:05 -0400 Subject: [PATCH 095/294] Add paper engine "offline loading" support to the ledger cli --- piker/accounting/cli.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 71ad9757..6018ffe1 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -93,6 +93,7 @@ def sync( account: str, loglevel: str = 'cancel', + pdb: bool = False, ): start_kwargs, _, trades_ep = broker_init( @@ -106,6 +107,8 @@ def sync( open_piker_runtime( name='ledger_cli', loglevel=loglevel, + debug_mode=pdb, + ) as (actor, sockaddr), tractor.open_nursery() as an, @@ -120,12 +123,12 @@ def sync( brokername == 'paper' or trades_ep is None ): - # from . import _paper_engine as paper - # open_trades_endpoint = paper.open_paperboi( - # fqme='.'.join([symbol, broker]), - # loglevel=loglevel, - # ) - RuntimeError('Paper mode not supported for sync!') + from ..clearing import _paper_engine as paper + open_trades_endpoint = paper.open_paperboi( + fqme=None, # tell paper to not start clearing loop + broker=brokername, + loglevel=loglevel, + ) else: # open live brokerd trades endpoint open_trades_endpoint = portal.open_context( From 1560330acd2955cc370759c7cfb7bf1e62270806 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 29 Mar 2023 18:35:40 -0400 Subject: [PATCH 096/294] Convert `Flume.MktPair.size_tick` to float for dark clearing --- piker/clearing/_ems.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 429c1935..a4e40587 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -1247,7 +1247,7 @@ async def process_client_order_cmds( pred = mk_check(trigger_price, last, action) spread_slap: float = 5 - min_tick = flume.symbol.size_tick + min_tick = float(flume.symbol.size_tick) min_tick_digits = float_digits(min_tick) if action == 'buy': From 2cc77c21baf1ff4551b69e43e9bfe354a267192c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 29 Mar 2023 18:36:01 -0400 Subject: [PATCH 097/294] Rework paper engine for "offline" pp loading This will end up being super handy for testing our accounting subsystems as well as providing unified and simple cli utils for managing ledgers and position tracking. Allows loading the paper boi without starting a data feed and instead just trigger ledger and pps loading without starting the entire clearing engine. Deatz: - only init `PaperBoi` and start clearing loop (tasks) if a non-`None` fqme is provided, ow just `Context.started()` the existing pps msgs as loaded from the ledger. - always update both the ledger and pp table on startup and pass a single instance of each obj to the `PaperBoi` for reuse (without opening and closing backing config files since we now have `.write_config()`). - drop the global `_positions` dict, it's not needed any more if we use a `PaperBoi.ppt: PpTable` which persists with the engine actor's lifetime. --- piker/clearing/_paper_engine.py | 167 ++++++++++++++++++-------------- 1 file changed, 94 insertions(+), 73 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index a8edeb8b..07593d79 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -25,8 +25,6 @@ from operator import itemgetter import itertools import time from typing import ( - Any, - Optional, Callable, ) import uuid @@ -41,7 +39,9 @@ from ..data.types import Struct from ..accounting._mktinfo import Symbol from ..accounting import ( Position, + PpTable, Transaction, + TransactionLedger, open_trade_ledger, open_pps, ) @@ -73,13 +73,14 @@ class PaperBoi(Struct): ems_trades_stream: tractor.MsgStream + ppt: PpTable + ledger: TransactionLedger + # map of paper "live" orders which be used # to simulate fills based on paper engine settings _buys: defaultdict[str, bidict] _sells: defaultdict[str, bidict] _reqids: bidict - _positions: dict[str, Position] - _trade_ledger: dict[str, Any] _syms: dict[str, Symbol] = {} # init edge case L1 spread @@ -93,7 +94,7 @@ class PaperBoi(Struct): price: float, action: str, size: float, - reqid: Optional[str], + reqid: str | None, ) -> int: ''' @@ -261,38 +262,31 @@ class PaperBoi(Struct): bs_mktid=key, ) - with ( - open_trade_ledger( - self.broker, - 'paper', - ) as ledger, + tx = t.to_dict() + tx.pop('sym') - open_pps( - brokername=self.broker, - acctid='paper', - write_on_exit=True, - ) as table - ): - tx = t.to_dict() - tx.pop('sym') - ledger.update({oid: tx}) - # Write to pps toml right now - table.update_from_trans({oid: t}) + # update in-mem ledger and pos table + self.ledger.update({oid: tx}) + self.ppt.update_from_trans({oid: t}) - pp = table.pps[key] - pp_msg = BrokerdPosition( - broker=self.broker, - account='paper', - symbol=fqme, - # TODO: we need to look up the asset currency from - # broker info. i guess for crypto this can be - # inferred from the pair? - currency=key, - size=pp.size, - avg_price=pp.ppu, - ) + # transmit pp msg to ems + pp = self.ppt.pps[key] + pp_msg = BrokerdPosition( + broker=self.broker, + account='paper', + symbol=fqme, + # TODO: we need to look up the asset currency from + # broker info. i guess for crypto this can be + # inferred from the pair? + currency=key, + size=pp.size, + avg_price=pp.ppu, + ) + await self.ems_trades_stream.send(pp_msg) - await self.ems_trades_stream.send(pp_msg) + # write all updates to filesys + self.ledger.write_config() + self.ppt.write_config() async def simulate_fills( @@ -518,7 +512,6 @@ _sells: defaultdict[ tuple[float, float, str, str], # order info ] ] = defaultdict(bidict) -_positions: dict[str, Position] = {} @tractor.context @@ -526,27 +519,34 @@ async def trades_dialogue( ctx: tractor.Context, broker: str, - fqme: str, - loglevel: str = None, + fqme: str | None = None, # if empty, we only boot broker mode + loglevel: str = 'warning', ) -> None: tractor.log.get_console_log(loglevel) - async with ( - data.open_feed( - [fqme], - loglevel=loglevel, - ) as feed, + ppt: PpTable + ledger: TransactionLedger + with ( + open_pps( + broker, + 'paper', + write_on_exit=True, + ) as ppt, + + open_trade_ledger( + broker, + 'paper', + ) as ledger ): - with open_pps(broker, 'paper') as table: - # save pps in local state - _positions.update(table.pps) + # update pos table from ledger history + ppt.update_from_trans(ledger.to_trans()) pp_msgs: list[BrokerdPosition] = [] pos: Position token: str # f'{symbol}.{self.broker}' - for token, pos in _positions.items(): + for token, pos in ppt.pps.items(): pp_msgs.append(BrokerdPosition( broker=broker, account='paper', @@ -560,42 +560,59 @@ async def trades_dialogue( ['paper'], )) + # exit early since no fqme was passed, + # normally this case is just to load + # positions "offline". + if fqme is None: + log.warning( + 'Paper engine only running in position delivery mode!\n' + 'NO SIMULATED CLEARING LOOP IS ACTIVE!' + ) + await trio.sleep_forever() + return + async with ( - ctx.open_stream() as ems_stream, - trio.open_nursery() as n, + data.open_feed( + [fqme], + loglevel=loglevel, + ) as feed, ): - client = PaperBoi( - broker, - ems_stream, - _buys=_buys, - _sells=_sells, + async with ( + ctx.open_stream() as ems_stream, + trio.open_nursery() as n, + ): + client = PaperBoi( + broker=broker, + ems_trades_stream=ems_stream, + ppt=ppt, + ledger=ledger, - _reqids=_reqids, + _buys=_buys, + _sells=_sells, + _reqids=_reqids, - _positions=_positions, + # TODO: load postions from ledger file + _syms={ + fqme: flume.symbol + for fqme, flume in feed.flumes.items() + } + ) - # TODO: load postions from ledger file - _trade_ledger={}, - _syms={ - fqme: flume.symbol - for fqme, flume in feed.flumes.items() - } - ) + n.start_soon( + handle_order_requests, + client, + ems_stream, + ) - n.start_soon( - handle_order_requests, - client, - ems_stream, - ) - - # paper engine simulator clearing task - await simulate_fills(feed.streams[broker], client) + # paper engine simulator clearing task + await simulate_fills(feed.streams[broker], client) @acm async def open_paperboi( - fqme: str, - loglevel: str, + fqme: str | None = None, + broker: str | None = None, + loglevel: str | None = None, ) -> Callable: ''' @@ -603,7 +620,11 @@ async def open_paperboi( its context. ''' - broker, symbol, expiry = unpack_fqme(fqme) + if not fqme: + assert broker, 'One of `broker` or `fqme` is required siss..!' + else: + broker, symbol, expiry = unpack_fqme(fqme) + service_name = f'paperboi.{broker}' async with ( From 48f096995fb04dfdd95b870aba8f783479ce0dcb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 29 Mar 2023 20:02:20 -0400 Subject: [PATCH 098/294] `kraken`: write ledger and pps files on startup --- piker/brokers/kraken/broker.py | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 1c551343..935459cc 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -43,6 +43,7 @@ from piker.accounting import ( Position, PpTable, Transaction, + TransactionLedger, open_trade_ledger, open_pps, get_likely_pair, @@ -477,28 +478,39 @@ async def trades_dialogue( # update things correctly. simulate_pp_update: bool = False + table: PpTable + ledger: TransactionLedger with ( open_pps( 'kraken', acctid, write_on_exit=True, - ) as table, open_trade_ledger( 'kraken', acctid, - ) as ledger_dict, + ) as ledger, ): # transaction-ify the ledger entries - ledger_trans = norm_trade_records(ledger_dict) + ledger_trans = norm_trade_records(ledger) + + if not table.pps: + # NOTE: we can't use this since it first needs + # broker: str input support! + # table.update_from_trans(ledger.to_trans()) + table.update_from_trans(ledger_trans) + table.write_config() # TODO: eventually probably only load # as far back as it seems is not deliverd in the # most recent 50 trades and assume that by ordering we # already have those records in the ledger. tids2trades = await client.get_trades() - ledger_dict.update(tids2trades) + ledger.update(tids2trades) + if tids2trades: + ledger.write_config() + api_trans = norm_trade_records(tids2trades) # retrieve kraken reported balances @@ -506,7 +518,6 @@ async def trades_dialogue( # what amount of trades-transactions need # to be reloaded. balances = await client.get_balances() - # await tractor.breakpoint() for dst, size in balances.items(): From 72abe9847598c8c77bc0fd86ea898a696d2d66fa Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 30 Mar 2023 16:21:18 -0400 Subject: [PATCH 099/294] Async-ify order client methods and some renaming We previously only offered a sync API (which was recently renamed to `._nowait()` style) since initially all order control was from our `OrderMode` Qt driven UI/UX. This adds the equivalent async methods for both testing as well as eventual auto-strat driven control B) Also includes a bunch of renaming: - `OrderBook` -> `OrderClient`. - better internal renaming of the client's mem chan vars and add a ref `._ems_stream: tractor.MsgStream`. - drop `get_orders()` factory, just always check for the actor-global instance and always set the ems stream on that client (in case old one was closed). --- piker/clearing/_client.py | 226 +++++++++++++++++++------------------- piker/ui/order_mode.py | 13 ++- 2 files changed, 120 insertions(+), 119 deletions(-) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index 14c77d54..8f531d6d 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -46,70 +46,86 @@ if TYPE_CHECKING: ) -class OrderBook(Struct): - '''EMS-client-side order book ctl and tracking. +class OrderClient(Struct): + ''' + EMS-client-side order book ctl and tracking. - A style similar to "model-view" is used here where this api is - provided as a supervised control for an EMS actor which does all the - hard/fast work of talking to brokers/exchanges to conduct - executions. - - Currently, this is mostly for keeping local state to match the EMS - and use received events to trigger graphics updates. + (A)sync API for submitting orders and alerts to the `emsd` service; + this is the main control for execution management from client code. ''' + # IPC stream to `emsd` actor + _ems_stream: tractor.MsgStream + # mem channels used to relay order requests to the EMS daemon - _to_ems: trio.abc.SendChannel - _from_order_book: trio.abc.ReceiveChannel + _to_relay_task: trio.abc.SendChannel + _from_sync_order_client: trio.abc.ReceiveChannel + + # history table _sent_orders: dict[str, Order] = {} def send_nowait( self, msg: Order | dict, - ) -> dict: + ) -> dict | Order: + ''' + Sync version of ``.send()``. + + ''' self._sent_orders[msg.oid] = msg - self._to_ems.send_nowait(msg) + self._to_relay_task.send_nowait(msg) return msg - # TODO: make this an async version.. - def send( + async def send( self, msg: Order | dict, - ) -> dict: - log.warning('USE `.send_nowait()` instead!') - return self.send_nowait(msg) + ) -> dict | Order: + ''' + Send a new order msg async to the `emsd` service. + + ''' + self._sent_orders[msg.oid] = msg + await self._ems_stream.send(msg) + return msg def update_nowait( self, - uuid: str, **data: dict, ) -> dict: + ''' + Sync version of ``.update()``. + + ''' cmd = self._sent_orders[uuid] msg = cmd.copy(update=data) self._sent_orders[uuid] = msg - self._to_ems.send_nowait(msg) - return cmd + self._to_relay_task.send_nowait(msg) + return msg - # TODO: async meth for this! - # def update( - # self, - # uuid: str, - # **data: dict, - # ) -> dict: - # ... - - def cancel_nowait( + async def update( self, uuid: str, - ) -> bool: + **data: dict, + ) -> dict: ''' - Cancel an order (or alert) in the EMS. + Update an existing order dialog with a msg updated from + ``update`` kwargs. ''' + cmd = self._sent_orders[uuid] + msg = cmd.copy(update=data) + self._sent_orders[uuid] = msg + await self._ems_stream.send(msg) + return msg + + def _mk_cancel_msg( + self, + uuid: str, + ) -> Cancel: cmd = self._sent_orders.get(uuid) if not cmd: log.error( @@ -118,85 +134,76 @@ class OrderBook(Struct): f'You should report this as a bug!' ) fqme = str(cmd.symbol) - msg = Cancel( + return Cancel( oid=uuid, symbol=fqme, ) - self._to_ems.send_nowait(msg) - # TODO: make this an async version.. - def cancel( + def cancel_nowait( self, uuid: str, - ) -> bool: - log.warning('USE `.cancel_nowait()` instead!') - return self.cancel_nowait(uuid) + ) -> None: + ''' + Sync version of ``.cancel()``. -_orders: OrderBook = None - - -def get_orders( - emsd_uid: tuple[str, str] = None -) -> OrderBook: - """" - OrderBook singleton factory per actor. - - """ - if emsd_uid is not None: - # TODO: read in target emsd's active book on startup - pass - - global _orders - - if _orders is None: - size = 100 - tx, rx = trio.open_memory_channel(size) - brx = broadcast_receiver(rx, size) - - # setup local ui event streaming channels for request/resp - # streamging with EMS daemon - _orders = OrderBook( - _to_ems=tx, - _from_order_book=brx, + ''' + self._to_relay_task.send_nowait( + self._mk_cancel_msg(uuid) ) - return _orders + async def cancel( + self, + uuid: str, + + ) -> bool: + ''' + Cancel an already existintg order (or alert) dialog. + + ''' + await self._ems_stream.send( + self._mk_cancel_msg(uuid) + ) -# TODO: we can get rid of this relay loop once we move -# order_mode inputs to async code! -async def relay_order_cmds_from_sync_code( +_client: OrderClient = None + +async def relay_orders_from_sync_code( + + client: OrderClient, symbol_key: str, to_ems_stream: tractor.MsgStream, ) -> None: - """ - Order streaming task: deliver orders transmitted from UI - to downstream consumers. + ''' + Order submission relay task: deliver orders sent from synchronous (UI) + code to the EMS via ``OrderClient._from_sync_order_client``. This is run in the UI actor (usually the one running Qt but could be any other client service code). This process simply delivers order - messages to the above ``_to_ems`` send channel (from sync code using + messages to the above ``_to_relay_task`` send channel (from sync code using ``.send_nowait()``), these values are pulled from the channel here and relayed to any consumer(s) that called this function using a ``tractor`` portal. This effectively makes order messages look like they're being "pushed" from the parent to the EMS where local sync code is likely - doing the pushing from some UI. + doing the pushing from some non-async UI handler. - """ - book = get_orders() - async with book._from_order_book.subscribe() as orders_stream: - async for cmd in orders_stream: + ''' + async with ( + client._from_sync_order_client.subscribe() as sync_order_cmds + ): + async for cmd in sync_order_cmds: sym = cmd.symbol msg = pformat(cmd) + if sym == symbol_key: log.info(f'Send order cmd:\n{msg}') # send msg over IPC / wire await to_ems_stream.send(cmd) + else: log.warning( f'Ignoring unmatched order cmd for {sym} != {symbol_key}:' @@ -211,7 +218,7 @@ async def open_ems( loglevel: str = 'error', ) -> tuple[ - OrderBook, + OrderClient, tractor.MsgStream, dict[ # brokername, acctid @@ -222,42 +229,15 @@ async def open_ems( dict[str, Status], ]: ''' - Spawn an EMS daemon and begin sending orders and receiving - alerts. + (Maybe) spawn an EMS-daemon (emsd), deliver an `OrderClient` for + requesting orders/alerts and a `trades_stream` which delivers all + response-msgs. - This EMS tries to reduce most broker's terrible order entry apis to - a very simple protocol built on a few easy to grok and/or - "rantsy" premises: - - - most users will prefer "dark mode" where orders are not submitted - to a broker until and execution condition is triggered - (aka client-side "hidden orders") - - - Brokers over-complicate their apis and generally speaking hire - poor designers to create them. We're better off using creating a super - minimal, schema-simple, request-event-stream protocol to unify all the - existing piles of shit (and shocker, it'll probably just end up - looking like a decent crypto exchange's api) - - - all order types can be implemented with client-side limit orders - - - we aren't reinventing a wheel in this case since none of these - brokers are exposing FIX protocol; it is they doing the re-invention. - - - TODO: make some fancy diagrams using mermaid.io - - the possible set of responses from the stream is currently: - - 'dark_submitted', 'broker_submitted' - - 'dark_cancelled', 'broker_cancelled' - - 'dark_executed', 'broker_executed' - - 'broker_filled' + This is a "client side" entrypoint which may spawn the `emsd` service + if it can't be discovered and generally speaking is the lowest level + broker control client-API. ''' - # wait for service to connect back to us signalling - # ready for order commands - book = get_orders() - broker, symbol, suffix = unpack_fqme(fqme) async with maybe_open_emsd(broker) as portal: @@ -291,16 +271,34 @@ async def open_ems( # open 2-way trade command stream ctx.open_stream() as trades_stream, ): + # use any pre-existing actor singleton client. + global _client + if _client is None: + size = 100 + tx, rx = trio.open_memory_channel(size) + brx = broadcast_receiver(rx, size) + + # setup local ui event streaming channels for request/resp + # streamging with EMS daemon + _client = OrderClient( + _ems_stream=trades_stream, + _to_relay_task=tx, + _from_sync_order_client=brx, + ) + + _client._ems_stream = trades_stream + # start sync code order msg delivery task async with trio.open_nursery() as n: n.start_soon( - relay_order_cmds_from_sync_code, + relay_orders_from_sync_code, + _client, fqme, trades_stream ) yield ( - book, + _client, trades_stream, positions, accounts, diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 61fdb6d9..0013891c 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -40,7 +40,10 @@ from ..accounting import Position from ..accounting._allocate import ( mk_allocator, ) -from ..clearing._client import open_ems, OrderBook +from ..clearing._client import ( + open_ems, + OrderClient, +) from ._style import _font from ..accounting._mktinfo import Symbol from ..data.feed import ( @@ -120,7 +123,7 @@ class OrderMode: chart: ChartPlotWidget # type: ignore # noqa hist_chart: ChartPlotWidget # type: ignore # noqa nursery: trio.Nursery # used by ``ui._position`` code? - book: OrderBook + book: OrderClient lines: LineEditor arrows: ArrowEditor multistatus: MultiStatus @@ -679,7 +682,7 @@ async def open_order_mode( multistatus = chart.window().status_bar done = multistatus.open_status('starting order mode..') - book: OrderBook + book: OrderClient trades_stream: tractor.MsgStream # The keys in this dict **must** be in set our set of "normalized" @@ -923,7 +926,7 @@ async def process_trades_and_update_ui( trades_stream: tractor.MsgStream, mode: OrderMode, - book: OrderBook, + book: OrderClient, ) -> None: @@ -939,7 +942,7 @@ async def process_trades_and_update_ui( async def process_trade_msg( mode: OrderMode, - book: OrderBook, + book: OrderClient, msg: dict, ) -> tuple[Dialog, Status]: From fb13c7cbf692cb1ad5f7909b7ce9ac6977f739b4 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 30 Mar 2023 17:25:44 -0400 Subject: [PATCH 100/294] `ib`: drop pp mismatch err block, we already do it in audit routine --- piker/brokers/ib/broker.py | 22 +++------------------- 1 file changed, 3 insertions(+), 19 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 7c42cf37..add03cc1 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -683,23 +683,6 @@ async def trades_dialogue( assert msg.account in accounts, ( f'Position for unknown account: {msg.account}') - ledger: dict = ledgers[acctid] - table: PpTable = tables[acctid] - pp: Position = table.pps.get(bs_mktid) - - if ( - not pp - or pp.size != msg.size - ): - pp = table.pps[bs_mktid] - pairinfo = pp.symbol - if msg.size != pp.size: - log.error( - f'Pos size mismatch {pairinfo.fqsn}:\n' - f'ib: {msg.size}\n' - f'piker: {pp.size}\n' - ) - # iterate all (newly) updated pps tables for every # client-account and build out position msgs to deliver to # EMS. @@ -802,13 +785,14 @@ async def emit_pp_update( active, closed = table.dump_active() # NOTE: update ledger with all new trades - for acctid, trades_by_id in api_to_ledger_entries.items(): + for fq_acctid, trades_by_id in api_to_ledger_entries.items(): + acctid = fq_acctid.strip('ib.') ledger = ledgers[acctid] for tid, tdict in trades_by_id.items(): # NOTE: don't override flex/previous entries with new API # ones, just update with new fields! - ledger.setdefaults(tid, {}).update(tdict) + ledger.setdefault(tid, {}).update(tdict) # generate pp msgs and cross check with ib's positions data, relay # re-formatted pps as msgs to the ems. From 879657cc75b55b6c011d5187b96ba5a429288eb5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 31 Mar 2023 21:56:36 -0400 Subject: [PATCH 101/294] Detail `pikerd` sock bind collision in error --- piker/service/_actor_runtime.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/piker/service/_actor_runtime.py b/piker/service/_actor_runtime.py index db727e2e..ea7399fa 100644 --- a/piker/service/_actor_runtime.py +++ b/piker/service/_actor_runtime.py @@ -183,7 +183,10 @@ async def open_pikerd( trio.open_nursery() as service_nursery, ): if root_actor.accept_addr != reg_addr: - raise RuntimeError(f'Daemon failed to bind on {reg_addr}!?') + raise RuntimeError( + f'`pikerd` failed to bind on {reg_addr}!\n' + 'Maybe you have another daemon already running?' + ) # assign globally for future daemon/task creation Services.actor_n = actor_nursery From 56cd15fa51df126ebe576478d0fef48ec7fba559 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 2 Apr 2023 17:59:42 -0400 Subject: [PATCH 102/294] ib: maybe incr client id; can't catch api errors.. Turns out we don't hookup our eventkit handler until after the `load_aio_clients()` is complete, which means we can't get `ib_insync.Client.apiError` events unless inside the asyncio side task. So I guess try to report any such errors during API scan (note the duplicate client id case is a special one from ibis itself) even though we're not going to catch them trio side. The hack to work around this is to just increment the client id value with the `connect_retries` led `i` value even though that will break on more then 3 clients attached to an API endpoint lul .. Further adjustments that were to the end of trying to fix this proper: - add `remove_handler_on_err()` cm to disconnect a handler when the trio side of the channel closes. - actually connect to client api erros in our `Client.inline_errors()` - increase connect timeout to a sec. - change the trio-asyncio proxy response-msg loop over to `match:` syntax and raise on unhandled msgs from eventkit handlers. --- piker/brokers/ib/api.py | 103 ++++++++++++++++++++++++++++------------ 1 file changed, 73 insertions(+), 30 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 2281fa25..355ab362 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -20,18 +20,22 @@ """ from __future__ import annotations -from contextlib import asynccontextmanager as acm +from contextlib import ( + asynccontextmanager as acm, + contextmanager as cm, +) from contextlib import AsyncExitStack from dataclasses import asdict, astuple from datetime import datetime from functools import ( partial, - lru_cache, + # lru_cache, ) import itertools from math import isnan from typing import ( Any, + Callable, Optional, Union, ) @@ -47,6 +51,7 @@ import trio import tractor from tractor import to_asyncio import pendulum +from eventkit import Event import ib_insync as ibis from ib_insync.contract import ( Contract, @@ -131,11 +136,13 @@ class NonShittyWrapper(Wrapper): class NonShittyIB(ibis.IB): - """The beginning of overriding quite a few decisions in this lib. + ''' + The beginning of overriding quite a few decisions in this lib. - Don't use datetimes - Don't use named tuples - """ + + ''' def __init__(self): # override `ib_insync` internal loggers so we can see wtf @@ -312,6 +319,22 @@ _samplings: dict[int, tuple[str, str]] = { } +@cm +def remove_handler_on_err( + event: Event, + handler: Callable, +) -> None: + try: + yield + except trio.BrokenResourceError: + # XXX: eventkit's ``Event.emit()`` for whatever redic + # reason will catch and ignore regular exceptions + # resulting in tracebacks spammed to console.. + # Manually do the dereg ourselves. + log.exception(f'Disconnected from {event} updates') + event.disconnect(handler) + + class Client: ''' IB wrapped for our broker backend API. @@ -1015,6 +1038,21 @@ class Client: self.ib.errorEvent.connect(push_err) + api_err = self.ib.client.apiError + + def report_api_err(msg: str) -> None: + with remove_handler_on_err( + api_err, + report_api_err, + ): + breakpoint() + to_trio.send_nowait(( + 'error', + msg, + )) + + api_err.connect(report_api_err) + def positions( self, account: str = '', @@ -1144,7 +1182,7 @@ async def load_aio_clients( # the API TCP in `ib_insync` connection can be flaky af so instead # retry a few times to get the client going.. connect_retries: int = 3, - connect_timeout: float = 0.5, + connect_timeout: float = 1, disconnect_on_exit: bool = True, ) -> dict[str, Client]: @@ -1216,9 +1254,9 @@ async def load_aio_clients( await ib.connectAsync( host, port, - clientId=client_id, + clientId=client_id + i, - # this timeout is sensative on windows and will + # this timeout is sensitive on windows and will # fail without a good "timeout error" so be # careful. timeout=connect_timeout, @@ -1242,15 +1280,10 @@ async def load_aio_clients( OSError, ) as ce: _err = ce - - if i > 8: - # cache logic to avoid rescanning if we already have all - # clients loaded. - _scan_ignore.add(sockaddr) - raise - log.warning( - f'Failed to connect on {port} for {i} time, retrying...') + f'Failed to connect on {port} for {i} time with,\n' + f'{ib.client.apiError.value()}\n' + 'retrying with a new client id..') # Pre-collect all accounts available for this # connection and map account names to this client @@ -1457,6 +1490,7 @@ async def open_aio_client_method_relay( ) -> None: + # sync with `open_client_proxy()` caller to_trio.send_nowait(client) # TODO: separate channel for error handling? @@ -1466,25 +1500,34 @@ async def open_aio_client_method_relay( # back results while not to_trio._closed: msg = await from_trio.get() - if msg is None: - print('asyncio PROXY-RELAY SHUTDOWN') - break - meth_name, kwargs = msg - meth = getattr(client, meth_name) + match msg: + case None: # termination sentinel + print('asyncio PROXY-RELAY SHUTDOWN') + break - try: - resp = await meth(**kwargs) - # echo the msg back - to_trio.send_nowait({'result': resp}) + case (meth_name, kwargs): + meth_name, kwargs = msg + meth = getattr(client, meth_name) - except ( - RequestError, + try: + resp = await meth(**kwargs) + # echo the msg back + to_trio.send_nowait({'result': resp}) - # TODO: relay all errors to trio? - # BaseException, - ) as err: - to_trio.send_nowait({'exception': err}) + except ( + RequestError, + + # TODO: relay all errors to trio? + # BaseException, + ) as err: + to_trio.send_nowait({'exception': err}) + + case {'error': content}: + to_trio.send_nowait({'exception': content}) + + case _: + raise ValueError(f'Unhandled msg {msg}') @acm From 96006b24228d08bdc9086b98aa254efee1a1e560 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 2 Apr 2023 20:33:53 -0400 Subject: [PATCH 103/294] Adjust tests to `.clearing._client.OrderClient` type --- tests/conftest.py | 4 ++-- tests/test_services.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 3a0afba2..c783256f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -206,13 +206,13 @@ async def _open_test_pikerd_and_ems( @pytest.fixture def open_test_pikerd_and_ems( open_test_pikerd, - fqsn: str = 'xbtusdt.kraken', + fqme: str = 'xbtusdt.kraken', mode: str = 'paper', loglevel: str = 'info', ): yield partial( _open_test_pikerd_and_ems, - fqsn, + fqme, mode, loglevel, open_test_pikerd diff --git a/tests/test_services.py b/tests/test_services.py index 29e613e3..082f629f 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -24,7 +24,7 @@ from piker.clearing._messages import ( Status, ) from piker.clearing._client import ( - OrderBook, + OrderClient, ) @@ -121,7 +121,7 @@ def test_ensure_ems_in_paper_actors( async def main(): # type declares - book: OrderBook + book: OrderClient trades_stream: tractor.MsgStream pps: dict[str, list[BrokerdPosition]] accounts: list[str] From 008bfed702050cfba42ad7892d862c337c184ae8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 2 Apr 2023 23:18:00 -0400 Subject: [PATCH 104/294] ib: lul, fix oil (cl) venue to correctly be nymex.. --- piker/brokers/ib/api.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 355ab362..3d77ee6d 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -180,6 +180,8 @@ _adhoc_cmdty_set = { 'xagusd.cmdty', # silver spot } +# NOTE: if you aren't seeing one of these symbol's futues contracts +# show up, it's likely the `.` part is wrong! _adhoc_futes_set = { # equities @@ -205,7 +207,7 @@ _adhoc_futes_set = { 'mgc.comex', # micro # oil & gas - 'cl.comex', + 'cl.nymex', 'ni.comex', # silver futes 'qi.comex', # mini-silver futes From d67031d9ab676c0244123ef03ff8a5359643b318 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 12:59:30 -0400 Subject: [PATCH 105/294] Ensure we set the test config dir in the root actor.. Not sure how this worked before but we need to also override the `piker._config_dir: Path` in the root actor when running in `pytest`; my guess is something in the old test suite was masking this problem after the change to passing the dir path down through the runtime vars via `tractor`? Also this drops the ems related fixtures/factories since they're specific enough to define in the clearing engine tests directly. --- tests/conftest.py | 43 ++++++++----------------------------------- 1 file changed, 8 insertions(+), 35 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index c783256f..897c6b7f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,7 @@ import os from pathlib import Path import pytest +import pytest_trio import tractor from piker import ( config, @@ -13,7 +14,6 @@ from piker.service import ( Services, ) from piker.log import get_console_log -from piker.clearing._client import open_ems def pytest_addoption(parser): @@ -122,6 +122,8 @@ async def _open_test_pikerd( # or just in sequence per test, so we keep root. drop_root_perms_for_ahab=False, + debug_mode=True, + **kwargs, ) as service_manager, @@ -153,6 +155,11 @@ def open_test_pikerd( tmpconfdir.mkdir() tmpconfdir_str: str = str(tmpconfdir) + # override config dir in the root actor (aka + # this top level testing process). + from piker import config + config._config_dir = tmpconfdir + # NOTE: on linux the tmp config dir is generally located at: # /tmp/pytest-of-/pytest-/test_/ # the default `pytest` config ensures that only the last 4 test @@ -183,37 +190,3 @@ def open_test_pikerd( # - no leaked subprocs or shm buffers # - all requested container service are torn down # - certain ``tractor`` runtime state? - - -@acm -async def _open_test_pikerd_and_ems( - fqsn, - mode, - loglevel, - open_test_pikerd -): - async with ( - open_test_pikerd() as (_, _, _, services), - open_ems( - fqsn, - mode=mode, - loglevel=loglevel, - ) as ems_services, - ): - yield (services, ems_services) - - -@pytest.fixture -def open_test_pikerd_and_ems( - open_test_pikerd, - fqme: str = 'xbtusdt.kraken', - mode: str = 'paper', - loglevel: str = 'info', -): - yield partial( - _open_test_pikerd_and_ems, - fqme, - mode, - loglevel, - open_test_pikerd - ) From b619e4a82d20f0882628db5d4efcabdcbc163f72 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:03:52 -0400 Subject: [PATCH 106/294] WIP complete rework of paper engine tests More or less we need to be able to audit not only simple "make trades check pps.toml files" tests (which btw were great to get started!). We also need more sophisticated and granular order mgmt and service config scenarios, - full e2e EMS msg flow verification - multi-client (dis)connection scenarios and/or monitoring - dark order clearing and offline storage - accounting schema and position calcs detailing As such, this is the beginning to "modularlizingz" the components needed in the test harness to this end by breaking up the `OrderClient` control flows vs. position checking logic so as to allow for more flexible test scenario cases and likely `pytest` parametrizations over different transaction sequences. --- tests/test_paper.py | 442 +++++++++++++++++++++++++++----------------- 1 file changed, 271 insertions(+), 171 deletions(-) diff --git a/tests/test_paper.py b/tests/test_paper.py index 2f46c559..01ea57d4 100644 --- a/tests/test_paper.py +++ b/tests/test_paper.py @@ -1,230 +1,330 @@ ''' Paper-mode testing ''' - -import trio -from exceptiongroup import BaseExceptionGroup +from contextlib import ( + contextmanager as cm, +) from typing import ( + Awaitable, + Callable, AsyncContextManager, Literal, ) +import trio +# import pytest_trio +from exceptiongroup import BaseExceptionGroup + import pytest -from tractor._exceptions import ContextCancelled +import tractor from uuid import uuid4 from functools import partial +from piker.service import Services from piker.log import get_logger -from piker.clearing._messages import Order +from piker.clearing._messages import ( + Order, + Status, + # Cancel, + BrokerdPosition, +) +from piker.clearing import ( + open_ems, + OrderClient, +) +from piker.accounting._mktinfo import ( + unpack_fqme, +) from piker.accounting import ( open_pps, + Position, ) log = get_logger(__name__) -def get_fqsn(broker, symbol): - fqsn = f'{symbol}.{broker}' - return (fqsn, symbol, broker) +async def open_pikerd( + open_test_pikerd: AsyncContextManager, + +) -> Services: + async with ( + open_test_pikerd() as (_, _, _, services), + ): + yield services -oid = '' -test_exec_mode = 'live' -(fqsn, symbol, broker) = get_fqsn('kraken', 'xbtusdt') -brokers = [broker] -account = 'paper' - - -async def _async_main( - open_test_pikerd_and_ems: AsyncContextManager, - action: Literal['buy', 'sell'] | None = None, - price: int = 30000, +async def submit_order( + client: OrderClient, + trades_stream: tractor.MsgStream, + fqme: str, + action: Literal['buy', 'sell'], + price: float = 30000., executions: int = 1, size: float = 0.01, + exec_mode: str = 'live', + account: str = 'paper', - # Assert options - assert_entries: bool = False, - assert_pps: bool = False, - assert_zeroed_pps: bool = False, - assert_msg: bool = False, - -) -> None: +) -> list[Status | BrokerdPosition]: ''' Start piker, place a trade and assert data in pps stream, ledger and position table. ''' - oid: str = '' - last_msg = {} + sent: list[Order] = [] + broker, key, suffix = unpack_fqme(fqme) - async with open_test_pikerd_and_ems() as ( - services, - (book, trades_stream, pps, accounts, dialogs), - ): - if action: - for x in range(executions): - oid = str(uuid4()) - order = Order( - exec_mode=test_exec_mode, - action=action, - oid=oid, - account=account, - size=size, - symbol=fqsn, - price=price, - brokers=brokers, - ) - # This is actually a syncronous call to push a message - book.send(order) + for _ in range(executions): - async for msg in trades_stream: - last_msg = msg - match msg: - # Wait for position message before moving on - case {'name': 'position'}: - break - - # Teardown piker like a user would - raise KeyboardInterrupt - - if assert_entries or assert_pps or assert_zeroed_pps or assert_msg: - _assert( - assert_entries, - assert_pps, - assert_zeroed_pps, - pps, - last_msg, - size, - executions, + order = Order( + exec_mode=exec_mode, + action=action, + oid=str(uuid4()), + account=account, + size=size, + symbol=fqme, + price=price, + brokers=[broker], ) + sent.append(order) + await client.send(order) + + # TODO: i guess we should still test the old sync-API? + # client.send_nowait(order) + + msgs: list[Status | BrokerdPosition] = [] + async for msg in trades_stream: + print(f'Rx Order resp: {msg}') + match msg: + + # Wait for position message before moving on + case {'name': 'position'}: + ppmsg = BrokerdPosition(**msg) + msgs.append(ppmsg) + break + + case {'name': 'status'}: + msgs.append(Status(**msg)) + + return sent, msgs -def _assert( - assert_entries, - assert_pps, - assert_zerod_pps, - pps, - last_msg, - size, - executions, +def run_and_catch( + fn: Callable[..., Awaitable], + + expect_errs: tuple[Exception] = ( + KeyboardInterrupt, + tractor.ContextCancelled, + ) + ): - with ( - open_pps(broker, account, write_on_exit=False) as table, - ): - ''' - Assert multiple cases including pps, - ledger and final position message state - - ''' - if assert_entries: - for key, val in [ - ('broker', broker), - ('account', account), - ('symbol', fqsn), - ('size', size * executions), - ('currency', symbol), - ('avg_price', table.pps[symbol].ppu) - ]: - assert last_msg[key] == val - - if assert_pps: - last_ppu = pps[(broker, account)][-1] - assert last_ppu['avg_price'] == table.pps[symbol].ppu - - if assert_zerod_pps: - assert not bool(table.pps) - - -def _run_test_and_check(fn): ''' Close position and assert empty position in pps ''' - with pytest.raises(BaseExceptionGroup) as exc_info: + if expect_errs: + with pytest.raises(BaseExceptionGroup) as exc_info: + trio.run(fn) + + for err in exc_info.value.exceptions: + assert type(err) in expect_errs + else: trio.run(fn) - for exception in exc_info.value.exceptions: - assert isinstance(exception, KeyboardInterrupt) or isinstance( - exception, ContextCancelled - ) + +@cm +def load_and_check_pos( + order: Order, + ppmsg: BrokerdPosition, + +) -> None: + + with open_pps(ppmsg.broker, ppmsg.account) as table: + + # NOTE: a special case is here since the `PpTable.pps` are + # normally indexed by the particular broker's + # `Position.bs_mktid: str` (a unique market / symbol id provided + # by their systems/design) but for the paper engine case, this + # is the same the fqme. + pp: Position = table.pps[ppmsg.symbol] + + assert ppmsg.size == pp.size + assert ppmsg.avg_price == pp.ppu + + yield pp -def test_buy( - open_test_pikerd_and_ems: AsyncContextManager, +@pytest.mark.trio +async def test_ems_err_on_bad_broker( + open_pikerd: Services, + loglevel: str, +): + try: + async with open_ems( + 'doggy.smiles', + mode='paper', + loglevel=loglevel, + ) as _: + pytest.fail('EMS is working on non-broker!?') + except ModuleNotFoundError: + pass + + +async def atest_buy( + loglevel: str, ): ''' Enter a trade and assert entries are made in pps and ledger files. + Shutdown the ems-client and ensure on reconnect we get the expected + matching ``BrokerdPosition`` and pps.toml entries. + ''' - _run_test_and_check( - partial( - _async_main, - open_test_pikerd_and_ems=open_test_pikerd_and_ems, + broker: str = 'kraken' + mkt_key: str = 'xbtusdt' + fqme: str = f'{mkt_key}.{broker}' + + startup_pps: dict[ + tuple[str, str], # brokername, acctid + list[BrokerdPosition], + ] + + assert loglevel == 'info' + async with ( + open_ems( + fqme, + mode='paper', + loglevel=loglevel, + ) as ( + client, # OrderClient + trades_stream, + startup_pps, + accounts, + dialogs, + ) + ): + # no positions on startup + assert not startup_pps + assert 'paper' in accounts + + sent, msgs = await submit_order( + client, + trades_stream, + fqme, action='buy', - assert_entries=True, - ), - ) - - # Open ems and assert existence of pps entries - _run_test_and_check( - partial( - _async_main, - open_test_pikerd_and_ems=open_test_pikerd_and_ems, - assert_pps=True, - ), + size=1, + ) + + last_order = sent[-1] + + last_resp = msgs[-1] + assert isinstance(last_resp, BrokerdPosition) + + # check that pps.toml for account has been updated + with load_and_check_pos( + last_order, + last_resp, + ) as pos: + return pos + + # disconnect from EMS, then reconnect and ensure we get our same + # position relayed to us again. + + # _run_test_and_check( + # partial( + # _async_main, + # open_test_pikerd_and_ems=open_test_pikerd_and_ems, + # action='buy', + # assert_entries=True, + # ), + # ) + + # await _async_main( + # open_test_pikerd_and_ems=open_test_pikerd_and_ems, + # assert_pps=True, + # ) + # _run_test_and_check( + # partial( + # _async_main, + # open_test_pikerd_and_ems=open_test_pikerd_and_ems, + # assert_pps=True, + # ), + # ) + + +def test_open_long( + open_test_pikerd: AsyncContextManager, + loglevel: str, + +) -> None: + + async def atest(): + async with ( + open_test_pikerd() as (_, _, _, services), + ): + assert await atest_buy(loglevel) + + # Teardown piker like a user would from cli + # raise KeyboardInterrupt + + run_and_catch( + atest, + expect_errs=None, ) + # Open ems another time and assert existence of prior + # pps entries confirming they persisted -def test_sell( - open_test_pikerd_and_ems: AsyncContextManager, -): - ''' - Sell position and ensure pps are zeroed. - ''' - _run_test_and_check( - partial( - _async_main, - open_test_pikerd_and_ems=open_test_pikerd_and_ems, - action='sell', - price=1, - ), - ) +# def test_sell( +# open_test_pikerd_and_ems: AsyncContextManager, +# ): +# ''' +# Sell position and ensure pps are zeroed. - _run_test_and_check( - partial( - _async_main, - open_test_pikerd_and_ems=open_test_pikerd_and_ems, - assert_zeroed_pps=True, - ), - ) +# ''' +# _run_test_and_check( +# partial( +# _async_main, +# open_test_pikerd_and_ems=open_test_pikerd_and_ems, +# action='sell', +# price=1, +# ), +# ) + +# _run_test_and_check( +# partial( +# _async_main, +# open_test_pikerd_and_ems=open_test_pikerd_and_ems, +# assert_zeroed_pps=True, +# ), +# ) -def test_multi_sell( - open_test_pikerd_and_ems: AsyncContextManager, -): - ''' - Make 5 market limit buy orders and - then sell 5 slots at the same price. - Finally, assert cleared positions. +# def test_multi_sell( +# open_test_pikerd_and_ems: AsyncContextManager, +# ): +# ''' +# Make 5 market limit buy orders and +# then sell 5 slots at the same price. +# Finally, assert cleared positions. - ''' - _run_test_and_check( - partial( - _async_main, - open_test_pikerd_and_ems=open_test_pikerd_and_ems, - action='buy', - executions=5, - ), - ) +# ''' +# _run_test_and_check( +# partial( +# _async_main, +# open_test_pikerd_and_ems=open_test_pikerd_and_ems, +# action='buy', +# executions=5, +# ), +# ) - _run_test_and_check( - partial( - _async_main, - open_test_pikerd_and_ems=open_test_pikerd_and_ems, - action='sell', - executions=5, - price=1, - assert_zeroed_pps=True, - ), - ) +# _run_test_and_check( +# partial( +# _async_main, +# open_test_pikerd_and_ems=open_test_pikerd_and_ems, +# action='sell', +# executions=5, +# price=1, +# assert_zeroed_pps=True, +# ), +# ) From 1944f75ae88f77d5f908854089280941a060552d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:14:23 -0400 Subject: [PATCH 107/294] Expose `piker.clearing.OrderClient` --- piker/clearing/__init__.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/piker/clearing/__init__.py b/piker/clearing/__init__.py index bd95a8ab..b2cc5fa7 100644 --- a/piker/clearing/__init__.py +++ b/piker/clearing/__init__.py @@ -19,11 +19,16 @@ Market machinery for order executions, book, management. """ from ..log import get_logger -from ._client import open_ems +from ._client import ( + open_ems, + OrderClient, +) __all__ = [ 'open_ems', + 'OrderClient', + ] log = get_logger(__name__) From eb7a7462ad64bb81483da823bb34732f72d49c03 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:14:52 -0400 Subject: [PATCH 108/294] Always pass `loglevel: str` to daemon root task eps If you want a sub-actor to write console logs (with the right level) the `get_console_log()` call has to be made somewhere during service task startup. Previously this wasn't well formalized nor used (depending on daemon) so passing `loglevel` to the service's root-task-endpoint (eg. `_setup_persistent_brokerd()`) encourages that the daemon's logging is configured during init according to the spawner's requesting logging config. The previous `get_console_log()` call happening inside `maybe_spawn_daemon()` wasn't actually doing anything in the target daemon XD, so obviously remove that and instead passthrough loglevel to the ctx endpoints and service manager methods. --- piker/service/_daemon.py | 45 +++++++++++++++++++++++++--------------- piker/service/_mngr.py | 4 ++-- 2 files changed, 30 insertions(+), 19 deletions(-) diff --git a/piker/service/_daemon.py b/piker/service/_daemon.py index 3e0d2080..0521f830 100644 --- a/piker/service/_daemon.py +++ b/piker/service/_daemon.py @@ -31,7 +31,6 @@ import tractor from ._util import ( log, # sub-sys logger - get_console_log, ) from ..brokers import get_brokermod from ._mngr import ( @@ -77,9 +76,6 @@ async def maybe_spawn_daemon( clients. ''' - if loglevel: - get_console_log(loglevel) - # serialize access to this section to avoid # 2 or more tasks racing to create a daemon lock = Services.locks[service_name] @@ -91,10 +87,10 @@ async def maybe_spawn_daemon( yield portal return - log.warning(f"Couldn't find any existing {service_name}") - - # TODO: really shouldn't the actor spawning be part of the service - # starting method `Services.start_service()` ? + log.warning( + f"Couldn't find any existing {service_name}\n" + 'Attempting to spawn new daemon-service..' + ) # ask root ``pikerd`` daemon to spawn the daemon we need if # pikerd is not live we now become the root of the @@ -114,23 +110,33 @@ async def maybe_spawn_daemon( # service task for that actor. started: bool if pikerd_portal is None: - started = await service_task_target(**spawn_args) + started = await service_task_target( + loglevel=loglevel, + **spawn_args, + ) else: - # tell the remote `pikerd` to start the target, - # the target can't return a non-serializable value - # since it is expected that service startingn is - # non-blocking and the target task will persist running - # on `pikerd` after the client requesting it's start - # disconnects. + # request a remote `pikerd` (service manager) to start the + # target daemon-task, the target can't return + # a non-serializable value since it is expected that service + # starting is non-blocking and the target task will persist + # running "under" or "within" the `pikerd` actor tree after + # the questing client disconnects. in other words this + # spawns a persistent daemon actor that continues to live + # for the lifespan of whatever the service manager inside + # `pikerd` says it should. started = await pikerd_portal.run( service_task_target, + loglevel=loglevel, **spawn_args, ) if started: log.info(f'Service {service_name} started!') + # block until we can discover (by IPC connection) to the newly + # spawned daemon-actor and then deliver the portal to the + # caller. async with tractor.wait_for_actor(service_name) as portal: lock.release() yield portal @@ -180,8 +186,11 @@ async def spawn_brokerd( await Services.start_service_task( dname, portal, + + # signature of target root-task endpoint _setup_persistent_brokerd, brokername=brokername, + loglevel=loglevel, ) return True @@ -243,7 +252,10 @@ async def spawn_emsd( await Services.start_service_task( 'emsd', portal, + + # signature of target root-task endpoint _setup_persistent_emsd, + loglevel=loglevel, ) return True @@ -255,10 +267,9 @@ async def maybe_open_emsd( loglevel: str | None = None, **kwargs, -) -> tractor._portal.Portal: # noqa +) -> tractor.Portal: # noqa async with maybe_spawn_daemon( - 'emsd', service_task_target=spawn_emsd, spawn_args={'loglevel': loglevel}, diff --git a/piker/service/_mngr.py b/piker/service/_mngr.py index 80a84487..69712c07 100644 --- a/piker/service/_mngr.py +++ b/piker/service/_mngr.py @@ -56,7 +56,7 @@ class Services: name: str, portal: tractor.Portal, target: Callable, - **kwargs, + **ctx_kwargs, ) -> (trio.CancelScope, tractor.Context): ''' @@ -81,7 +81,7 @@ class Services: with trio.CancelScope() as cs: async with portal.open_context( target, - **kwargs, + **ctx_kwargs, ) as (ctx, first): From 4c1d174801605f4c41fb4183e8e9d2b98d550922 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:25:36 -0400 Subject: [PATCH 109/294] Expect `loglevel: str` in brokerd root task ep Set the level right after spawn and once for the lifetime of the daemon. --- piker/data/__init__.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/piker/data/__init__.py b/piker/data/__init__.py index 5c83150e..74eefb83 100644 --- a/piker/data/__init__.py +++ b/piker/data/__init__.py @@ -56,6 +56,7 @@ __all__ = [ async def _setup_persistent_brokerd( ctx: tractor.Context, brokername: str, + loglevel: str | None = None, ) -> None: ''' @@ -64,7 +65,9 @@ async def _setup_persistent_brokerd( the broker backend as needed. ''' - get_console_log(tractor.current_actor().loglevel) + get_console_log( + loglevel or tractor.current_actor().loglevel, + ) from .feed import ( _bus, @@ -84,5 +87,3 @@ async def _setup_persistent_brokerd( # we pin this task to keep the feeds manager active until the # parent actor decides to tear it down await trio.sleep_forever() - - From 97e3c06af8d9117c4896bfd2dba91709723d19da Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:27:43 -0400 Subject: [PATCH 110/294] Set `emsd` log level and clearly report startup pps Change the root-service-task entrypoint to accept the level and setup a console log as is now expected for all sub-services. Cast all backend delivered startup `BrokerdPosition` msgs and log them to console. --- piker/clearing/_ems.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index a4e40587..20ef8de0 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -43,6 +43,7 @@ import tractor from ._util import ( log, # sub-sys logger + get_console_log, ) from ..data._normalize import iterticks from ..accounting._mktinfo import ( @@ -411,6 +412,9 @@ class Router(Struct): trades_endpoint is None or exec_mode == 'paper' ): + # for logging purposes + brokermod = paper + # for paper mode we need to mock this trades response feed # so we load bidir stream to a new sub-actor running # a paper-simulator clearing engine. @@ -468,13 +472,15 @@ class Router(Struct): # msgs. pps = {} for msg in positions: - log.info(f'loading pp: {msg}') - account = msg['account'] + msg = BrokerdPosition(**msg) + log.info( + f'loading pp for {brokermod.__name__}:\n' + f'{pformat(msg.to_dict())}', + ) - # TODO: better value error for this which - # dumps the account and message and states the - # mismatch.. + # TODO: state any mismatch here? + account = msg.account assert account in accounts pps.setdefault( @@ -635,11 +641,14 @@ _router: Router = None @tractor.context async def _setup_persistent_emsd( - ctx: tractor.Context, + loglevel: str | None = None, ) -> None: + if loglevel: + get_console_log(loglevel) + global _router # open a root "service nursery" for the ``emsd`` actor @@ -1371,7 +1380,7 @@ async def _emsd_main( ctx: tractor.Context, fqme: str, exec_mode: str, # ('paper', 'live') - loglevel: str = 'info', + loglevel: str | None = None, ) -> tuple[ dict[ From 9770a39d7bf91ed2ac686897b205366505dc49d7 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:31:39 -0400 Subject: [PATCH 111/294] Cancel the `OrderClient` sync-method relay task on exit --- piker/clearing/_client.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index 8f531d6d..c9ad0d67 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -240,7 +240,10 @@ async def open_ems( ''' broker, symbol, suffix = unpack_fqme(fqme) - async with maybe_open_emsd(broker) as portal: + async with maybe_open_emsd( + broker, + loglevel=loglevel, + ) as portal: mod = get_brokermod(broker) if ( @@ -304,3 +307,6 @@ async def open_ems( accounts, dialogs, ) + + # stop the sync-msg-relay task on exit. + n.cancel_scope.cancel() From f51361435ff77b104da6649f1f72d58c6f8be3ac Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:31:55 -0400 Subject: [PATCH 112/294] paper engine: use the `fqme` for the `bs_mktid` Instead of stripping the broker part just use the full fqme for all `Transaction.bs_mktid: str` values since it makes indexing the `PpTable` much easier with less key mangling.. --- piker/clearing/_paper_engine.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 07593d79..85956551 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -249,8 +249,10 @@ class PaperBoi(Struct): ) await self.ems_trades_stream.send(msg) - # lookup any existing position - key = fqme.rstrip(f'.{self.broker}') + # NOTE: for paper we set the "bs_mktid" as just the fqme since + # we don't actually have any unique backend symbol ourselves + # other then this thing, our fqme address. + bs_mktid: str = fqme t = Transaction( fqsn=fqme, sym=self._syms[fqme], @@ -259,7 +261,7 @@ class PaperBoi(Struct): price=price, cost=0, # TODO: cost model dt=pendulum.from_timestamp(fill_time_s), - bs_mktid=key, + bs_mktid=bs_mktid, ) tx = t.to_dict() @@ -270,17 +272,19 @@ class PaperBoi(Struct): self.ppt.update_from_trans({oid: t}) # transmit pp msg to ems - pp = self.ppt.pps[key] + pp = self.ppt.pps[bs_mktid] pp_msg = BrokerdPosition( broker=self.broker, account='paper', symbol=fqme, + + size=pp.size, + avg_price=pp.ppu, + # TODO: we need to look up the asset currency from # broker info. i guess for crypto this can be # inferred from the pair? - currency=key, - size=pp.size, - avg_price=pp.ppu, + # currency=bs_mktid, ) await self.ems_trades_stream.send(pp_msg) From a63599828b48ba3f5606645495f676c4ccefcfed Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:34:34 -0400 Subject: [PATCH 113/294] Drop masked `MktPair.size_tick_digits()` cruft --- piker/accounting/_mktinfo.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 0a301986..02ed7a9d 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -408,10 +408,6 @@ class MktPair(Struct, frozen=True): rounding=ROUND_HALF_EVEN ) - # @property - # def size_tick_digits(self) -> int: - # return float_digits(self.size_tick) - # TODO: BACKWARD COMPAT, TO REMOVE? @property def type_key(self) -> str: From 70efce16317cb4dc74165f186b26267976eaa785 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:35:29 -0400 Subject: [PATCH 114/294] `kraken`: handle ws connection startup status msgs --- piker/brokers/kraken/feed.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index 5ea96e28..a54329db 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -189,6 +189,17 @@ async def process_data_feed_msgs( # chan_id, *payload_array, chan_name, pair = msg # print(msg) + case { + 'connectionID': conid, + 'event': 'systemStatus', + 'status': 'online', + 'version': ver, + }: + log.info( + f'Established {ver} ws connection with id: {conid}' + ) + continue + case _: print(f'UNHANDLED MSG: {msg}') # yield msg From b2a5f8698d0271f41ab3e58d3f68e313c6b242fb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:36:45 -0400 Subject: [PATCH 115/294] Use `--pdb` flag to config `brokerd` debug mode --- piker/accounting/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 6018ffe1..bb4e28df 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -115,7 +115,7 @@ def sync( ): portal = await an.start_actor( loglevel=loglevel, - debug_mode=True, + debug_mode=pdb, **start_kwargs, ) From 2d609dceac13d77c7359a5906675622ea7534b28 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 13:45:52 -0400 Subject: [PATCH 116/294] Drop `loglevel` from `spawn_args` inputs to `maybe_spawn_daemon()` --- piker/data/_sampling.py | 6 +++--- piker/service/_daemon.py | 24 ++++++++++++++---------- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/piker/data/_sampling.py b/piker/data/_sampling.py index 84dce08e..3ebdd140 100644 --- a/piker/data/_sampling.py +++ b/piker/data/_sampling.py @@ -429,7 +429,7 @@ async def spawn_samplerd( async def maybe_open_samplerd( loglevel: str | None = None, - **kwargs, + **pikerd_kwargs, ) -> tractor.Portal: # noqa ''' @@ -442,9 +442,9 @@ async def maybe_open_samplerd( async with maybe_spawn_daemon( dname, service_task_target=spawn_samplerd, - spawn_args={'loglevel': loglevel}, + spawn_args={}, loglevel=loglevel, - **kwargs, + **pikerd_kwargs, ) as portal: yield portal diff --git a/piker/service/_daemon.py b/piker/service/_daemon.py index 0521f830..ba1a467a 100644 --- a/piker/service/_daemon.py +++ b/piker/service/_daemon.py @@ -57,11 +57,13 @@ async def maybe_spawn_daemon( service_name: str, service_task_target: Callable, - spawn_args: dict[str, Any], - loglevel: str | None = None, + spawn_args: dict[str, Any], + + loglevel: str | None = None, singleton: bool = False, - **kwargs, + + **pikerd_kwargs, ) -> tractor.Portal: ''' @@ -97,7 +99,7 @@ async def maybe_spawn_daemon( # process tree async with maybe_open_pikerd( loglevel=loglevel, - **kwargs, + **pikerd_kwargs, ) as pikerd_portal: @@ -200,7 +202,8 @@ async def maybe_spawn_brokerd( brokername: str, loglevel: str | None = None, - **kwargs, + + **pikerd_kwargs, ) -> tractor.Portal: ''' @@ -214,10 +217,10 @@ async def maybe_spawn_brokerd( service_task_target=spawn_brokerd, spawn_args={ 'brokername': brokername, - 'loglevel': loglevel, }, loglevel=loglevel, - **kwargs, + + **pikerd_kwargs, ) as portal: yield portal @@ -265,16 +268,17 @@ async def maybe_open_emsd( brokername: str, loglevel: str | None = None, - **kwargs, + + **pikerd_kwargs, ) -> tractor.Portal: # noqa async with maybe_spawn_daemon( 'emsd', service_task_target=spawn_emsd, - spawn_args={'loglevel': loglevel}, + spawn_args={}, loglevel=loglevel, - **kwargs, + **pikerd_kwargs, ) as portal: yield portal From 2806a4c0e5e43b336767a9a9df7cc587f3bc28a0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 14:09:39 -0400 Subject: [PATCH 117/294] Tweak ems msg-received log msg --- piker/clearing/_ems.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 20ef8de0..6d18b686 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -698,7 +698,7 @@ async def translate_and_relay_brokerd_events( async for brokerd_msg in brokerd_trades_stream: fmsg = pformat(brokerd_msg) log.info( - f'Received broker trade event:\n' + f'Rx brokerd trade msg:\n' f'{fmsg}' ) status_msg: Optional[Status] = None From 33a78366ff37fd66efa553817de5c5319902a267 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 14:10:12 -0400 Subject: [PATCH 118/294] paper: always sync pps.toml state on startup --- piker/clearing/_paper_engine.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 85956551..3c6fdc4c 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -473,6 +473,7 @@ async def handle_order_requests( BrokerdOrderAck( oid=order.oid, reqid=reqid, + account='paper' ) ) @@ -564,6 +565,10 @@ async def trades_dialogue( ['paper'], )) + # write new positions state in case ledger was + # newer then that tracked in pps.toml + ppt.write_config() + # exit early since no fqme was passed, # normally this case is just to load # positions "offline". From b8a975a3fd5e683bc54c9b81905629fabdd7fedc Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 14:29:25 -0400 Subject: [PATCH 119/294] Drop `"..."` from pps.toml entries Add special blocks to handle removing the broker account levels from both writing and reading routines. --- piker/accounting/_pos.py | 35 ++++++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 0a3e2949..12daa6bf 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -504,6 +504,8 @@ class PpTable(Struct): trans: dict[str, Transaction], cost_scalar: float = 2, + mkt: MktPair | None = None, + ) -> dict[str, Position]: pps = self.pps @@ -521,7 +523,7 @@ class PpTable(Struct): # template the mkt-info presuming a legacy market ticks # if no info exists in the transactions.. - mkt: MktPair | Symbol = t.sys + mkt: MktPair | Symbol | None = mkt or t.sys if not mkt: mkt = MktPair.from_fqme( fqme, @@ -671,7 +673,20 @@ class PpTable(Struct): f'Updating positions in ``{self.conf_path}``:\n' f'n{pformat(pp_entries)}' ) - self.conf[self.brokername][self.acctid] = pp_entries + + if self.brokername in self.conf: + log.warning( + f'Rewriting {self.conf_path} keys to drop !' + ) + # legacy key schema including , so + # rewrite all entries to drop those tables since we now + # put that in the filename! + accounts = self.conf.pop(self.brokername) + assert len(accounts) == 1 + entries = accounts.pop(self.acctid) + self.conf.update(entries) + + self.conf.update(pp_entries) elif ( self.brokername in self.conf and @@ -758,8 +773,18 @@ def open_pps( conf, conf_path = config.load( f'pps.{brokername}.{acctid}', ) - brokersection = conf.setdefault(brokername, {}) - pps = brokersection.setdefault(acctid, {}) + + if brokername in conf: + log.warning( + f'Rewriting {conf_path} keys to drop !' + ) + # legacy key schema including , so + # rewrite all entries to drop those tables since we now + # put that in the filename! + accounts = conf.pop(brokername) + for acctid in accounts.copy(): + entries = accounts.pop(acctid) + conf.update(entries) # TODO: ideally we can pass in an existing # pps state to this right? such that we @@ -782,7 +807,7 @@ def open_pps( # unmarshal/load ``pps.toml`` config entries into object form # and update `PpTable` obj entries. - for fqme, entry in pps.items(): + for fqme, entry in conf.items(): # atype = entry.get('asset_type', '') From 05a33ae634ff6c154eec586750736980a1a95874 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 4 Apr 2023 21:28:52 -0400 Subject: [PATCH 120/294] Make default order size to decimal --- tests/test_paper.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/tests/test_paper.py b/tests/test_paper.py index 01ea57d4..75be3dc2 100644 --- a/tests/test_paper.py +++ b/tests/test_paper.py @@ -91,12 +91,11 @@ async def submit_order( # TODO: i guess we should still test the old sync-API? # client.send_nowait(order) + # Wait for position message before moving on to verify flow(s) + # for the multi-order position entry/exit. msgs: list[Status | BrokerdPosition] = [] async for msg in trades_stream: - print(f'Rx Order resp: {msg}') match msg: - - # Wait for position message before moving on case {'name': 'position'}: ppmsg = BrokerdPosition(**msg) msgs.append(ppmsg) @@ -187,8 +186,6 @@ async def atest_buy( tuple[str, str], # brokername, acctid list[BrokerdPosition], ] - - assert loglevel == 'info' async with ( open_ems( fqme, @@ -196,7 +193,7 @@ async def atest_buy( loglevel=loglevel, ) as ( client, # OrderClient - trades_stream, + trades_stream, # tractor.MsgStream startup_pps, accounts, dialogs, @@ -211,7 +208,7 @@ async def atest_buy( trades_stream, fqme, action='buy', - size=1, + size=0.01, ) last_order = sent[-1] From 5ee044e418a74186885eea36daa4c233a5e7c6ea Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Apr 2023 11:56:15 -0400 Subject: [PATCH 121/294] Another `@acm` in `._cacheables` XD --- piker/_cacheables.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/piker/_cacheables.py b/piker/_cacheables.py index ba7361c3..6746fc2f 100644 --- a/piker/_cacheables.py +++ b/piker/_cacheables.py @@ -21,7 +21,7 @@ Cacheing apis and toolz. from collections import OrderedDict from contextlib import ( - asynccontextmanager, + asynccontextmanager as acm, ) from tractor.trionics import maybe_open_context @@ -62,7 +62,7 @@ def async_lifo_cache(maxsize=128): return decorator -@asynccontextmanager +@acm async def open_cached_client( brokername: str, ) -> 'Client': # noqa From 1d2d4b40a85b7151343f72e184a1c90512702fe6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Apr 2023 11:58:52 -0400 Subject: [PATCH 122/294] Only log about pps once in order mode code --- piker/ui/order_mode.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 0013891c..cb3ee7d1 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -871,7 +871,6 @@ async def open_order_mode( # the expected symbol key in its positions msg. for (broker, acctid), msgs in position_msgs.items(): for msg in msgs: - log.info(f'Loading pp for {acctid}@{broker}:\n{pformat(msg)}') await process_trade_msg( mode, book, @@ -956,13 +955,16 @@ async def process_trade_msg( ): sym = mode.chart.linked.symbol pp_msg_symbol = msg['symbol'].lower() - fqsn = sym.fqme + fqme = sym.fqme broker = sym.broker if ( - pp_msg_symbol == fqsn - or pp_msg_symbol == fqsn.removesuffix(f'.{broker}') + pp_msg_symbol == fqme + or pp_msg_symbol == fqme.removesuffix(f'.{broker}') ): - log.info(f'{fqsn} matched pp msg: {fmsg}') + log.info( + f'Loading position for `{fqme}`:\n' + f'{fmsg}' + ) tracker = mode.trackers[msg['account']] tracker.live_pp.update_from_msg(msg) tracker.update_from_pp(set_as_startup=True) # status/pane UI From 3f2f5edb28918ec35c3b9be46e0ef1ddc1768aa1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Apr 2023 13:07:22 -0400 Subject: [PATCH 123/294] kraken: rename `Client._atable` -> `_altnames` --- piker/brokers/kraken/api.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index 4ce05322..da377822 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -170,9 +170,12 @@ class Pair(Struct): class Client: - # global symbol normalization table + # symbol mapping from all names to the altname _ntable: dict[str, str] = {} - _atable: bidict[str, str] = bidict() + + # 2-way map of symbol names to their "alt names" ffs XD + _altnames: bidict[str, str] = bidict() + _pairs: dict[str, Pair] = {} def __init__( @@ -267,7 +270,7 @@ class Client: # data and return a `decimal.Decimal` instead here! # using the underlying Asset return { - self._atable[sym].lower(): float(bal) + self._altnames[sym].lower(): float(bal) for sym, bal in by_bsmktid.items() } @@ -300,7 +303,7 @@ class Client: assets = await self.get_assets() for bs_mktid, info in assets.items(): - aname = self._atable[bs_mktid] = info['altname'] + aname = self._altnames[bs_mktid] = info['altname'] aclass = info['aclass'] self.assets[bs_mktid] = Asset( @@ -395,7 +398,7 @@ class Client: # look up the normalized name and asset info asset_key = entry['asset'] asset = self.assets[asset_key] - asset_key = self._atable[asset_key].lower() + asset_key = self._altnames[asset_key].lower() # XXX: this is in the asset units (likely) so it isn't # quite the same as a commisions cost necessarily..) @@ -504,9 +507,11 @@ class Client: ) -> MktPair: - pair_info: Pair # = await self.pair_info(pair) - bs_mktid: str - bs_mktid, pair_info = Client.normalize_symbol(pair_str) + ( + bs_mktid, # str + pair_info, # Pair + ) = Client.normalize_symbol(pair_str) + dst_asset = self.assets[pair_info.base] # NOTE XXX parse out the src asset name until we figure out From 6decd4112a94fb3a72d7466ae83223a918b44836 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Apr 2023 13:08:31 -0400 Subject: [PATCH 124/294] kraken: drop console setup, now done during brokerd init --- piker/brokers/kraken/broker.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 935459cc..58cc1464 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -69,7 +69,6 @@ from .api import ( get_client, ) from .feed import ( - get_console_log, open_autorecon_ws, NoBsWs, stream_messages, @@ -425,9 +424,6 @@ async def trades_dialogue( ) -> AsyncIterator[dict[str, Any]]: - # XXX: required to propagate ``tractor`` loglevel to ``piker`` logging - get_console_log(loglevel or tractor.current_actor().loglevel) - async with get_client() as client: if not client._api_key: From 21401853c45377e735169325d23f6452df0464e8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Apr 2023 13:09:06 -0400 Subject: [PATCH 125/294] `kraken`: add module level `get_mkt_info()` This will (likely) act as a new backend query endpoint for other `piker` (client) code to lookup `MktPair` info from each backend. To start it also returns the backend-broker's local `Pair` (or wtv other type) as well. The main motivation for this is for our paper engine which can require the mkt info when processing paper-trades ledgers which do not contain appropriate info to compute position metrics. --- piker/brokers/kraken/__init__.py | 1 + piker/brokers/kraken/feed.py | 45 ++++++++++++++++++++------------ 2 files changed, 29 insertions(+), 17 deletions(-) diff --git a/piker/brokers/kraken/__init__.py b/piker/brokers/kraken/__init__.py index cd36f4e5..cd04c950 100644 --- a/piker/brokers/kraken/__init__.py +++ b/piker/brokers/kraken/__init__.py @@ -34,6 +34,7 @@ from .api import ( get_client, ) from .feed import ( + get_mkt_info, open_history_client, open_symbol_search, stream_quotes, diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index a54329db..ff4f57a9 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -44,7 +44,6 @@ from piker.brokers._util import ( DataThrottle, DataUnavailable, ) -from piker.log import get_console_log from piker.data.types import Struct from piker.data._web_bs import open_autorecon_ws, NoBsWs from . import log @@ -279,6 +278,27 @@ async def open_history_client( yield get_ohlc, {'erlangs': 1, 'rate': 1} +async def get_mkt_info( + fqme: str, + +) -> tuple[MktPair, Pair]: + ''' + Query for and return a `MktPair` and backend-native `Pair` (or + wtv else) info. + + If more then one fqme is provided return a ``dict`` of native + key-strs to `MktPair`s. + + ''' + async with open_cached_client('kraken') as client: + + # uppercase since kraken bs_mktid is always upper + sym_str = fqme.upper() + pair: Pair = await client.pair_info(sym_str) + mkt: MktPair = await client.mkt_info(sym_str) + return mkt, pair + + async def stream_quotes( send_chan: trio.abc.SendChannel, @@ -299,26 +319,17 @@ async def stream_quotes( ``pairs`` must be formatted /. ''' - # XXX: required to propagate ``tractor`` loglevel to piker logging - get_console_log(loglevel or tractor.current_actor().loglevel) - ws_pairs = {} + ws_pairs: list[str] = [] mkt_infos: dict[str, MktPair] = {} async with ( - open_cached_client('kraken') as client, send_chan as send_chan, ): - # keep client cached for real-time section - for sym in symbols: - - # uppercase since piker style is always lowercase. - sym_str = sym.upper() - pair: Pair = await client.pair_info(sym_str) - mkt: MktPair = await client.mkt_info(sym_str) + for sym_str in symbols: + mkt, pair = await get_mkt_info(sym_str) mkt_infos[sym_str] = mkt - - ws_pairs[sym_str] = pair.wsname + ws_pairs.append(pair.wsname) symbol = symbols[0].lower() @@ -343,7 +354,7 @@ async def stream_quotes( # https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188 ohlc_sub = { 'event': 'subscribe', - 'pair': list(ws_pairs.values()), + 'pair': ws_pairs, 'subscription': { 'name': 'ohlc', 'interval': 1, @@ -359,7 +370,7 @@ async def stream_quotes( # trade data (aka L1) l1_sub = { 'event': 'subscribe', - 'pair': list(ws_pairs.values()), + 'pair': ws_pairs, 'subscription': { 'name': 'spread', # 'depth': 10} @@ -374,7 +385,7 @@ async def stream_quotes( # unsub from all pairs on teardown if ws.connected(): await ws.send_msg({ - 'pair': list(ws_pairs.values()), + 'pair': ws_pairs, 'event': 'unsubscribe', 'subscription': ['ohlc', 'spread'], }) From 83514b0e90f08f5f0f6a47425da275cf5a6cfb8e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Apr 2023 13:17:17 -0400 Subject: [PATCH 126/294] `binance`: add `get_mkt_info()` ep --- piker/brokers/binance.py | 72 +++++++++++++++++++--------------------- 1 file changed, 35 insertions(+), 37 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 3ec02328..a12cd4c9 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -1,5 +1,8 @@ # piker: trading gear for hackers -# Copyright (C) Guillermo Rodriguez (in stewardship for piker0) +# Copyright (C) +# Guillermo Rodriguez +# Tyler Goodlet +# (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -469,6 +472,34 @@ async def open_history_client( yield get_ohlc, {'erlangs': 3, 'rate': 3} +async def get_mkt_info( + fqme: str, + +) -> tuple[MktPair, Pair]: + + async with open_cached_client('binance') as client: + + pair: Pair = await client.exch_info(fqme.upper()) + + mkt = MktPair( + dst=Asset( + name=pair.baseAsset, + atype='crypto', + tx_tick=digits_to_dec(pair.baseAssetPrecision), + ), + src=Asset( + name=pair.quoteAsset, + atype='crypto', + tx_tick=digits_to_dec(pair.quoteAssetPrecision), + ), + price_tick=pair.price_tick, + size_tick=pair.size_tick, + bs_mktid=pair.symbol, + broker='binance', + ) + return mkt, pair + + async def stream_quotes( send_chan: trio.abc.SendChannel, @@ -483,47 +514,15 @@ async def stream_quotes( # XXX: required to propagate ``tractor`` loglevel to piker logging get_console_log(loglevel or tractor.current_actor().loglevel) - sym_infos = {} uid = 0 async with ( - open_cached_client('binance') as client, send_chan as send_chan, ): - - # keep client cached for real-time section - pairs = await client.exch_info() - sym_infos: dict[str, dict] = {} mkt_infos: dict[str, MktPair] = {} - for sym in symbols: - - pair: Pair = pairs[sym.upper()] - price_tick = pair.price_tick - size_tick = pair.size_tick - - mkt_infos[sym] = MktPair( - dst=Asset( - name=pair.baseAsset, - atype='crypto', - tx_tick=digits_to_dec(pair.baseAssetPrecision), - ), - src=Asset( - name=pair.quoteAsset, - atype='crypto', - tx_tick=digits_to_dec(pair.quoteAssetPrecision), - ), - price_tick=price_tick, - size_tick=size_tick, - bs_mktid=pair.symbol, - broker='binance', - ) - - sym_infos[sym] = { - 'price_tick_size': price_tick, - 'lot_tick_size': size_tick, - 'asset_type': 'crypto', - } + mkt, pair = await get_mkt_info(sym) + mkt_infos[sym] = mkt symbol = symbols[0] @@ -533,7 +532,6 @@ async def stream_quotes( symbol: { 'fqsn': sym, - # 'symbol_info': sym_infos[sym], 'mkt_info': mkt_infos[sym], 'shm_write_opts': {'sum_tick_vml': False}, }, @@ -638,5 +636,5 @@ async def open_symbol_search( # repack in dict form await stream.send({ item[0].symbol: item[0] - for item in matches + for item in matches }) From 55b4866d5e51d96295feb4962333312f60a59c66 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Apr 2023 14:15:02 -0400 Subject: [PATCH 127/294] Use `force_mkt` override in paper pps updates When processing paper trades ledgers we normally won't have specific `MktPair` info for the backend market we're simulating, as such we need to look up this info when updating pps.toml files such that we get precision info correct (particularly in the case of cryptos!) and can also run paper ledger processing without running the simulated clearing loop. In order to make it happen we lookup any `get_mkt_info()` ep on the backend and pass the output to the `force_mkt` input of the `PpTable.update_from_trans()` method. --- piker/accounting/_pos.py | 4 ++-- piker/clearing/_paper_engine.py | 31 +++++++++++++++++++++++++++++-- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 12daa6bf..2b3af58f 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -504,7 +504,7 @@ class PpTable(Struct): trans: dict[str, Transaction], cost_scalar: float = 2, - mkt: MktPair | None = None, + force_mkt: MktPair | None = None, ) -> dict[str, Position]: @@ -523,7 +523,7 @@ class PpTable(Struct): # template the mkt-info presuming a legacy market ticks # if no info exists in the transactions.. - mkt: MktPair | Symbol | None = mkt or t.sys + mkt: MktPair | Symbol | None = force_mkt or t.sys if not mkt: mkt = MktPair.from_fqme( fqme, diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 3c6fdc4c..086263a2 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -34,9 +34,13 @@ import pendulum import trio import tractor +from ..brokers import get_brokermod from .. import data from ..data.types import Struct -from ..accounting._mktinfo import Symbol +from ..accounting._mktinfo import ( + Symbol, + MktPair, +) from ..accounting import ( Position, PpTable, @@ -545,8 +549,31 @@ async def trades_dialogue( 'paper', ) as ledger ): + # attempt to get market info from the backend instead of presuming + # the ledger entries have everything correct. + # TODO: how to process ledger info from backends? + # - should we be rolling our own actor-cached version of these + # client API refs or using portal IPC to send requests to the + # existing brokerd daemon? + # - alternatively we can possibly expect and use + # a `.broker.norm_trade_records()` ep? + mkt: MktPair | None = None + brokermod = get_brokermod(broker) + gmi = getattr(brokermod, 'get_mkt_info', None) + if gmi: + mkt, pair = await brokermod.get_mkt_info( + fqme.rstrip(f'.{broker}'), + ) + # update pos table from ledger history - ppt.update_from_trans(ledger.to_trans()) + ppt.update_from_trans( + ledger.to_trans(), + + # NOTE: here we pass in any `MktPair` provided by the + # backend broker instead of assuming the pps.toml contains + # the correct contents! + force_mkt=mkt + ) pp_msgs: list[BrokerdPosition] = [] pos: Position From f42bc2dbce02d5d94d78b6dc3235fe0e999a839e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Apr 2023 19:02:33 -0400 Subject: [PATCH 128/294] `pprint.pformat()` IB position mismatch log msgs --- piker/brokers/ib/broker.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index add03cc1..fdabb645 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -369,6 +369,9 @@ async def update_and_audit_msgs( ) msgs.append(msg) + ibfmtmsg = pformat(ibppmsg.to_dict()) + pikerfmtmsg = pformat(msg.to_dict()) + if validate: ibsize = ibppmsg.size pikersize = msg.size @@ -388,13 +391,13 @@ async def update_and_audit_msgs( # raise ValueError( log.error( - f'POSITION MISMATCH ib <-> piker ledger:\n' - 'FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?\n\n' + f'Pos mismatch in ib vs. the piker ledger!\n' + f'IB:\n{ibfmtmsg}\n\n' + f'PIKER:\n{pikerfmtmsg}\n\n' 'If you are expecting a (reverse) split in this ' - 'instrument you should probably put the following\n\n' - f'in the `pps.toml` section:\n{entry}' - f'IB:\nm{ibppmsg.to_dict()}\n\n' - f'PIKER:\n{msg.to_dict()}\n\n' + 'instrument you should probably put the following' + 'in the `pps.toml` section:\n' + f'{entry}\n' # f'reverse_split_ratio: {reverse_split_ratio}\n' # f'split_ratio: {split_ratio}\n\n' ) @@ -403,10 +406,9 @@ async def update_and_audit_msgs( if ibppmsg.avg_price != msg.avg_price: # TODO: make this a "propaganda" log level? log.warning( - 'The mega-cucks at IB want you to believe with their ' - f'"FIFO" positioning for {msg.symbol}:\n' - f'"ib" mega-cucker avg price: {ibppmsg.avg_price}\n' - f'piker, LIFO breakeven PnL price: {msg.avg_price}' + f'IB "FIFO" avg price for {msg.symbol} is DIFF:\n' + f'ib: {ibppmsg.avg_price}\n' + f'piker: {msg.avg_price}' ) else: @@ -431,9 +433,8 @@ async def update_and_audit_msgs( # raise ValueError( log.error( f'UNEXPECTED POSITION says IB:\n' - 'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?\n' - 'THEY LIQUIDATED YOU OR YOUR MISSING LEDGER RECORDS!?\n' - f'PIKER:\n{msg.to_dict()}\n' + 'Maybe they LIQUIDATED YOU or your missing ledger records?\n' + f'PIKER:\n{pikerfmtmsg}\n\n' ) msgs.append(msg) From 62259880fd0d9ac40c455347666924b0714ea7b0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Apr 2023 19:34:29 -0400 Subject: [PATCH 129/294] paper: on no input fqme, load all mktinfos from pos table --- piker/clearing/_paper_engine.py | 37 +++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 086263a2..f2fcbbd3 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -557,24 +557,29 @@ async def trades_dialogue( # existing brokerd daemon? # - alternatively we can possibly expect and use # a `.broker.norm_trade_records()` ep? - mkt: MktPair | None = None - brokermod = get_brokermod(broker) - gmi = getattr(brokermod, 'get_mkt_info', None) - if gmi: - mkt, pair = await brokermod.get_mkt_info( - fqme.rstrip(f'.{broker}'), + fqmes: list[str] = [fqme] + if fqme is None: + fqmes = list(ppt.pps) + + for fqme in fqmes: + mkt: MktPair | None = None + brokermod = get_brokermod(broker) + gmi = getattr(brokermod, 'get_mkt_info', None) + if gmi: + mkt, pair = await brokermod.get_mkt_info( + fqme.rstrip(f'.{broker}'), + ) + + # update pos table from ledger history + ppt.update_from_trans( + ledger.to_trans(), + + # NOTE: here we pass in any `MktPair` provided by the + # backend broker instead of assuming the pps.toml contains + # the correct contents! + force_mkt=mkt ) - # update pos table from ledger history - ppt.update_from_trans( - ledger.to_trans(), - - # NOTE: here we pass in any `MktPair` provided by the - # backend broker instead of assuming the pps.toml contains - # the correct contents! - force_mkt=mkt - ) - pp_msgs: list[BrokerdPosition] = [] pos: Position token: str # f'{symbol}.{self.broker}' From 250e1c4c51aee0e67898d1a044c9e9c67b4c4aeb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Apr 2023 23:57:55 -0400 Subject: [PATCH 130/294] `ledger` cli: dump colored summary lines to console Tried a couple libs and ended up sticking with `rich` (since it's the sibling lib to `typer`) but also (initially) implemented a version with `blessings` that I ended up commenting out (and will likely remove). Adjusted the CLI I/O a slight bit as well: - require a fully qualified account name of the form: `.` and error on non-matching input. - dump positions summary lines as humanized size, ppu and cost basis values per line. --- piker/accounting/cli.py | 118 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 109 insertions(+), 9 deletions(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index bb4e28df..a922b59c 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -22,6 +22,10 @@ from typing import ( Any, ) +from rich import print +from rich.console import Console +from rich.markdown import Markdown +# from blessings import Terminal import tractor import trio import typer @@ -29,7 +33,8 @@ import typer from ..service import ( open_piker_runtime, ) -# from ._pos import open_pps +from ..clearing._messages import BrokerdPosition +from ..calc import humanize ledger = typer.Typer() @@ -89,13 +94,32 @@ def broker_init( @ledger.command() def sync( - brokername: str, - account: str, - - loglevel: str = 'cancel', + fully_qualified_account_name: str, + # brokername: str, + # account: str, pdb: bool = False, -): + loglevel: str = typer.Option( + 'error', + "-l", + ), +): + console = Console() + + try: + brokername, account = fully_qualified_account_name.split('.') + except ValueError: + md = Markdown( + f'=> `{fully_qualified_account_name}` <=\n\n' + 'is not a valid ' + '__fully qualified account name?__\n\n' + 'Your account name needs to be of the form ' + '`.`\n' + ) + console.print(md) + return + + # term = Terminal() start_kwargs, _, trades_ep = broker_init( brokername, loglevel=loglevel, @@ -138,15 +162,91 @@ def sync( positions: dict[str, Any] accounts: list[str] - # brokerd_trades_stream: tractor.MsgStream async with ( open_trades_endpoint as ( brokerd_ctx, (positions, accounts,), ), - # brokerd_ctx.open_stream() as brokerd_trades_stream, ): - await tractor.breakpoint() + # XXX: ``blessings`` lib syntax.. + # summary: str = ( + # term.dim('Position Summary ') + # + term.dim_blue_underline(f'{brokername}') + # + term.dim('.') + # + term.blue_underline(f'{account}') + # + term.dim(':\n') + # + term.dim('|-> total pps: ') + # + term.green(f'{len(positions)}\n') + # ) + + summary: str = ( + '[dim]PP Summary[/] ' + f'[dim blue underline]{brokername}[/]' + '[dim].[/]' + f'[blue underline]{account}[/]' + f'[dim]:\n|-> total pps: [/]' + f'[green]{len(positions)}[/]\n' + ) + for ppdict in positions: + ppmsg = BrokerdPosition(**ppdict) + size = ppmsg.size + if size: + ppu: float = round( + ppmsg.avg_price, + ndigits=2, + ) + cb: str = humanize(size * ppu) + h_size: str = humanize(size) + + if size < 0: + # pcolor = term.red + pcolor = 'red' + else: + # pcolor = term.green + pcolor = 'green' + + # sematic-highligh of fqme + fqme = ppmsg.symbol + tokens = fqme.split('.') + # styled_fqme = term.blue_underline(f'{tokens[0]}') + styled_fqme = f'[blue underline]{tokens[0]}[/]' + for tok in tokens[1:]: + # styled_fqme += term.dim('.') + styled_fqme += '[dim].[/]' + # styled_fqme += term.dim_blue_underline(tok) + styled_fqme += f'[dim blue underline]{tok}[/]' + + # blessing.Terminal code. + # summary += ( + # # term.dim('- ') + # # + term.dim_blue(f'{ppmsg.symbol}') + # styled_fqme + # + term.dim(': ') + # + pcolor(f'{h_size}') + # # + term.dim_blue('u \n') + # # + term.dim_blue('@ ') + # + term.dim_blue('u @') + # # + term.dim(f' |-> ppu: ') + # # + pcolor(f'{ppu}\n') + # + pcolor(f'{ppu}') + + # # + term.dim(f' |-> book value: ') + # + term.dim_blue(' = ') + # + pcolor(f'$ {cb}\n') + # ) + + summary += ( + styled_fqme + + '[dim]: [/]' + f'[{pcolor}]{h_size}[/]' + '[dim blue]u @[/]' + f'[{pcolor}]{ppu}[/]' + '[dim blue] = [/]' + f'[{pcolor}]$ {cb}\n[/]' + ) + + # console.print(summar) + print(summary) await brokerd_ctx.cancel() await portal.cancel_actor() From b7ddf9cb05d2ad56b9b13a339c2a50beefc8e8ee Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 6 Apr 2023 00:26:53 -0400 Subject: [PATCH 131/294] paper-eng: close context and terminate actor on exit --- piker/clearing/_paper_engine.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index f2fcbbd3..02031898 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -666,21 +666,23 @@ async def open_paperboi( else: broker, symbol, expiry = unpack_fqme(fqme) + we_spawned: bool = False service_name = f'paperboi.{broker}' async with ( tractor.find_actor(service_name) as portal, tractor.open_nursery() as tn, ): - # only spawn if no paperboi already is up - # (we likely don't need more then one proc for basic - # simulated order clearing) + # NOTE: only spawn if no paperboi already is up since we likely + # don't need more then one actor for simulated order clearing + # per broker-backend. if portal is None: log.info('Starting new paper-engine actor') portal = await tn.start_actor( service_name, enable_modules=[__name__] ) + we_spawned = True async with portal.open_context( trades_dialogue, @@ -690,3 +692,8 @@ async def open_paperboi( ) as (ctx, first): yield ctx, first + + # tear down connection and any spawned actor on exit + await ctx.cancel() + if we_spawned: + await portal.cancel_actor() From f92c289842d76942ddfdfdc3f12774a402c062c7 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 6 Apr 2023 00:40:18 -0400 Subject: [PATCH 132/294] Drop old blessings code, general cleanups --- piker/accounting/cli.py | 55 ++++++----------------------------------- 1 file changed, 7 insertions(+), 48 deletions(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index a922b59c..9f894e1a 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -22,10 +22,8 @@ from typing import ( Any, ) -from rich import print from rich.console import Console from rich.markdown import Markdown -# from blessings import Terminal import tractor import trio import typer @@ -53,7 +51,6 @@ def broker_init( the relevant `brokerd` service endpoint. ''' - # log.info(f'Spawning {brokername} broker daemon') from ..brokers import get_brokermod brokermod = get_brokermod(brokername) modpath = brokermod.__name__ @@ -95,8 +92,6 @@ def broker_init( @ledger.command() def sync( fully_qualified_account_name: str, - # brokername: str, - # account: str, pdb: bool = False, loglevel: str = typer.Option( @@ -119,7 +114,6 @@ def sync( console.print(md) return - # term = Terminal() start_kwargs, _, trades_ep = broker_init( brokername, loglevel=loglevel, @@ -168,23 +162,12 @@ def sync( (positions, accounts,), ), ): - # XXX: ``blessings`` lib syntax.. - # summary: str = ( - # term.dim('Position Summary ') - # + term.dim_blue_underline(f'{brokername}') - # + term.dim('.') - # + term.blue_underline(f'{account}') - # + term.dim(':\n') - # + term.dim('|-> total pps: ') - # + term.green(f'{len(positions)}\n') - # ) - summary: str = ( - '[dim]PP Summary[/] ' + '[dim underline]Piker Position Summary[/] ' f'[dim blue underline]{brokername}[/]' '[dim].[/]' f'[blue underline]{account}[/]' - f'[dim]:\n|-> total pps: [/]' + f'[dim underline] -> total pps: [/]' f'[green]{len(positions)}[/]\n' ) for ppdict in positions: @@ -195,46 +178,23 @@ def sync( ppmsg.avg_price, ndigits=2, ) - cb: str = humanize(size * ppu) + cost_basis: str = humanize(size * ppu) h_size: str = humanize(size) if size < 0: - # pcolor = term.red pcolor = 'red' else: - # pcolor = term.green pcolor = 'green' - # sematic-highligh of fqme + # sematic-highlight of fqme fqme = ppmsg.symbol tokens = fqme.split('.') - # styled_fqme = term.blue_underline(f'{tokens[0]}') styled_fqme = f'[blue underline]{tokens[0]}[/]' for tok in tokens[1:]: - # styled_fqme += term.dim('.') styled_fqme += '[dim].[/]' - # styled_fqme += term.dim_blue_underline(tok) styled_fqme += f'[dim blue underline]{tok}[/]' - # blessing.Terminal code. - # summary += ( - # # term.dim('- ') - # # + term.dim_blue(f'{ppmsg.symbol}') - # styled_fqme - # + term.dim(': ') - # + pcolor(f'{h_size}') - # # + term.dim_blue('u \n') - # # + term.dim_blue('@ ') - # + term.dim_blue('u @') - # # + term.dim(f' |-> ppu: ') - # # + pcolor(f'{ppu}\n') - # + pcolor(f'{ppu}') - - # # + term.dim(f' |-> book value: ') - # + term.dim_blue(' = ') - # + pcolor(f'$ {cb}\n') - # ) - + # TODO: instead display in a ``rich.Table``? summary += ( styled_fqme + '[dim]: [/]' @@ -242,11 +202,10 @@ def sync( '[dim blue]u @[/]' f'[{pcolor}]{ppu}[/]' '[dim blue] = [/]' - f'[{pcolor}]$ {cb}\n[/]' + f'[{pcolor}]$ {cost_basis}\n[/]' ) - # console.print(summar) - print(summary) + console.print(summary) await brokerd_ctx.cancel() await portal.cancel_actor() From 484565988d4c3b86a6291a545b64b5a2235c231d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 8 Apr 2023 19:22:53 -0400 Subject: [PATCH 133/294] `order_mode`: broad rename book -> client --- piker/ui/order_mode.py | 39 +++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index cb3ee7d1..3656c1ae 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) Tyler Goodlet (in stewardship for piker0) +# Copyright (C) Tyler Goodlet (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -123,7 +123,7 @@ class OrderMode: chart: ChartPlotWidget # type: ignore # noqa hist_chart: ChartPlotWidget # type: ignore # noqa nursery: trio.Nursery # used by ``ui._position`` code? - book: OrderClient + client: OrderClient lines: LineEditor arrows: ArrowEditor multistatus: MultiStatus @@ -409,13 +409,13 @@ class OrderMode: # send order cmd to ems if send_msg: - self.book.send_nowait(order) + self.client.send_nowait(order) else: # just register for control over this order # TODO: some kind of mini-perms system here based on # an out-of-band tagging/auth sub-sys for multiplayer # order control? - self.book._sent_orders[order.oid] = order + self.client._sent_orders[order.oid] = order return dialog @@ -443,7 +443,7 @@ class OrderMode: size = dialog.order.size # NOTE: sends modified order msg to EMS - self.book.update_nowait( + self.client.update_nowait( uuid=line.dialog.uuid, price=level, size=size, @@ -559,7 +559,7 @@ class OrderMode: ) -> None: - msg = self.book._sent_orders.pop(uuid, None) + msg = self.client._sent_orders.pop(uuid, None) if msg is not None: self.lines.remove_line(uuid=uuid) @@ -615,7 +615,7 @@ class OrderMode: dialog.last_status_close = cancel_status_close ids.append(oid) - self.book.cancel_nowait(uuid=oid) + self.client.cancel_nowait(uuid=oid) return ids @@ -682,7 +682,7 @@ async def open_order_mode( multistatus = chart.window().status_bar done = multistatus.open_status('starting order mode..') - book: OrderClient + client: OrderClient trades_stream: tractor.MsgStream # The keys in this dict **must** be in set our set of "normalized" @@ -693,8 +693,11 @@ async def open_order_mode( # spawn EMS actor-service async with ( - open_ems(fqsn, loglevel=loglevel) as ( - book, + open_ems( + fqsn, + loglevel=loglevel, + ) as ( + client, trades_stream, position_msgs, brokerd_accounts, @@ -821,7 +824,7 @@ async def open_order_mode( chart, hist_chart, tn, - book, + client, lines, arrows, multistatus, @@ -873,7 +876,7 @@ async def open_order_mode( for msg in msgs: await process_trade_msg( mode, - book, + client, msg, ) @@ -907,7 +910,7 @@ async def open_order_mode( await process_trade_msg( mode, - book, + client, msg, ) @@ -915,7 +918,7 @@ async def open_order_mode( process_trades_and_update_ui, trades_stream, mode, - book, + client, ) yield mode @@ -925,7 +928,7 @@ async def process_trades_and_update_ui( trades_stream: tractor.MsgStream, mode: OrderMode, - book: OrderClient, + client: OrderClient, ) -> None: @@ -934,14 +937,14 @@ async def process_trades_and_update_ui( async for msg in trades_stream: await process_trade_msg( mode, - book, + client, msg, ) async def process_trade_msg( mode: OrderMode, - book: OrderClient, + client: OrderClient, msg: dict, ) -> tuple[Dialog, Status]: @@ -1079,7 +1082,7 @@ async def process_trade_msg( case Status(resp='fill'): # handle out-of-piker fills reporting? - order: Order = book._sent_orders.get(oid) + order: Order = client._sent_orders.get(oid) if not order: log.warning(f'order {oid} is unknown') order = msg.req From abbba1fa6e6d659770dace37d1815ff616b0aa57 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 8 Apr 2023 22:48:30 -0400 Subject: [PATCH 134/294] Pack startup pps into a table keyed by fqmes --- piker/clearing/_ems.py | 47 +++++++++++++++++++----------------------- 1 file changed, 21 insertions(+), 26 deletions(-) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 6d18b686..ad13e8f0 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) Tyler Goodlet (in stewardship for piker0) +# Copyright (C) Tyler Goodlet (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -315,9 +315,6 @@ class TradesRelay(Struct): # allowed account names accounts: tuple[str] - # count of connected ems clients for this ``brokerd`` - consumers: int = 0 - class Router(Struct): ''' @@ -468,9 +465,15 @@ class Router(Struct): # client set. # locally cache and track positions per account with - # a table of (brokername, acctid) -> `BrokerdPosition` - # msgs. - pps = {} + # a nested table of msgs: + # tuple(brokername, acctid) -> + # (fqme: str -> + # `BrokerdPosition`) + relay = TradesRelay( + brokerd_stream=brokerd_trades_stream, + positions={}, + accounts=accounts, + ) for msg in positions: msg = BrokerdPosition(**msg) @@ -483,17 +486,10 @@ class Router(Struct): account = msg.account assert account in accounts - pps.setdefault( + relay.positions.setdefault( (broker, account), - [], - ).append(msg) - - relay = TradesRelay( - brokerd_stream=brokerd_trades_stream, - positions=pps, - accounts=accounts, - consumers=1, - ) + {}, + )[msg.symbol] = msg self.relays[broker] = relay @@ -521,8 +517,9 @@ class Router(Struct): ) -> tuple[TradesRelay, Feed]: ''' - Open and yield ``brokerd`` trades dialogue context-stream if - none already exists. + Maybe open a live feed to the target fqme, start `brokerd` order + msg relay and dark clearing tasks to run in the background + indefinitely. ''' broker, symbol, suffix = unpack_fqme(fqme) @@ -701,13 +698,12 @@ async def translate_and_relay_brokerd_events( f'Rx brokerd trade msg:\n' f'{fmsg}' ) - status_msg: Optional[Status] = None + status_msg: Status | None = None match brokerd_msg: # BrokerdPosition case { 'name': 'position', - 'symbol': sym, 'broker': broker, }: pos_msg = BrokerdPosition(**brokerd_msg) @@ -718,9 +714,9 @@ async def translate_and_relay_brokerd_events( relay.positions.setdefault( # NOTE: translate to a FQSN! - (broker, sym), - [] - ).append(pos_msg) + (broker, pos_msg.account), + {} + )[pos_msg.symbol] = pos_msg # fan-out-relay position msgs immediately by # broadcasting updates on all client streams @@ -787,12 +783,11 @@ async def translate_and_relay_brokerd_events( # no msg to client necessary continue - # BrokerdOrderError + # BrokerdError case { 'name': 'error', 'oid': oid, # ems order-dialog id 'reqid': reqid, # brokerd generated order-request id - 'symbol': sym, }: status_msg = book._active.get(oid) msg = BrokerdError(**brokerd_msg) From e524c6fe4fb42dc39c78b37606c3117e62cf037b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Apr 2023 17:22:26 -0400 Subject: [PATCH 135/294] `binance`: add startup caching info log msg --- piker/brokers/binance.py | 1 + 1 file changed, 1 insertion(+) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index a12cd4c9..e1ffabd7 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -332,6 +332,7 @@ class Client: @acm async def get_client() -> Client: client = Client() + log.info(f'Caching exchange infos..') await client.exch_info() yield client From 30af91a82c6a5e5435aa6e9197ea18ec62f06217 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Apr 2023 19:05:36 -0400 Subject: [PATCH 136/294] Rewrite order ctl tests as a parametrization More or less a complete rework which allows passing a detailed clearing/fills input and allows for *not* rebooting the runtime / ems between each position check. Some further enhancements: - use (unit) fractional sizes to simulate both the more realistic and more "complex position calculation" case; since this is crypto. - add a no-fqme-found test. - factor cross-session/offline pos storage (pps.toml) checks into a `load_and_check_pos()` helper which does all entry loading directly from a provided `BrokerdPosition` msg. - use the new `OrderClient.send()` async api. --- tests/test_paper.py | 339 +++++++++++++++++++++++++------------------- 1 file changed, 190 insertions(+), 149 deletions(-) diff --git a/tests/test_paper.py b/tests/test_paper.py index 75be3dc2..0e791197 100644 --- a/tests/test_paper.py +++ b/tests/test_paper.py @@ -18,7 +18,6 @@ from exceptiongroup import BaseExceptionGroup import pytest import tractor from uuid import uuid4 -from functools import partial from piker.service import Services from piker.log import get_logger @@ -53,14 +52,15 @@ async def open_pikerd( yield services -async def submit_order( +async def order_and_and_wait_for_ppmsg( client: OrderClient, trades_stream: tractor.MsgStream, fqme: str, + action: Literal['buy', 'sell'], - price: float = 30000., - executions: int = 1, + price: float = 100e3, # just a super high price. size: float = 0.01, + exec_mode: str = 'live', account: str = 'paper', @@ -73,51 +73,49 @@ async def submit_order( sent: list[Order] = [] broker, key, suffix = unpack_fqme(fqme) - for _ in range(executions): + order = Order( + exec_mode=exec_mode, + action=action, # TODO: remove this from our schema? + oid=str(uuid4()), + account=account, + size=size, + symbol=fqme, + price=price, + brokers=[broker], + ) + sent.append(order) + await client.send(order) - order = Order( - exec_mode=exec_mode, - action=action, - oid=str(uuid4()), - account=account, - size=size, - symbol=fqme, - price=price, - brokers=[broker], - ) - sent.append(order) - await client.send(order) + # TODO: i guess we should still test the old sync-API? + # client.send_nowait(order) - # TODO: i guess we should still test the old sync-API? - # client.send_nowait(order) + # Wait for position message before moving on to verify flow(s) + # for the multi-order position entry/exit. + msgs: list[Status | BrokerdPosition] = [] + async for msg in trades_stream: + match msg: + case {'name': 'position'}: + ppmsg = BrokerdPosition(**msg) + msgs.append(ppmsg) + break - # Wait for position message before moving on to verify flow(s) - # for the multi-order position entry/exit. - msgs: list[Status | BrokerdPosition] = [] - async for msg in trades_stream: - match msg: - case {'name': 'position'}: - ppmsg = BrokerdPosition(**msg) - msgs.append(ppmsg) - break - - case {'name': 'status'}: - msgs.append(Status(**msg)) + case {'name': 'status'}: + msgs.append(Status(**msg)) return sent, msgs -def run_and_catch( +def run_and_tollerate_cancels( fn: Callable[..., Awaitable], - expect_errs: tuple[Exception] = ( - KeyboardInterrupt, - tractor.ContextCancelled, - ) + expect_errs: tuple[Exception] | None = None, + tollerate_errs: tuple[Exception] = (tractor.ContextCancelled,), ): ''' - Close position and assert empty position in pps + Run ``trio``-``piker`` runtime with potential tolerance for + inter-actor cancellation during teardown (normally just + `tractor.ContextCancelled`s). ''' if expect_errs: @@ -127,7 +125,10 @@ def run_and_catch( for err in exc_info.value.exceptions: assert type(err) in expect_errs else: - trio.run(fn) + try: + trio.run(fn) + except tollerate_errs: + pass @cm @@ -139,22 +140,28 @@ def load_and_check_pos( with open_pps(ppmsg.broker, ppmsg.account) as table: - # NOTE: a special case is here since the `PpTable.pps` are - # normally indexed by the particular broker's - # `Position.bs_mktid: str` (a unique market / symbol id provided - # by their systems/design) but for the paper engine case, this - # is the same the fqme. - pp: Position = table.pps[ppmsg.symbol] + if ppmsg.size == 0: + assert ppmsg.symbol not in table.pps + yield None + return - assert ppmsg.size == pp.size - assert ppmsg.avg_price == pp.ppu + else: + # NOTE: a special case is here since the `PpTable.pps` are + # normally indexed by the particular broker's + # `Position.bs_mktid: str` (a unique market / symbol id provided + # by their systems/design) but for the paper engine case, this + # is the same the fqme. + pp: Position = table.pps[ppmsg.symbol] - yield pp + assert ppmsg.size == pp.size + assert ppmsg.avg_price == pp.ppu + + yield pp @pytest.mark.trio async def test_ems_err_on_bad_broker( - open_pikerd: Services, + open_test_pikerd: Services, loglevel: str, ): try: @@ -168,9 +175,60 @@ async def test_ems_err_on_bad_broker( pass -async def atest_buy( +async def match_ppmsgs_on_ems_boot( + ppmsgs: list[BrokerdPosition], + +) -> None: + ''' + Given a list of input position msgs, verify they match + what is loaded from the EMS on connect. + + ''' + by_acct: dict[tuple, list[BrokerdPosition]] = {} + for msg in ppmsgs: + by_acct.setdefault( + (msg.broker, msg.account), + [], + ).append(msg) + + # TODO: actually support multi-mkts to `open_ems()` + # but for now just pass the first fqme. + fqme = msg.symbol + + # disconnect from EMS, reconnect and ensure we get our same + # position relayed to us again in the startup msg. + async with ( + open_ems( + fqme, + mode='paper', + loglevel='info', + ) as ( + _, # OrderClient + _, # tractor.MsgStream + startup_pps, + accounts, + _, # dialogs, + ) + ): + for (broker, account), ppmsgs in by_acct.items(): + assert account in accounts + + # lookup all msgs rx-ed for this account + rx_msgs = startup_pps[(broker, account)] + + for expect_ppmsg in ppmsgs: + rx_msg = BrokerdPosition(**rx_msgs[expect_ppmsg.symbol]) + assert rx_msg == expect_ppmsg + + +async def submit_and_check( + fills: tuple[dict], loglevel: str, -): + +) -> tuple[ + BrokerdPosition, + Position, +]: ''' Enter a trade and assert entries are made in pps and ledger files. @@ -203,125 +261,108 @@ async def atest_buy( assert not startup_pps assert 'paper' in accounts - sent, msgs = await submit_order( - client, - trades_stream, - fqme, - action='buy', - size=0.01, - ) - - last_order = sent[-1] + od: dict + for od in fills: + print(f'Sending order {od} for fill') + sent, msgs = await order_and_and_wait_for_ppmsg( + client, + trades_stream, + fqme, + action='buy', + size=od['size'], + ) + last_order: Order = sent[-1] last_resp = msgs[-1] assert isinstance(last_resp, BrokerdPosition) + ppmsg = last_resp # check that pps.toml for account has been updated + # and all ems position msgs match that state. with load_and_check_pos( last_order, - last_resp, + ppmsg, ) as pos: - return pos + pass - # disconnect from EMS, then reconnect and ensure we get our same - # position relayed to us again. - - # _run_test_and_check( - # partial( - # _async_main, - # open_test_pikerd_and_ems=open_test_pikerd_and_ems, - # action='buy', - # assert_entries=True, - # ), - # ) - - # await _async_main( - # open_test_pikerd_and_ems=open_test_pikerd_and_ems, - # assert_pps=True, - # ) - # _run_test_and_check( - # partial( - # _async_main, - # open_test_pikerd_and_ems=open_test_pikerd_and_ems, - # assert_pps=True, - # ), - # ) + return ppmsg, pos -def test_open_long( +@pytest.mark.parametrize( + 'fills', + [ + # buy and leave + ({'size': 0.001},), + + # sell short, then buy back to net-zero in dst + ( + {'size': -0.001}, + {'size': 0.001}, + ), + + # multi-partial entry and exits. + ( + # enters + {'size': 0.001}, + {'size': 0.002}, + + # partial exit + {'size': -0.001}, + + # partial enter + {'size': 0.0015}, + {'size': 0.001}, + {'size': 0.002}, + + # exits to get back to zero. + {'size': -0.001}, + {'size': -0.025}, + {'size': -0.0195}, + ), + ], + ids='fills={}'.format, +) +def test_multi_fill_positions( open_test_pikerd: AsyncContextManager, loglevel: str, + fills: tuple[dict], + + check_cross_session: bool = True, + ) -> None: + ppmsg: BrokerdPosition + pos: Position + + accum_size: float = 0 + for fill in fills: + accum_size += fill['size'] + async def atest(): + + # export to outer scope for audit on second runtime-boot. + nonlocal ppmsg, pos + async with ( open_test_pikerd() as (_, _, _, services), ): - assert await atest_buy(loglevel) + ppmsg, pos = await submit_and_check( + fills=fills, + loglevel=loglevel, + ) + assert ppmsg.size == accum_size - # Teardown piker like a user would from cli - # raise KeyboardInterrupt + run_and_tollerate_cancels(atest) - run_and_catch( - atest, - expect_errs=None, - ) - # Open ems another time and assert existence of prior - # pps entries confirming they persisted + if check_cross_session or accum_size != 0: + # rerun just to check that position info is persistent for the paper + # account (i.e. a user can expect to see paper pps persist across + # runtime sessions. + async def just_check_pp(): + async with ( + open_test_pikerd() as (_, _, _, services), + ): + await match_ppmsgs_on_ems_boot([ppmsg]) - - -# def test_sell( -# open_test_pikerd_and_ems: AsyncContextManager, -# ): -# ''' -# Sell position and ensure pps are zeroed. - -# ''' -# _run_test_and_check( -# partial( -# _async_main, -# open_test_pikerd_and_ems=open_test_pikerd_and_ems, -# action='sell', -# price=1, -# ), -# ) - -# _run_test_and_check( -# partial( -# _async_main, -# open_test_pikerd_and_ems=open_test_pikerd_and_ems, -# assert_zeroed_pps=True, -# ), -# ) - - -# def test_multi_sell( -# open_test_pikerd_and_ems: AsyncContextManager, -# ): -# ''' -# Make 5 market limit buy orders and -# then sell 5 slots at the same price. -# Finally, assert cleared positions. - -# ''' -# _run_test_and_check( -# partial( -# _async_main, -# open_test_pikerd_and_ems=open_test_pikerd_and_ems, -# action='buy', -# executions=5, -# ), -# ) - -# _run_test_and_check( -# partial( -# _async_main, -# open_test_pikerd_and_ems=open_test_pikerd_and_ems, -# action='sell', -# executions=5, -# price=1, -# assert_zeroed_pps=True, -# ), -# ) + run_and_tollerate_cancels(just_check_pp) From 2ed9e40d5ef7726bd2fb9ad656a786bcb2325845 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Apr 2023 19:12:52 -0400 Subject: [PATCH 137/294] Better EMS client-side msg formatting --- piker/clearing/_client.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index c9ad0d67..a2835183 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) Tyler Goodlet (in stewardship for piker0) +# Copyright (C) Tyler Goodlet (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -36,12 +36,12 @@ from ..service import maybe_open_emsd from ._messages import ( Order, Cancel, + BrokerdPosition, ) from ..brokers import get_brokermod if TYPE_CHECKING: from ._messages import ( - BrokerdPosition, Status, ) @@ -197,7 +197,7 @@ async def relay_orders_from_sync_code( ): async for cmd in sync_order_cmds: sym = cmd.symbol - msg = pformat(cmd) + msg = pformat(cmd.to_dict()) if sym == symbol_key: log.info(f'Send order cmd:\n{msg}') @@ -223,7 +223,7 @@ async def open_ems( dict[ # brokername, acctid tuple[str, str], - list[BrokerdPosition], + dict[str, BrokerdPosition], ], list[str], dict[str, Status], @@ -256,7 +256,6 @@ async def open_ems( async with ( # connect to emsd portal.open_context( - _emsd_main, fqme=fqme, exec_mode=mode, From 928765074fca7e5404f11f6e52a8ae0befd00ee7 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Apr 2023 21:53:48 -0400 Subject: [PATCH 138/294] Fix zero-pp entry to toml case for new file-per-account format --- piker/accounting/_pos.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 2b3af58f..9fcd80d0 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -637,8 +637,8 @@ class PpTable(Struct): active, closed = self.dump_active() - # ONLY dict-serialize all active positions; those that are closed - # we don't store in the ``pps.toml``. + # ONLY dict-serialize all active positions; those that are + # closed we don't store in the ``pps.toml``. to_toml_dict = {} for bs_mktid, pos in active.items(): @@ -688,13 +688,12 @@ class PpTable(Struct): self.conf.update(pp_entries) - elif ( - self.brokername in self.conf and - self.acctid in self.conf[self.brokername] - ): - del self.conf[self.brokername][self.acctid] - if len(self.conf[self.brokername]) == 0: - del self.conf[self.brokername] + # if there are no active position entries according + # to the toml dump output above, then clear the config + # file of all entries. + elif self.conf: + for entry in list(self.conf): + del self.conf[entry] # TODO: why tf haven't they already done this for inline # tables smh.. From 589232d12dfef06aee8a552eca0c3a472c312419 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Apr 2023 22:17:27 -0400 Subject: [PATCH 139/294] Only flip size sign for seels if not already -ve --- piker/clearing/_paper_engine.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 02031898..23e1d347 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -124,7 +124,10 @@ class PaperBoi(Struct): # in the broker trades event processing loop await trio.sleep(0.05) - if action == 'sell': + if ( + action == 'sell' + and size > 0 + ): size = -size msg = BrokerdStatus( From 7de914d54c0bd44f7f083cf758587726c6691910 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Apr 2023 22:21:22 -0400 Subject: [PATCH 140/294] Fix bad-fqme test, adjust prices based on buy/sell --- tests/test_paper.py | 49 ++++++++++++++++++++++++++++++--------------- 1 file changed, 33 insertions(+), 16 deletions(-) diff --git a/tests/test_paper.py b/tests/test_paper.py index 0e791197..8a290f6a 100644 --- a/tests/test_paper.py +++ b/tests/test_paper.py @@ -159,20 +159,26 @@ def load_and_check_pos( yield pp -@pytest.mark.trio -async def test_ems_err_on_bad_broker( +def test_ems_err_on_bad_broker( open_test_pikerd: Services, loglevel: str, ): - try: - async with open_ems( - 'doggy.smiles', - mode='paper', - loglevel=loglevel, - ) as _: - pytest.fail('EMS is working on non-broker!?') - except ModuleNotFoundError: - pass + async def load_bad_fqme(): + try: + async with ( + open_test_pikerd() as (_, _, _, services), + + open_ems( + 'doggycoin.doggy', + mode='paper', + loglevel=loglevel, + ) as _ + ): + pytest.fail('EMS is working on non-broker!?') + except ModuleNotFoundError: + pass + + run_and_tollerate_cancels(load_bad_fqme) async def match_ppmsgs_on_ems_boot( @@ -264,12 +270,14 @@ async def submit_and_check( od: dict for od in fills: print(f'Sending order {od} for fill') + size = od['size'] sent, msgs = await order_and_and_wait_for_ppmsg( client, trades_stream, fqme, - action='buy', - size=od['size'], + action='buy' if size > 0 else 'sell', + price=100e3 if size > 0 else 0, + size=size, ) last_order: Order = sent[-1] @@ -300,7 +308,8 @@ async def submit_and_check( {'size': 0.001}, ), - # multi-partial entry and exits. + # multi-partial entry and exits from net-zero, to short and back + # to net-zero. ( # enters {'size': 0.001}, @@ -314,10 +323,18 @@ async def submit_and_check( {'size': 0.001}, {'size': 0.002}, - # exits to get back to zero. + # nearly back to zero. {'size': -0.001}, + + # switch to net-short {'size': -0.025}, {'size': -0.0195}, + + # another entry + {'size': 0.001}, + + # final cover to net-zero again. + {'size': 0.038}, ), ], ids='fills={}'.format, @@ -328,7 +345,7 @@ def test_multi_fill_positions( fills: tuple[dict], - check_cross_session: bool = True, + check_cross_session: bool = False, ) -> None: From 3b7579990bbf23dee976bc987c52ec7f147bc738 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Apr 2023 22:21:49 -0400 Subject: [PATCH 141/294] Link `tractor` debug mode to `pytest` --pdb flag --- tests/conftest.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 897c6b7f..87611c55 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -87,8 +87,11 @@ def log( @acm async def _open_test_pikerd( tmpconfdir: str, + reg_addr: tuple[str, int] | None = None, loglevel: str = 'warning', + debug_mode: bool = False, + **kwargs, ) -> tuple[ @@ -122,7 +125,7 @@ async def _open_test_pikerd( # or just in sequence per test, so we keep root. drop_root_perms_for_ahab=False, - debug_mode=True, + debug_mode=debug_mode, **kwargs, @@ -178,6 +181,8 @@ def open_test_pikerd( # bind in level from fixture, which is itself set by # `--ll ` cli flag. loglevel=loglevel, + + debug_mode=request.config.option.usepdb ) # NOTE: the `tmp_dir` fixture will wipe any files older then 3 test From dedc51a939a0415a30a8d091e2b420184346665d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Apr 2023 22:22:17 -0400 Subject: [PATCH 142/294] Quantize order prices prior to `OrderClient.send()` Order mode previously was just willy-nilly sending `float` prices (particularly on order edits) which are generated from the associated level line. This actually uses the `MktPair.price_tick: Decimal` to ensure the value is rounded correctly before submission to the ems.. Also adjusts the order mode init to expect a table of tables of startup position messages, with the inner table being keyed by fqme per msg. --- piker/ui/order_mode.py | 36 +++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 3656c1ae..ec789d66 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -289,6 +289,20 @@ class OrderMode: symbol = self.chart.linked.symbol + # NOTE : we could also use instead, + # symbol.quantize(price, quantity_type='price') + # but it returns a Decimal and it's probably gonna + # be slower? + # TODO: should we be enforcing this precision + # at a different layer in the stack? right now + # any precision error will literally be relayed + # all the way back from the backend. + + price = round( + price, + ndigits=symbol.tick_size_digits, + ) + order = self._staged_order = Order( action=action, price=price, @@ -359,7 +373,7 @@ class OrderMode: # NOTE: we have to str-ify `MktPair` first since we can't # cast to it without being mega explicit with # `msgspec.Struct`, which we're not yet.. - order = staged.copy({ + order: Order = staged.copy({ 'symbol': str(staged.symbol), 'oid': oid, }) @@ -436,8 +450,17 @@ class OrderMode: line: LevelLine, ) -> None: + ''' + Retreive the level line's end state, compute the size + and price for the new price-level, send an update msg to + the EMS, adjust mirrored level line on secondary chart. - level = line.value() + ''' + mktinfo = self.chart.linked.symbol + level = round( + line.value(), + ndigits=mktinfo.tick_size_digits, + ) # updated by level change callback set in ``.new_line_from_order()`` dialog = line.dialog size = dialog.order.size @@ -689,7 +712,7 @@ async def open_order_mode( # symbol names (i.e. the same names you'd get back in search # results) in order for position msgs to correctly trigger the # display of a position indicator on screen. - position_msgs: dict[str, list[BrokerdPosition]] + position_msgs: dict[str, dict[str, BrokerdPosition]] # spawn EMS actor-service async with ( @@ -872,8 +895,11 @@ async def open_order_mode( # Pack position messages by account, should only be one-to-one. # NOTE: requires the backend exactly specifies # the expected symbol key in its positions msg. - for (broker, acctid), msgs in position_msgs.items(): - for msg in msgs: + for ( + (broker, acctid), + pps_by_fqme + ) in position_msgs.items(): + for msg in pps_by_fqme.values(): await process_trade_msg( mode, client, From 2cf7daca3036112c292ae1caec47cc307e17bd4c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Apr 2023 22:27:29 -0400 Subject: [PATCH 143/294] Another fqsn -> fqme rename --- piker/data/feed.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index 405a8f57..927eecd5 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -941,7 +941,7 @@ class BackendInitMsg(Struct, frozen=True): from each backend broker/data provider. ''' - fqsn: str + fqme: str symbol_info: dict | None = None mkt_info: MktPair | None = None shm_write_opts: dict[str, Any] | None = None @@ -1284,7 +1284,9 @@ async def open_feed_bus( # sync feed subscribers with flume handles await ctx.started( - {fqsn: flume.to_msg() for fqsn, flume in flumes.items()} + {fqsn: flume.to_msg() + for fqsn, flume in flumes.items() + } ) if not start_stream: From 146e0993a9f264e53546ba9e23abf4d729f2363d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 11 Apr 2023 01:26:55 -0400 Subject: [PATCH 144/294] More explicit test mod docstring --- tests/test_paper.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_paper.py b/tests/test_paper.py index 8a290f6a..378c2690 100644 --- a/tests/test_paper.py +++ b/tests/test_paper.py @@ -1,5 +1,13 @@ ''' -Paper-mode testing +Execution mgmt system (EMS) e2e testing. + +Most tests leverage our paper clearing engine found (currently) in +``piker.clearing._paper_engine`. + +Ideally in the longer run we are able to support forms of (non-clearing) +live order tests against certain backends that make it possible to do +so.. + ''' from contextlib import ( contextmanager as cm, From 02eb966a87afc9f70b7b9090e683b9a7ac1038a1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 11 Apr 2023 01:31:52 -0400 Subject: [PATCH 145/294] Rename ems test mod --- tests/{test_paper.py => test_ems.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{test_paper.py => test_ems.py} (100%) diff --git a/tests/test_paper.py b/tests/test_ems.py similarity index 100% rename from tests/test_paper.py rename to tests/test_ems.py From 48cae3c1786ff3292f6bd996ac2f52ccd11ce83c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 11 Apr 2023 14:03:47 -0400 Subject: [PATCH 146/294] `ib`: rejects their own fractional size tick.. Frickin ib, they give you the `0.001` (or wtv) in the `ContractDetails.minSize: float` but won't accept fractional sizes through the API.. Either way, it's probably not sane to be supporting fractional order sizes for legacy instruments by default especially since it in theory affects a lot of the clearing outcomes by having ib do wtv magical junk behind the scenes to make it work.. --- piker/brokers/ib/feed.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index ead4b8e4..e06ae29c 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -619,7 +619,7 @@ async def _setup_quote_stream( async def open_aio_quote_stream( symbol: str, - contract: Optional[Contract] = None, + contract: Contract | None = None, ) -> trio.abc.ReceiveStream: @@ -741,9 +741,9 @@ async def stream_quotes( try: ( - con, - first_ticker, - details, + con, # Contract + first_ticker, # Ticker + details, # ContractDetails ) = await proxy.get_sym_details(symbol=sym) except ConnectionError: log.exception(f'Proxy is ded {proxy._aio_ns}') @@ -759,6 +759,7 @@ async def stream_quotes( ''' # pass back some symbol info like min_tick, trading_hours, etc. + con: Contract = details.contract syminfo = asdict(details) syminfo.update(syminfo['contract']) @@ -785,6 +786,11 @@ async def stream_quotes( price_tick: Decimal = Decimal(str(syminfo['minTick'])) size_tick: Decimal = Decimal(str(syminfo['minSize']).rstrip('0')) + # XXX: GRRRR they don't support fractional share sizes for + # stocks from the API?! + if con.secType == 'STK': + size_tick = Decimal('1') + syminfo['price_tick_size'] = price_tick # NOTE: as you'd expect for "legacy" assets, the "volume # precision" is normally discreet. From b810de30892b7ed385c555547f20d9fc3560b3ae Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 11 Apr 2023 23:59:50 -0400 Subject: [PATCH 147/294] Rename fqsn -> fqme in feeds tests --- tests/test_feeds.py | 39 ++++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/tests/test_feeds.py b/tests/test_feeds.py index 0435ed61..0f88ce5f 100644 --- a/tests/test_feeds.py +++ b/tests/test_feeds.py @@ -13,13 +13,14 @@ from piker.data import ( ShmArray, open_feed, ) +from piker.data.flows import Flume from piker.accounting._mktinfo import ( unpack_fqsn, ) @pytest.mark.parametrize( - 'fqsns', + 'fqmes', [ # binance (100, {'btcusdt.binance', 'ethusdt.binance'}, False), @@ -30,20 +31,20 @@ from piker.accounting._mktinfo import ( # binance + kraken (100, {'btcusdt.binance', 'xbtusd.kraken'}, False), ], - ids=lambda param: f'quotes={param[0]}@fqsns={param[1]}', + ids=lambda param: f'quotes={param[0]}@fqmes={param[1]}', ) def test_multi_fqsn_feed( open_test_pikerd: AsyncContextManager, - fqsns: set[str], + fqmes: set[str], loglevel: str, ci_env: bool ): ''' - Start a real-time data feed for provided fqsn and pull + Start a real-time data feed for provided fqme and pull a few quotes then simply shut down. ''' - max_quotes, fqsns, run_in_ci = fqsns + max_quotes, fqmes, run_in_ci = fqmes if ( ci_env @@ -52,15 +53,15 @@ def test_multi_fqsn_feed( pytest.skip('Skipping CI disabled test due to feed restrictions') brokers = set() - for fqsn in fqsns: - brokername, key, suffix = unpack_fqsn(fqsn) + for fqme in fqmes: + brokername, key, suffix = unpack_fqsn(fqme) brokers.add(brokername) async def main(): async with ( open_test_pikerd(), open_feed( - fqsns, + fqmes, loglevel=loglevel, # TODO: ensure throttle rate is applied @@ -71,20 +72,20 @@ def test_multi_fqsn_feed( ) as feed ): # verify shm buffers exist - for fqin in fqsns: + for fqin in fqmes: flume = feed.flumes[fqin] ohlcv: ShmArray = flume.rt_shm hist_ohlcv: ShmArray = flume.hist_shm async with feed.open_multi_stream(brokers) as stream: - # pull the first startup quotes, one for each fqsn, and + # pull the first startup quotes, one for each fqme, and # ensure they match each flume's startup quote value. - fqsns_copy = fqsns.copy() + fqsns_copy = fqmes.copy() with trio.fail_after(0.5): for _ in range(1): first_quotes = await stream.receive() - for fqsn, quote in first_quotes.items(): + for fqme, quote in first_quotes.items(): # XXX: TODO: WTF apparently this error will get # supressed and only show up in the teardown @@ -92,18 +93,18 @@ def test_multi_fqsn_feed( # # assert 0 - fqsns_copy.remove(fqsn) - flume = feed.flumes[fqsn] + fqsns_copy.remove(fqme) + flume: Flume = feed.flumes[fqme] assert quote['last'] == flume.first_quote['last'] cntr = Counter() with trio.fail_after(6): async for quotes in stream: - for fqsn, quote in quotes.items(): - cntr[fqsn] += 1 + for fqme, quote in quotes.items(): + cntr[fqme] += 1 # await tractor.breakpoint() - flume = feed.flumes[fqsn] + flume = feed.flumes[fqme] ohlcv: ShmArray = flume.rt_shm hist_ohlcv: ShmArray = flume.hist_shm @@ -116,7 +117,7 @@ def test_multi_fqsn_feed( # assert last == rt_row['close'] # assert last == hist_row['close'] pprint( - f'{fqsn}: {quote}\n' + f'{fqme}: {quote}\n' f'rt_ohlc: {rt_row}\n' f'hist_ohlc: {hist_row}\n' ) @@ -124,6 +125,6 @@ def test_multi_fqsn_feed( if cntr.total() >= max_quotes: break - assert set(cntr.keys()) == fqsns + assert set(cntr.keys()) == fqmes trio.run(main) From b1e162ebb463a1e327a285699c5a0a09ab3e61f2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 14 Apr 2023 01:22:22 -0400 Subject: [PATCH 148/294] Fix ._util import in questrade backend --- piker/brokers/questrade.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/piker/brokers/questrade.py b/piker/brokers/questrade.py index b7042bdf..1d447b23 100644 --- a/piker/brokers/questrade.py +++ b/piker/brokers/questrade.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0) +# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -46,7 +46,7 @@ from ._util import resproc, BrokerError, SymbolNotFound from ..log import ( colorize_json, ) -from .util import ( +from ._util import ( log, get_console_log, ) From 611d86d98865e398c31a93fc9ba2804c84a84479 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 14 Apr 2023 01:45:42 -0400 Subject: [PATCH 149/294] Change `Flume.symbol` -> `.mkt: MktPair` Might as well try and flip it over to the new type; make appropriate dict serialization changes in `.to_msg()`. Alias back to `.symbol: Symbol` with a property. --- piker/data/flows.py | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/piker/data/flows.py b/piker/data/flows.py index 34adf876..1ddd35c2 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -35,6 +35,9 @@ from ..accounting._mktinfo import ( MktPair, Symbol, ) +from ..log import ( + get_logger, +) from .types import Struct from ._sharedmem import ( attach_shm_array, @@ -50,6 +53,8 @@ if TYPE_CHECKING: # from pyqtgraph import PlotItem from .feed import Feed +log = get_logger(__name__) + # TODO: ideas for further abstractions as per # https://github.com/pikers/piker/issues/216 and @@ -91,10 +96,18 @@ class Flume(Struct): queuing properties. ''' - symbol: Symbol | MktPair + mkt: MktPair | Symbol first_quote: dict _rt_shm_token: _Token + @property + def symbol(self) -> MktPair | Symbol: + log.warning( + '`Flume.symbol` is deprecated!\n' + 'Use `.mkt: MktPair` instead!' + ) + return self.mkt + # optional since some data flows won't have a "downsampled" history # buffer/stream (eg. FSPs). _hist_shm_token: _Token | None = None @@ -176,14 +189,7 @@ class Flume(Struct): def to_msg(self) -> dict: msg = self.to_dict() - - # TODO: do we even need to convert to dict - # first now? - # TODO: drop the former. - msg['symbol'] = msg['symbol'].to_dict() - mktpair = msg.get('mktpair') - if mktpair: - msg['mktpair'] = mktpair.to_dict() + msg['mkt'] = self.mkt.to_dict() # can't serialize the stream or feed objects, it's expected # you'll have a ref to it since this msg should be rxed on @@ -203,10 +209,10 @@ class Flume(Struct): `msgspec.Struct` form. ''' - sym_msg = msg.pop('symbol') + mkt_msg = msg.pop('mkt') - if 'dst' in sym_msg: - mkt = MktPair.from_msg(sym_msg) + if 'dst' in mkt_msg: + mkt = MktPair.from_msg(mkt_msg) else: # XXX NOTE: ``msgspec`` can encode `Decimal` @@ -214,9 +220,9 @@ class Flume(Struct): # we aren't spec-cing these msgs as structs, SO # we have to ensure we do a struct type case (which `.copy()` # does) to ensure we get the right type! - mkt = Symbol(**sym_msg).copy() + mkt = Symbol(**mkt_msg).copy() - return cls(symbol=mkt, **msg) + return cls(mkt=mkt, **msg) def get_index( self, From a301fabd6ce97d4f57fba3b3cdeff370a6ce2481 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 14 Apr 2023 01:50:05 -0400 Subject: [PATCH 150/294] Change`.ui._fsp` to use `Flume.mkt` --- piker/ui/_fsp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/ui/_fsp.py b/piker/ui/_fsp.py index 960b287a..5c61b07e 100644 --- a/piker/ui/_fsp.py +++ b/piker/ui/_fsp.py @@ -503,7 +503,7 @@ class FspAdmin: }, ) dst_fsp_flume = Flume( - symbol=symbol, + mkt=symbol, _rt_shm_token=dst_shm.token, first_quote={}, From 0917b580c9c774480f4871b1a9fdaf22cffbf573 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 14 Apr 2023 01:50:36 -0400 Subject: [PATCH 151/294] Flip `.feed` and `._sampling` over to new stuff In `.feed` and `._sampling` move to using the new `tractor.Context.open_stream(allow_overruns: bool)` (cough, A BREAKING CHANGE). Also set `Flume.mkt` during construction in `.feed.open_feed()`. --- piker/data/_sampling.py | 3 --- piker/data/feed.py | 36 ++++++++++++------------------------ 2 files changed, 12 insertions(+), 27 deletions(-) diff --git a/piker/data/_sampling.py b/piker/data/_sampling.py index 3ebdd140..208a686b 100644 --- a/piker/data/_sampling.py +++ b/piker/data/_sampling.py @@ -782,9 +782,6 @@ async def uniform_rate_send( https://gist.github.com/njsmith/7ea44ec07e901cb78ebe1dd8dd846cb9 ''' - # try not to error-out on overruns of the subscribed client - stream._ctx._backpressure = True - # TODO: compute the approx overhead latency per cycle left_to_sleep = throttle_period = 1/rate - 0.000616 diff --git a/piker/data/feed.py b/piker/data/feed.py index 927eecd5..9d4e09d9 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -1116,7 +1116,7 @@ async def allocate_persistent_feed( # TODO: we have to use this for now since currently the # MktPair above doesn't render the correct output key it seems # when we provide the `MktInfo` here?..? - symbol=symbol, + mkt=symbol, first_quote=first_quote, _rt_shm_token=rt_shm.token, @@ -1206,10 +1206,6 @@ async def open_feed_bus( symbol. ''' - # ensure that a quote feed stream which is pushing too fast doesn't - # cause and overrun in the client. - ctx._backpressure = True - if loglevel is None: loglevel = tractor.current_actor().loglevel @@ -1285,8 +1281,7 @@ async def open_feed_bus( # sync feed subscribers with flume handles await ctx.started( {fqsn: flume.to_msg() - for fqsn, flume in flumes.items() - } + for fqsn, flume in flumes.items()} ) if not start_stream: @@ -1295,7 +1290,12 @@ async def open_feed_bus( # real-time stream loop async with ( - ctx.open_stream() as stream, + ctx.open_stream( + # NOTE we allow this since it's common to have the live + # quote feed actor's sampling task push faster then the + # the local UI-graphics code during startup. + allow_overruns=True, + ) as stream, ): local_subs: dict[str, set[tuple]] = {} @@ -1323,7 +1323,6 @@ async def open_feed_bus( # a max ``tick_throttle`` instantaneous rate. send, recv = trio.open_memory_channel(2**10) - ctx._backpressure = False cs = await bus.start_task( uniform_rate_send, tick_throttle, @@ -1455,14 +1454,6 @@ class Feed(Struct): _max_sample_rate: int = 1 - # @property - # def portal(self) -> tractor.Portal: - # return self._portal - - # @property - # def name(self) -> str: - # return self.mod.name - async def pause(self) -> None: for stream in set(self.streams.values()): await stream.send('pause') @@ -1537,7 +1528,7 @@ async def maybe_open_feed( 'tick_throttle': kwargs.get('tick_throttle'), # XXX: super critical to have bool defaults here XD - 'backpressure': kwargs.get('backpressure', True), + 'allow_overruns': kwargs.get('allow_overruns', True), 'start_stream': kwargs.get('start_stream', True), }, key=fqsn, @@ -1569,7 +1560,7 @@ async def open_feed( fqsns: list[str], loglevel: str | None = None, - backpressure: bool = True, + allow_overruns: bool = True, start_stream: bool = True, tick_throttle: float | None = None, # Hz @@ -1659,9 +1650,6 @@ async def open_feed( (brokermod, bfqsns), ) in zip(ctxs, providers.items()): - # NOTE: do it asap to avoid overruns during multi-feed setup? - ctx._backpressure = backpressure - for fqsn, flume_msg in flumes_msg_dict.items(): flume = Flume.from_msg(flume_msg) assert flume.symbol.fqsn == fqsn @@ -1683,11 +1671,11 @@ async def open_feed( stream_ctxs.append( ctx.open_stream( - # XXX: be explicit about stream backpressure + # XXX: be explicit about stream overruns # since we should **never** overrun on feeds # being too fast, which will pretty much # always happen with HFT XD - backpressure=backpressure, + allow_overruns=allow_overruns, ) ) From 10a39ca42ce1392d3890156b324bae29dd17d50b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 14 Apr 2023 01:55:48 -0400 Subject: [PATCH 152/294] More detailed dark-slap comments --- piker/clearing/_ems.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index ad13e8f0..0d666ef0 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -1250,6 +1250,11 @@ async def process_client_order_cmds( pred = mk_check(trigger_price, last, action) + # NOTE: for dark orders currently we submit + # the triggered live order at a price 5 ticks + # above/below the L1 prices. + # TODO: make this configurable from our top level + # config, prolly in a .clearing` section? spread_slap: float = 5 min_tick = float(flume.symbol.size_tick) min_tick_digits = float_digits(min_tick) From 0d2e713e9adbda8a050b2dd4bc016b8bad2a9285 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 16 Apr 2023 17:55:44 -0400 Subject: [PATCH 153/294] `binance`: facepalm, swap price/size_tick methods.. Wow not sure how that happened, but we should probably use the correct market precision info for the correct parameter.. Also, use `@lru_cache` on new `get_mkt_info()` ep, seems to work? --- piker/brokers/binance.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index e1ffabd7..aa1a1f5d 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -23,6 +23,7 @@ Binance backend """ from contextlib import asynccontextmanager as acm from datetime import datetime +from functools import lru_cache from decimal import Decimal from typing import ( Any, Union, Optional, @@ -131,14 +132,16 @@ class Pair(Struct, frozen=True): permissions: list[str] @property - def size_tick(self) -> Decimal: + def price_tick(self) -> Decimal: # XXX: lul, after manually inspecting the response format we # just directly pick out the info we need - return Decimal(self.filters['PRICE_FILTER']['tickSize'].rstrip('0')) + step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0') + return Decimal(step_size) @property - def price_tick(self) -> Decimal: - return Decimal(self.filters['LOT_SIZE']['stepSize'].rstrip('0')) + def size_tick(self) -> Decimal: + step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0') + return Decimal(step_size) class OHLC(Struct): @@ -473,6 +476,7 @@ async def open_history_client( yield get_ohlc, {'erlangs': 3, 'rate': 3} +@lru_cache async def get_mkt_info( fqme: str, @@ -481,7 +485,6 @@ async def get_mkt_info( async with open_cached_client('binance') as client: pair: Pair = await client.exch_info(fqme.upper()) - mkt = MktPair( dst=Asset( name=pair.baseAsset, From bba1ee43ff6d1bf530ce6bf0a1e6071482d2a1f7 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Apr 2023 14:50:07 -0400 Subject: [PATCH 154/294] Allow mkt info table input to `.iter_trans()` Since ledger records are often provided (and thus stored) from most backends *without* containing the info we normally need for accounting defined by `MktPair`, this extends the ledger method to take in a table that allows assigning the `Transaction.sys` from an fqme lookup. This way client code (like the paper engine and new ledger mgmt tools) can do the mkt info lookup before hand and then load both ledger `Transactions` and positions via the `PpTable` and get correct accounting calculations, always :fingers_crossed: Also adds `TransactionLedger.update_from_t(t: Transaction)` to allow updating directly from an existing tran instead of making the user cast to a `dict` first. Includes fix to `.to_dict()` to always pop the `.sym` again to avoid client code having to do so. --- piker/accounting/_ledger.py | 32 +++++++++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 1ff593bc..64f77bab 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -72,7 +72,8 @@ class Transaction(Struct, frozen=True): def fqme(self) -> str: return self.fqsn - # TODO: drop the Symbol type + # TODO: drop the Symbol type, construct using + # t.sys (the transaction system) # the underlying "transaction system", normally one of a ``MktPair`` # (a description of a tradable double auction) or a ledger-recorded @@ -93,6 +94,10 @@ class Transaction(Struct, frozen=True): def to_dict(self) -> dict: dct = super().to_dict() + + # TODO: switch to sys! + dct.pop('sym') + # ensure we use a pendulum formatted # ISO style str here!@ dct['dt'] = str(self.dt) @@ -122,11 +127,24 @@ class TransactionLedger(UserDict): ''' with self.file_path.open(mode='w') as fp: + + # rewrite the key name to fqme if needed + fqsn: str = self.data.get('fqsn') + if fqsn: + self.data['fqme'] = fqsn + toml.dump(self.data, fp) + def update_from_t( + self, + t: Transaction, + ) -> None: + self.data[t.tid] = t.to_dict() + def iter_trans( self, broker: str = 'paper', + mkt_by_fqme: dict[str, MktPair] | None = None, ) -> Generator[ tuple[str, Transaction], @@ -158,6 +176,7 @@ class TransactionLedger(UserDict): fqme = txdict.get('fqme', txdict['fqsn']) dt = parse(txdict['dt']) expiry = txdict.get('expiry') + mkt_by_fqme = mkt_by_fqme or {} yield ( tid, @@ -171,7 +190,7 @@ class TransactionLedger(UserDict): bs_mktid=txdict['bs_mktid'], # optional - sym=None, + sym=mkt_by_fqme[fqme] if mkt_by_fqme else None, expiry=parse(expiry) if expiry else None, ) ) @@ -180,12 +199,19 @@ class TransactionLedger(UserDict): self, broker: str = 'paper', + **kwargs, + ) -> dict[str, Transaction]: ''' Return the entire output from ``.iter_trans()`` in a ``dict``. ''' - return dict(self.iter_trans()) + return dict( + self.iter_trans( + broker, + **kwargs, + ) + ) @cm From f106472bcb9ce4aae992bb9b39f707a785bff993 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Apr 2023 15:18:43 -0400 Subject: [PATCH 155/294] Fix size quantization and closed position popping.. Turns out we actually had further pp entry bugs due to *not quantizing* the size inside `.minimize_clears()` method calcs; fix that using `Position.sys.mkt.quantize()` as is done in `Position.calc_size()`. Fix `PpTable.write_config()` to drop from the TOML config any `closed: dict[str, Position]` entries delivered by `.dump_active()`. Add a more detailed doc string for our position type and a little todo for the `.bep` B) --- piker/accounting/_pos.py | 82 +++++++++++++++++++++++++++++----------- 1 file changed, 60 insertions(+), 22 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 9fcd80d0..bbf7ce8c 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -63,19 +63,42 @@ log = get_logger(__name__) class Position(Struct): ''' - Basic pp (personal/piker position) model with attached clearing - transaction history. + An asset "position" model with attached clearing transaction history. + + A financial "position" in `piker` terms is a summary of accounting + metrics computed from a transaction ledger; generally it describes + some acumulative "size" and "average price" from the summarized + underlying transaction set. + + In piker we focus on the `.ppu` (price per unit) and the `.bep` + (break even price) including all transaction entries and exits since + the last "net-zero" size of the destination asset's holding. + + This interface serves as an object API for computing and tracking + positions as well as supports serialization for storage in the local + file system (in TOML) and to interchange as a msg over IPC. ''' symbol: Symbol | MktPair + @property + def mkt(self) -> MktPair: + return self.symbol + # can be +ve or -ve for long/short size: float - # "breakeven price" above or below which pnl moves above and below - # zero for the entirety of the current "trade state". + # "price-per-unit price" above or below which pnl moves above and + # below zero for the entirety of the current "trade state". The ppu + # is only modified on "increases of" the absolute size of a position + # in one of a long/short "direction" (i.e. abs(.size_i) > 0 after + # the next transaction given .size was > 0 before that tx, and vice + # versa for -ve sized positions). ppu: float + # TODO: break-even-price support! + # bep: float + # unique "backend system market id" bs_mktid: str @@ -164,7 +187,8 @@ class Position(Struct): inline_table = toml.TomlDecoder().get_empty_inline_table() # serialize datetime to parsable `str` - inline_table['dt'] = str(data['dt']) + dtstr = inline_table['dt'] = str(data['dt']) + assert 'Datetime' not in dtstr # insert optional clear fields in column order for k in ['ppu', 'accum_size']: @@ -191,7 +215,9 @@ class Position(Struct): ''' clears = list(self.clears.values()) - self.first_clear_dt = min(list(entry['dt'] for entry in clears)) + self.first_clear_dt = min( + list(entry['dt'] for entry in clears) + ) last_clear = clears[-1] csize = self.calc_size() @@ -413,15 +439,21 @@ class Position(Struct): asset using the clears/trade event table; zero if expired. ''' - size: float = 0 + size: float = 0. # time-expired pps (normally derivatives) are "closed" # and have a zero size. if self.expired(): - return 0 + return 0. for tid, entry in self.clears.items(): size += entry['size'] + # XXX: do we need it every step? + # no right since rounding is an LT? + # size = self.mkt.quantize( + # size + entry['size'], + # quantity_type='size', + # ) if self.split_ratio is not None: size = round(size * self.split_ratio) @@ -450,7 +482,9 @@ class Position(Struct): # scan for the last "net zero" position by iterating # transactions until the next net-zero size, rinse, repeat. for tid, clear in self.clears.items(): - size += clear['size'] + size = float( + self.mkt.quantize(size + clear['size']) + ) clears_since_zero.append((tid, clear)) if size == 0: @@ -504,8 +538,6 @@ class PpTable(Struct): trans: dict[str, Transaction], cost_scalar: float = 2, - force_mkt: MktPair | None = None, - ) -> dict[str, Position]: pps = self.pps @@ -523,15 +555,7 @@ class PpTable(Struct): # template the mkt-info presuming a legacy market ticks # if no info exists in the transactions.. - mkt: MktPair | Symbol | None = force_mkt or t.sys - if not mkt: - mkt = MktPair.from_fqme( - fqme, - price_tick='0.01', - size_tick='0.0', - bs_mktid=bs_mktid, - ) - + mkt: MktPair = t.sys pp = pps.get(bs_mktid) if not pp: # if no existing pp, allocate fresh one. @@ -633,14 +657,18 @@ class PpTable(Struct): def to_toml( self, + active: dict[str, Position] | None = None, + ) -> dict[str, Any]: - active, closed = self.dump_active() + if active is None: + active, _ = self.dump_active() # ONLY dict-serialize all active positions; those that are # closed we don't store in the ``pps.toml``. to_toml_dict = {} + pos: Position for bs_mktid, pos in active.items(): # keep the minimal amount of clears that make up this @@ -650,6 +678,8 @@ class PpTable(Struct): # serialize to pre-toml form fqme, asdict = pos.to_pretoml() + + # assert 'Datetime' not in asdict['dt'] log.info(f'Updating active pp: {fqme}') # XXX: ugh, it's cuz we push the section under @@ -667,7 +697,9 @@ class PpTable(Struct): # TODO: show diff output? # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries # active, closed_pp_objs = table.dump_active() - pp_entries = self.to_toml() + + active, closed = self.dump_active() + pp_entries = self.to_toml(active=active) if pp_entries: log.info( f'Updating positions in ``{self.conf_path}``:\n' @@ -688,6 +720,12 @@ class PpTable(Struct): self.conf.update(pp_entries) + # drop any entries that are computed as net-zero + # we don't care about storing in the pps file. + if closed: + for fqme in closed: + self.conf.pop(fqme, None) + # if there are no active position entries according # to the toml dump output above, then clear the config # file of all entries. From 7ee6f36e6232b0ecc43df9334b7fa2d77cec5e22 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Apr 2023 16:30:58 -0400 Subject: [PATCH 156/294] Actually, require `mkt_by_fqme` in `.iter_trans()` --- piker/accounting/_ledger.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 64f77bab..8025ec3d 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -143,8 +143,8 @@ class TransactionLedger(UserDict): def iter_trans( self, + mkt_by_fqme: dict[str, MktPair], broker: str = 'paper', - mkt_by_fqme: dict[str, MktPair] | None = None, ) -> Generator[ tuple[str, Transaction], @@ -176,7 +176,12 @@ class TransactionLedger(UserDict): fqme = txdict.get('fqme', txdict['fqsn']) dt = parse(txdict['dt']) expiry = txdict.get('expiry') - mkt_by_fqme = mkt_by_fqme or {} + + mkt = mkt_by_fqme.get(fqme) + if not mkt: + # we can't build a trans if we don't have + # the ``.sys: MktPair`` info, so skip. + continue yield ( tid, @@ -189,29 +194,22 @@ class TransactionLedger(UserDict): cost=txdict.get('cost', 0), bs_mktid=txdict['bs_mktid'], - # optional - sym=mkt_by_fqme[fqme] if mkt_by_fqme else None, + # TODO: change to .sys! + sym=mkt, expiry=parse(expiry) if expiry else None, ) ) def to_trans( self, - broker: str = 'paper', - **kwargs, ) -> dict[str, Transaction]: ''' - Return the entire output from ``.iter_trans()`` in a ``dict``. + Return entire output from ``.iter_trans()`` in a ``dict``. ''' - return dict( - self.iter_trans( - broker, - **kwargs, - ) - ) + return dict(self.iter_trans(**kwargs)) @cm From 4b7ac1d895c263fe729894db423e1a3d7600bf6c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Apr 2023 16:31:21 -0400 Subject: [PATCH 157/294] Port paper engine to latest `.accounting` sys fixes - only preload necessary (one for clearing, all for ledger sync) `MktPair` info from the backend using `.get_mkt_info()`, build the `mkt_by_fqme: dict[str, MktPair]` and pass it to `TransactionLedger.iter_trans()`. - use new `TransactionLedger.update_from_t()` method on clears. - sanity check all `mkt_by_fqme` entries against `Flume.mkt` values when we open a data feed. - rename `PaperBoi._syms` -> `._mkts`. --- piker/clearing/_paper_engine.py | 81 +++++++++++++++++++-------------- 1 file changed, 47 insertions(+), 34 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 23e1d347..511d625c 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -38,7 +38,6 @@ from ..brokers import get_brokermod from .. import data from ..data.types import Struct from ..accounting._mktinfo import ( - Symbol, MktPair, ) from ..accounting import ( @@ -85,7 +84,7 @@ class PaperBoi(Struct): _buys: defaultdict[str, bidict] _sells: defaultdict[str, bidict] _reqids: bidict - _syms: dict[str, Symbol] = {} + _mkts: dict[str, MktPair] = {} # init edge case L1 spread last_ask: tuple[float, float] = (float('inf'), 0) # price, size @@ -262,7 +261,7 @@ class PaperBoi(Struct): bs_mktid: str = fqme t = Transaction( fqsn=fqme, - sym=self._syms[fqme], + sym=self._mkts[fqme], tid=oid, size=size, price=price, @@ -271,11 +270,8 @@ class PaperBoi(Struct): bs_mktid=bs_mktid, ) - tx = t.to_dict() - tx.pop('sym') - # update in-mem ledger and pos table - self.ledger.update({oid: tx}) + self.ledger.update_from_t(t) self.ppt.update_from_trans({oid: t}) # transmit pp msg to ems @@ -293,12 +289,13 @@ class PaperBoi(Struct): # inferred from the pair? # currency=bs_mktid, ) - await self.ems_trades_stream.send(pp_msg) - - # write all updates to filesys + # write all updates to filesys immediately + # (adds latency but that works for simulation anyway) self.ledger.write_config() self.ppt.write_config() + await self.ems_trades_stream.send(pp_msg) + async def simulate_fills( quote_stream: tractor.MsgStream, # noqa @@ -552,36 +549,51 @@ async def trades_dialogue( 'paper', ) as ledger ): - # attempt to get market info from the backend instead of presuming - # the ledger entries have everything correct. + # NOTE: retreive market(pair) info from the backend broker + # since ledger entries (in their backend native format) often + # don't contain necessary market info per trade record entry.. + # - if no fqme was passed in, we presume we're running in + # "ledger-sync-only mode" and thus we load mkt info for + # each symbol found in the ledger to a ppt table manually. + # TODO: how to process ledger info from backends? # - should we be rolling our own actor-cached version of these # client API refs or using portal IPC to send requests to the # existing brokerd daemon? # - alternatively we can possibly expect and use # a `.broker.norm_trade_records()` ep? - fqmes: list[str] = [fqme] - if fqme is None: - fqmes = list(ppt.pps) + brokermod = get_brokermod(broker) + gmi = getattr(brokermod, 'get_mkt_info', None) - for fqme in fqmes: - mkt: MktPair | None = None - brokermod = get_brokermod(broker) - gmi = getattr(brokermod, 'get_mkt_info', None) - if gmi: + # update all transactions with mkt info before + # loading any pps + mkt_by_fqme: dict[str, MktPair | None] = {} + for tid, tdict in ledger.data.items(): + + # TODO: switch this to fqme + l_fqme = tdict['fqsn'] + if ( + gmi + and l_fqme not in mkt_by_fqme + ): mkt, pair = await brokermod.get_mkt_info( - fqme.rstrip(f'.{broker}'), + l_fqme.rstrip(f'.{broker}'), ) + mkt_by_fqme[l_fqme] = mkt - # update pos table from ledger history - ppt.update_from_trans( - ledger.to_trans(), + # if an ``fqme: str`` input was provided we only + # need a ``MktPair`` for that one market, since we're + # running in real simulated-clearing mode, not just ledger + # syncing. + if ( + fqme is not None + and fqme in mkt_by_fqme + ): + break - # NOTE: here we pass in any `MktPair` provided by the - # backend broker instead of assuming the pps.toml contains - # the correct contents! - force_mkt=mkt - ) + # update pos table from ledger history and provide a ``MktPair`` + # lookup for internal position accounting calcs. + ppt.update_from_trans(ledger.to_trans(mkt_by_fqme=mkt_by_fqme)) pp_msgs: list[BrokerdPosition] = [] pos: Position @@ -621,6 +633,10 @@ async def trades_dialogue( loglevel=loglevel, ) as feed, ): + # sanity check all the mkt infos + for fqme, flume in feed.flumes.items(): + assert mkt_by_fqme[fqme] == flume.mkt + async with ( ctx.open_stream() as ems_stream, trio.open_nursery() as n, @@ -635,11 +651,8 @@ async def trades_dialogue( _sells=_sells, _reqids=_reqids, - # TODO: load postions from ledger file - _syms={ - fqme: flume.symbol - for fqme, flume in feed.flumes.items() - } + _mkts=mkt_by_fqme, + ) n.start_soon( From 83f1922f6e468986e902bc21d2cff4c63c7b4fab Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Apr 2023 16:36:52 -0400 Subject: [PATCH 158/294] `binance.get_mkt_info()`: bleh, right `@lru_cache` dun work for async.. --- piker/brokers/binance.py | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index aa1a1f5d..02f234d9 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -23,7 +23,7 @@ Binance backend """ from contextlib import asynccontextmanager as acm from datetime import datetime -from functools import lru_cache +# from functools import lru_cache from decimal import Decimal from typing import ( Any, Union, Optional, @@ -335,7 +335,7 @@ class Client: @acm async def get_client() -> Client: client = Client() - log.info(f'Caching exchange infos..') + log.info('Caching exchange infos..') await client.exch_info() yield client @@ -371,7 +371,13 @@ async def stream_messages( timeouts += 1 if timeouts > 2: log.error("binance feed seems down and slow af? rebooting...") - await ws._connect() + try: + await ws._connect() + except BaseException as err: + assert err + # Wut in the f#@$% is going on here. + with trio.CancelScope(shield=True): + await tractor.breakpoint() continue @@ -476,12 +482,20 @@ async def open_history_client( yield get_ohlc, {'erlangs': 3, 'rate': 3} -@lru_cache +# TODO: bleh, didn't we have an async version of +# this at some point? +# @lru_cache async def get_mkt_info( fqme: str, + _cache: dict[str, MktPair] = {} + ) -> tuple[MktPair, Pair]: + both = _cache.get(fqme) + if both: + return both + async with open_cached_client('binance') as client: pair: Pair = await client.exch_info(fqme.upper()) @@ -501,7 +515,9 @@ async def get_mkt_info( bs_mktid=pair.symbol, broker='binance', ) - return mkt, pair + both = mkt, pair + _cache[fqme] = both + return both async def stream_quotes( From fa88924f84c4764ce5669a8d318cb64d932e2c52 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Apr 2023 16:37:15 -0400 Subject: [PATCH 159/294] Do we need feed mod enabled? no right? --- piker/accounting/cli.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 9f894e1a..16712c8c 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -75,6 +75,9 @@ def broker_init( subpath = f'{modpath}.{submodname}' enabled.append(subpath) + # TODO XXX: DO WE NEED THIS? + # enabled.append('piker.data.feed') + # non-blocking setup of brokerd service nursery from ..data import _setup_persistent_brokerd From 06b80ff9edff66488aa245415c63d489da6a68d1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Apr 2023 17:28:43 -0400 Subject: [PATCH 160/294] ARRG, disable `dunst` notifications for now in order mode --- piker/ui/_notify.py | 6 ++++-- piker/ui/order_mode.py | 12 ++++++++++-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/piker/ui/_notify.py b/piker/ui/_notify.py index 4a33dabb..8cc45e89 100644 --- a/piker/ui/_notify.py +++ b/piker/ui/_notify.py @@ -93,7 +93,7 @@ async def notify_from_ems_status_msg( # TODO: add in standard fill/exec info that maybe we # pack in a broker independent way? f"'{msg.pformat()}'", - ], + ], capture_stdout=True, capture_stderr=True, check=False, @@ -104,4 +104,6 @@ async def notify_from_ems_status_msg( log.runtime(result) except FileNotFoundError: - log.warn('Tried to send a notification but \'notify-send\' not present') + log.warn( + 'Tried to send a notification but \'notify-send\' not present' + ) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index ec789d66..578babc2 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -973,6 +973,12 @@ async def process_trade_msg( client: OrderClient, msg: dict, + # emit linux DE notification? + # XXX: currently my experience with `dunst` is that this + # is horrible slow and clunky and invasive and noisy so i'm + # disabling it for now until we find a better UX solution.. + do_notify: bool = False, + ) -> tuple[Dialog, Status]: fmsg = pformat(msg) @@ -1092,7 +1098,8 @@ async def process_trade_msg( ) mode.lines.remove_line(uuid=oid) msg.req = req - await notify_from_ems_status_msg(msg) + if do_notify: + await notify_from_ems_status_msg(msg) # response to completed 'dialog' for order request case Status( @@ -1101,7 +1108,8 @@ async def process_trade_msg( req=req, ): msg.req = Order(**req) - await notify_from_ems_status_msg(msg) + if do_notify: + await notify_from_ems_status_msg(msg) mode.lines.remove_line(uuid=oid) # each clearing tick is responded individually From 53a41ba93d4ce2bbb561d66fe1fab28357838c48 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Apr 2023 18:17:45 -0400 Subject: [PATCH 161/294] Add subsys log to new `.data._util` --- piker/data/__init__.py | 2 +- piker/data/_m4.py | 5 +---- piker/data/_sampling.py | 6 ++---- piker/data/_sharedmem.py | 5 +---- piker/data/_source.py | 14 +------------- piker/data/_util.py | 34 ++++++++++++++++++++++++++++++++++ piker/data/_web_bs.py | 5 +---- piker/data/cli.py | 7 ++----- piker/data/flows.py | 8 +++----- piker/data/ingest.py | 2 +- 10 files changed, 47 insertions(+), 41 deletions(-) create mode 100644 piker/data/_util.py diff --git a/piker/data/__init__.py b/piker/data/__init__.py index 74eefb83..37da54b0 100644 --- a/piker/data/__init__.py +++ b/piker/data/__init__.py @@ -25,7 +25,7 @@ sharing live streams over a network. import tractor import trio -from ..log import ( +from ._util import ( get_console_log, ) from ._normalize import iterticks diff --git a/piker/data/_m4.py b/piker/data/_m4.py index 8452e022..3c23d966 100644 --- a/piker/data/_m4.py +++ b/piker/data/_m4.py @@ -42,10 +42,7 @@ from numba import ( # float64, optional, int64, ) -from ..log import get_logger - - -log = get_logger(__name__) +from ._util import log def ds_m4( diff --git a/piker/data/_sampling.py b/piker/data/_sampling.py index 208a686b..3c769551 100644 --- a/piker/data/_sampling.py +++ b/piker/data/_sampling.py @@ -38,8 +38,8 @@ from tractor.trionics import ( import trio from trio_typing import TaskStatus -from ..log import ( - get_logger, +from ._util import ( + log, get_console_log, ) from ..service import maybe_spawn_daemon @@ -50,8 +50,6 @@ if TYPE_CHECKING: ) from .feed import _FeedsBus -log = get_logger(__name__) - # highest frequency sample step is 1 second by default, though in # the future we may want to support shorter periods or a dynamic style diff --git a/piker/data/_sharedmem.py b/piker/data/_sharedmem.py index 00865731..2ed1c892 100644 --- a/piker/data/_sharedmem.py +++ b/piker/data/_sharedmem.py @@ -32,14 +32,11 @@ import numpy as np from numpy.lib import recfunctions as rfn import tractor -from ..log import get_logger +from ._util import log from ._source import base_iohlc_dtype from .types import Struct -log = get_logger(__name__) - - # how much is probably dependent on lifestyle _secs_in_day = int(60 * 60 * 24) # we try for a buncha times, but only on a run-every-other-day kinda week. diff --git a/piker/data/_source.py b/piker/data/_source.py index 61c2e52f..d1d8be02 100644 --- a/piker/data/_source.py +++ b/piker/data/_source.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) 2018-present Tyler Goodlet (in stewardship for piker0) +# Copyright (C) 2018-present Tyler Goodlet (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -18,22 +18,10 @@ numpy data source coversion helpers. """ from __future__ import annotations -from decimal import ( - Decimal, - ROUND_HALF_EVEN, -) -from typing import Any from bidict import bidict import numpy as np -from .types import Struct -from ..accounting._mktinfo import ( - # mkfqsn, - unpack_fqsn, - # digits_to_dec, - float_digits, -) ohlc_fields = [ ('time', float), diff --git a/piker/data/_util.py b/piker/data/_util.py new file mode 100644 index 00000000..8c78255f --- /dev/null +++ b/piker/data/_util.py @@ -0,0 +1,34 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Data layer module commons. + +''' +from functools import partial + +from ..log import ( + get_logger, + get_console_log, +) +subsys: str = 'piker.data' + +log = get_logger(subsys) + +get_console_log = partial( + get_console_log, + name=subsys, +) diff --git a/piker/data/_web_bs.py b/piker/data/_web_bs.py index 21b06d68..864ca651 100644 --- a/piker/data/_web_bs.py +++ b/piker/data/_web_bs.py @@ -44,12 +44,9 @@ from trio_websocket._impl import ( ConnectionTimeout, ) -from ..log import get_logger - +from ._util import log from .types import Struct -log = get_logger(__name__) - class NoBsWs: ''' diff --git a/piker/data/cli.py b/piker/data/cli.py index 6984d9ff..cee729e5 100644 --- a/piker/data/cli.py +++ b/piker/data/cli.py @@ -32,14 +32,11 @@ from ..service.marketstore import ( ) from ..cli import cli from .. import watchlists as wl -from ..log import ( - get_logger, +from ._util import ( + log, ) -log = get_logger(__name__) - - @cli.command() @click.option( '--url', diff --git a/piker/data/flows.py b/piker/data/flows.py index 1ddd35c2..ecb727e8 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -15,7 +15,7 @@ # along with this program. If not, see . """ -abstractions for organizing, managing and generally operating-on +Public abstractions for organizing, managing and generally operating-on real-time data processing data-structures. "Streams, flumes, cascades and flows.." @@ -35,8 +35,8 @@ from ..accounting._mktinfo import ( MktPair, Symbol, ) -from ..log import ( - get_logger, +from ._util import ( + log, ) from .types import Struct from ._sharedmem import ( @@ -53,8 +53,6 @@ if TYPE_CHECKING: # from pyqtgraph import PlotItem from .feed import Feed -log = get_logger(__name__) - # TODO: ideas for further abstractions as per # https://github.com/pikers/piker/issues/216 and diff --git a/piker/data/ingest.py b/piker/data/ingest.py index afb5fc4a..c6f50135 100644 --- a/piker/data/ingest.py +++ b/piker/data/ingest.py @@ -23,7 +23,7 @@ Api layer likely in here... from types import ModuleType from importlib import import_module -from ..log import get_logger +from ._util import get_logger log = get_logger(__name__) From d4c8ba19a226bcff9ee9f4a766e1c977c31a4071 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Apr 2023 18:55:01 -0400 Subject: [PATCH 162/294] `.accounting._mktinfo`: better fqme `MktPair` handling It needed some work.. - Make `unpack_fqme()` always return a 4-tuple handling the venue and suffix parts more generally. - add `Asset.Asset.guess_from_mkt_ep_key()` a like-it-sounds hack at trying to render a `.dst: Asset` for most most purposes throughout the stack. - always try to preprocess the input `fqme: str` with `unpack_fqme()` in `MktPair.from_fqme()` and use the new `Asset` method (above) to make up a `.dst: Asset` pulling as much meta-info we can from the caller. - add `MktPair.bs_fqme` to get the thing without the broker part.. - add an `'unknown'` value to the `_derivs` def. - drop `Symbol.from_fqsn()` and `unpack_fqsn()` more generally (yes BREAKING). --- piker/accounting/_mktinfo.py | 104 ++++++++++++++++++++++++++++++----- 1 file changed, 89 insertions(+), 15 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 02ed7a9d..7ab59a66 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -54,6 +54,9 @@ _derivs: list[str] = [ 'continuous_future', 'option', 'futures_option', + + # if we can't figure it out, presume the worst XD + 'unknown', ] # NOTE: a tag for other subsystems to try @@ -143,6 +146,39 @@ class Asset(Struct, frozen=True): rounding=ROUND_HALF_EVEN ) + @classmethod + def guess_from_mkt_ep_key( + cls, + mkt_ep_key: str, + atype: str | None = None, + + ) -> Asset: + ''' + A hacky guess method for presuming a (target) asset's properties + based on either the actualy market endpoint key, or config settings + from the user. + + ''' + atype = atype or 'unknown' + + # attempt to strip off any source asset + # via presumed syntax of: + # - / + # - . + # - etc. + for char in ['/', '.']: + dst, _, src = mkt_ep_key.partition(char) + if src: + if not atype: + atype = 'fiat' + break + + return Asset( + name=dst, + atype=atype, + tx_tick=Decimal('0.01'), + ) + def maybe_cons_tokens( tokens: list[Any], @@ -269,15 +305,28 @@ class MktPair(Struct, frozen=True): def from_fqme( cls, fqme: str, + price_tick: float | str, size_tick: float | str, bs_mktid: str, + broker: str | None = None, **kwargs, ) -> MktPair: - broker, key, suffix = unpack_fqme(fqme) + _fqme: str = fqme + if ( + broker + and broker not in fqme + ): + _fqme = f'{fqme}.{broker}' + + broker, mkt_ep_key, venue, suffix = unpack_fqme(_fqme) + dst: Asset = Asset.guess_from_mkt_ep_key( + mkt_ep_key, + atype=kwargs.get('_atype'), + ) # XXX: loading from a fqme string will # leave this pair as "un resolved" meaning @@ -285,13 +334,21 @@ class MktPair(Struct, frozen=True): # which we expect to be filled in by some # backend client with access to that data-info. return cls( - dst=key, # not resolved + # XXX: not resolved to ``Asset`` :( + dst=dst, + + broker=broker, + venue=venue, + # XXX NOTE: we presume this token + # if the expiry for now! + expiry=suffix, + price_tick=price_tick, size_tick=size_tick, bs_mktid=bs_mktid, - broker=broker, **kwargs, + ).copy() @property @@ -381,6 +438,14 @@ class MktPair(Struct, frozen=True): self.broker, ]) + @property + def bs_fqme(self) -> str: + ''' + FQME sin broker part XD + + ''' + return self.fqme.rstrip(f'.{self.broker}') + @property def fqsn(self) -> str: return self.fqme @@ -428,7 +493,9 @@ class MktPair(Struct, frozen=True): def unpack_fqme( fqme: str, -) -> tuple[str, str, str]: + broker: str | None = None + +) -> tuple[str, ...]: ''' Unpack a fully-qualified-symbol-name to ``tuple``. @@ -442,17 +509,26 @@ def unpack_fqme( match tokens: case [mkt_ep, broker]: # probably crypto - # mkt_ep, broker = tokens return ( broker, mkt_ep, '', + '', ) # TODO: swap venue and suffix/deriv-info here? case [mkt_ep, venue, suffix, broker]: pass + # handle `bs_mktid` + `broker` input case + case [ + mkt_ep, venue, suffix + ] if ( + broker + and suffix != broker + ): + pass + case [mkt_ep, venue, broker]: suffix = '' @@ -461,14 +537,13 @@ def unpack_fqme( return ( broker, - '.'.join([mkt_ep, venue]), + mkt_ep, + venue, + # '.'.join([mkt_ep, venue]), suffix, ) -unpack_fqsn = unpack_fqme - - class Symbol(Struct): ''' I guess this is some kinda container thing for dealing with @@ -485,27 +560,26 @@ class Symbol(Struct): broker_info: dict[str, dict[str, Any]] = {} @classmethod - def from_fqsn( + def from_fqme( cls, fqsn: str, info: dict[str, Any], ) -> Symbol: - broker, key, suffix = unpack_fqsn(fqsn) + broker, mktep, venue, suffix = unpack_fqme(fqsn) tick_size = info.get('price_tick_size', 0.01) lot_size = info.get('lot_tick_size', 0.0) return Symbol( - key=key, + broker=broker, + key=mktep, tick_size=tick_size, lot_tick_size=lot_size, + venue=venue, suffix=suffix, broker_info={broker: info}, ) - # compat name mapping - from_fqme = from_fqsn - @property def type_key(self) -> str: return list(self.broker_info.values())[0]['asset_type'] From afdbf8e10aeeb58f2c2d93dc67aaaf9dd62926f4 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Apr 2023 19:03:04 -0400 Subject: [PATCH 163/294] `.accounting`: Use `_fqme()` throughout and export decimal converters --- piker/accounting/__init__.py | 18 ++++++++++++++---- piker/accounting/_pos.py | 2 +- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index eb420bab..d8d1fec9 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -32,17 +32,27 @@ from ._pos import ( Position, PpTable, ) +from ._mktinfo import ( + Asset, + dec_digits, + digits_to_dec, + MktPair, +) log = get_logger(__name__) __all__ = [ + 'Asset', + 'dec_digits', + 'digits_to_dec', + 'MktPair', + 'Position', + 'PpTable', 'Transaction', 'TransactionLedger', - 'open_trade_ledger', - 'PpTable', - 'open_pps', 'load_pps_from_ledger', - 'Position', + 'open_pps', + 'open_trade_ledger', ] diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index bbf7ce8c..12c2e19f 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -145,7 +145,7 @@ class Position(Struct): # drop symbol obj in serialized form s = d.pop('symbol') fqme = s.fqme - broker, key, suffix = unpack_fqme(fqme) + broker, mktep, venue, suffix = unpack_fqme(fqme) if isinstance(s, Symbol): sym_info = s.broker_info[broker] From 6f5a2654ab0acb9f2af61b335b7a8bceea27d4f6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Apr 2023 19:04:00 -0400 Subject: [PATCH 164/294] Port `.clearing` to new `unpack_fqme()` --- piker/clearing/_client.py | 2 +- piker/clearing/_ems.py | 12 +++++------- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index a2835183..a9f0fb23 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -238,7 +238,7 @@ async def open_ems( broker control client-API. ''' - broker, symbol, suffix = unpack_fqme(fqme) + broker, mktep, venue, suffix = unpack_fqme(fqme) async with maybe_open_emsd( broker, diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 0d666ef0..fdb1986a 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -383,7 +383,7 @@ class Router(Struct): brokermod: ModuleType, portal: tractor.Portal, exec_mode: str, - symbol: str, + fqme: str, loglevel: str, ) -> None: @@ -424,7 +424,7 @@ class Router(Struct): # actor to simulate the real IPC load it'll have when also # pulling data from feeds open_trades_endpoint = paper.open_paperboi( - fqme='.'.join([symbol, broker]), + fqme=fqme, loglevel=loglevel, ) @@ -522,15 +522,13 @@ class Router(Struct): indefinitely. ''' - broker, symbol, suffix = unpack_fqme(fqme) - async with ( maybe_open_feed( [fqme], loglevel=loglevel, ) as feed, ): - brokername, _, _ = unpack_fqme(fqme) + brokername, _, _, _ = unpack_fqme(fqme) brokermod = feed.mods[brokername] broker = brokermod.name portal = feed.portals[brokermod] @@ -545,7 +543,7 @@ class Router(Struct): brokermod=brokermod, portal=portal, exec_mode=exec_mode, - symbol=symbol, + fqme=fqme, loglevel=loglevel, ) as relay: @@ -1435,7 +1433,7 @@ async def _emsd_main( global _router assert _router - broker, symbol, suffix = unpack_fqme(fqme) + broker, _, _, _ = unpack_fqme(fqme) # TODO: would be nice if in tractor we can require either a ctx arg, # or a named arg with ctx in it and a type annotation of From d48b2c5b573540f8173369fb93d63efc1b730358 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Apr 2023 19:04:54 -0400 Subject: [PATCH 165/294] `._paper_engine`: right, load `MktPair` in `fqme is not None` usage --- piker/clearing/_paper_engine.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 511d625c..24afa609 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -14,10 +14,13 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -""" -Fake trading for forward testing. +''' +Fake trading: a full forward testing simulation engine. -""" +We can real-time emulate any mkt conditions you want bruddr B) +Just slide us the model que quieres.. + +''' from collections import defaultdict from contextlib import asynccontextmanager as acm from datetime import datetime @@ -567,11 +570,16 @@ async def trades_dialogue( # update all transactions with mkt info before # loading any pps - mkt_by_fqme: dict[str, MktPair | None] = {} - for tid, tdict in ledger.data.items(): + mkt_by_fqme: dict[str, MktPair] = {} + if fqme: + mkt, _ = await brokermod.get_mkt_info(fqme.rstrip(f'.{broker}')) + mkt_by_fqme[fqme] = mkt + # for each sym in the ledger load it's `MktPair` info + for tid, tdict in ledger.data.items(): # TODO: switch this to fqme l_fqme = tdict['fqsn'] + if ( gmi and l_fqme not in mkt_by_fqme @@ -680,7 +688,7 @@ async def open_paperboi( if not fqme: assert broker, 'One of `broker` or `fqme` is required siss..!' else: - broker, symbol, expiry = unpack_fqme(fqme) + broker, _, _, _ = unpack_fqme(fqme) we_spawned: bool = False service_name = f'paperboi.{broker}' From 4129d693be4822b22771adf9114a7f0ed848ef97 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Apr 2023 19:05:42 -0400 Subject: [PATCH 166/294] Add `.data.validate` checker for live feed layer More or less a replacement for what @guilledk did with the initial attempt at a "broker check" type script a while back except in this case we're going to always run this validation routine and it now uses a new `FeedInit` struct to ensure backends are delivering the right schema-ed data during startup. Also allows us to stick deprecation warnings / and or strict API compat errors all in one spot (at least for live feeds). Factors out a bunch of `MktPair` related adapter-logic into a new `.validate.valiate_backend()` which warns to the backend implementer via log msgs all the problems outstanding. Ideally we do our backend module endpoint scan-and-complain regarding missing feature support from here as well (eg. search, broker/trade ctl, ledger processing, etc.). --- piker/data/feed.py | 149 +++++++++++------------------- piker/data/validate.py | 201 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 255 insertions(+), 95 deletions(-) create mode 100644 piker/data/validate.py diff --git a/piker/data/feed.py b/piker/data/feed.py index 9d4e09d9..9beec93b 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -26,7 +26,7 @@ from collections import ( Counter, ) from contextlib import asynccontextmanager as acm -from decimal import Decimal +# from decimal import Decimal from datetime import datetime from functools import partial import time @@ -55,8 +55,8 @@ import numpy as np from ..brokers import get_brokermod from ..calc import humanize -from ..log import ( - get_logger, +from ._util import ( + log, get_console_log, ) from ..service import ( @@ -64,6 +64,10 @@ from ..service import ( check_for_service, ) from .flows import Flume +from .validate import ( + FeedInit, + validate_backend, +) from ._sharedmem import ( maybe_open_shm_array, ShmArray, @@ -72,10 +76,8 @@ from ._sharedmem import ( from .ingest import get_ingestormod from .types import Struct from ..accounting._mktinfo import ( - Asset, MktPair, unpack_fqme, - Symbol, ) from ._source import base_iohlc_dtype from ..ui import _search @@ -91,8 +93,6 @@ from ..brokers._util import ( if TYPE_CHECKING: from ..service.marketstore import Storage -log = get_logger(__name__) - class _FeedsBus(Struct): ''' @@ -568,7 +568,7 @@ async def tsdb_backfill( timeframe=timeframe, ) - broker, symbol, expiry = unpack_fqme(fqsn) + broker, *_ = unpack_fqme(fqsn) try: ( latest_start_dt, @@ -790,13 +790,14 @@ async def manage_history( # port = _runtime_vars['_root_mailbox'][1] uid = tractor.current_actor().uid - suffix = '.'.join(uid) + name, uuid = uid + service = name.rstrip(f'.{mod.name}') # (maybe) allocate shm array for this broker/symbol which will # be used for fast near-term history capture and processing. hist_shm, opened = maybe_open_shm_array( # key=f'{fqsn}_hist_p{port}', - key=f'{fqsn}_hist.{suffix}', + key=f'piker.{service}[{uuid[:16]}.{fqsn}.hist', # use any broker defined ohlc dtype: dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype), @@ -814,7 +815,8 @@ async def manage_history( rt_shm, opened = maybe_open_shm_array( # key=f'{fqsn}_rt_p{port}', - key=f'{fqsn}_rt.{suffix}', + # key=f'piker.{service}.{fqsn}_rt.{uuid}', + key=f'piker.{service}[{uuid[:16]}.{fqsn}.rt', # use any broker defined ohlc dtype: dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype), @@ -933,24 +935,6 @@ async def manage_history( await trio.sleep_forever() -class BackendInitMsg(Struct, frozen=True): - ''' - A stringent data provider startup msg schema validator. - - The fields defined here are matched with those absolutely required - from each backend broker/data provider. - - ''' - fqme: str - symbol_info: dict | None = None - mkt_info: MktPair | None = None - shm_write_opts: dict[str, Any] | None = None - - -def validate_init_msg() -> None: - ... - - async def allocate_persistent_feed( bus: _FeedsBus, sub_registered: trio.Event, @@ -961,7 +945,7 @@ async def allocate_persistent_feed( loglevel: str, start_stream: bool = True, - task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, + task_status: TaskStatus[FeedInit] = trio.TASK_STATUS_IGNORED, ) -> None: ''' @@ -991,22 +975,37 @@ async def allocate_persistent_feed( some_data_ready = trio.Event() feed_is_live = trio.Event() - symstr = symstr.lower() - # establish broker backend quote stream by calling - # ``stream_quotes()``, which is a required broker backend endpoint. + # ``stream_quotes()``, a required broker backend endpoint. + init_msgs: ( + list[FeedInit] # new + | dict[str, dict[str, str]] # legacy / deprecated + ) + + # TODO: probably make a struct msg type for this as well + # since eventually we do want to have more efficient IPC.. + first_quote: dict[str, Any] + + symstr = symstr.lower() ( - init_msg, + init_msgs, first_quote, ) = await bus.nursery.start( partial( mod.stream_quotes, send_chan=send, feed_is_live=feed_is_live, + + # NOTE / TODO: eventualy we may support providing more then + # one input here such that a datad daemon can multiplex + # multiple live feeds from one task, instead of getting + # a new request (and thus new task) for each subscription. symbols=[symstr], + loglevel=loglevel, ) ) + # TODO: this is indexed by symbol for now since we've planned (for # some time) to expect backends to handle single # ``.stream_quotes()`` calls with multiple symbols inputs to just @@ -1029,58 +1028,15 @@ async def allocate_persistent_feed( # a small streaming machine around the remote feed which can then # do the normal work of sampling and writing shm buffers # (depending on if we want sampling done on the far end or not?) - per_mkt_init_msg = init_msg[symstr] - - # the broker-specific fully qualified symbol name, - # but ensure it is lower-cased for external use. - bs_mktid = per_mkt_init_msg['fqsn'].lower() - - # true fqme including broker/provider suffix - fqme = '.'.join((bs_mktid, brokername)) - - mktinfo = per_mkt_init_msg.get('mkt_info') - if not mktinfo: - - log.warning( - f'BACKEND {brokername} is using old `Symbol` style API\n' - 'IT SHOULD BE PORTED TO THE NEW `.accounting._mktinfo.MktPair`\n' - 'STATTTTT!!!\n' - ) - mktinfo = per_mkt_init_msg['symbol_info'] - - # TODO: read out renamed/new tick size fields in block below! - price_tick = mktinfo.get( - 'price_tick_size', - Decimal('0.01'), - ) - size_tick = mktinfo.get( - 'lot_tick_size', - Decimal('0.0'), - ) - - log.warning(f'FQME: {fqme} -> backend needs port to `MktPair`') - mkt = MktPair.from_fqme( - fqme, - price_tick=price_tick, - size_tick=size_tick, - bs_mktid=bs_mktid, - - _atype=mktinfo['asset_type'] - ) - - symbol = Symbol.from_fqsn( - fqsn=fqme, - info=mktinfo, - ) - - else: - # the new msg-protocol is to expect an already packed - # ``Asset`` and ``MktPair`` object from the backend - symbol = mkt = mktinfo - assert isinstance(mkt, MktPair) - assert isinstance(mkt.dst, Asset) - - assert mkt.type_key + init: FeedInit = validate_backend( + mod, + [symstr], + init_msgs, + ) + bs_mktid: str = init.bs_mktid + mkt: MktPair = init.mkt_info + assert mkt.bs_mktid == bs_mktid + fqme: str = mkt.fqme # HISTORY storage, run 2 tasks: # - a history loader / maintainer @@ -1116,7 +1072,7 @@ async def allocate_persistent_feed( # TODO: we have to use this for now since currently the # MktPair above doesn't render the correct output key it seems # when we provide the `MktInfo` here?..? - mkt=symbol, + mkt=mkt, first_quote=first_quote, _rt_shm_token=rt_shm.token, @@ -1125,11 +1081,17 @@ async def allocate_persistent_feed( izero_rt=izero_rt, ) - # for ambiguous names we simply apply the retreived + # for ambiguous names we simply register the + # flume for all possible name (sub) sets. # feed to that name (for now). - bus.feeds[symstr] = bus.feeds[bs_mktid] = flume + bus.feeds.update({ + symstr: flume, + fqme: flume, + mkt.bs_fqme: flume, + }) - task_status.started() + # signal the ``open_feed_bus()`` caller task to continue + task_status.started(init) if not start_stream: await trio.sleep_forever() @@ -1140,9 +1102,7 @@ async def allocate_persistent_feed( # NOTE: if not configured otherwise, we always sum tick volume # values in the OHLCV sampler. - sum_tick_vlm: bool = init_msg.get( - 'shm_write_opts', {} - ).get('sum_tick_vlm', True) + sum_tick_vlm: bool = (init.shm_write_opts or {}).get('sum_tick_vlm', True) # NOTE: if no high-freq sampled data has (yet) been loaded, # seed the buffer with a history datum - this is most handy @@ -1218,7 +1178,6 @@ async def open_feed_bus( # ensure we are who we think we are servicename = tractor.current_actor().name assert 'brokerd' in servicename - assert brokername in servicename bus = get_feed_bus(brokername) @@ -1573,7 +1532,7 @@ async def open_feed( feed = Feed() for fqsn in fqsns: - brokername, key, suffix = unpack_fqme(fqsn) + brokername, *_ = unpack_fqme(fqsn) bfqsn = fqsn.replace('.' + brokername, '') try: diff --git a/piker/data/validate.py b/piker/data/validate.py new file mode 100644 index 00000000..8e71326c --- /dev/null +++ b/piker/data/validate.py @@ -0,0 +1,201 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +''' +Data feed synchronization protocols, init msgs, and general +data-provider-backend-agnostic schema definitions. + +''' +from decimal import Decimal +from pprint import pformat +from types import ModuleType +from typing import ( + Any, +) + +from .types import Struct +from ..accounting import ( + Asset, + MktPair, +) +from ._util import log + + +class FeedInitializationError(ValueError): + ''' + Live data feed setup failed due to API / msg incompatiblity! + + ''' + + +class FeedInit(Struct, frozen=True): + ''' + A stringent data provider startup msg schema validator. + + The fields defined here are matched with those absolutely required + from each backend broker/data provider. + + ''' + # backend specific, market endpoint id + bs_mktid: str + mkt_info: MktPair + shm_write_opts: dict[str, Any] | None = None + + +def validate_backend( + mod: ModuleType, + syms: list[str], + init_msgs: list[FeedInit] | dict[str, dict[str, Any]], + + # TODO: do a module method scan and report mismatches. + check_eps: bool = False, + + api_log_msg_level: str = 'critical' + +) -> FeedInit: + ''' + Fail on malformed live quotes feed config/init or warn on changes + that haven't been implemented by this backend yet. + + ''' + if isinstance(init_msgs, dict): + for i, (sym_str, msg) in enumerate(init_msgs.items()): + init: FeedInit | dict[str, Any] = msg + + # XXX: eventually this WILL NOT necessarily be true. + if i > 0: + assert not len(init_msgs) == 1 + keys: set = set(init_msgs.keys()) - set(syms) + raise FeedInitializationError( + 'TOO MANY INIT MSGS!\n' + f'Unexpected keys: {keys}\n' + 'ALL MSGS:\n' + f'{pformat(init_msgs)}\n' + ) + + # TODO: once all backends are updated we can remove this branching. + rx_msg: bool = False + warn_msg: str = '' + if not isinstance(init, FeedInit): + warn_msg += ( + '\n' + '--------------------------\n' + ':::DEPRECATED API STYLE:::\n' + '--------------------------\n' + f'`{mod.name}.stream_quotes()` should deliver ' + '`.started(FeedInit)`\n' + f'|-> CURRENTLY it is using DEPRECATED `.started(dict)` style!\n' + f'|-> SEE `FeedInit` in `piker.data.validate`\n' + '--------------------------------------------\n' + ) + else: + rx_msg = True + + # verify feed init state / schema + bs_mktid: str # backend specific (unique) market id + bs_fqme: str # backend specific fqme + mkt: MktPair + + match init: + case { + 'symbol_info': dict(symbol_info), + 'fqsn': bs_fqme, + } | { + 'mkt_info': dict(symbol_info), + 'fqsn': bs_fqme, + }: + symbol_info: dict + warn_msg += ( + 'It may also be still using the legacy `Symbol` style API\n' + 'IT SHOULD BE PORTED TO THE NEW ' + '`.accounting._mktinfo.MktPair`\n' + 'STATTTTT!!!\n' + ) + + # XXX use default legacy (aka discrete precision) mkt + # price/size_ticks if none delivered. + price_tick = symbol_info.get( + 'price_tick_size', + Decimal('0.01'), + ) + size_tick = symbol_info.get( + 'lot_tick_size', + Decimal('1'), + ) + mkt = MktPair.from_fqme( + fqme=f'{bs_fqme}.{mod.name}', + + price_tick=price_tick, + size_tick=size_tick, + + bs_mktid=str(init['bs_mktid']), + _atype=symbol_info['asset_type'] + ) + + case { + 'mkt_info': MktPair( + dst=Asset(), + ) as mkt, + 'fqsn': bs_fqme, + }: + warn_msg += ( + f'{mod.name} in API compat transition?\n' + "It's half dict, half man..\n" + '-------------------------------------\n' + ) + + case FeedInit( + # bs_mktid=bs_mktid, + mkt_info=MktPair(dst=Asset()) as mkt, + shm_write_opts=dict(), + ) as init: + log.info( + f'NICE JOB {mod.name} BACKEND!\n' + 'You are fully up to API spec B):\n' + f'{init.to_dict()}' + ) + + case _: + raise FeedInitializationError(init) + + # build a msg if we received a dict for input. + if not rx_msg: + init = FeedInit( + bs_mktid=mkt.bs_mktid, + mkt_info=mkt, + shm_write_opts=init.get('shm_write_opts'), + ) + + # `MktPair` value audits + mkt = init.mkt_info + assert bs_fqme in mkt.fqme + assert mkt.type_key + + # `MktPair` wish list + if not isinstance(mkt.src, Asset): + warn_msg += ( + f'ALSO, {mod.name.upper()} should try to deliver\n' + 'the new `MktPair.src: Asset` field!\n' + '-----------------------------------------------\n' + ) + + # complain about any non-idealities + if warn_msg: + # TODO: would be nice to register an API_COMPAT or something in + # maybe cyan for this in general throughput piker no? + logmeth = getattr(log, api_log_msg_level) + logmeth(warn_msg) + + return init.copy() From adb62dc7b49c45812c8c36b0649e65b3bf987405 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Apr 2023 19:12:14 -0400 Subject: [PATCH 167/294] Port oustanding parts of codebase to `unpack_fqme()` --- piker/brokers/ib/api.py | 15 ++++++++++++--- piker/ui/_app.py | 4 ++-- piker/ui/_position.py | 6 +++--- piker/ui/order_mode.py | 5 ++++- tests/test_ems.py | 2 +- tests/test_feeds.py | 4 ++-- 6 files changed, 24 insertions(+), 12 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 3d77ee6d..0914dea4 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -676,8 +676,8 @@ class Client: # fqsn parsing stage # ------------------ if '.ib' in pattern: - from ..accounting._mktinfo import unpack_fqsn - _, symbol, expiry = unpack_fqsn(pattern) + from ..accounting._mktinfo import unpack_fqme + _, symbol, venue, expiry = unpack_fqme(pattern) else: symbol = pattern @@ -857,8 +857,17 @@ class Client: self, symbol: str, - ) -> tuple[Contract, Ticker, ContractDetails]: + ) -> tuple[ + Contract, + Ticker, + ContractDetails, + ]: + ''' + Get summary (meta) data for a given symbol str including + ``Contract`` and its details and a (first snapshot of the) + ``Ticker``. + ''' contract = (await self.find_contracts(symbol))[0] ticker: Ticker = self.ib.reqMktData( contract, diff --git a/piker/ui/_app.py b/piker/ui/_app.py index 0e7dad47..a1e31a6e 100644 --- a/piker/ui/_app.py +++ b/piker/ui/_app.py @@ -28,7 +28,7 @@ from ..service import maybe_spawn_brokerd from . import _event from ._exec import run_qtractor from ..data.feed import install_brokerd_search -from ..accounting._mktinfo import unpack_fqsn +from ..accounting._mktinfo import unpack_fqme from . import _search from ._chart import GodWidget from ..log import get_logger @@ -101,7 +101,7 @@ async def _async_main( needed_brokermods: dict[str, ModuleType] = {} for fqsn in syms: - brokername, *_ = unpack_fqsn(fqsn) + brokername, *_ = unpack_fqme(fqsn) needed_brokermods[brokername] = brokers[brokername] async with ( diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 56659056..f08e234a 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -14,10 +14,10 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -""" -Position info and display +''' +Position (pos) info and display to track ur PnLz B) -""" +''' from __future__ import annotations from copy import copy from dataclasses import dataclass diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 578babc2..1a54aa5c 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -1002,7 +1002,10 @@ async def process_trade_msg( ) tracker = mode.trackers[msg['account']] tracker.live_pp.update_from_msg(msg) - tracker.update_from_pp(set_as_startup=True) # status/pane UI + tracker.update_from_pp( + set_as_startup=True, + ) + # status/pane UI mode.pane.update_status_ui(tracker) if tracker.live_pp.size: diff --git a/tests/test_ems.py b/tests/test_ems.py index 378c2690..8b8d3600 100644 --- a/tests/test_ems.py +++ b/tests/test_ems.py @@ -79,7 +79,7 @@ async def order_and_and_wait_for_ppmsg( ''' sent: list[Order] = [] - broker, key, suffix = unpack_fqme(fqme) + broker, mktep, venue, suffix = unpack_fqme(fqme) order = Order( exec_mode=exec_mode, diff --git a/tests/test_feeds.py b/tests/test_feeds.py index 0f88ce5f..df854a79 100644 --- a/tests/test_feeds.py +++ b/tests/test_feeds.py @@ -15,7 +15,7 @@ from piker.data import ( ) from piker.data.flows import Flume from piker.accounting._mktinfo import ( - unpack_fqsn, + unpack_fqme, ) @@ -54,7 +54,7 @@ def test_multi_fqsn_feed( brokers = set() for fqme in fqmes: - brokername, key, suffix = unpack_fqsn(fqme) + brokername, *_ = unpack_fqme(fqme) brokers.add(brokername) async def main(): From 6008497b89fdbb3053de6bd50baa0528ba1a0d77 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Apr 2023 19:12:53 -0400 Subject: [PATCH 168/294] Use more "hierarchical" schema for fsp shm segment names --- piker/fsp/_api.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/piker/fsp/_api.py b/piker/fsp/_api.py index 9654a2a1..8226d16b 100644 --- a/piker/fsp/_api.py +++ b/piker/fsp/_api.py @@ -179,8 +179,9 @@ def mk_fsp_shm_key( target: Fsp ) -> str: - uid = tractor.current_actor().uid - return f'{sym}.fsp.{target.name}.{".".join(uid)}' + actor_name, uuid = tractor.current_actor().uid + uuid_snip: str = uuid[:16] + return f'piker.{actor_name}[{uuid_snip}].{sym}.{target.name}' def maybe_mk_fsp_shm( From d1cf90e2aed570d951fdf35ef3b88fde8a51d1d5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Apr 2023 19:27:44 -0400 Subject: [PATCH 169/294] ib: finally convert ledger processing to use `MktPair` --- piker/brokers/ib/broker.py | 60 +++++++++++++++++--------------------- 1 file changed, 26 insertions(+), 34 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index fdabb645..8379ce5e 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -55,6 +55,8 @@ import pendulum from piker import config from piker.accounting import ( + dec_digits, + digits_to_dec, Position, Transaction, open_trade_ledger, @@ -73,8 +75,8 @@ from piker.clearing._messages import ( BrokerdFill, BrokerdError, ) -from piker.accounting._mktinfo import ( - Symbol, +from piker.accounting import ( + MktPair, ) from .api import ( _accounts2clients, @@ -433,7 +435,7 @@ async def update_and_audit_msgs( # raise ValueError( log.error( f'UNEXPECTED POSITION says IB:\n' - 'Maybe they LIQUIDATED YOU or your missing ledger records?\n' + 'Maybe they LIQUIDATED YOU or are missing ledger txs?\n' f'PIKER:\n{pikerfmtmsg}\n\n' ) msgs.append(msg) @@ -1203,48 +1205,38 @@ def norm_trade_records( if asset_type == 'FUT': # (flex) ledger entries don't have any simple 3-char key? symbol = record['symbol'][:3] + asset_type: str = 'future' + + elif asset_type == 'STK': + asset_type: str = 'stock' # try to build out piker fqsn from record. - expiry = record.get( - 'lastTradeDateOrContractMonth') or record.get('expiry') + expiry = ( + record.get('lastTradeDateOrContractMonth') + or record.get('expiry') + ) + if expiry: expiry = str(expiry).strip(' ') suffix = f'{exch}.{expiry}' expiry = pendulum.parse(expiry) # src: str = record['currency'] + price_tick: Decimal = digits_to_dec(dec_digits(price)) - # price_tick_digits = float_digits(price) - tick_size = Decimal( - Decimal(10)**Decimal(str(price)).as_tuple().exponent + pair = MktPair.from_fqme( + fqme=f'{symbol}.{suffix}.ib', + bs_mktid=str(conid), + _atype=asset_type, + + price_tick=price_tick, + # NOTE: for "legacy" assets, volume is normally discreet, not + # a float, but we keep a digit in case the suitz decide + # to get crazy and change it; we'll be kinda ready + # schema-wise.. + size_tick='1', ) - # TODO: convert to MktPair!!! - pair = Symbol.from_fqsn( - fqsn=f'{symbol}.{suffix}.ib', - info={ - 'tick_size': tick_size, - - # NOTE: for "legacy" assets, volume is normally discreet, not - # a float, but we keep a digit in case the suitz decide - # to get crazy and change it; we'll be kinda ready - # schema-wise.. - 'lot_tick_size': 0.0, - - # TODO: remove when we switching from - # ``Symbol`` -> ``MktPair`` - 'asset_type': asset_type, - - # # TODO: figure out a target fin-type name - # # set and normalize to that here! - # 'dst_type': asset_type.lower(), - - # # starting to use new key naming as in ``MktPair`` - # # type have drafted... - # 'src': src, - # 'src_type': 'fiat', - }, - ) fqme = pair.fqme # NOTE: for flex records the normal fields for defining an fqme From b4f2f490012e33ec2c360c5bafb6b98c26153f72 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Apr 2023 19:28:32 -0400 Subject: [PATCH 170/294] ib: make `stream_quotes()` compat with new init msg bare-minimums --- piker/brokers/ib/feed.py | 54 ++++++++++++++++++++++++++++++++++------ 1 file changed, 47 insertions(+), 7 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index e06ae29c..86fdc095 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -714,6 +714,43 @@ def normalize( return data +# TODO! +# async def get_mkt_info( +# fqme: str, + +# _cache: dict[str, MktPair] = {} + +# ) -> tuple[MktPair, Pair]: + +# both = _cache.get(fqme) +# if both: +# return both + +# proxy: MethodProxy +# async with open_data_client() as proxy: + +# pair: Pair = await client.exch_info(fqme.upper()) +# mkt = MktPair( +# dst=Asset( +# name=pair.baseAsset, +# atype='crypto', +# tx_tick=digits_to_dec(pair.baseAssetPrecision), +# ), +# src=Asset( +# name=pair.quoteAsset, +# atype='crypto', +# tx_tick=digits_to_dec(pair.quoteAssetPrecision), +# ), +# price_tick=pair.price_tick, +# size_tick=pair.size_tick, +# bs_mktid=pair.symbol, +# broker='binance', +# ) +# both = mkt, pair +# _cache[fqme] = both +# return both + + async def stream_quotes( send_chan: trio.abc.SendChannel, @@ -738,7 +775,6 @@ async def stream_quotes( proxy: MethodProxy async with open_data_client() as proxy: - try: ( con, # Contract @@ -796,20 +832,24 @@ async def stream_quotes( # precision" is normally discreet. syminfo['lot_tick_size'] = size_tick - ibclient = proxy._aio_ns.ib.client - host, port = ibclient.host, ibclient.port + # should be at top level right? + syminfo['bs_mktid'] = con.conId + + # ibclient = proxy._aio_ns.ib.client + # host, port = ibclient.host, ibclient.port # TODO: for loop through all symbols passed in - init_msgs = { + init_msgs: dict[str, dict] = { # pass back token, and bool, signalling if we're the writer # and that history has been written sym: { 'symbol_info': syminfo, 'fqsn': first_quote['fqsn'], + 'bs_mktid': con.conId, }, - 'status': { - 'data_ep': f'{host}:{port}', - }, + # 'status': { + # 'data_ep': f'{host}:{port}', + # }, } return init_msgs, syminfo From 765b8f8e5cbca7c909f17b8ea24f821f58181544 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 19 Apr 2023 13:14:33 -0400 Subject: [PATCH 171/294] Support both input msg-sequence types The legacy version was a `dict` of `dicts` vs. now we want to be handed a `list[FeedInit]`; process both in a factored way. Drop `FeedInit.bs_mktid` since it's already defined on `.mkt.bs_mktid` and we don't really need it top level. --- piker/data/validate.py | 42 ++++++++++++++++++++++++++---------------- 1 file changed, 26 insertions(+), 16 deletions(-) diff --git a/piker/data/validate.py b/piker/data/validate.py index 8e71326c..4b92e662 100644 --- a/piker/data/validate.py +++ b/piker/data/validate.py @@ -48,8 +48,6 @@ class FeedInit(Struct, frozen=True): from each backend broker/data provider. ''' - # backend specific, market endpoint id - bs_mktid: str mkt_info: MktPair shm_write_opts: dict[str, Any] | None = None @@ -70,13 +68,21 @@ def validate_backend( that haven't been implemented by this backend yet. ''' - if isinstance(init_msgs, dict): - for i, (sym_str, msg) in enumerate(init_msgs.items()): - init: FeedInit | dict[str, Any] = msg + inits: list[ + FeedInit | dict[str, Any] + ] = init_msgs - # XXX: eventually this WILL NOT necessarily be true. - if i > 0: - assert not len(init_msgs) == 1 + # convert to list if from old dict-style + if isinstance(init_msgs, dict): + inits = list(init_msgs.values()) + + init: FeedInit | dict[str, Any] + for i, init in enumerate(inits): + + # XXX: eventually this WILL NOT necessarily be true. + if i > 0: + assert not len(init_msgs) == 1 + if isinstance(init_msgs, dict): keys: set = set(init_msgs.keys()) - set(syms) raise FeedInitializationError( 'TOO MANY INIT MSGS!\n' @@ -84,6 +90,11 @@ def validate_backend( 'ALL MSGS:\n' f'{pformat(init_msgs)}\n' ) + else: + raise FeedInitializationError( + 'TOO MANY INIT MSGS!\n' + f'{pformat(init_msgs)}\n' + ) # TODO: once all backends are updated we can remove this branching. rx_msg: bool = False @@ -104,7 +115,6 @@ def validate_backend( rx_msg = True # verify feed init state / schema - bs_mktid: str # backend specific (unique) market id bs_fqme: str # backend specific fqme mkt: MktPair @@ -157,14 +167,15 @@ def validate_backend( ) case FeedInit( - # bs_mktid=bs_mktid, mkt_info=MktPair(dst=Asset()) as mkt, - shm_write_opts=dict(), + shm_write_opts=dict(shm_opts), ) as init: + name: str = mod.name log.info( - f'NICE JOB {mod.name} BACKEND!\n' - 'You are fully up to API spec B):\n' - f'{init.to_dict()}' + f'NICE JOB {name} BACKEND being fully up to API spec B)\n' + f"{name}'s `MktPair` info:\n" + f'{pformat(mkt.to_dict())}\n' + f'shm conf: {pformat(shm_opts)}\n' ) case _: @@ -172,15 +183,14 @@ def validate_backend( # build a msg if we received a dict for input. if not rx_msg: + assert bs_fqme in mkt.fqme init = FeedInit( - bs_mktid=mkt.bs_mktid, mkt_info=mkt, shm_write_opts=init.get('shm_write_opts'), ) # `MktPair` value audits mkt = init.mkt_info - assert bs_fqme in mkt.fqme assert mkt.type_key # `MktPair` wish list From 83802e932aa387e38783bc200f021afcbf26e534 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 19 Apr 2023 13:58:38 -0400 Subject: [PATCH 172/294] Drop (missed) usage of `Symbol.from_fqsn()` in order mode --- piker/ui/order_mode.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 1a54aa5c..256bf7b6 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -46,6 +46,7 @@ from ..clearing._client import ( ) from ._style import _font from ..accounting._mktinfo import Symbol +from ..accounting import MktPair from ..data.feed import ( Feed, Flume, @@ -660,17 +661,17 @@ class OrderMode: and src not in ('dark', 'paperboi') and src not in symbol ): - fqsn = symbol + '.' + src + fqme = symbol + '.' + src brokername = src else: - fqsn = symbol - *head, brokername = fqsn.rsplit('.') + fqme = symbol + *head, brokername = fqme.rsplit('.') # fill out complex fields order.oid = str(order.oid) order.brokers = [brokername] - order.symbol = Symbol.from_fqsn( - fqsn=fqsn, + order.symbol = MktPair.from_fqme( + fqme=fqme, info={}, ) dialog = self.submit_order( From 4131ff11520d5e1128e99a60540b5f4eab976b4f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 19 Apr 2023 13:59:00 -0400 Subject: [PATCH 173/294] Rename `bs_mktid` -> `bs_fqme` and drop (some) `fqsn`s Since we have made `MktPair.bs_mktid` mean something else now, change all the feed setup var names to instead be more representative of the actual value: `bs_fqme: str` and use the new `MktPair.bs_fqme` where necessary. --- piker/data/feed.py | 61 ++++++++++++++++++++++------------------------ 1 file changed, 29 insertions(+), 32 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index 9beec93b..51f1275b 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -1033,9 +1033,7 @@ async def allocate_persistent_feed( [symstr], init_msgs, ) - bs_mktid: str = init.bs_mktid mkt: MktPair = init.mkt_info - assert mkt.bs_mktid == bs_mktid fqme: str = mkt.fqme # HISTORY storage, run 2 tasks: @@ -1150,14 +1148,14 @@ async def open_feed_bus( ctx: tractor.Context, brokername: str, - symbols: list[str], # normally expected to the broker-specific fqsn + symbols: list[str], # normally expected to the broker-specific fqme loglevel: str = 'error', tick_throttle: Optional[float] = None, start_stream: bool = True, ) -> dict[ - str, # fqsn + str, # fqme tuple[dict, dict] # pair of dicts of the initmsg and first quotes ]: ''' @@ -1218,33 +1216,32 @@ async def open_feed_bus( # XXX: ``.first_quote`` may be outdated here if this is secondary # subscriber - flume = bus.feeds[symbol] - sym = flume.symbol - bs_mktid = sym.key - fqsn = sym.fqme # true fqsn - assert bs_mktid in fqsn and brokername in fqsn + flume: Flume = bus.feeds[symbol] + mkt: MktPair = flume.mkt + bs_fqme: str = mkt.bs_fqme + fqme: str = mkt.fqme + assert brokername in fqme - if sym.suffix: - bs_mktid = fqsn.removesuffix(f'.{brokername}') - log.warning(f'{brokername} expanded symbol {symbol} -> {bs_mktid}') + if mkt.suffix: + log.warning(f'{brokername} expanded symbol {symbol} -> {bs_fqme}') # pack for ``.started()`` sync msg - flumes[fqsn] = flume + flumes[fqme] = flume - # we use the broker-specific market id (bs_mktid) for the + # we use the broker-specific fqme (bs_fqme) for the # sampler subscription since the backend isn't (yet) expected to - # append it's own name to the fqsn, so we filter on keys which + # append it's own name to the fqme, so we filter on keys which # *do not* include that name (e.g .ib) . - bus._subscribers.setdefault(bs_mktid, set()) + bus._subscribers.setdefault(bs_fqme, set()) # sync feed subscribers with flume handles await ctx.started( - {fqsn: flume.to_msg() - for fqsn, flume in flumes.items()} + {fqme: flume.to_msg() + for fqme, flume in flumes.items()} ) if not start_stream: - log.warning(f'Not opening real-time stream for {fqsn}') + log.warning(f'Not opening real-time stream for {fqme}') await trio.sleep_forever() # real-time stream loop @@ -1258,11 +1255,11 @@ async def open_feed_bus( ): local_subs: dict[str, set[tuple]] = {} - for fqsn, flume in flumes.items(): + for fqme, flume in flumes.items(): # re-send to trigger display loop cycle (necessary especially # when the mkt is closed and no real-time messages are # expected). - await stream.send({fqsn: flume.first_quote}) + await stream.send({fqme: flume.first_quote}) # set a common msg stream for all requested symbols assert stream @@ -1304,9 +1301,9 @@ async def open_feed_bus( # maybe use the current task-id to key the sub list that's # added / removed? Or maybe we can add a general # pause-resume by sub-key api? - bs_mktid = fqsn.removesuffix(f'.{brokername}') - local_subs.setdefault(bs_mktid, set()).add(sub) - bus.add_subs(bs_mktid, {sub}) + bs_fqme = fqme.removesuffix(f'.{brokername}') + local_subs.setdefault(bs_fqme, set()).add(sub) + bus.add_subs(bs_fqme, {sub}) # sync caller with all subs registered state sub_registered.set() @@ -1319,16 +1316,16 @@ async def open_feed_bus( async for msg in stream: if msg == 'pause': - for bs_mktid, subs in local_subs.items(): + for bs_fqme, subs in local_subs.items(): log.info( - f'Pausing {bs_mktid} feed for {uid}') - bus.remove_subs(bs_mktid, subs) + f'Pausing {bs_fqme} feed for {uid}') + bus.remove_subs(bs_fqme, subs) elif msg == 'resume': - for bs_mktid, subs in local_subs.items(): + for bs_fqme, subs in local_subs.items(): log.info( - f'Resuming {bs_mktid} feed for {uid}') - bus.add_subs(bs_mktid, subs) + f'Resuming {bs_fqme} feed for {uid}') + bus.add_subs(bs_fqme, subs) else: raise ValueError(msg) @@ -1342,8 +1339,8 @@ async def open_feed_bus( cs.cancel() # drop all subs for this task from the bus - for bs_mktid, subs in local_subs.items(): - bus.remove_subs(bs_mktid, subs) + for bs_fqme, subs in local_subs.items(): + bus.remove_subs(bs_fqme, subs) class Feed(Struct): From e317310ed329b7118b7e68bc153fffe45f90da73 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 19 Apr 2023 14:03:50 -0400 Subject: [PATCH 174/294] binance: make `stream_quotes()` deliver new `list[FeedInit]` API --- piker/brokers/binance.py | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 02f234d9..0f8d5f77 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -57,6 +57,7 @@ from ._util import ( get_console_log, ) from ..data.types import Struct +from ..data.validate import FeedInit from ..data._web_bs import ( open_autorecon_ws, NoBsWs, @@ -539,23 +540,17 @@ async def stream_quotes( async with ( send_chan as send_chan, ): - mkt_infos: dict[str, MktPair] = {} + init_msgs: list[FeedInit] = [] for sym in symbols: mkt, pair = await get_mkt_info(sym) - mkt_infos[sym] = mkt - symbol = symbols[0] - - init_msgs = { - # pass back token, and bool, signalling if we're the writer - # and that history has been written - symbol: { - 'fqsn': sym, - - 'mkt_info': mkt_infos[sym], - 'shm_write_opts': {'sum_tick_vml': False}, - }, - } + # build out init msgs according to latest spec + init_msgs.append( + FeedInit( + mkt_info=mkt, + shm_write_opts={'sum_tick_vml': False}, + ) + ) @acm async def subscribe(ws: wsproto.WSConnection): From 1b50bff6250e64a10132cc8d2354c0a9bf45ee9a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 19 Apr 2023 14:04:17 -0400 Subject: [PATCH 175/294] Error test harness if `--pdb` passed without `-s` --- tests/conftest.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 87611c55..d5b0d697 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -170,6 +170,20 @@ def open_test_pikerd( # https://docs.pytest.org/en/6.2.x/tmpdir.html#the-default-base-temporary-directory print(f'CURRENT TEST CONF DIR: {tmpconfdir}') + conf = request.config + debug_mode: bool = conf.option.usepdb + if ( + debug_mode + and conf.option.capture != 'no' + ): + # TODO: how to disable capture dynamically? + # conf._configured = False + # conf._do_configure() + pytest.fail( + 'To use `--pdb` (with `tractor` subactors) you also must also ' + 'pass `-s`!' + ) + yield partial( _open_test_pikerd, @@ -182,7 +196,7 @@ def open_test_pikerd( # `--ll ` cli flag. loglevel=loglevel, - debug_mode=request.config.option.usepdb + debug_mode=debug_mode, ) # NOTE: the `tmp_dir` fixture will wipe any files older then 3 test From bcf355e2c876ed700cfc61ff829791b871e813f2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 19 Apr 2023 15:08:10 -0400 Subject: [PATCH 176/294] Fix up `@async_lifo_cache` typing, add TODOs for move to `tractor` --- piker/_cacheables.py | 67 ++++++++++++++++++++++++++++++++++++-------- 1 file changed, 56 insertions(+), 11 deletions(-) diff --git a/piker/_cacheables.py b/piker/_cacheables.py index 6746fc2f..9be4d079 100644 --- a/piker/_cacheables.py +++ b/piker/_cacheables.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) Tyler Goodlet (in stewardship for piker0) +# Copyright (C) Tyler Goodlet (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -14,15 +14,21 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -""" +''' Cacheing apis and toolz. -""" +''' from collections import OrderedDict from contextlib import ( asynccontextmanager as acm, ) +from typing import ( + Awaitable, + Callable, + ParamSpec, + TypeVar, +) from tractor.trionics import maybe_open_context @@ -32,19 +38,54 @@ from .log import get_logger log = get_logger(__name__) +T = TypeVar("T") +P = ParamSpec("P") -def async_lifo_cache(maxsize=128): - """Async ``cache`` with a LIFO policy. + +# TODO: move this to `tractor.trionics`.. +# - egs. to replicate for tests: https://github.com/aio-libs/async-lru#usage +# - their suite as well: +# https://github.com/aio-libs/async-lru/tree/master/tests +# - asked trio_util about it too: +# https://github.com/groove-x/trio-util/issues/21 +def async_lifo_cache( + maxsize=128, + + # NOTE: typing style was learned from: + # https://stackoverflow.com/a/71132186 +) -> Callable[ + Callable[P, Awaitable[T]], + Callable[ + Callable[P, Awaitable[T]], + Callable[P, Awaitable[T]], + ], +]: + ''' + Async ``cache`` with a LIFO policy. Implemented my own since no one else seems to have a standard. I'll wait for the smarter people to come up with one, but until then... - """ + + NOTE: when decorating, due to this simple/naive implementation, you + MUST call the decorator like, + + .. code:: python + + @async_lifo_cache() + async def cache_target(): + + ''' cache = OrderedDict() - def decorator(fn): + def decorator( + fn: Callable[P, Awaitable[T]], + ) -> Callable[P, Awaitable[T]]: - async def wrapper(*args): + async def decorated( + *args: P.args, + **kwargs: P.kwargs, + ) -> T: key = args try: return cache[key] @@ -53,15 +94,19 @@ def async_lifo_cache(maxsize=128): # discard last added new entry cache.popitem() - # do it - cache[key] = await fn(*args) + # call underlying + cache[key] = await fn( + *args, + **kwargs, + ) return cache[key] - return wrapper + return decorated return decorator +# TODO: move this to `.brokers.utils`.. @acm async def open_cached_client( brokername: str, From af2f8756c5cf2acef2b0783668cc9a83c5b4c7ca Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 19 Apr 2023 15:29:59 -0400 Subject: [PATCH 177/294] binance: use `@async_lifo_cache` on `.get_mkt_info()` ep --- piker/brokers/binance.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 0f8d5f77..915902fb 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -41,6 +41,7 @@ import numpy as np import tractor import wsproto +from .._cacheables import async_lifo_cache from ..accounting._mktinfo import ( Asset, MktPair, @@ -483,20 +484,12 @@ async def open_history_client( yield get_ohlc, {'erlangs': 3, 'rate': 3} -# TODO: bleh, didn't we have an async version of -# this at some point? -# @lru_cache +@async_lifo_cache() async def get_mkt_info( fqme: str, - _cache: dict[str, MktPair] = {} - ) -> tuple[MktPair, Pair]: - both = _cache.get(fqme) - if both: - return both - async with open_cached_client('binance') as client: pair: Pair = await client.exch_info(fqme.upper()) @@ -517,7 +510,6 @@ async def get_mkt_info( broker='binance', ) both = mkt, pair - _cache[fqme] = both return both From d0e01ff9b66b42b5eced018a8f25151dd4828f14 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 20 Apr 2023 10:41:24 -0400 Subject: [PATCH 178/294] Fix `Symbol.from_fqme()` extra added symbols.. --- piker/accounting/_mktinfo.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 7ab59a66..27996ae0 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -571,11 +571,9 @@ class Symbol(Struct): lot_size = info.get('lot_tick_size', 0.0) return Symbol( - broker=broker, key=mktep, tick_size=tick_size, lot_tick_size=lot_size, - venue=venue, suffix=suffix, broker_info={broker: info}, ) From 0d93871c88fdb66dd8a33ba27b85d3aae3a6a874 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 20 Apr 2023 11:52:23 -0400 Subject: [PATCH 179/294] kraken: drop `Client.cache_assets()`, simpler `.pair_info()`, drop `.mkt_info()` --- piker/brokers/kraken/api.py | 102 ++++++++++++++---------------------- 1 file changed, 40 insertions(+), 62 deletions(-) diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index da377822..026dca71 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -43,7 +43,6 @@ from piker import config from piker.data.types import Struct from piker.accounting._mktinfo import ( Asset, - MktPair, digits_to_dec, ) from piker.brokers._util import ( @@ -274,9 +273,10 @@ class Client: for sym, bal in by_bsmktid.items() } - async def get_assets(self) -> dict[str, dict]: + async def get_assets(self) -> dict[str, Asset]: ''' - Get all assets available for trading and xfer. + Load and cache all asset infos and pack into + our native ``Asset`` struct. https://docs.kraken.com/rest/#tag/Market-Data/operation/getAssetInfo @@ -292,27 +292,21 @@ class Client: ''' resp = await self._public('Assets', {}) - return resp['result'] + assets = resp['result'] - async def cache_assets(self) -> None: - ''' - Load and cache all asset infos and pack into - our native ``Asset`` struct. - - ''' - assets = await self.get_assets() for bs_mktid, info in assets.items(): - - aname = self._altnames[bs_mktid] = info['altname'] + altname = self._altnames[bs_mktid] = info['altname'] aclass = info['aclass'] self.assets[bs_mktid] = Asset( - name=aname.lower(), + name=altname.lower(), atype=f'crypto_{aclass}', tx_tick=digits_to_dec(info['decimals']), info=info, ) + return self.assets + async def get_trades( self, fetch_limit: int | None = None, @@ -475,57 +469,42 @@ class Client: async def pair_info( self, - pair: str | None = None, + pair_patt: str | None = None, ) -> dict[str, Pair] | Pair: + ''' + Query for a tradeable asset pair (info), or all if no input + pattern is provided. - if pair is not None: - pairs = {'pair': pair} - else: - pairs = None # get all pairs + https://docs.kraken.com/rest/#tag/Market-Data/operation/getTradableAssetPairs - resp = await self._public('AssetPairs', pairs) + ''' + # get all pairs by default, or filter + # to whatever pattern is provided as input. + pairs: dict[str, str] | None = None + if pair_patt is not None: + pairs = {'pair': pair_patt} + + resp = await self._public( + 'AssetPairs', + pairs, + ) err = resp['error'] if err: - symbolname = pairs['pair'] if pair else None - raise SymbolNotFound(f'{symbolname}.kraken') + raise SymbolNotFound(pair_patt) - pairs = resp['result'] + pairs: dict[str, Pair] = { - if pair is not None: - _, data = next(iter(pairs.items())) - return Pair(**data) - else: - return { - key: Pair(**data) - for key, data in pairs.items() - } + key: Pair(**data) + for key, data in resp['result'].items() + } + # always cache so we can possibly do faster lookup + self._pairs.update(pairs) - async def mkt_info( - self, - pair_str: str, + if pair_patt is not None: + return next(iter(pairs.items()))[1] - ) -> MktPair: - - ( - bs_mktid, # str - pair_info, # Pair - ) = Client.normalize_symbol(pair_str) - - dst_asset = self.assets[pair_info.base] - - # NOTE XXX parse out the src asset name until we figure out - # how to get the src asset's `Pair` info from kraken.. - src_key = pair_str.lstrip(dst_asset.name.upper()).lower() - - return MktPair( - dst=dst_asset, - price_tick=pair_info.price_tick, - size_tick=pair_info.size_tick, - bs_mktid=bs_mktid, - src=src_key, - broker='kraken', - ) + return pairs async def cache_symbols(self) -> dict: ''' @@ -538,14 +517,15 @@ class Client: ''' if not self._pairs: - self._pairs.update(await self.pair_info()) + pairs = await self.pair_info() + assert self._pairs == pairs # table of all ws and rest keys to their alt-name values. ntable: dict[str, str] = {} - for rest_key in list(self._pairs.keys()): + for rest_key in list(pairs.keys()): - pair: Pair = self._pairs[rest_key] + pair: Pair = pairs[rest_key] altname = pair.altname wsname = pair.wsname ntable[altname] = ntable[rest_key] = ntable[wsname] = altname @@ -561,7 +541,6 @@ class Client: async def search_symbols( self, pattern: str, - limit: int = None, ) -> dict[str, Any]: ''' @@ -672,8 +651,7 @@ class Client: the 'AssetPairs' endpoint, see methods above. ''' - ticker = cls._ntable[ticker] - return ticker.lower(), cls._pairs[ticker] + return cls._ntable[ticker].lower() @acm @@ -693,7 +671,7 @@ async def get_client() -> Client: # at startup, load all symbols, and asset info in # batch requests. async with trio.open_nursery() as nurse: - nurse.start_soon(client.cache_assets) + nurse.start_soon(client.get_assets) await client.cache_symbols() yield client From d7288972b781210bf776044bfb757fa2b6c1f86f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 20 Apr 2023 11:59:17 -0400 Subject: [PATCH 180/294] kraken: port to `FeedInit` and proper impl of `get_mkt_info()` ep --- piker/brokers/kraken/feed.py | 56 +++++++++++++++++++++++------------- 1 file changed, 36 insertions(+), 20 deletions(-) diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index ff4f57a9..5b654970 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -36,15 +36,20 @@ import tractor import trio from piker.accounting._mktinfo import ( + Asset, MktPair, ) -from piker._cacheables import open_cached_client +from piker._cacheables import ( + open_cached_client, + async_lifo_cache, +) from piker.brokers._util import ( BrokerError, DataThrottle, DataUnavailable, ) from piker.data.types import Struct +from piker.data.validate import FeedInit from piker.data._web_bs import open_autorecon_ws, NoBsWs from . import log from .api import ( @@ -278,6 +283,7 @@ async def open_history_client( yield get_ohlc, {'erlangs': 1, 'rate': 1} +@async_lifo_cache() async def get_mkt_info( fqme: str, @@ -293,9 +299,25 @@ async def get_mkt_info( async with open_cached_client('kraken') as client: # uppercase since kraken bs_mktid is always upper - sym_str = fqme.upper() - pair: Pair = await client.pair_info(sym_str) - mkt: MktPair = await client.mkt_info(sym_str) + bs_fqme, _, broker = fqme.partition('.') + pair_str: str = bs_fqme.upper() + bs_mktid: str = Client.normalize_symbol(pair_str) + pair: Pair = await client.pair_info(pair_str) + + assets = client.assets + dst_asset: Asset = assets[pair.base] + src_asset: Asset = assets[pair.quote] + + mkt = MktPair( + dst=dst_asset, + src=src_asset, + + price_tick=pair.price_tick, + size_tick=pair.size_tick, + bs_mktid=bs_mktid, + + broker='kraken', + ) return mkt, pair @@ -321,30 +343,24 @@ async def stream_quotes( ''' ws_pairs: list[str] = [] - mkt_infos: dict[str, MktPair] = {} + init_msgs: list[FeedInit] = [] async with ( send_chan as send_chan, ): for sym_str in symbols: mkt, pair = await get_mkt_info(sym_str) - mkt_infos[sym_str] = mkt + init_msgs.append( + FeedInit( + mkt_info=mkt, + shm_write_opts={ + 'sum_tick_vml': False, + }, + ) + ) + ws_pairs.append(pair.wsname) - symbol = symbols[0].lower() - - # sync with `.data.feed` caller - # TODO: should we make this init msg a `Struct`? - init_msgs = { - symbol: { - 'fqsn': sym_str, - 'mkt_info': mkt_infos[sym_str], - 'shm_write_opts': { - 'sum_tick_vml': False, - }, - }, - } - @acm async def subscribe(ws: NoBsWs): From 4a0beda77e4cbce986248b736f6c9d8499a13e4e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 20 Apr 2023 12:00:24 -0400 Subject: [PATCH 181/294] kraken: asyncify and use `get_mkt_info()` in `norm_trade_records()` --- piker/brokers/kraken/broker.py | 22 +++++++--------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 58cc1464..10a55e3d 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -69,6 +69,7 @@ from .api import ( get_client, ) from .feed import ( + get_mkt_info, open_autorecon_ws, NoBsWs, stream_messages, @@ -489,7 +490,7 @@ async def trades_dialogue( ) as ledger, ): # transaction-ify the ledger entries - ledger_trans = norm_trade_records(ledger) + ledger_trans = await norm_trade_records(ledger) if not table.pps: # NOTE: we can't use this since it first needs @@ -507,7 +508,7 @@ async def trades_dialogue( if tids2trades: ledger.write_config() - api_trans = norm_trade_records(tids2trades) + api_trans = await norm_trade_records(tids2trades) # retrieve kraken reported balances # and do diff with ledger to determine @@ -823,7 +824,7 @@ async def handle_order_updates( ) await ems_stream.send(status_msg) - new_trans = norm_trade_records(trades) + new_trans = await norm_trade_records(trades) ppmsgs = trades2pps( table, acctid, @@ -1185,7 +1186,7 @@ async def handle_order_updates( log.warning(f'Unhandled trades update msg: {msg}') -def norm_trade_records( +async def norm_trade_records( ledger: dict[str, Any], ) -> dict[str, Transaction]: @@ -1200,18 +1201,9 @@ def norm_trade_records( }[record['type']] # we normalize to kraken's `altname` always.. - bs_mktid, pair_info = Client.normalize_symbol( - record['pair'] - ) + bs_mktid = Client.normalize_symbol(record['pair']) fqme = f'{bs_mktid}.kraken' - - dst, src = pair_info.wsname.lower().split('/') - mkt = MktPair.from_fqme( - fqme, - price_tick=pair_info.price_tick, - size_tick=pair_info.size_tick, - bs_mktid=bs_mktid, - ) + mkt: MktPair = (await get_mkt_info(fqme))[0] records[tid] = Transaction( fqsn=fqme, From 3cd853cb5dbfd42bd8ad0dd2d0f9cea32eca26d3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 20 Apr 2023 13:04:40 -0400 Subject: [PATCH 182/294] order_mode: revert switch to `MktPair` for pre-order loading --- piker/ui/order_mode.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 256bf7b6..32b8b039 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -670,8 +670,12 @@ class OrderMode: # fill out complex fields order.oid = str(order.oid) order.brokers = [brokername] - order.symbol = MktPair.from_fqme( - fqme=fqme, + + # TODO: change this over to `MktPair`, but it's + # gonna be tough since we don't have any such data + # really in our clearing msg schema.. + order.symbol = Symbol.from_fqme( + fqsn=fqme, info={}, ) dialog = self.submit_order( From 9d04accf2e9e9e1a42837ff62de2fa8ed23f12ae Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 20 Apr 2023 13:36:52 -0400 Subject: [PATCH 183/294] Factor out all history mgmt-logic into a new `.data.history` --- piker/data/feed.py | 738 +--------------------------------------- piker/data/history.py | 770 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 780 insertions(+), 728 deletions(-) create mode 100644 piker/data/history.py diff --git a/piker/data/feed.py b/piker/data/feed.py index 51f1275b..02f0adec 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -14,31 +14,31 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -""" +''' Data feed apis and infra. -This module is enabled for ``brokerd`` daemons. +This module is enabled for ``brokerd`` daemons and includes mostly +endpoints and middleware to support our real-time, provider agnostic, +live market quotes layer. Historical data loading and processing is also +initiated in parts of the feed bus startup but business logic and +functionality is generally located in the sibling `.data.history` +module. -""" +''' from __future__ import annotations from collections import ( defaultdict, - Counter, ) from contextlib import asynccontextmanager as acm -# from decimal import Decimal -from datetime import datetime from functools import partial import time from types import ModuleType from typing import ( Any, AsyncContextManager, - Callable, Optional, Awaitable, Sequence, - TYPE_CHECKING, Union, ) @@ -50,8 +50,6 @@ from tractor.trionics import ( maybe_open_context, gather_contexts, ) -import pendulum -import numpy as np from ..brokers import get_brokermod from ..calc import humanize @@ -61,17 +59,14 @@ from ._util import ( ) from ..service import ( maybe_spawn_brokerd, - check_for_service, ) from .flows import Flume from .validate import ( FeedInit, validate_backend, ) -from ._sharedmem import ( - maybe_open_shm_array, - ShmArray, - _secs_in_day, +from .history import ( + manage_history, ) from .ingest import get_ingestormod from .types import Struct @@ -79,19 +74,11 @@ from ..accounting._mktinfo import ( MktPair, unpack_fqme, ) -from ._source import base_iohlc_dtype from ..ui import _search from ._sampling import ( - open_sample_stream, sample_and_broadcast, uniform_rate_send, ) -from ..brokers._util import ( - DataUnavailable, -) - -if TYPE_CHECKING: - from ..service.marketstore import Storage class _FeedsBus(Struct): @@ -230,711 +217,6 @@ def get_feed_bus( return _bus -def diff_history( - array: np.ndarray, - timeframe: int, - start_dt: datetime, - end_dt: datetime, - last_tsdb_dt: datetime | None = None - -) -> np.ndarray: - - # no diffing with tsdb dt index possible.. - if last_tsdb_dt is None: - return array - - time = array['time'] - return array[time > last_tsdb_dt.timestamp()] - - -async def start_backfill( - mod: ModuleType, - bfqsn: str, - shm: ShmArray, - timeframe: float, - sampler_stream: tractor.MsgStream, - feed_is_live: trio.Event, - - last_tsdb_dt: Optional[datetime] = None, - storage: Optional[Storage] = None, - write_tsdb: bool = True, - tsdb_is_up: bool = False, - - task_status: TaskStatus[tuple] = trio.TASK_STATUS_IGNORED, - -) -> int: - - hist: Callable[ - [int, datetime, datetime], - tuple[np.ndarray, str] - ] - config: dict[str, int] - async with mod.open_history_client(bfqsn) as (hist, config): - - # get latest query's worth of history all the way - # back to what is recorded in the tsdb - array, start_dt, end_dt = await hist( - timeframe, - end_dt=None, - ) - times = array['time'] - - # sample period step size in seconds - step_size_s = ( - pendulum.from_timestamp(times[-1]) - - pendulum.from_timestamp(times[-2]) - ).seconds - - # if the market is open (aka we have a live feed) but the - # history sample step index seems off we report the surrounding - # data and drop into a bp. this case shouldn't really ever - # happen if we're doing history retrieval correctly. - if ( - step_size_s == 60 - and feed_is_live.is_set() - ): - inow = round(time.time()) - diff = inow - times[-1] - if abs(diff) > 60: - surr = array[-6:] - diff_in_mins = round(diff/60., ndigits=2) - log.warning( - f'STEP ERROR `{bfqsn}` for period {step_size_s}s:\n' - f'Off by `{diff}` seconds (or `{diff_in_mins}` mins)\n' - 'Surrounding 6 time stamps:\n' - f'{list(surr["time"])}\n' - 'Here is surrounding 6 samples:\n' - f'{surr}\nn' - ) - - # uncomment this for a hacker who wants to investigate - # this case manually.. - # await tractor.breakpoint() - - # frame's worth of sample-period-steps, in seconds - frame_size_s = len(array) * step_size_s - - to_push = diff_history( - array, - timeframe, - start_dt, - end_dt, - last_tsdb_dt=last_tsdb_dt, - ) - - log.info(f'Pushing {to_push.size} to shm!') - shm.push(to_push, prepend=True) - - # TODO: *** THIS IS A BUG *** - # we need to only broadcast to subscribers for this fqsn.. - # otherwise all fsps get reset on every chart.. - await sampler_stream.send('broadcast_all') - - # signal that backfilling to tsdb's end datum is complete - bf_done = trio.Event() - - # let caller unblock and deliver latest history frame - task_status.started(( - start_dt, - end_dt, - bf_done, - )) - - # based on the sample step size, maybe load a certain amount history - if last_tsdb_dt is None: - if step_size_s not in (1, 60): - raise ValueError( - '`piker` only needs to support 1m and 1s sampling ' - 'but ur api is trying to deliver a longer ' - f'timeframe of {step_size_s} seconds..\n' - 'So yuh.. dun do dat brudder.' - ) - - # when no tsdb "last datum" is provided, we just load - # some near-term history. - periods = { - 1: {'days': 1}, - 60: {'days': 14}, - } - - if tsdb_is_up: - # do a decently sized backfill and load it into storage. - periods = { - 1: {'days': 6}, - 60: {'years': 6}, - } - - period_duration = periods[step_size_s] - - # NOTE: manually set the "latest" datetime which we intend to - # backfill history "until" so as to adhere to the history - # settings above when the tsdb is detected as being empty. - last_tsdb_dt = start_dt.subtract(**period_duration) - - # configure async query throttling - # rate = config.get('rate', 1) - # XXX: legacy from ``trimeter`` code but unsupported now. - # erlangs = config.get('erlangs', 1) - - # avoid duplicate history frames with a set of datetime frame - # starts and associated counts of how many duplicates we see - # per time stamp. - starts: Counter[datetime] = Counter() - - # inline sequential loop where we simply pass the - # last retrieved start dt to the next request as - # it's end dt. - while end_dt > last_tsdb_dt: - log.debug( - f'Requesting {step_size_s}s frame ending in {start_dt}' - ) - - try: - array, next_start_dt, end_dt = await hist( - timeframe, - end_dt=start_dt, - ) - - # broker says there never was or is no more history to pull - except DataUnavailable: - log.warning( - f'NO-MORE-DATA: backend {mod.name} halted history!?' - ) - - # ugh, what's a better way? - # TODO: fwiw, we probably want a way to signal a throttle - # condition (eg. with ib) so that we can halt the - # request loop until the condition is resolved? - return - - if ( - next_start_dt in starts - and starts[next_start_dt] <= 6 - ): - start_dt = min(starts) - log.warning( - f"{bfqsn}: skipping duplicate frame @ {next_start_dt}" - ) - starts[start_dt] += 1 - continue - - elif starts[next_start_dt] > 6: - log.warning( - f'NO-MORE-DATA: backend {mod.name} before {next_start_dt}?' - ) - return - - # only update new start point if not-yet-seen - start_dt = next_start_dt - starts[start_dt] += 1 - - assert array['time'][0] == start_dt.timestamp() - - diff = end_dt - start_dt - frame_time_diff_s = diff.seconds - expected_frame_size_s = frame_size_s + step_size_s - - if frame_time_diff_s > expected_frame_size_s: - - # XXX: query result includes a start point prior to our - # expected "frame size" and thus is likely some kind of - # history gap (eg. market closed period, outage, etc.) - # so just report it to console for now. - log.warning( - f'History frame ending @ {end_dt} appears to have a gap:\n' - f'{diff} ~= {frame_time_diff_s} seconds' - ) - - to_push = diff_history( - array, - timeframe, - start_dt, - end_dt, - last_tsdb_dt=last_tsdb_dt, - ) - ln = len(to_push) - if ln: - log.info(f'{ln} bars for {start_dt} -> {end_dt}') - - else: - log.warning( - f'{ln} BARS TO PUSH after diff?!: {start_dt} -> {end_dt}' - ) - - # bail gracefully on shm allocation overrun/full condition - try: - shm.push(to_push, prepend=True) - except ValueError: - log.info( - f'Shm buffer overrun on: {start_dt} -> {end_dt}?' - ) - # can't push the entire frame? so - # push only the amount that can fit.. - break - - log.info( - f'Shm pushed {ln} frame:\n' - f'{start_dt} -> {end_dt}' - ) - - if ( - storage is not None - and write_tsdb - ): - log.info( - f'Writing {ln} frame to storage:\n' - f'{start_dt} -> {end_dt}' - ) - await storage.write_ohlcv( - f'{bfqsn}.{mod.name}', # lul.. - to_push, - timeframe, - ) - - # TODO: can we only trigger this if the respective - # history in "in view"?!? - - # XXX: extremely important, there can be no checkpoints - # in the block above to avoid entering new ``frames`` - # values while we're pipelining the current ones to - # memory... - await sampler_stream.send('broadcast_all') - - # short-circuit (for now) - bf_done.set() - - -async def basic_backfill( - bus: _FeedsBus, - mod: ModuleType, - bfqsn: str, - shms: dict[int, ShmArray], - sampler_stream: tractor.MsgStream, - feed_is_live: trio.Event, - -) -> None: - - # do a legacy incremental backfill from the provider. - log.info('No TSDB (marketstored) found, doing basic backfill..') - - # start history backfill task ``backfill_bars()`` is - # a required backend func this must block until shm is - # filled with first set of ohlc bars - for timeframe, shm in shms.items(): - try: - await bus.nursery.start( - partial( - start_backfill, - mod, - bfqsn, - shm, - timeframe, - sampler_stream, - feed_is_live, - ) - ) - except DataUnavailable: - # XXX: timeframe not supported for backend - continue - - -async def tsdb_backfill( - mod: ModuleType, - marketstore: ModuleType, - bus: _FeedsBus, - storage: Storage, - fqsn: str, - bfqsn: str, - shms: dict[int, ShmArray], - sampler_stream: tractor.MsgStream, - feed_is_live: trio.Event, - - task_status: TaskStatus[ - tuple[ShmArray, ShmArray] - ] = trio.TASK_STATUS_IGNORED, - -) -> None: - - # TODO: this should be used verbatim for the pure - # shm backfiller approach below. - dts_per_tf: dict[int, datetime] = {} - - # start history anal and load missing new data via backend. - for timeframe, shm in shms.items(): - # loads a (large) frame of data from the tsdb depending - # on the db's query size limit. - tsdb_history, first_tsdb_dt, last_tsdb_dt = await storage.load( - fqsn, - timeframe=timeframe, - ) - - broker, *_ = unpack_fqme(fqsn) - try: - ( - latest_start_dt, - latest_end_dt, - bf_done, - ) = await bus.nursery.start( - partial( - start_backfill, - mod, - bfqsn, - shm, - timeframe, - sampler_stream, - feed_is_live, - - last_tsdb_dt=last_tsdb_dt, - tsdb_is_up=True, - storage=storage, - ) - ) - except DataUnavailable: - # XXX: timeframe not supported for backend - dts_per_tf[timeframe] = ( - tsdb_history, - last_tsdb_dt, - None, - None, - None, - ) - continue - - # tsdb_history = series.get(timeframe) - dts_per_tf[timeframe] = ( - tsdb_history, - last_tsdb_dt, - latest_start_dt, - latest_end_dt, - bf_done, - ) - - # if len(hist_shm.array) < 2: - # TODO: there's an edge case here to solve where if the last - # frame before market close (at least on ib) was pushed and - # there was only "1 new" row pushed from the first backfill - # query-iteration, then the sample step sizing calcs will - # break upstream from here since you can't diff on at least - # 2 steps... probably should also add logic to compute from - # the tsdb series and stash that somewhere as meta data on - # the shm buffer?.. no se. - - # unblock the feed bus management task - # assert len(shms[1].array) - task_status.started() - - async def back_load_from_tsdb( - timeframe: int, - shm: ShmArray, - ): - ( - tsdb_history, - last_tsdb_dt, - latest_start_dt, - latest_end_dt, - bf_done, - ) = dts_per_tf[timeframe] - - # sync to backend history task's query/load completion - if bf_done: - await bf_done.wait() - - # TODO: eventually it'd be nice to not require a shm array/buffer - # to accomplish this.. maybe we can do some kind of tsdb direct to - # graphics format eventually in a child-actor? - - # TODO: see if there's faster multi-field reads: - # https://numpy.org/doc/stable/user/basics.rec.html#accessing-multiple-fields - # re-index with a `time` and index field - prepend_start = shm._first.value - array = shm.array - if len(array): - shm_last_dt = pendulum.from_timestamp(shm.array[0]['time']) - else: - shm_last_dt = None - - if last_tsdb_dt: - assert shm_last_dt >= last_tsdb_dt - - # do diff against start index of last frame of history and only - # fill in an amount of datums from tsdb allows for most recent - # to be loaded into mem *before* tsdb data. - if ( - last_tsdb_dt - and latest_start_dt - ): - backfilled_size_s = ( - latest_start_dt - last_tsdb_dt - ).seconds - # if the shm buffer len is not large enough to contain - # all missing data between the most recent backend-queried frame - # and the most recent dt-index in the db we warn that we only - # want to load a portion of the next tsdb query to fill that - # space. - log.info( - f'{backfilled_size_s} seconds worth of {timeframe}s loaded' - ) - - # Load TSDB history into shm buffer (for display) if there is - # remaining buffer space. - - if ( - len(tsdb_history) - ): - # load the first (smaller) bit of history originally loaded - # above from ``Storage.load()``. - to_push = tsdb_history[-prepend_start:] - shm.push( - to_push, - - # insert the history pre a "days worth" of samples - # to leave some real-time buffer space at the end. - prepend=True, - # update_first=False, - # start=prepend_start, - field_map=marketstore.ohlc_key_map, - ) - - tsdb_last_frame_start = tsdb_history['Epoch'][0] - - if timeframe == 1: - times = shm.array['time'] - assert (times[1] - times[0]) == 1 - - # load as much from storage into shm possible (depends on - # user's shm size settings). - while shm._first.value > 0: - - tsdb_history = await storage.read_ohlcv( - fqsn, - timeframe=timeframe, - end=tsdb_last_frame_start, - ) - - # empty query - if not len(tsdb_history): - break - - next_start = tsdb_history['Epoch'][0] - if next_start >= tsdb_last_frame_start: - # no earlier data detected - break - else: - tsdb_last_frame_start = next_start - - prepend_start = shm._first.value - to_push = tsdb_history[-prepend_start:] - - # insert the history pre a "days worth" of samples - # to leave some real-time buffer space at the end. - shm.push( - to_push, - prepend=True, - field_map=marketstore.ohlc_key_map, - ) - log.info(f'Loaded {to_push.shape} datums from storage') - - # manually trigger step update to update charts/fsps - # which need an incremental update. - # NOTE: the way this works is super duper - # un-intuitive right now: - # - the broadcaster fires a msg to the fsp subsystem. - # - fsp subsys then checks for a sample step diff and - # possibly recomputes prepended history. - # - the fsp then sends back to the parent actor - # (usually a chart showing graphics for said fsp) - # which tells the chart to conduct a manual full - # graphics loop cycle. - await sampler_stream.send('broadcast_all') - - # TODO: write new data to tsdb to be ready to for next read. - - # backload from db (concurrently per timeframe) once backfilling of - # recent dat a loaded from the backend provider (see - # ``bf_done.wait()`` call). - async with trio.open_nursery() as nurse: - for timeframe, shm in shms.items(): - nurse.start_soon( - back_load_from_tsdb, - timeframe, - shm, - ) - - -async def manage_history( - mod: ModuleType, - bus: _FeedsBus, - fqsn: str, - some_data_ready: trio.Event, - feed_is_live: trio.Event, - timeframe: float = 60, # in seconds - - task_status: TaskStatus[ - tuple[ShmArray, ShmArray] - ] = trio.TASK_STATUS_IGNORED, - -) -> None: - ''' - Load and manage historical data including the loading of any - available series from `marketstore` as well as conducting real-time - update of both that existing db and the allocated shared memory - buffer. - - ''' - - # TODO: is there a way to make each shm file key - # actor-tree-discovery-addr unique so we avoid collisions - # when doing tests which also allocate shms for certain instruments - # that may be in use on the system by some other running daemons? - # from tractor._state import _runtime_vars - # port = _runtime_vars['_root_mailbox'][1] - - uid = tractor.current_actor().uid - name, uuid = uid - service = name.rstrip(f'.{mod.name}') - - # (maybe) allocate shm array for this broker/symbol which will - # be used for fast near-term history capture and processing. - hist_shm, opened = maybe_open_shm_array( - # key=f'{fqsn}_hist_p{port}', - key=f'piker.{service}[{uuid[:16]}.{fqsn}.hist', - - # use any broker defined ohlc dtype: - dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype), - - # we expect the sub-actor to write - readonly=False, - ) - hist_zero_index = hist_shm.index - 1 - - # TODO: history validation - if not opened: - raise RuntimeError( - "Persistent shm for sym was already open?!" - ) - - rt_shm, opened = maybe_open_shm_array( - # key=f'{fqsn}_rt_p{port}', - # key=f'piker.{service}.{fqsn}_rt.{uuid}', - key=f'piker.{service}[{uuid[:16]}.{fqsn}.rt', - - # use any broker defined ohlc dtype: - dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype), - - # we expect the sub-actor to write - readonly=False, - size=3*_secs_in_day, - ) - - # (for now) set the rt (hft) shm array with space to prepend - # only a few days worth of 1s history. - days = 2 - start_index = days*_secs_in_day - rt_shm._first.value = start_index - rt_shm._last.value = start_index - rt_zero_index = rt_shm.index - 1 - - if not opened: - raise RuntimeError( - "Persistent shm for sym was already open?!" - ) - - # register 1s and 1m buffers with the global incrementer task - async with open_sample_stream( - period_s=1., - shms_by_period={ - 1.: rt_shm.token, - 60.: hist_shm.token, - }, - - # NOTE: we want to only open a stream for doing broadcasts on - # backfill operations, not receive the sample index-stream - # (since there's no code in this data feed layer that needs to - # consume it). - open_index_stream=True, - sub_for_broadcasts=False, - - ) as sample_stream: - - log.info('Scanning for existing `marketstored`') - tsdb_is_up = await check_for_service('marketstored') - - bfqsn = fqsn.replace('.' + mod.name, '') - open_history_client = getattr(mod, 'open_history_client', None) - assert open_history_client - - if ( - tsdb_is_up - and opened - and open_history_client - ): - log.info('Found existing `marketstored`') - - from ..service import marketstore - async with ( - marketstore.open_storage_client(fqsn)as storage, - ): - # TODO: drop returning the output that we pass in? - await bus.nursery.start( - tsdb_backfill, - mod, - marketstore, - bus, - storage, - fqsn, - bfqsn, - { - 1: rt_shm, - 60: hist_shm, - }, - sample_stream, - feed_is_live, - ) - - # yield back after client connect with filled shm - task_status.started(( - hist_zero_index, - hist_shm, - rt_zero_index, - rt_shm, - )) - - # indicate to caller that feed can be delivered to - # remote requesting client since we've loaded history - # data that can be used. - some_data_ready.set() - - # history retreival loop depending on user interaction - # and thus a small RPC-prot for remotely controllinlg - # what data is loaded for viewing. - await trio.sleep_forever() - - # load less history if no tsdb can be found - elif ( - not tsdb_is_up - and opened - ): - await basic_backfill( - bus, - mod, - bfqsn, - { - 1: rt_shm, - 60: hist_shm, - }, - sample_stream, - feed_is_live, - ) - task_status.started(( - hist_zero_index, - hist_shm, - rt_zero_index, - rt_shm, - )) - some_data_ready.set() - await trio.sleep_forever() - - async def allocate_persistent_feed( bus: _FeedsBus, sub_registered: trio.Event, diff --git a/piker/data/history.py b/piker/data/history.py new file mode 100644 index 00000000..3e0a3a62 --- /dev/null +++ b/piker/data/history.py @@ -0,0 +1,770 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Historical data business logic for load, backfill and tsdb storage. + +''' +from __future__ import annotations +from collections import ( + Counter, +) +from datetime import datetime +from functools import partial +import time +from types import ModuleType +from typing import ( + Callable, + Optional, + TYPE_CHECKING, +) + +import trio +from trio_typing import TaskStatus +import tractor +import pendulum +import numpy as np + +from ._util import ( + log, +) +from ..service import ( + check_for_service, +) +from ._sharedmem import ( + maybe_open_shm_array, + ShmArray, + _secs_in_day, +) +from ..accounting._mktinfo import ( + unpack_fqme, +) +from ._source import base_iohlc_dtype +from ._sampling import ( + open_sample_stream, +) +from ..brokers._util import ( + DataUnavailable, +) + +if TYPE_CHECKING: + from ..service.marketstore import Storage + from .feed import _FeedsBus + + +def diff_history( + array: np.ndarray, + timeframe: int, + start_dt: datetime, + end_dt: datetime, + last_tsdb_dt: datetime | None = None + +) -> np.ndarray: + + # no diffing with tsdb dt index possible.. + if last_tsdb_dt is None: + return array + + time = array['time'] + return array[time > last_tsdb_dt.timestamp()] + + +async def start_backfill( + mod: ModuleType, + bfqsn: str, + shm: ShmArray, + timeframe: float, + sampler_stream: tractor.MsgStream, + feed_is_live: trio.Event, + + last_tsdb_dt: Optional[datetime] = None, + storage: Optional[Storage] = None, + write_tsdb: bool = True, + tsdb_is_up: bool = False, + + task_status: TaskStatus[tuple] = trio.TASK_STATUS_IGNORED, + +) -> int: + + hist: Callable[ + [int, datetime, datetime], + tuple[np.ndarray, str] + ] + config: dict[str, int] + async with mod.open_history_client(bfqsn) as (hist, config): + + # get latest query's worth of history all the way + # back to what is recorded in the tsdb + array, start_dt, end_dt = await hist( + timeframe, + end_dt=None, + ) + times = array['time'] + + # sample period step size in seconds + step_size_s = ( + pendulum.from_timestamp(times[-1]) + - pendulum.from_timestamp(times[-2]) + ).seconds + + # if the market is open (aka we have a live feed) but the + # history sample step index seems off we report the surrounding + # data and drop into a bp. this case shouldn't really ever + # happen if we're doing history retrieval correctly. + if ( + step_size_s == 60 + and feed_is_live.is_set() + ): + inow = round(time.time()) + diff = inow - times[-1] + if abs(diff) > 60: + surr = array[-6:] + diff_in_mins = round(diff/60., ndigits=2) + log.warning( + f'STEP ERROR `{bfqsn}` for period {step_size_s}s:\n' + f'Off by `{diff}` seconds (or `{diff_in_mins}` mins)\n' + 'Surrounding 6 time stamps:\n' + f'{list(surr["time"])}\n' + 'Here is surrounding 6 samples:\n' + f'{surr}\nn' + ) + + # uncomment this for a hacker who wants to investigate + # this case manually.. + # await tractor.breakpoint() + + # frame's worth of sample-period-steps, in seconds + frame_size_s = len(array) * step_size_s + + to_push = diff_history( + array, + timeframe, + start_dt, + end_dt, + last_tsdb_dt=last_tsdb_dt, + ) + + log.info(f'Pushing {to_push.size} to shm!') + shm.push(to_push, prepend=True) + + # TODO: *** THIS IS A BUG *** + # we need to only broadcast to subscribers for this fqsn.. + # otherwise all fsps get reset on every chart.. + await sampler_stream.send('broadcast_all') + + # signal that backfilling to tsdb's end datum is complete + bf_done = trio.Event() + + # let caller unblock and deliver latest history frame + task_status.started(( + start_dt, + end_dt, + bf_done, + )) + + # based on the sample step size, maybe load a certain amount history + if last_tsdb_dt is None: + if step_size_s not in (1, 60): + raise ValueError( + '`piker` only needs to support 1m and 1s sampling ' + 'but ur api is trying to deliver a longer ' + f'timeframe of {step_size_s} seconds..\n' + 'So yuh.. dun do dat brudder.' + ) + + # when no tsdb "last datum" is provided, we just load + # some near-term history. + periods = { + 1: {'days': 1}, + 60: {'days': 14}, + } + + if tsdb_is_up: + # do a decently sized backfill and load it into storage. + periods = { + 1: {'days': 6}, + 60: {'years': 6}, + } + + period_duration = periods[step_size_s] + + # NOTE: manually set the "latest" datetime which we intend to + # backfill history "until" so as to adhere to the history + # settings above when the tsdb is detected as being empty. + last_tsdb_dt = start_dt.subtract(**period_duration) + + # configure async query throttling + # rate = config.get('rate', 1) + # XXX: legacy from ``trimeter`` code but unsupported now. + # erlangs = config.get('erlangs', 1) + + # avoid duplicate history frames with a set of datetime frame + # starts and associated counts of how many duplicates we see + # per time stamp. + starts: Counter[datetime] = Counter() + + # inline sequential loop where we simply pass the + # last retrieved start dt to the next request as + # it's end dt. + while end_dt > last_tsdb_dt: + log.debug( + f'Requesting {step_size_s}s frame ending in {start_dt}' + ) + + try: + array, next_start_dt, end_dt = await hist( + timeframe, + end_dt=start_dt, + ) + + # broker says there never was or is no more history to pull + except DataUnavailable: + log.warning( + f'NO-MORE-DATA: backend {mod.name} halted history!?' + ) + + # ugh, what's a better way? + # TODO: fwiw, we probably want a way to signal a throttle + # condition (eg. with ib) so that we can halt the + # request loop until the condition is resolved? + return + + if ( + next_start_dt in starts + and starts[next_start_dt] <= 6 + ): + start_dt = min(starts) + log.warning( + f"{bfqsn}: skipping duplicate frame @ {next_start_dt}" + ) + starts[start_dt] += 1 + continue + + elif starts[next_start_dt] > 6: + log.warning( + f'NO-MORE-DATA: backend {mod.name} before {next_start_dt}?' + ) + return + + # only update new start point if not-yet-seen + start_dt = next_start_dt + starts[start_dt] += 1 + + assert array['time'][0] == start_dt.timestamp() + + diff = end_dt - start_dt + frame_time_diff_s = diff.seconds + expected_frame_size_s = frame_size_s + step_size_s + + if frame_time_diff_s > expected_frame_size_s: + + # XXX: query result includes a start point prior to our + # expected "frame size" and thus is likely some kind of + # history gap (eg. market closed period, outage, etc.) + # so just report it to console for now. + log.warning( + f'History frame ending @ {end_dt} appears to have a gap:\n' + f'{diff} ~= {frame_time_diff_s} seconds' + ) + + to_push = diff_history( + array, + timeframe, + start_dt, + end_dt, + last_tsdb_dt=last_tsdb_dt, + ) + ln = len(to_push) + if ln: + log.info(f'{ln} bars for {start_dt} -> {end_dt}') + + else: + log.warning( + f'{ln} BARS TO PUSH after diff?!: {start_dt} -> {end_dt}' + ) + + # bail gracefully on shm allocation overrun/full condition + try: + shm.push(to_push, prepend=True) + except ValueError: + log.info( + f'Shm buffer overrun on: {start_dt} -> {end_dt}?' + ) + # can't push the entire frame? so + # push only the amount that can fit.. + break + + log.info( + f'Shm pushed {ln} frame:\n' + f'{start_dt} -> {end_dt}' + ) + + if ( + storage is not None + and write_tsdb + ): + log.info( + f'Writing {ln} frame to storage:\n' + f'{start_dt} -> {end_dt}' + ) + await storage.write_ohlcv( + f'{bfqsn}.{mod.name}', # lul.. + to_push, + timeframe, + ) + + # TODO: can we only trigger this if the respective + # history in "in view"?!? + + # XXX: extremely important, there can be no checkpoints + # in the block above to avoid entering new ``frames`` + # values while we're pipelining the current ones to + # memory... + await sampler_stream.send('broadcast_all') + + # short-circuit (for now) + bf_done.set() + + +async def basic_backfill( + bus: _FeedsBus, + mod: ModuleType, + bfqsn: str, + shms: dict[int, ShmArray], + sampler_stream: tractor.MsgStream, + feed_is_live: trio.Event, + +) -> None: + + # do a legacy incremental backfill from the provider. + log.info('No TSDB (marketstored) found, doing basic backfill..') + + # start history backfill task ``backfill_bars()`` is + # a required backend func this must block until shm is + # filled with first set of ohlc bars + for timeframe, shm in shms.items(): + try: + await bus.nursery.start( + partial( + start_backfill, + mod, + bfqsn, + shm, + timeframe, + sampler_stream, + feed_is_live, + ) + ) + except DataUnavailable: + # XXX: timeframe not supported for backend + continue + + +async def tsdb_backfill( + mod: ModuleType, + marketstore: ModuleType, + bus: _FeedsBus, + storage: Storage, + fqsn: str, + bfqsn: str, + shms: dict[int, ShmArray], + sampler_stream: tractor.MsgStream, + feed_is_live: trio.Event, + + task_status: TaskStatus[ + tuple[ShmArray, ShmArray] + ] = trio.TASK_STATUS_IGNORED, + +) -> None: + + # TODO: this should be used verbatim for the pure + # shm backfiller approach below. + dts_per_tf: dict[int, datetime] = {} + + # start history anal and load missing new data via backend. + for timeframe, shm in shms.items(): + # loads a (large) frame of data from the tsdb depending + # on the db's query size limit. + tsdb_history, first_tsdb_dt, last_tsdb_dt = await storage.load( + fqsn, + timeframe=timeframe, + ) + + broker, *_ = unpack_fqme(fqsn) + try: + ( + latest_start_dt, + latest_end_dt, + bf_done, + ) = await bus.nursery.start( + partial( + start_backfill, + mod, + bfqsn, + shm, + timeframe, + sampler_stream, + feed_is_live, + + last_tsdb_dt=last_tsdb_dt, + tsdb_is_up=True, + storage=storage, + ) + ) + except DataUnavailable: + # XXX: timeframe not supported for backend + dts_per_tf[timeframe] = ( + tsdb_history, + last_tsdb_dt, + None, + None, + None, + ) + continue + + # tsdb_history = series.get(timeframe) + dts_per_tf[timeframe] = ( + tsdb_history, + last_tsdb_dt, + latest_start_dt, + latest_end_dt, + bf_done, + ) + + # if len(hist_shm.array) < 2: + # TODO: there's an edge case here to solve where if the last + # frame before market close (at least on ib) was pushed and + # there was only "1 new" row pushed from the first backfill + # query-iteration, then the sample step sizing calcs will + # break upstream from here since you can't diff on at least + # 2 steps... probably should also add logic to compute from + # the tsdb series and stash that somewhere as meta data on + # the shm buffer?.. no se. + + # unblock the feed bus management task + # assert len(shms[1].array) + task_status.started() + + async def back_load_from_tsdb( + timeframe: int, + shm: ShmArray, + ): + ( + tsdb_history, + last_tsdb_dt, + latest_start_dt, + latest_end_dt, + bf_done, + ) = dts_per_tf[timeframe] + + # sync to backend history task's query/load completion + if bf_done: + await bf_done.wait() + + # TODO: eventually it'd be nice to not require a shm array/buffer + # to accomplish this.. maybe we can do some kind of tsdb direct to + # graphics format eventually in a child-actor? + + # TODO: see if there's faster multi-field reads: + # https://numpy.org/doc/stable/user/basics.rec.html#accessing-multiple-fields + # re-index with a `time` and index field + prepend_start = shm._first.value + array = shm.array + if len(array): + shm_last_dt = pendulum.from_timestamp(shm.array[0]['time']) + else: + shm_last_dt = None + + if last_tsdb_dt: + assert shm_last_dt >= last_tsdb_dt + + # do diff against start index of last frame of history and only + # fill in an amount of datums from tsdb allows for most recent + # to be loaded into mem *before* tsdb data. + if ( + last_tsdb_dt + and latest_start_dt + ): + backfilled_size_s = ( + latest_start_dt - last_tsdb_dt + ).seconds + # if the shm buffer len is not large enough to contain + # all missing data between the most recent backend-queried frame + # and the most recent dt-index in the db we warn that we only + # want to load a portion of the next tsdb query to fill that + # space. + log.info( + f'{backfilled_size_s} seconds worth of {timeframe}s loaded' + ) + + # Load TSDB history into shm buffer (for display) if there is + # remaining buffer space. + + if ( + len(tsdb_history) + ): + # load the first (smaller) bit of history originally loaded + # above from ``Storage.load()``. + to_push = tsdb_history[-prepend_start:] + shm.push( + to_push, + + # insert the history pre a "days worth" of samples + # to leave some real-time buffer space at the end. + prepend=True, + # update_first=False, + # start=prepend_start, + field_map=marketstore.ohlc_key_map, + ) + + tsdb_last_frame_start = tsdb_history['Epoch'][0] + + if timeframe == 1: + times = shm.array['time'] + assert (times[1] - times[0]) == 1 + + # load as much from storage into shm possible (depends on + # user's shm size settings). + while shm._first.value > 0: + + tsdb_history = await storage.read_ohlcv( + fqsn, + timeframe=timeframe, + end=tsdb_last_frame_start, + ) + + # empty query + if not len(tsdb_history): + break + + next_start = tsdb_history['Epoch'][0] + if next_start >= tsdb_last_frame_start: + # no earlier data detected + break + else: + tsdb_last_frame_start = next_start + + prepend_start = shm._first.value + to_push = tsdb_history[-prepend_start:] + + # insert the history pre a "days worth" of samples + # to leave some real-time buffer space at the end. + shm.push( + to_push, + prepend=True, + field_map=marketstore.ohlc_key_map, + ) + log.info(f'Loaded {to_push.shape} datums from storage') + + # manually trigger step update to update charts/fsps + # which need an incremental update. + # NOTE: the way this works is super duper + # un-intuitive right now: + # - the broadcaster fires a msg to the fsp subsystem. + # - fsp subsys then checks for a sample step diff and + # possibly recomputes prepended history. + # - the fsp then sends back to the parent actor + # (usually a chart showing graphics for said fsp) + # which tells the chart to conduct a manual full + # graphics loop cycle. + await sampler_stream.send('broadcast_all') + + # TODO: write new data to tsdb to be ready to for next read. + + # backload from db (concurrently per timeframe) once backfilling of + # recent dat a loaded from the backend provider (see + # ``bf_done.wait()`` call). + async with trio.open_nursery() as nurse: + for timeframe, shm in shms.items(): + nurse.start_soon( + back_load_from_tsdb, + timeframe, + shm, + ) + + +async def manage_history( + mod: ModuleType, + bus: _FeedsBus, + fqsn: str, + some_data_ready: trio.Event, + feed_is_live: trio.Event, + timeframe: float = 60, # in seconds + + task_status: TaskStatus[ + tuple[ShmArray, ShmArray] + ] = trio.TASK_STATUS_IGNORED, + +) -> None: + ''' + Load and manage historical data including the loading of any + available series from `marketstore` as well as conducting real-time + update of both that existing db and the allocated shared memory + buffer. + + ''' + + # TODO: is there a way to make each shm file key + # actor-tree-discovery-addr unique so we avoid collisions + # when doing tests which also allocate shms for certain instruments + # that may be in use on the system by some other running daemons? + # from tractor._state import _runtime_vars + # port = _runtime_vars['_root_mailbox'][1] + + uid = tractor.current_actor().uid + name, uuid = uid + service = name.rstrip(f'.{mod.name}') + + # (maybe) allocate shm array for this broker/symbol which will + # be used for fast near-term history capture and processing. + hist_shm, opened = maybe_open_shm_array( + # key=f'{fqsn}_hist_p{port}', + key=f'piker.{service}[{uuid[:16]}.{fqsn}.hist', + + # use any broker defined ohlc dtype: + dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype), + + # we expect the sub-actor to write + readonly=False, + ) + hist_zero_index = hist_shm.index - 1 + + # TODO: history validation + if not opened: + raise RuntimeError( + "Persistent shm for sym was already open?!" + ) + + rt_shm, opened = maybe_open_shm_array( + # key=f'{fqsn}_rt_p{port}', + # key=f'piker.{service}.{fqsn}_rt.{uuid}', + key=f'piker.{service}[{uuid[:16]}.{fqsn}.rt', + + # use any broker defined ohlc dtype: + dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype), + + # we expect the sub-actor to write + readonly=False, + size=3*_secs_in_day, + ) + + # (for now) set the rt (hft) shm array with space to prepend + # only a few days worth of 1s history. + days = 2 + start_index = days*_secs_in_day + rt_shm._first.value = start_index + rt_shm._last.value = start_index + rt_zero_index = rt_shm.index - 1 + + if not opened: + raise RuntimeError( + "Persistent shm for sym was already open?!" + ) + + # register 1s and 1m buffers with the global incrementer task + async with open_sample_stream( + period_s=1., + shms_by_period={ + 1.: rt_shm.token, + 60.: hist_shm.token, + }, + + # NOTE: we want to only open a stream for doing broadcasts on + # backfill operations, not receive the sample index-stream + # (since there's no code in this data feed layer that needs to + # consume it). + open_index_stream=True, + sub_for_broadcasts=False, + + ) as sample_stream: + + log.info('Scanning for existing `marketstored`') + tsdb_is_up = await check_for_service('marketstored') + + bfqsn = fqsn.replace('.' + mod.name, '') + open_history_client = getattr(mod, 'open_history_client', None) + assert open_history_client + + if ( + tsdb_is_up + and opened + and open_history_client + ): + log.info('Found existing `marketstored`') + + from ..service import marketstore + async with ( + marketstore.open_storage_client(fqsn)as storage, + ): + # TODO: drop returning the output that we pass in? + await bus.nursery.start( + tsdb_backfill, + mod, + marketstore, + bus, + storage, + fqsn, + bfqsn, + { + 1: rt_shm, + 60: hist_shm, + }, + sample_stream, + feed_is_live, + ) + + # yield back after client connect with filled shm + task_status.started(( + hist_zero_index, + hist_shm, + rt_zero_index, + rt_shm, + )) + + # indicate to caller that feed can be delivered to + # remote requesting client since we've loaded history + # data that can be used. + some_data_ready.set() + + # history retreival loop depending on user interaction + # and thus a small RPC-prot for remotely controllinlg + # what data is loaded for viewing. + await trio.sleep_forever() + + # load less history if no tsdb can be found + elif ( + not tsdb_is_up + and opened + ): + await basic_backfill( + bus, + mod, + bfqsn, + { + 1: rt_shm, + 60: hist_shm, + }, + sample_stream, + feed_is_live, + ) + task_status.started(( + hist_zero_index, + hist_shm, + rt_zero_index, + rt_shm, + )) + some_data_ready.set() + await trio.sleep_forever() From 59743b7b736957596de5cedcab038e3efbc05ada Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 21 Apr 2023 13:05:34 -0400 Subject: [PATCH 184/294] Rework `NoBsWs` to avoid agen/`trio` incompatibility `trio`'s internals don't allow for async generator (and thus by consequence dynamic reset of async exit stacks containing `@acm`s) interleaving since doing so corrupts the cancel-scope stack. See details in: - https://github.com/python-trio/trio/issues/638 - https://trio-util.readthedocs.io/en/latest/#trio_util.trio_async_generator - `trio._core._run.MISNESTING_ADVICE` We originally tried to address this using `@trio_util.trio_async_generator` in backend streaming code but for whatever reason stopped working recently (at least for me) and it's more or less implemented the same way as this patch but with more layers and an extra dep. I also don't want us to have to address this problem again if/when that lib isn't able to keep up to date with wtv `trio` is doing.. So instead this is a complete rewrite of the conc design of our auto-reconnect ws API to move all reset logic and msg relay into a bg task which is respawned on reset-requiring events: user spec-ed msg recv latency, network errors, roaming events. Deatz: - drop all usage of `AsyncExitStack` and no longer require client code to (hackily) call `NoBsWs._connect()` on msg latency conditions, intead this is all done behind the scenes and the user can instead pass in a `msg_recv_timeout: float`. - massively simplify impl of `NoBsWs` and move all reset logic into a new `_reconnect_forever()` task. - offer use of `reset_after: int` a count value that determines how many `msg_recv_timeout` events are allowed to occur before reconnecting the entire ws from scratch again. --- piker/data/_web_bs.py | 316 +++++++++++++++++++++++++++++++----------- 1 file changed, 233 insertions(+), 83 deletions(-) diff --git a/piker/data/_web_bs.py b/piker/data/_web_bs.py index 864ca651..c8d1e83e 100644 --- a/piker/data/_web_bs.py +++ b/piker/data/_web_bs.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) Tyler Goodlet (in stewardship for piker0) +# Copyright (C) Tyler Goodlet (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -18,23 +18,29 @@ ToOlS fOr CoPInG wITh "tHE wEB" protocols. """ +from __future__ import annotations from contextlib import ( - asynccontextmanager, - AsyncExitStack, + asynccontextmanager as acm, ) from itertools import count +from functools import partial from types import ModuleType from typing import ( Any, Optional, Callable, + AsyncContextManager, AsyncGenerator, Iterable, ) import json import trio -import trio_websocket +from trio_typing import TaskStatus +from trio_websocket import ( + WebSocketConnection, + open_websocket_url, +) from wsproto.utilities import LocalProtocolError from trio_websocket._impl import ( ConnectionClosed, @@ -52,9 +58,15 @@ class NoBsWs: ''' Make ``trio_websocket`` sockets stay up no matter the bs. - You can provide a ``fixture`` async-context-manager which will be - enter/exitted around each reconnect operation. + A shim interface that allows client code to stream from some + ``WebSocketConnection`` but where any connectivy bs is handled + automatcially and entirely in the background. + + NOTE: this type should never be created directly but instead is + provided via the ``open_autorecon_ws()`` factor below. + ''' + # apparently we can QoS for all sorts of reasons..so catch em. recon_errors = ( ConnectionClosed, DisconnectionTimeout, @@ -67,68 +79,42 @@ class NoBsWs: def __init__( self, url: str, - stack: AsyncExitStack, - fixture: Optional[Callable] = None, + rxchan: trio.MemoryReceiveChannel, + msg_recv_timeout: float, + serializer: ModuleType = json ): self.url = url - self.fixture = fixture - self._stack = stack - self._ws: 'WebSocketConnection' = None # noqa + self._rx = rxchan + self._timeout = msg_recv_timeout - # TODO: is there some method we can call - # on the underlying `._ws` to get this? - self._connected: bool = False + # signaling between caller and relay task which determines when + # socket is connected (and subscribed). + self._connected: trio.Event = trio.Event() - async def _connect( - self, - tries: int = 1000, - ) -> None: + # dynamically reset by the bg relay task + self._ws: WebSocketConnection | None = None + self._cs: trio.CancelScope | None = None - self._connected = False - while True: - try: - await self._stack.aclose() - except self.recon_errors: - await trio.sleep(0.5) - else: - break - - last_err = None - for i in range(tries): - try: - self._ws = await self._stack.enter_async_context( - trio_websocket.open_websocket_url(self.url) - ) - - if self.fixture is not None: - # rerun user code fixture - ret = await self._stack.enter_async_context( - self.fixture(self) - ) - - assert ret is None - - log.info(f'Connection success: {self.url}') - - self._connected = True - return self._ws - - except self.recon_errors as err: - last_err = err - log.error( - f'{self} connection bail with ' - f'{type(err)}...retry attempt {i}' - ) - await trio.sleep(0.5) - self._connected = False - continue - else: - log.exception('ws connection fail...') - raise last_err + # interchange codec methods + # TODO: obviously the method API here may be different + # for another interchange format.. + self._dumps: Callable = serializer.dumps + self._loads: Callable = serializer.loads def connected(self) -> bool: - return self._connected + return self._connected.is_set() + + async def reset(self) -> None: + ''' + Reset the underlying ws connection by cancelling + the bg relay task and waiting for it to signal + a new connection. + + ''' + self._connected = trio.Event() + self._cs.cancel() + await self._connected.wait() async def send_msg( self, @@ -136,18 +122,15 @@ class NoBsWs: ) -> None: while True: try: - return await self._ws.send_message(json.dumps(data)) + msg: Any = self._dumps(data) + return await self._ws.send_message(msg) except self.recon_errors: - await self._connect() + await self.reset() - async def recv_msg( - self, - ) -> Any: - while True: - try: - return json.loads(await self._ws.get_message()) - except self.recon_errors: - await self._connect() + async def recv_msg(self) -> Any: + msg: Any = await self._rx.receive() + data = self._loads(msg) + return data def __aiter__(self): return self @@ -155,27 +138,194 @@ class NoBsWs: async def __anext__(self): return await self.recv_msg() + def set_recv_timeout( + self, + timeout: float, + ) -> None: + self._timeout = timeout -@asynccontextmanager + +async def _reconnect_forever( + url: str, + snd: trio.MemorySendChannel, + nobsws: NoBsWs, + reset_after: int, # msg recv timeout before reset attempt + + fixture: AsyncContextManager | None = None, + task_status: TaskStatus = trio.TASK_STATUS_IGNORED, + +) -> None: + + async def proxy_msgs( + ws: WebSocketConnection, + pcs: trio.CancelScope, # parent cancel scope + ): + ''' + Receive (under `timeout` deadline) all msgs from from underlying + websocket and relay them to (calling) parent task via ``trio`` + mem chan. + + ''' + # after so many msg recv timeouts, reset the connection + timeouts: int = 0 + + while True: + with trio.move_on_after( + # can be dynamically changed by user code + nobsws._timeout, + ) as cs: + try: + msg: Any = await ws.get_message() + await snd.send(msg) + except nobsws.recon_errors: + log.exception( + f'{url} connection bail with:' + ) + await trio.sleep(0.5) + pcs.cancel() + + # go back to reonnect loop in parent task + return + + if cs.cancelled_caught: + timeouts += 1 + if timeouts > reset_after: + log.error( + 'WS feed seems down and slow af? .. resetting\n' + ) + pcs.cancel() + + # go back to reonnect loop in parent task + return + + async def open_fixture( + fixture: AsyncContextManager, + nobsws: NoBsWs, + task_status: TaskStatus = trio.TASK_STATUS_IGNORED, + ): + ''' + Open user provided `@acm` and sleep until any connection + reset occurs. + + ''' + async with fixture(nobsws) as ret: + assert ret is None + task_status.started() + await trio.sleep_forever() + + # last_err = None + nobsws._connected = trio.Event() + task_status.started() + + while not snd._closed: + log.info(f'{url} trying (RE)CONNECT') + + async with trio.open_nursery() as n: + cs = nobsws._cs = n.cancel_scope + ws: WebSocketConnection + async with open_websocket_url(url) as ws: + nobsws._ws = ws + log.info(f'Connection success: {url}') + + # begin relay loop to forward msgs + n.start_soon( + proxy_msgs, + ws, + cs, + ) + + if fixture is not None: + log.info(f'Entering fixture: {fixture}') + + # TODO: should we return an explicit sub-cs + # from this fixture task? + await n.start( + open_fixture, + fixture, + nobsws, + ) + + # indicate to wrapper / opener that we are up and block + # to let tasks run **inside** the ws open block above. + nobsws._connected.set() + await trio.sleep_forever() + + # ws open block end + # nursery block end + nobsws._connected = trio.Event() + if cs.cancelled_caught: + log.cancel( + f'{url} connection cancelled!' + ) + # if wrapper cancelled us, we expect it to also + # have re-assigned a new event + assert ( + nobsws._connected + and not nobsws._connected.is_set() + ) + + # -> from here, move to next reconnect attempt + + else: + log.exception('ws connection closed by client...') + + +@acm async def open_autorecon_ws( url: str, - # TODO: proper type cannot smh - fixture: Optional[Callable] = None, + fixture: AsyncContextManager | None = None, -) -> AsyncGenerator[tuple[...], NoBsWs]: - """Apparently we can QoS for all sorts of reasons..so catch em. + # time in sec + msg_recv_timeout: float = 3, - """ - async with AsyncExitStack() as stack: - ws = NoBsWs(url, stack, fixture=fixture) - await ws._connect() + # count of the number of above timeouts before connection reset + reset_after: int = 3, + +) -> AsyncGenerator[tuple[...], NoBsWs]: + ''' + An auto-reconnect websocket (wrapper API) around + ``trio_websocket.open_websocket_url()`` providing automatic + re-connection on network errors, msg latency and thus roaming. + + Here we implement a re-connect websocket interface where a bg + nursery runs ``WebSocketConnection.receive_message()``s in a loop + and restarts the full http(s) handshake on catches of certain + connetivity errors, or some user defined recv timeout. + + You can provide a ``fixture`` async-context-manager which will be + entered/exitted around each connection reset; eg. for (re)requesting + subscriptions without requiring streaming setup code to rerun. + + ''' + snd: trio.MemorySendChannel + rcv: trio.MemoryReceiveChannel + snd, rcv = trio.open_memory_channel(616) + + async with trio.open_nursery() as n: + nobsws = NoBsWs( + url, + rcv, + msg_recv_timeout=msg_recv_timeout, + ) + await n.start( + partial( + _reconnect_forever, + url, + snd, + nobsws, + fixture=fixture, + reset_after=reset_after, + ) + ) + await nobsws._connected.wait() + assert nobsws._cs + assert nobsws.connected() try: - yield ws - + yield nobsws finally: - await stack.aclose() + n.cancel_scope.cancel() ''' @@ -192,7 +342,7 @@ class JSONRPCResult(Struct): error: Optional[dict] = None -@asynccontextmanager +@acm async def open_jsonrpc_session( url: str, start_id: int = 0, From b03564da2cc77cb9a2dfe1711b35c2e25b6fb48b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 21 Apr 2023 13:48:18 -0400 Subject: [PATCH 185/294] binance: port to new `NoBsWs` api and drop `trio_util` usage --- piker/brokers/binance.py | 35 ++++++++--------------------------- 1 file changed, 8 insertions(+), 27 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 915902fb..cde20d3f 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -21,7 +21,10 @@ Binance backend """ -from contextlib import asynccontextmanager as acm +from contextlib import ( + asynccontextmanager as acm, + aclosing, +) from datetime import datetime # from functools import lru_cache from decimal import Decimal @@ -31,7 +34,6 @@ from typing import ( ) import time -from trio_util import trio_async_generator import trio from trio_typing import TaskStatus import pendulum @@ -39,7 +41,6 @@ import asks from fuzzywuzzy import process as fuzzy import numpy as np import tractor -import wsproto from .._cacheables import async_lifo_cache from ..accounting._mktinfo import ( @@ -357,36 +358,16 @@ class AggTrade(Struct): M: bool # Ignore -@trio_async_generator async def stream_messages( ws: NoBsWs, ) -> AsyncGenerator[NoBsWs, dict]: - timeouts = 0 - while True: - - with trio.move_on_after(3) as cs: - msg = await ws.recv_msg() - - if cs.cancelled_caught: - - timeouts += 1 - if timeouts > 2: - log.error("binance feed seems down and slow af? rebooting...") - try: - await ws._connect() - except BaseException as err: - assert err - # Wut in the f#@$% is going on here. - with trio.CancelScope(shield=True): - await tractor.breakpoint() - - continue + # TODO: match syntax here! + async for msg in ws: # for l1 streams binance doesn't add an event type field so # identify those messages by matching keys # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams - if msg.get('u'): sym = msg['s'] bid = float(msg['b']) @@ -545,7 +526,7 @@ async def stream_quotes( ) @acm - async def subscribe(ws: wsproto.WSConnection): + async def subscribe(ws: NoBsWs): # setup subs # trade data (aka L1) @@ -591,7 +572,7 @@ async def stream_quotes( ) as ws, # avoid stream-gen closure from breaking trio.. - stream_messages(ws) as msg_gen, + aclosing(stream_messages(ws)) as msg_gen, ): typ, quote = await anext(msg_gen) From 34ff5ff249acdc805ea90fea3e89c7bca072b6a3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 21 Apr 2023 13:56:42 -0400 Subject: [PATCH 186/294] kraken: port to new `NoBsWs`, passing timeout (counts) during setup --- piker/brokers/kraken/broker.py | 3 ++- piker/brokers/kraken/feed.py | 35 ++++++++++------------------------ 2 files changed, 12 insertions(+), 26 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 10a55e3d..776d33cb 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -21,6 +21,7 @@ Order api and machinery from collections import ChainMap, defaultdict from contextlib import ( asynccontextmanager as acm, + aclosing, ) from functools import partial from itertools import count @@ -679,7 +680,7 @@ async def trades_dialogue( token=token, ), ) as ws, - stream_messages(ws) as stream, + aclosing(stream_messages(ws)) as stream, trio.open_nursery() as nurse, ): # task for processing inbound requests from ems diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index 5b654970..e92c8021 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -18,7 +18,10 @@ Real-time and historical data feed endpoints. ''' -from contextlib import asynccontextmanager as acm +from contextlib import ( + asynccontextmanager as acm, + aclosing, +) from datetime import datetime from typing import ( Any, @@ -31,7 +34,6 @@ from fuzzywuzzy import process as fuzzy import numpy as np import pendulum from trio_typing import TaskStatus -from trio_util import trio_async_generator import tractor import trio @@ -82,7 +84,6 @@ class OHLC(Struct): ticks: list[Any] = [] -@trio_async_generator async def stream_messages( ws: NoBsWs, ): @@ -93,26 +94,9 @@ async def stream_messages( though a single async generator. ''' - too_slow_count = last_hb = 0 - - while True: - - with trio.move_on_after(5) as cs: - msg = await ws.recv_msg() - - # trigger reconnection if heartbeat is laggy - if cs.cancelled_caught: - - too_slow_count += 1 - - if too_slow_count > 20: - log.warning( - "Heartbeat is too slow, resetting ws connection") - - await ws._connect() - too_slow_count = 0 - continue + last_hb: float = 0 + async for msg in ws: match msg: case {'event': 'heartbeat'}: now = time.time() @@ -130,7 +114,6 @@ async def stream_messages( yield msg -@trio_async_generator async def process_data_feed_msgs( ws: NoBsWs, ): @@ -138,7 +121,7 @@ async def process_data_feed_msgs( Parse and pack data feed messages. ''' - async with stream_messages(ws) as ws_stream: + async with aclosing(stream_messages(ws)) as ws_stream: async for msg in ws_stream: match msg: case { @@ -416,13 +399,15 @@ async def stream_quotes( open_autorecon_ws( 'wss://ws.kraken.com/', fixture=subscribe, + msg_recv_timeout=5, + reset_after=20, ) as ws, # avoid stream-gen closure from breaking trio.. # NOTE: not sure this actually works XD particularly # if we call `ws._connect()` manally in the streaming # async gen.. - process_data_feed_msgs(ws) as msg_gen, + aclosing(process_data_feed_msgs(ws)) as msg_gen, ): # pull a first quote and deliver typ, ohlc_last = await anext(msg_gen) From f6cd08c6faef264109f648d33e8a551bcbf227eb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 21 Apr 2023 14:00:13 -0400 Subject: [PATCH 187/294] Attempt to guard against numercial "anomalies" in `Viz.maxmin()`, add cacheing flag --- piker/ui/_dataviz.py | 36 +++++++++++++++++++++++++++++++++--- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/piker/ui/_dataviz.py b/piker/ui/_dataviz.py index 3c686619..7f1ef41e 100644 --- a/piker/ui/_dataviz.py +++ b/piker/ui/_dataviz.py @@ -23,6 +23,8 @@ from functools import lru_cache from math import ( ceil, floor, + isnan, + log as logf, ) from typing import ( Literal, @@ -332,6 +334,8 @@ class Viz(Struct): float, ] = {} + _mxmn_cache_enabled: bool = True + # to make lru_cache-ing work, see # https://docs.python.org/3/faq/programming.html#how-do-i-cache-method-calls def __eq__(self, other): @@ -447,7 +451,10 @@ class Viz(Struct): # https://stackoverflow.com/a/29980872 ixrng = lbar, rbar = round(x_range[0]), round(x_range[1]) - if use_caching: + if ( + use_caching + and self._mxmn_cache_enabled + ): cached_result = self._mxmns.get(ixrng) if cached_result: if do_print: @@ -521,8 +528,31 @@ class Viz(Struct): ) # cache result for input range - assert mxmn - self._mxmns[ixrng] = (read_slc, mxmn) + ylow, yhi = mxmn + + try: + prolly_anomaly: bool = ( + ( + abs(logf(ylow, 10)) > 16 + if ylow + else False + ) + or ( + isnan(ylow) or isnan(yhi) + ) + ) + except ValueError: + prolly_anomaly = True + + if prolly_anomaly: + return None + + if ( + not isnan(ylow) + and not prolly_anomaly + ): + self._mxmns[ixrng] = (read_slc, mxmn) + self.vs.yrange = mxmn profiler(f'yrange mxmn cacheing: {x_range} -> {mxmn}') return ( From af068c5c513af0853c8df457acc679e9d3eee311 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 21 Apr 2023 15:04:47 -0400 Subject: [PATCH 188/294] binance: port `stream_messages()` to use `match:` and a new `L1` struct --- piker/brokers/binance.py | 130 +++++++++++++++++++++++++++------------ 1 file changed, 91 insertions(+), 39 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index cde20d3f..06c3ed46 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -176,6 +176,18 @@ class OHLC(Struct): bar_wap: float = 0.0 +class L1(Struct): + # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams + + update_id: int + sym: str + + bid: float + bsize: float + ask: float + asize: float + + # convert datetime obj timestamp to unixtime in milliseconds def binance_timestamp( when: datetime @@ -363,48 +375,88 @@ async def stream_messages( ) -> AsyncGenerator[NoBsWs, dict]: # TODO: match syntax here! + msg: dict[str, Any] async for msg in ws: + match msg: + # for l1 streams binance doesn't add an event type field so + # identify those messages by matching keys + # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams + case { + # NOTE: this is never an old value it seems, so + # they are always sending real L1 spread updates. + 'u': upid, # update id + 's': sym, + 'b': bid, + 'B': bsize, + 'a': ask, + 'A': asize, + }: + # TODO: it would be super nice to have a `L1` piker type + # which "renders" incremental tick updates from a packed + # msg-struct: + # - backend msgs after packed into the type such that we + # can reduce IPC usage but without each backend having + # to do that incremental update logic manually B) + # - would it maybe be more efficient to use this instead? + # https://binance-docs.github.io/apidocs/spot/en/#diff-depth-stream + l1 = L1( + update_id=upid, + sym=sym, + bid=bid, + bsize=bsize, + ask=ask, + asize=asize, + ) + l1.typecast() - # for l1 streams binance doesn't add an event type field so - # identify those messages by matching keys - # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams - if msg.get('u'): - sym = msg['s'] - bid = float(msg['b']) - bsize = float(msg['B']) - ask = float(msg['a']) - asize = float(msg['A']) + # repack into piker's tick-quote format + yield 'l1', { + 'symbol': l1.sym, + 'ticks': [ + { + 'type': 'bid', + 'price': l1.bid, + 'size': l1.bsize, + }, + { + 'type': 'bsize', + 'price': l1.bid, + 'size': l1.bsize, + }, + { + 'type': 'ask', + 'price': l1.ask, + 'size': l1.asize, + }, + { + 'type': 'asize', + 'price': l1.ask, + 'size': l1.asize, + } + ] + } - yield 'l1', { - 'symbol': sym, - 'ticks': [ - {'type': 'bid', 'price': bid, 'size': bsize}, - {'type': 'bsize', 'price': bid, 'size': bsize}, - {'type': 'ask', 'price': ask, 'size': asize}, - {'type': 'asize', 'price': ask, 'size': asize} - ] - } - - elif msg.get('e') == 'aggTrade': - - # NOTE: this is purely for a definition, ``msgspec.Struct`` - # does not runtime-validate until you decode/encode. - # see: https://jcristharif.com/msgspec/structs.html#type-validation - msg = AggTrade(**msg) - - # TODO: type out and require this quote format - # from all backends! - yield 'trade', { - 'symbol': msg.s, - 'last': msg.p, - 'brokerd_ts': time.time(), - 'ticks': [{ - 'type': 'trade', - 'price': float(msg.p), - 'size': float(msg.q), - 'broker_ts': msg.T, - }], - } + # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams + case { + 'e': 'aggTrade', + }: + # NOTE: this is purely for a definition, + # ``msgspec.Struct`` does not runtime-validate until you + # decode/encode, see: + # https://jcristharif.com/msgspec/structs.html#type-validation + msg = AggTrade(**msg) + msg.typecast() + yield 'trade', { + 'symbol': msg.s, + 'last': msg.p, + 'brokerd_ts': time.time(), + 'ticks': [{ + 'type': 'trade', + 'price': msg.p, + 'size': msg.q, + 'broker_ts': msg.T, + }], + } def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: From ed434e284ba6385337f510f37ad1d6793ce319a2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 21 Apr 2023 15:05:42 -0400 Subject: [PATCH 189/294] Disable ems init order-dialog notifications by default --- piker/clearing/_ems.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index fdb1986a..7373a5ea 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -330,6 +330,9 @@ class Router(Struct): # broker to book map books: dict[str, DarkBook] = {} + # NOTE: disable for since stupid "dunst" + notify_on_order_loads: bool = False + # sets of clients mapped from subscription keys subscribers: defaultdict[ str, # sub key, default fqme @@ -617,6 +620,7 @@ class Router(Struct): if ( not sent_some + and self.notify_on_order_loads and notify_on_headless ): log.info( From 0b43e0aa8c9a5be40c4b49176906d7424a9ab73e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 21 Apr 2023 15:47:54 -0400 Subject: [PATCH 190/294] Try having `brokerd` eps defined in `.brokers._daemon` Since it's a bit weird having service specific implementation details inside the general service `._daemon` mod, and since i'd mentioned trying this re-org; let's do it B) Requires enabling the new mod in both `pikerd` and `brokerd` and obviously a bit more runtime-loading of the service modules in the `brokerd` service eps to avoid import cycles. Also moved `_setup_persistent_brokerd()` into the new mod since the naming would place it there even though the implementation really wouldn't (longer run) since we want to split up `.data.feed` layer backend-invoked eps into a separate actor eventually from the "actual" `brokerd` which will be the actor running **only** the trade control eps (eg. trades_dialogue()` and friends). --- piker/accounting/cli.py | 2 +- piker/brokers/_daemon.py | 169 ++++++++++++++++++++++++++++++++ piker/brokers/_util.py | 1 + piker/data/__init__.py | 37 ------- piker/service/__init__.py | 7 +- piker/service/_actor_runtime.py | 3 + piker/service/_daemon.py | 94 ------------------ piker/service/elastic.py | 2 +- 8 files changed, 179 insertions(+), 136 deletions(-) create mode 100644 piker/brokers/_daemon.py diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 16712c8c..7e68ce6f 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -79,7 +79,7 @@ def broker_init( # enabled.append('piker.data.feed') # non-blocking setup of brokerd service nursery - from ..data import _setup_persistent_brokerd + from ..brokers import _setup_persistent_brokerd return ( start_actor_kwargs, # to `ActorNursery.start_actor()` diff --git a/piker/brokers/_daemon.py b/piker/brokers/_daemon.py new file mode 100644 index 00000000..8a81b1d6 --- /dev/null +++ b/piker/brokers/_daemon.py @@ -0,0 +1,169 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Broker-daemon-actor "endpoint-hooks": the service task entry points for +``brokerd``. + +''' +from contextlib import ( + asynccontextmanager as acm, +) + +import tractor +import trio + +from . import _util +from . import get_brokermod + +# `brokerd` enabled modules +# TODO: move this def to the `.data` subpkg.. +# NOTE: keeping this list as small as possible is part of our caps-sec +# model and should be treated with utmost care! +_data_mods = [ + 'piker.brokers.core', + 'piker.brokers.data', + 'piker.brokers._daemon', + 'piker.data', + 'piker.data.feed', + 'piker.data._sampling' +] + + +# TODO: we should rename the daemon to datad prolly once we split up +# broker vs. data tasks into separate actors? +@tractor.context +async def _setup_persistent_brokerd( + ctx: tractor.Context, + brokername: str, + loglevel: str | None = None, + +) -> None: + ''' + Allocate a actor-wide service nursery in ``brokerd`` + such that feeds can be run in the background persistently by + the broker backend as needed. + + ''' + log = _util.get_console_log( + loglevel or tractor.current_actor().loglevel, + name=f'{_util.subsys}.{brokername}', + ) + # set global for this actor to this new process-wide instance B) + _util.log = log + + from piker.data.feed import ( + _bus, + get_feed_bus, + ) + global _bus + assert not _bus + + async with trio.open_nursery() as service_nursery: + # assign a nursery to the feeds bus for spawning + # background tasks from clients + get_feed_bus(brokername, service_nursery) + + # unblock caller + await ctx.started() + + # we pin this task to keep the feeds manager active until the + # parent actor decides to tear it down + await trio.sleep_forever() + + +async def spawn_brokerd( + + brokername: str, + loglevel: str | None = None, + + **tractor_kwargs, + +) -> bool: + + from piker.service import Services + from piker.service._util import log # use service mngr log + + log.info(f'Spawning {brokername} broker daemon') + + brokermod = get_brokermod(brokername) + dname = f'brokerd.{brokername}' + + extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {}) + tractor_kwargs.update(extra_tractor_kwargs) + + # ask `pikerd` to spawn a new sub-actor and manage it under its + # actor nursery + modpath = brokermod.__name__ + broker_enable = [modpath] + for submodname in getattr( + brokermod, + '__enable_modules__', + [], + ): + subpath = f'{modpath}.{submodname}' + broker_enable.append(subpath) + + portal = await Services.actor_n.start_actor( + dname, + enable_modules=_data_mods + broker_enable, + loglevel=loglevel, + debug_mode=Services.debug_mode, + **tractor_kwargs + ) + + # non-blocking setup of brokerd service nursery + await Services.start_service_task( + dname, + portal, + + # signature of target root-task endpoint + _setup_persistent_brokerd, + brokername=brokername, + loglevel=loglevel, + ) + return True + + +@acm +async def maybe_spawn_brokerd( + + brokername: str, + loglevel: str | None = None, + + **pikerd_kwargs, + +) -> tractor.Portal: + ''' + Helper to spawn a brokerd service *from* a client + who wishes to use the sub-actor-daemon. + + ''' + from piker.service import maybe_spawn_daemon + + async with maybe_spawn_daemon( + + f'brokerd.{brokername}', + service_task_target=spawn_brokerd, + spawn_args={ + 'brokername': brokername, + }, + loglevel=loglevel, + + **pikerd_kwargs, + + ) as portal: + yield portal diff --git a/piker/brokers/_util.py b/piker/brokers/_util.py index ba123156..7e7a3ec7 100644 --- a/piker/brokers/_util.py +++ b/piker/brokers/_util.py @@ -31,6 +31,7 @@ from ..log import ( ) subsys: str = 'piker.brokers' +# NOTE: level should be reset by any actor that is spawned log = get_logger(subsys) get_console_log = partial( diff --git a/piker/data/__init__.py b/piker/data/__init__.py index 37da54b0..ba6af4ca 100644 --- a/piker/data/__init__.py +++ b/piker/data/__init__.py @@ -50,40 +50,3 @@ __all__ = [ 'open_shm_array', 'get_shm_token', ] - - -@tractor.context -async def _setup_persistent_brokerd( - ctx: tractor.Context, - brokername: str, - loglevel: str | None = None, - -) -> None: - ''' - Allocate a actor-wide service nursery in ``brokerd`` - such that feeds can be run in the background persistently by - the broker backend as needed. - - ''' - get_console_log( - loglevel or tractor.current_actor().loglevel, - ) - - from .feed import ( - _bus, - get_feed_bus, - ) - global _bus - assert not _bus - - async with trio.open_nursery() as service_nursery: - # assign a nursery to the feeds bus for spawning - # background tasks from clients - get_feed_bus(brokername, service_nursery) - - # unblock caller - await ctx.started() - - # we pin this task to keep the feeds manager active until the - # parent actor decides to tear it down - await trio.sleep_forever() diff --git a/piker/service/__init__.py b/piker/service/__init__.py index a885bc39..e6a17da0 100644 --- a/piker/service/__init__.py +++ b/piker/service/__init__.py @@ -20,7 +20,6 @@ Actor-runtime service orchestration machinery. """ from __future__ import annotations -from ._util import log from ._mngr import Services from ._registry import ( # noqa _tractor_kwargs, @@ -33,8 +32,6 @@ from ._registry import ( # noqa ) from ._daemon import ( # noqa maybe_spawn_daemon, - spawn_brokerd, - maybe_spawn_brokerd, spawn_emsd, maybe_open_emsd, ) @@ -44,6 +41,10 @@ from ._actor_runtime import ( open_pikerd, get_tractor_runtime_kwargs, ) +from ..brokers._daemon import ( + spawn_brokerd, + maybe_spawn_brokerd, +) __all__ = [ diff --git a/piker/service/_actor_runtime.py b/piker/service/_actor_runtime.py index ea7399fa..ec14dbf9 100644 --- a/piker/service/_actor_runtime.py +++ b/piker/service/_actor_runtime.py @@ -133,8 +133,11 @@ _root_dname = 'pikerd' _root_modules = [ __name__, 'piker.service._daemon', + 'piker.brokers._daemon', + 'piker.clearing._ems', 'piker.clearing._client', + 'piker.data._sampling', ] diff --git a/piker/service/_daemon.py b/piker/service/_daemon.py index ba1a467a..df94a992 100644 --- a/piker/service/_daemon.py +++ b/piker/service/_daemon.py @@ -32,25 +32,12 @@ import tractor from ._util import ( log, # sub-sys logger ) -from ..brokers import get_brokermod from ._mngr import ( Services, ) from ._actor_runtime import maybe_open_pikerd from ._registry import find_service -# `brokerd` enabled modules -# TODO: move this def to the `.data` subpkg.. -# NOTE: keeping this list as small as possible is part of our caps-sec -# model and should be treated with utmost care! -_data_mods = [ - 'piker.brokers.core', - 'piker.brokers.data', - 'piker.data', - 'piker.data.feed', - 'piker.data._sampling' -] - @acm async def maybe_spawn_daemon( @@ -145,87 +132,6 @@ async def maybe_spawn_daemon( await portal.cancel_actor() -async def spawn_brokerd( - - brokername: str, - loglevel: str | None = None, - - **tractor_kwargs, - -) -> bool: - - log.info(f'Spawning {brokername} broker daemon') - - brokermod = get_brokermod(brokername) - dname = f'brokerd.{brokername}' - - extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {}) - tractor_kwargs.update(extra_tractor_kwargs) - - # ask `pikerd` to spawn a new sub-actor and manage it under its - # actor nursery - modpath = brokermod.__name__ - broker_enable = [modpath] - for submodname in getattr( - brokermod, - '__enable_modules__', - [], - ): - subpath = f'{modpath}.{submodname}' - broker_enable.append(subpath) - - portal = await Services.actor_n.start_actor( - dname, - enable_modules=_data_mods + broker_enable, - loglevel=loglevel, - debug_mode=Services.debug_mode, - **tractor_kwargs - ) - - # non-blocking setup of brokerd service nursery - from ..data import _setup_persistent_brokerd - - await Services.start_service_task( - dname, - portal, - - # signature of target root-task endpoint - _setup_persistent_brokerd, - brokername=brokername, - loglevel=loglevel, - ) - return True - - -@acm -async def maybe_spawn_brokerd( - - brokername: str, - loglevel: str | None = None, - - **pikerd_kwargs, - -) -> tractor.Portal: - ''' - Helper to spawn a brokerd service *from* a client - who wishes to use the sub-actor-daemon. - - ''' - async with maybe_spawn_daemon( - - f'brokerd.{brokername}', - service_task_target=spawn_brokerd, - spawn_args={ - 'brokername': brokername, - }, - loglevel=loglevel, - - **pikerd_kwargs, - - ) as portal: - yield portal - - async def spawn_emsd( loglevel: str | None = None, diff --git a/piker/service/elastic.py b/piker/service/elastic.py index 71097dcb..6714a9ec 100644 --- a/piker/service/elastic.py +++ b/piker/service/elastic.py @@ -26,7 +26,7 @@ if TYPE_CHECKING: import docker from ._ahab import DockerContainer -from . import log # sub-sys logger +from ._util import log # sub-sys logger from ._util import ( get_console_log, ) From 0a8dd7b6da46fda792ac9044ad1c666e399fbfa3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 21 Apr 2023 16:00:34 -0400 Subject: [PATCH 191/294] Try to disable `snappy` compression on variables; it breaks everything XD --- piker/service/marketstore.py | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/piker/service/marketstore.py b/piker/service/marketstore.py index 8d99b7cd..c5337c5c 100644 --- a/piker/service/marketstore.py +++ b/piker/service/marketstore.py @@ -89,6 +89,12 @@ stale_threshold: 5 enable_add: true enable_remove: false +# SUPER DUPER CRITICAL to address a super weird issue: +# https://github.com/pikers/piker/issues/443 +# seems like "variable compression" is possibly borked +# or snappy compression somehow breaks easily? +disable_variable_compression: true + triggers: - module: ondiskagg.so on: "*/1Sec/OHLCV" @@ -464,14 +470,20 @@ class Storage: limit=limit, ) - try: - result = await client.query(params) - except purerpc.grpclib.exceptions.UnknownError as err: - # indicate there is no history for this timeframe - log.exception( - f'Unknown mkts QUERY error: {params}\n' - f'{err.args}' - ) + for i in range(3): + try: + result = await client.query(params) + break + except purerpc.grpclib.exceptions.UnknownError as err: + if 'snappy' in err.args: + await tractor.breakpoint() + + # indicate there is no history for this timeframe + log.exception( + f'Unknown mkts QUERY error: {params}\n' + f'{err.args}' + ) + else: return {} # TODO: it turns out column access on recarrays is actually slower: From 363a2bbcc623efe4aaeddac1553a2c11f9801f24 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 22 Apr 2023 13:11:40 -0400 Subject: [PATCH 192/294] binance: use new `int` sub-id for each request --- piker/brokers/binance.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 06c3ed46..778f3aac 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -26,8 +26,8 @@ from contextlib import ( aclosing, ) from datetime import datetime -# from functools import lru_cache from decimal import Decimal +import itertools from typing import ( Any, Union, Optional, AsyncGenerator, Callable, @@ -560,8 +560,6 @@ async def stream_quotes( # XXX: required to propagate ``tractor`` loglevel to piker logging get_console_log(loglevel or tractor.current_actor().loglevel) - uid = 0 - async with ( send_chan as send_chan, ): @@ -577,24 +575,31 @@ async def stream_quotes( ) ) + iter_subids = itertools.count() + @acm async def subscribe(ws: NoBsWs): # setup subs + subid: int = next(iter_subids) + # trade data (aka L1) # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker - l1_sub = make_sub(symbols, 'bookTicker', uid) + l1_sub = make_sub(symbols, 'bookTicker', subid) await ws.send_msg(l1_sub) # aggregate (each order clear by taker **not** by maker) # trades data: # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams - agg_trades_sub = make_sub(symbols, 'aggTrade', uid) + agg_trades_sub = make_sub(symbols, 'aggTrade', subid) await ws.send_msg(agg_trades_sub) - # ack from ws server + # might get ack from ws server, or maybe some + # other msg still in transit.. res = await ws.recv_msg() - assert res['id'] == uid + subid: str | None = res.get('id') + if subid: + assert res['id'] == subid yield @@ -608,7 +613,7 @@ async def stream_quotes( await ws.send_msg({ "method": "UNSUBSCRIBE", "params": subs, - "id": uid, + "id": subid, }) # XXX: do we need to ack the unsub? From 7a3bce3f33352f0fb821a93a556b7ce9904dc2b2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Apr 2023 11:39:23 -0400 Subject: [PATCH 193/294] .data._web_bs: add client module name to log msgs --- piker/data/_web_bs.py | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/piker/data/_web_bs.py b/piker/data/_web_bs.py index c8d1e83e..d44f3fd1 100644 --- a/piker/data/_web_bs.py +++ b/piker/data/_web_bs.py @@ -156,6 +156,8 @@ async def _reconnect_forever( ) -> None: + src_mod: str = fixture.__module__ + async def proxy_msgs( ws: WebSocketConnection, pcs: trio.CancelScope, # parent cancel scope @@ -179,6 +181,7 @@ async def _reconnect_forever( await snd.send(msg) except nobsws.recon_errors: log.exception( + f'{src_mod}\n' f'{url} connection bail with:' ) await trio.sleep(0.5) @@ -191,7 +194,8 @@ async def _reconnect_forever( timeouts += 1 if timeouts > reset_after: log.error( - 'WS feed seems down and slow af? .. resetting\n' + f'{src_mod}\n' + 'WS feed seems down and slow af.. reconnecting\n' ) pcs.cancel() @@ -218,14 +222,20 @@ async def _reconnect_forever( task_status.started() while not snd._closed: - log.info(f'{url} trying (RE)CONNECT') + log.info( + f'{src_mod}\n' + f'{url} trying (RE)CONNECT' + ) async with trio.open_nursery() as n: cs = nobsws._cs = n.cancel_scope ws: WebSocketConnection async with open_websocket_url(url) as ws: nobsws._ws = ws - log.info(f'Connection success: {url}') + log.info( + f'{src_mod}\n' + f'Connection success: {url}' + ) # begin relay loop to forward msgs n.start_soon( @@ -235,7 +245,10 @@ async def _reconnect_forever( ) if fixture is not None: - log.info(f'Entering fixture: {fixture}') + log.info( + f'{src_mod}\n' + f'Entering fixture: {fixture}' + ) # TODO: should we return an explicit sub-cs # from this fixture task? @@ -267,7 +280,10 @@ async def _reconnect_forever( # -> from here, move to next reconnect attempt else: - log.exception('ws connection closed by client...') + log.exception( + f'{src_mod}\n' + 'ws connection closed by client...' + ) @acm From 685688d2b247648d3a43410c3591e1933a735973 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Apr 2023 11:39:49 -0400 Subject: [PATCH 194/294] ib: add `mbt.cme` micro-btc futes to adhoc set --- piker/brokers/ib/api.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 0914dea4..1ac4b6af 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -75,8 +75,10 @@ from ib_insync.client import Client as ib_Client import numpy as np from piker import config -from piker.log import get_logger -from piker.brokers._util import log +from piker.brokers._util import ( + log, + get_logger, +) from piker.data._source import base_ohlc_dtype @@ -193,6 +195,7 @@ _adhoc_futes_set = { # cypto$ 'brr.cme', + 'mbt.cme', # micro 'ethusdrr.cme', # agriculture From 226c3364c3179bd08185a1089e567ae1d1b02ff8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 14:36:36 -0400 Subject: [PATCH 195/294] Smh, handle `fixture==None` case.. --- piker/data/_web_bs.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/piker/data/_web_bs.py b/piker/data/_web_bs.py index d44f3fd1..17d44fdb 100644 --- a/piker/data/_web_bs.py +++ b/piker/data/_web_bs.py @@ -156,7 +156,10 @@ async def _reconnect_forever( ) -> None: - src_mod: str = fixture.__module__ + if fixture is not None: + src_mod: str = fixture.__module__ + else: + src_mod: str = 'unknown' async def proxy_msgs( ws: WebSocketConnection, From c415bd1ee1379a883cdd03c43b2756eb49bff865 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 14:37:32 -0400 Subject: [PATCH 196/294] If backend does not provide `bs_mktid`, use the `bs_fqme` --- piker/data/validate.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/piker/data/validate.py b/piker/data/validate.py index 4b92e662..2b07a094 100644 --- a/piker/data/validate.py +++ b/piker/data/validate.py @@ -144,13 +144,14 @@ def validate_backend( 'lot_tick_size', Decimal('1'), ) + bs_mktid = init.get('bs_mktid') or bs_fqme mkt = MktPair.from_fqme( fqme=f'{bs_fqme}.{mod.name}', price_tick=price_tick, size_tick=size_tick, - bs_mktid=str(init['bs_mktid']), + bs_mktid=str(bs_mktid), _atype=symbol_info['asset_type'] ) From 05fb4a40143080f6a93ef1a9f39f35823c7fd109 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 14:38:31 -0400 Subject: [PATCH 197/294] kraken: drop recv timeout for recon ws --- piker/brokers/kraken/feed.py | 1 - 1 file changed, 1 deletion(-) diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index e92c8021..b4364da2 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -399,7 +399,6 @@ async def stream_quotes( open_autorecon_ws( 'wss://ws.kraken.com/', fixture=subscribe, - msg_recv_timeout=5, reset_after=20, ) as ws, From 038b20d13a56fd16e59ee3aeda3a4d21995132e3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 14:39:45 -0400 Subject: [PATCH 198/294] wsbs: increase msg rx timeout to 16 secs --- piker/data/_web_bs.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/piker/data/_web_bs.py b/piker/data/_web_bs.py index 17d44fdb..9c2753b1 100644 --- a/piker/data/_web_bs.py +++ b/piker/data/_web_bs.py @@ -156,6 +156,10 @@ async def _reconnect_forever( ) -> None: + # TODO: can we just report "where" in the call stack + # the client code is using the ws stream? + # Maybe we can just drop this since it's already in the log msg + # orefix? if fixture is not None: src_mod: str = fixture.__module__ else: @@ -295,8 +299,9 @@ async def open_autorecon_ws( fixture: AsyncContextManager | None = None, - # time in sec - msg_recv_timeout: float = 3, + # time in sec between msgs received before + # we presume connection might need a reset. + msg_recv_timeout: float = 16, # count of the number of above timeouts before connection reset reset_after: int = 3, From cb8833d430d759fe78b08834af973970a9b3185f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 14:41:46 -0400 Subject: [PATCH 199/294] ib: clear error events on every received? --- piker/brokers/ib/api.py | 2 +- piker/brokers/ib/feed.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 1ac4b6af..0ad9683e 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -1059,11 +1059,11 @@ class Client: api_err, report_api_err, ): - breakpoint() to_trio.send_nowait(( 'error', msg, )) + api_err.clear() # drop msg history api_err.connect(report_api_err) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index 86fdc095..df1eea6a 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -837,6 +837,7 @@ async def stream_quotes( # ibclient = proxy._aio_ns.ib.client # host, port = ibclient.host, ibclient.port + fqsn = first_quote['fqsn'] # TODO: for loop through all symbols passed in init_msgs: dict[str, dict] = { @@ -844,7 +845,7 @@ async def stream_quotes( # and that history has been written sym: { 'symbol_info': syminfo, - 'fqsn': first_quote['fqsn'], + 'fqsn': fqsn, 'bs_mktid': con.conId, }, # 'status': { From 88f3912b2db036ad914a0286b4385806cb944761 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 14:42:43 -0400 Subject: [PATCH 200/294] test_ems: doc out some remaining suites --- piker/data/_sampling.py | 5 ++++- tests/test_ems.py | 15 +++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/piker/data/_sampling.py b/piker/data/_sampling.py index 3c769551..c124c95c 100644 --- a/piker/data/_sampling.py +++ b/piker/data/_sampling.py @@ -360,7 +360,10 @@ async def register_with_sampler( if msg == 'broadcast_all': await Sampler.broadcast_all() finally: - if sub_for_broadcasts: + if ( + sub_for_broadcasts + and subs + ): subs.remove(stream) else: # if no shms are passed in we just wait until cancelled diff --git a/tests/test_ems.py b/tests/test_ems.py index 8b8d3600..ec9aefb6 100644 --- a/tests/test_ems.py +++ b/tests/test_ems.py @@ -391,3 +391,18 @@ def test_multi_fill_positions( await match_ppmsgs_on_ems_boot([ppmsg]) run_and_tollerate_cancels(just_check_pp) + + +def test_open_orders_reloaded( + open_test_pikerd: AsyncContextManager, + loglevel: str, + + fills: tuple[dict], + + check_cross_session: bool = False, +): + ... + + +def test_dark_order_clearing(): + ... From 96532ad38c6fdba853b1346b8b3d932f82956ffb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 14:43:25 -0400 Subject: [PATCH 201/294] ui._display: no downsampling on history chart default view call --- piker/ui/_display.py | 1 + 1 file changed, 1 insertion(+) diff --git a/piker/ui/_display.py b/piker/ui/_display.py index 227e4a66..d7500544 100644 --- a/piker/ui/_display.py +++ b/piker/ui/_display.py @@ -1470,6 +1470,7 @@ async def display_symbol_data( hist_chart.main_viz.default_view( do_min_bars=True, + do_ds=False, ) hist_linked.graphics_cycle() From f8c8f63e8739e5effe9a239185647cdee4b043e0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 14:45:34 -0400 Subject: [PATCH 202/294] Drop `Optional` usage from marketstore module --- piker/service/marketstore.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/piker/service/marketstore.py b/piker/service/marketstore.py index c5337c5c..f2174ad2 100644 --- a/piker/service/marketstore.py +++ b/piker/service/marketstore.py @@ -28,7 +28,6 @@ from contextlib import asynccontextmanager as acm from datetime import datetime from typing import ( Any, - Optional, Union, TYPE_CHECKING, ) @@ -292,7 +291,7 @@ def mk_tbk(keys: tuple[str, str, str]) -> str: def quote_to_marketstore_structarray( quote: dict[str, Any], - last_fill: Optional[float] + last_fill: float | None, ) -> np.array: ''' @@ -413,8 +412,8 @@ class Storage: ) -> tuple[ np.ndarray, # timeframe sampled array-series - Optional[datetime], # first dt - Optional[datetime], # last dt + datetime | None, # first dt + datetime | None, # last dt ]: first_tsdb_dt, last_tsdb_dt = None, None @@ -444,7 +443,7 @@ class Storage: self, fqsn: str, timeframe: int | str, - end: Optional[int] = None, + end: int | None = None, limit: int = int(800e3), ) -> np.ndarray: @@ -519,7 +518,7 @@ class Storage: async def delete_ts( self, key: str, - timeframe: Optional[Union[int, str]] = None, + timeframe: Union[int, str | None] = None, fmt: str = 'OHLCV', ) -> bool: @@ -627,7 +626,7 @@ class Storage: @acm async def open_storage_client( fqsn: str, - period: Optional[Union[int, str]] = None, # in seconds + period: Union[int, str | None] = None, # in seconds ) -> tuple[Storage, dict[str, np.ndarray]]: ''' From 80338e1dddb26fba0722f105484d3f90f5a8ca8d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 14:46:02 -0400 Subject: [PATCH 203/294] kucoin: WIP moving to FeedInit API --- piker/brokers/kucoin.py | 200 +++++++++++++++++++++++++++------------- 1 file changed, 135 insertions(+), 65 deletions(-) diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index 743a78c2..55e73f5d 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -18,29 +18,45 @@ Kucoin broker backend ''' -from typing import Any, Callable, Literal, AsyncGenerator -from contextlib import asynccontextmanager as acm +from contextlib import ( + asynccontextmanager as acm, + aclosing, +) from datetime import datetime -import time +from decimal import Decimal import base64 import hmac import hashlib +import time +from functools import partial +from typing import ( + Any, + Callable, + Literal, + AsyncGenerator, +) import wsproto from uuid import uuid4 import asks import tractor import trio -from trio_util import trio_async_generator from trio_typing import TaskStatus from fuzzywuzzy import process as fuzzy import pendulum import numpy as np -from piker._cacheables import open_cached_client +from piker.accounting._mktinfo import ( + Asset, + MktPair, +) +from piker import config +from piker._cacheables import ( + open_cached_client, + async_lifo_cache, +) from piker.log import get_logger from ._util import DataUnavailable -from piker.pp import config from ..data.types import Struct from ..data._web_bs import ( open_autorecon_ws, @@ -67,11 +83,20 @@ class KucoinMktPair(Struct, frozen=True): https://docs.kucoin.com/#get-symbols-list ''' - baseCurrency: str baseIncrement: float + + @property + def price_tick(self) -> Decimal: + return Decimal(str(self.self.baseIncrement)) + baseMaxSize: float baseMinSize: float + + @property + def size_tick(self) -> Decimal: + return Decimal(str(self.baseMinSize)) + enableTrading: bool feeCurrency: str isMarginEnabled: bool @@ -84,7 +109,7 @@ class KucoinMktPair(Struct, frozen=True): quoteIncrement: float quoteMaxSize: float quoteMinSize: float - symbol: str + symbol: str # our bs_mktid, kucoin's internal id class AccountTrade(Struct, frozen=True): @@ -293,7 +318,7 @@ class Client: ) -> dict[str, KucoinMktPair]: entries = await self._request('GET', '/symbols') syms = { - kucoin_sym_to_fqsn(item['name']): KucoinMktPair(**item) + item['name'].lower().replace('-', ''): KucoinMktPair(**item) for item in entries } @@ -439,15 +464,15 @@ class Client: return array -def fqsn_to_kucoin_sym(fqsn: str, pairs: dict[str, KucoinMktPair]) -> str: +def fqsn_to_kucoin_sym( + fqsn: str, + pairs: dict[str, KucoinMktPair], + +) -> str: pair_data = pairs[fqsn] return pair_data.baseCurrency + '-' + pair_data.quoteCurrency -def kucoin_sym_to_fqsn(sym: str) -> str: - return sym.lower().replace('-', '') - - @acm async def get_client() -> AsyncGenerator[Client, None]: client = Client() @@ -497,14 +522,51 @@ async def open_ping_task( n.cancel_scope.cancel() +@async_lifo_cache() +async def get_mkt_info( + fqme: str, + +) -> tuple[MktPair, KucoinMktPair]: + ''' + Query for and return a `MktPair` and `KucoinMktPair`. + + ''' + async with open_cached_client('kucoin') as client: + # split off any fqme broker part + bs_fqme, _, broker = fqme.partition('.') + + pairs: dict[str, KucoinMktPair] = await client.cache_pairs() + pair: KucoinMktPair = pairs[bs_fqme] + bs_mktid: str = pair.symbol + + # pair: KucoinMktPair = await client.pair_info(pair_str) + + # assets = client.assets + # dst_asset: Asset = assets[pair.base] + # src_asset: Asset = assets[pair.quote] + + mkt = MktPair( + dst=dst_asset, + src=src_asset, + + price_tick=pair.price_tick, + size_tick=pair.size_tick, + bs_mktid=bs_mktid, + + broker='kucoin', + ) + return mkt, pair + + async def stream_quotes( send_chan: trio.abc.SendChannel, symbols: list[str], feed_is_live: trio.Event, - loglevel: str = '', - # startup sync - task_status: TaskStatus[tuple[dict, dict] - ] = trio.TASK_STATUS_IGNORED, + + task_status: TaskStatus[ + tuple[dict, dict] + ] = trio.TASK_STATUS_IGNORED, + ) -> None: ''' Required piker api to stream real-time data. @@ -512,64 +574,71 @@ async def stream_quotes( ''' async with open_cached_client('kucoin') as client: + + log.info('Starting up quote stream') + # loop through symbols and sub to feedz + for sym_str in symbols: + mkt, pair = await get_mkt_info(sym_str) + + init_msgs = { + # pass back token, and bool, signalling if we're the + # writer and that history has been written + sym_str: { + 'symbol_info': { + 'asset_type': 'crypto', + 'price_tick_size': pair.baseIncrement, + 'lot_tick_size': pair.baseMinSize, + }, + 'shm_write_opts': {'sum_tick_vml': False}, + 'fqsn': sym_str, + } + } + token, ping_interval = await client._get_ws_token() connect_id = str(uuid4()) - pairs = await client.cache_pairs() - ws_url = ( - f'wss://ws-api-spot.kucoin.com/?' - f'token={token}&[connectId={connect_id}]' - ) - # open ping task async with ( - open_autorecon_ws(ws_url) as ws, + open_autorecon_ws( + ( + f'wss://ws-api-spot.kucoin.com/?' + f'token={token}&[connectId={connect_id}]' + ), + fixture=partial( + subscribe, + connect_id=connect_id, + kucoin_sym=pair.sym, + ), + ) as ws, open_ping_task(ws, ping_interval, connect_id), + # subscribe(ws, connect_id, kucoin_sym), + aclosing(stream_messages(ws, sym_str)) as msg_gen, ): - log.info('Starting up quote stream') - # loop through symbols and sub to feedz - for sym in symbols: - pair: KucoinMktPair = pairs[sym] - kucoin_sym = pair.symbol + typ, quote = await anext(msg_gen) + while typ != 'trade': + # take care to not unblock here until we get a real + # trade quote + typ, quote = await anext(msg_gen) - init_msgs = { - # pass back token, and bool, signalling if we're the writer - # and that history has been written - sym: { - 'symbol_info': { - 'asset_type': 'crypto', - 'price_tick_size': float(pair.baseIncrement), - 'lot_tick_size': float(pair.baseMinSize), - }, - 'shm_write_opts': {'sum_tick_vml': False}, - 'fqsn': sym, - } - } + task_status.started((init_msgs, quote)) + feed_is_live.set() - async with ( - subscribe(ws, connect_id, kucoin_sym), - stream_messages(ws, sym) as msg_gen, - ): - typ, quote = await anext(msg_gen) - while typ != 'trade': - # take care to not unblock here until we get a real - # trade quote - typ, quote = await anext(msg_gen) - - task_status.started((init_msgs, quote)) - feed_is_live.set() - - async for typ, msg in msg_gen: - await send_chan.send({sym: msg}) + async for typ, msg in msg_gen: + await send_chan.send({sym_str: msg}) @acm -async def subscribe(ws: wsproto.WSConnection, connect_id, sym) -> AsyncGenerator[None, None]: +async def subscribe( + ws: NoBsWs, + connect_id, + bs_mktid, + +) -> AsyncGenerator[None, None]: # level 2 sub await ws.send_msg( { 'id': connect_id, 'type': 'subscribe', - 'topic': f'/spotMarket/level2Depth5:{sym}', + 'topic': f'/spotMarket/level2Depth5:{bs_mktid}', 'privateChannel': False, 'response': True, } @@ -580,7 +649,7 @@ async def subscribe(ws: wsproto.WSConnection, connect_id, sym) -> AsyncGenerator { 'id': connect_id, 'type': 'subscribe', - 'topic': f'/market/ticker:{sym}', + 'topic': f'/market/ticker:{bs_mktid}', 'privateChannel': False, 'response': True, } @@ -590,21 +659,22 @@ async def subscribe(ws: wsproto.WSConnection, connect_id, sym) -> AsyncGenerator # unsub if ws.connected(): - log.info(f'Unsubscribing to {sym} feed') + log.info(f'Unsubscribing to {bs_mktid} feed') await ws.send_msg( { 'id': connect_id, 'type': 'unsubscribe', - 'topic': f'/market/ticker:{sym}', + 'topic': f'/market/ticker:{bs_mktid}', 'privateChannel': False, 'response': True, } ) -@trio_async_generator async def stream_messages( - ws: NoBsWs, sym: str + ws: NoBsWs, + sym: str, + ) -> AsyncGenerator[tuple[str, dict], None]: timeouts = 0 last_trade_ts = 0 From f1f2ba2e02cf75e944a18f18b8078814f91b0031 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 18:17:50 -0400 Subject: [PATCH 204/294] kucoin: deliver `FeedInit` msgs on feed startup To fit with the rest of the new requirements added in `.data.validate` this adds `FeedInit` init including `MktPair` and `Asset` loading for all spot currencies provided by `kucoin`. Deatz: - add a `Currency` struct and accompanying `Client.get_currencies()` for storing all asset infos. - implement `.get_mkt_info()` which loads all necessary accounting and mkt meta-data structs including adding `.price/size_tick` fields to the `KucoinMktPair`. - on client boot, async spawn requests to cache both symbols and currencies. - pass `subscribe()` as the `fixture` arg to `open_autorecon_ws()` instead of opening it manually. Other: - tweak `Client._request` to not expect the prefixed `'/'` for the `endpoint: str`. - change the `api_v` arg to just be `api: str`. --- piker/brokers/kucoin.py | 176 +++++++++++++++++++++++++++++----------- 1 file changed, 130 insertions(+), 46 deletions(-) diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index 55e73f5d..aaa35f34 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -29,6 +29,7 @@ import hmac import hashlib import time from functools import partial +from pprint import pformat from typing import ( Any, Callable, @@ -48,20 +49,22 @@ import numpy as np from piker.accounting._mktinfo import ( Asset, + digits_to_dec, MktPair, ) +from piker.data.validate import FeedInit from piker import config from piker._cacheables import ( open_cached_client, async_lifo_cache, ) from piker.log import get_logger -from ._util import DataUnavailable -from ..data.types import Struct -from ..data._web_bs import ( +from piker.data.types import Struct +from piker.data._web_bs import ( open_autorecon_ws, NoBsWs, ) +from ._util import DataUnavailable log = get_logger(__name__) @@ -88,7 +91,7 @@ class KucoinMktPair(Struct, frozen=True): @property def price_tick(self) -> Decimal: - return Decimal(str(self.self.baseIncrement)) + return Decimal(str(self.baseIncrement)) baseMaxSize: float baseMinSize: float @@ -118,7 +121,6 @@ class AccountTrade(Struct, frozen=True): https://docs.kucoin.com/#get-account-ledgers ''' - id: str currency: str amount: float @@ -136,7 +138,6 @@ class AccountResponse(Struct, frozen=True): https://docs.kucoin.com/#get-account-ledgers ''' - currentPage: int pageSize: int totalNum: int @@ -150,7 +151,6 @@ class KucoinTrade(Struct, frozen=True): https://docs.kucoin.com/#symbol-ticker ''' - bestAsk: float bestAskSize: float bestBid: float @@ -178,13 +178,32 @@ class KucoinMsg(Struct, frozen=True): Generic outer-wrapper for any Kucoin ws msg ''' - type: str topic: str subject: str data: list[KucoinTrade | KucoinL2] +class Currency(Struct, frozen=True): + ''' + Currency (asset) info: + https://docs.kucoin.com/#get-currencies + + ''' + currency: str + name: str + fullName: str + precision: int + confirms: int + contractAddress: str + withdrawalMinSize: str + withdrawalMinFee: str + isWithdrawEnabled: bool + isDepositEnabled: bool + isMarginEnabled: bool + isDebitEnabled: bool + + class BrokerConfig(Struct, frozen=True): key_id: str key_secret: str @@ -205,15 +224,17 @@ def get_config() -> BrokerConfig | None: class Client: def __init__(self) -> None: + self._config: BrokerConfig | None = get_config() self._pairs: dict[str, KucoinMktPair] = {} self._bars: list[list[float]] = [] - self._config: BrokerConfig | None = get_config() + self._currencies: dict[str, Currency] = {} def _gen_auth_req_headers( self, action: Literal['POST', 'GET'], endpoint: str, - api_v: str = 'v2', + api: str = 'v2', + ) -> dict[str, str | bytes]: ''' Generate authenticated request headers @@ -227,7 +248,7 @@ class Client: str_to_sign = ( str(int(time.time() * 1000)) - + action + f'/api/{api_v}{endpoint}' + + action + f'/api/{api}/{endpoint.lstrip("/")}' ) signature = base64.b64encode( @@ -259,7 +280,7 @@ class Client: self, action: Literal['POST', 'GET'], endpoint: str, - api_v: str = 'v2', + api: str = 'v2', headers: dict = {}, ) -> Any: ''' @@ -268,19 +289,24 @@ class Client: ''' if self._config: headers = self._gen_auth_req_headers( - action, endpoint, api_v) + action, + endpoint, + api, + ) - api_url = f'https://api.kucoin.com/api/{api_v}{endpoint}' + api_url = f'https://api.kucoin.com/api/{api}/{endpoint}' res = await asks.request(action, api_url, headers=headers) - if 'data' in res.json(): - return res.json()['data'] + json = res.json() + if 'data' in json: + return json['data'] else: log.error( - f'Error making request to {api_url} -> {res.json()["msg"]}' + f'Error making request to {api_url} ->\n' + f'{pformat(res)}' ) - return res.json()['msg'] + return json['msg'] async def _get_ws_token( self, @@ -296,7 +322,9 @@ class Client: token_type = 'private' if private else 'public' try: data: dict[str, Any] | None = await self._request( - 'POST', f'/bullet-{token_type}', 'v1' + 'POST', + endpoint=f'bullet-{token_type}', + api='v1' ) except Exception as e: log.error( @@ -313,10 +341,39 @@ class Client: f'{data.json()["msg"]}' ) + async def get_currencies( + self, + update: bool = False, + ) -> dict[str, Currency]: + ''' + Retrieve all "currency" info: + https://docs.kucoin.com/#get-currencies + + We use this for creating piker-interal ``Asset``s. + + ''' + if ( + not self._currencies + or update + ): + currencies: dict[str, Currency] = {} + entries: list[dict] = await self._request( + 'GET', + api='v1', + endpoint='currencies', + ) + for entry in entries: + curr = Currency(**entry).copy() + currencies[curr.name] = curr + + self._currencies.update(currencies) + + return self._currencies + async def _get_pairs( self, ) -> dict[str, KucoinMktPair]: - entries = await self._request('GET', '/symbols') + entries = await self._request('GET', 'symbols') syms = { item['name'].lower().replace('-', ''): KucoinMktPair(**item) for item in entries @@ -327,13 +384,18 @@ class Client: async def cache_pairs( self, + update: bool = False, + ) -> dict[str, KucoinMktPair]: ''' Get cached pairs and convert keyed symbols into fqsns if ya want ''' - if not self._pairs: - self._pairs = await self._get_pairs() + if ( + not self._pairs + or update + ): + self._pairs.update(await self._get_pairs()) return self._pairs @@ -341,7 +403,12 @@ class Client: self, pattern: str, limit: int = 30, + ) -> dict[str, KucoinMktPair]: + ''' + Use fuzzy search to match against all market names. + + ''' data = await self.cache_pairs() matches = fuzzy.extractBests( @@ -352,7 +419,9 @@ class Client: async def last_trades(self, sym: str) -> list[AccountTrade]: trades = await self._request( - 'GET', f'/accounts/ledgers?currency={sym}', 'v1' + 'GET', + endpoint=f'accounts/ledgers?currency={sym}', + api='v1' ) trades = AccountResponse(**trades) return trades.items @@ -360,11 +429,13 @@ class Client: async def _get_bars( self, fqsn: str, + start_dt: datetime | None = None, end_dt: datetime | None = None, limit: int = 1000, as_np: bool = True, type: str = '1min', + ) -> np.ndarray: ''' Get OHLC data and convert to numpy array for perffff: @@ -409,7 +480,7 @@ class Client: kucoin_sym = fqsn_to_kucoin_sym(fqsn, self._pairs) url = ( - f'/market/candles?type={type}' + f'market/candles?type={type}' f'&symbol={kucoin_sym}' f'&startAt={start_dt}' f'&endAt={end_dt}' @@ -419,7 +490,7 @@ class Client: data: list[list[str]] | dict = await self._request( 'GET', url, - api_v='v1', + api='v1', ) if not isinstance(data, list): @@ -476,7 +547,10 @@ def fqsn_to_kucoin_sym( @acm async def get_client() -> AsyncGenerator[Client, None]: client = Client() - await client.cache_pairs() + + async with trio.open_nursery() as n: + n.start_soon(client.cache_pairs) + await client.get_currencies() yield client @@ -540,10 +614,24 @@ async def get_mkt_info( bs_mktid: str = pair.symbol # pair: KucoinMktPair = await client.pair_info(pair_str) + assets: dict[str, Currency] = client._currencies - # assets = client.assets - # dst_asset: Asset = assets[pair.base] - # src_asset: Asset = assets[pair.quote] + # TODO: maybe just do this processing in + # a .get_assets() method (see kraken)? + src: Currency = assets[pair.quoteCurrency] + src_asset = Asset( + name=src.name, + atype='crypto_currency', + tx_tick=digits_to_dec(src.precision), + info=src.to_dict(), + ) + dst: Currency = assets[pair.baseCurrency] + dst_asset = Asset( + name=dst.name, + atype='crypto_currency', + tx_tick=digits_to_dec(dst.precision), + info=dst.to_dict(), + ) mkt = MktPair( dst=dst_asset, @@ -573,30 +661,25 @@ async def stream_quotes( Where the rubber hits the road baby ''' + init_msgs: list[FeedInit] = [] + async with open_cached_client('kucoin') as client: - log.info('Starting up quote stream') - # loop through symbols and sub to feedz + log.info(f'Starting up quote stream(s) for {symbols}') for sym_str in symbols: mkt, pair = await get_mkt_info(sym_str) - - init_msgs = { - # pass back token, and bool, signalling if we're the - # writer and that history has been written - sym_str: { - 'symbol_info': { - 'asset_type': 'crypto', - 'price_tick_size': pair.baseIncrement, - 'lot_tick_size': pair.baseMinSize, + init_msgs.append( + FeedInit( + mkt_info=mkt, + shm_write_opts={ + 'sum_tick_vml': False, }, - 'shm_write_opts': {'sum_tick_vml': False}, - 'fqsn': sym_str, - } - } + ) + ) + ws: NoBsWs token, ping_interval = await client._get_ws_token() connect_id = str(uuid4()) - async with ( open_autorecon_ws( ( @@ -606,7 +689,7 @@ async def stream_quotes( fixture=partial( subscribe, connect_id=connect_id, - kucoin_sym=pair.sym, + bs_mktid=pair.symbol, ), ) as ws, open_ping_task(ws, ping_interval, connect_id), @@ -614,6 +697,7 @@ async def stream_quotes( aclosing(stream_messages(ws, sym_str)) as msg_gen, ): typ, quote = await anext(msg_gen) + while typ != 'trade': # take care to not unblock here until we get a real # trade quote From 361fc4645c6bbc57836c2aaabe8f771a3bcd5ef8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 18:28:51 -0400 Subject: [PATCH 205/294] Drop passing `loglevel` to `stream_quotes()`, level is set when actor spawns --- piker/data/feed.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index 02f0adec..ac6b188a 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -283,8 +283,6 @@ async def allocate_persistent_feed( # multiple live feeds from one task, instead of getting # a new request (and thus new task) for each subscription. symbols=[symstr], - - loglevel=loglevel, ) ) From 769b292dca6e7d2eda16193fe507c5eb916ebd1f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 18:29:28 -0400 Subject: [PATCH 206/294] Allow `brokerd` runtime switch to paper mode Previously you couldn't have a brokerd backend which defined `.trades_dialogue()` but which could also indicate that the paper clearing engine should be used. This adds that support by allowing the endpoint task to return a simple `'paper'` string, in which case the ems will boot a paperboi. The obvious useful case for this is if you have a broker you want to use but do not have actual broker credentials setup (yet) with that provider in your `brokers.toml`; demonstrated here with the adjustment to `kraken`'s startup to no longer raise a runtime error B) --- piker/brokers/kraken/broker.py | 4 +-- piker/brokers/kraken/feed.py | 2 -- piker/clearing/_ems.py | 45 +++++++++++++++++++++++++--------- 3 files changed, 36 insertions(+), 15 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 776d33cb..994b7158 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -429,8 +429,8 @@ async def trades_dialogue( async with get_client() as client: if not client._api_key: - raise RuntimeError( - 'Missing Kraken API key in `brokers.toml`!?!?') + await ctx.started('paper') + return # TODO: make ems flip to paper mode via # some returned signal if the user only wants to use diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index b4364da2..4d496376 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -413,8 +413,6 @@ async def stream_quotes( topic, quote = normalize(ohlc_last) task_status.started((init_msgs, quote)) - - # lol, only "closes" when they're margin squeezing clients ;P feed_is_live.set() # keep start of last interval for volume tracking diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 7373a5ea..77cad1bd 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -407,11 +407,7 @@ class Router(Struct): yield relay return - trades_endpoint = getattr(brokermod, 'trades_dialogue', None) - if ( - trades_endpoint is None - or exec_mode == 'paper' - ): + def mk_paper_ep(): # for logging purposes brokermod = paper @@ -426,26 +422,53 @@ class Router(Struct): # load the paper trading engine as a subactor of this emsd # actor to simulate the real IPC load it'll have when also # pulling data from feeds - open_trades_endpoint = paper.open_paperboi( + return paper.open_paperboi( fqme=fqme, loglevel=loglevel, ) - else: + trades_endpoint = getattr(brokermod, 'trades_dialogue', None) + if ( + trades_endpoint is not None + or exec_mode != 'paper' + ): # open live brokerd trades endpoint open_trades_endpoint = portal.open_context( trades_endpoint, loglevel=loglevel, ) - # open trades-dialog endpoint with backend broker + else: + exec_mode: str = 'paper' + + @acm + async def maybe_open_paper_ep(): + if exec_mode == 'paper': + async with mk_paper_ep() as msg: + yield msg + return + + # open trades-dialog endpoint with backend broker + async with open_trades_endpoint as msg: + ctx, first = msg + + # runtime indication that the backend can't support live + # order ctrl yet, so boot the paperboi B0 + if first == 'paper': + async with mk_paper_ep() as msg: + yield msg + return + else: + # working live ep case B) + yield msg + return + positions: list[BrokerdPosition] accounts: tuple[str] - async with ( - open_trades_endpoint as ( + maybe_open_paper_ep() as ( brokerd_ctx, - (positions, accounts,), + (positions, accounts), ), brokerd_ctx.open_stream() as brokerd_trades_stream, ): From c6e53685200aff4706c838e0c14ab5dcb2aed03e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 9 May 2023 18:34:01 -0400 Subject: [PATCH 207/294] paperboi: fix fqme parsing to handle `bs_fqme` cases --- piker/clearing/_paper_engine.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 24afa609..56e04577 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -572,7 +572,8 @@ async def trades_dialogue( # loading any pps mkt_by_fqme: dict[str, MktPair] = {} if fqme: - mkt, _ = await brokermod.get_mkt_info(fqme.rstrip(f'.{broker}')) + bs_fqme, _, broker = fqme.rpartition('.') + mkt, _ = await brokermod.get_mkt_info(bs_fqme) mkt_by_fqme[fqme] = mkt # for each sym in the ledger load it's `MktPair` info From e06f9dc5c0a05378aaa0e2841f30930497f6eb29 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 10 May 2023 16:22:09 -0400 Subject: [PATCH 208/294] kucoin: port to new `NoBsWs` api semantics No longer need to implement connection timeout logic in the streaming code, instead we just `async for` that bby B) Further refining: - better `KucoinTrade` msg parsing and handling with object cases. - make `subscribe()` do sub request in a loop wand wair for acks. --- piker/brokers/kucoin.py | 203 +++++++++++++++++++++------------------- 1 file changed, 105 insertions(+), 98 deletions(-) diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index aaa35f34..015b248c 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -1,4 +1,6 @@ -# Copyright (C) Jared Goldman (in stewardship for pikers) +# Copyright (C) (in stewardship for pikers) +# - Jared Goldman +# - Tyler Goodlet # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -693,7 +695,6 @@ async def stream_quotes( ), ) as ws, open_ping_task(ws, ping_interval, connect_id), - # subscribe(ws, connect_id, kucoin_sym), aclosing(stream_messages(ws, sym_str)) as msg_gen, ): typ, quote = await anext(msg_gen) @@ -716,43 +717,48 @@ async def subscribe( connect_id, bs_mktid, -) -> AsyncGenerator[None, None]: - # level 2 sub - await ws.send_msg( - { - 'id': connect_id, - 'type': 'subscribe', - 'topic': f'/spotMarket/level2Depth5:{bs_mktid}', - 'privateChannel': False, - 'response': True, - } - ) + # subs are filled in with `bs_mktid` from avbove + topics: list[str] = [ + '/market/ticker:{bs_mktid}', # clearing events + '/spotMarket/level2Depth5:{bs_mktid}', # level 2 + ], - # watch trades - await ws.send_msg( - { - 'id': connect_id, - 'type': 'subscribe', - 'topic': f'/market/ticker:{bs_mktid}', - 'privateChannel': False, - 'response': True, - } - ) +) -> AsyncGenerator[None, None]: + + eps: list[str] = [] + for topic in topics: + ep: str = topic.format(bs_mktid=bs_mktid) + eps.append(ep) + await ws.send_msg( + { + 'id': connect_id, + 'type': 'subscribe', + 'topic': ep, + # 'topic': f'/spotMarket/level2Depth5:{bs_mktid}', + 'privateChannel': False, + 'response': True, + } + ) + + for _ in topics: + ack_msg = await ws.recv_msg() + log.info(f'Sub ACK: {ack_msg}') yield # unsub if ws.connected(): log.info(f'Unsubscribing to {bs_mktid} feed') - await ws.send_msg( - { - 'id': connect_id, - 'type': 'unsubscribe', - 'topic': f'/market/ticker:{bs_mktid}', - 'privateChannel': False, - 'response': True, - } - ) + for ep in eps: + await ws.send_msg( + { + 'id': connect_id, + 'type': 'unsubscribe', + 'topic': ep, + 'privateChannel': False, + 'response': True, + } + ) async def stream_messages( @@ -760,80 +766,81 @@ async def stream_messages( sym: str, ) -> AsyncGenerator[tuple[str, dict], None]: - timeouts = 0 - last_trade_ts = 0 + ''' + Core (live) feed msg handler: relay market events + to the piker-ized tick-stream format. - while True: - with trio.move_on_after(3) as cs: - msg = await ws.recv_msg() - if cs.cancelled_caught: - timeouts += 1 - if timeouts > 2: - log.error( - 'kucoin feed is sh**ing the bed... rebooting...') - await ws._connect() + ''' + last_trade_ts: float = 0 + async for dict_msg in ws: + if 'subject' not in dict_msg: + log.warn(f'Unhandled message: {dict_msg}') continue - if msg.get('subject'): - msg = KucoinMsg(**msg) - match msg.subject: - case 'trade.ticker': - trade_data = KucoinTrade(**msg.data) - # XXX: Filter for duplicate messages as ws feed will - # send duplicate market state - # https://docs.kucoin.com/#level2-5-best-ask-bid-orders - if trade_data.time == last_trade_ts: - continue + msg = KucoinMsg(**dict_msg) + match msg: + case KucoinMsg( + subject='trade.ticker', + ): + trade_data = KucoinTrade(**msg.data) - last_trade_ts = trade_data.time + # XXX: Filter for duplicate messages as ws feed will + # send duplicate market state + # https://docs.kucoin.com/#level2-5-best-ask-bid-orders + if trade_data.time == last_trade_ts: + continue - yield 'trade', { - 'symbol': sym, - 'last': trade_data.price, - 'brokerd_ts': last_trade_ts, - 'ticks': [ - { - 'type': 'trade', - 'price': float(trade_data.price), - 'size': float(trade_data.size), - 'broker_ts': last_trade_ts, - } - ], - } + last_trade_ts = trade_data.time - case 'level2': - l2_data = KucoinL2(**msg.data) - first_ask = l2_data.asks[0] - first_bid = l2_data.bids[0] - yield 'l1', { - 'symbol': sym, - 'ticks': [ - { - 'type': 'bid', - 'price': float(first_bid[0]), - 'size': float(first_bid[1]), - }, - { - 'type': 'bsize', - 'price': float(first_bid[0]), - 'size': float(first_bid[1]), - }, - { - 'type': 'ask', - 'price': float(first_ask[0]), - 'size': float(first_ask[1]), - }, - { - 'type': 'asize', - 'price': float(first_ask[0]), - 'size': float(first_ask[1]), - }, - ], - } + yield 'trade', { + 'symbol': sym, + 'last': trade_data.price, + 'brokerd_ts': last_trade_ts, + 'ticks': [ + { + 'type': 'trade', + 'price': float(trade_data.price), + 'size': float(trade_data.size), + 'broker_ts': last_trade_ts, + } + ], + } - case _: - log.warn(f'Unhandled message: {msg}') + case KucoinMsg( + subject='level2', + ): + l2_data = KucoinL2(**msg.data) + first_ask = l2_data.asks[0] + first_bid = l2_data.bids[0] + yield 'l1', { + 'symbol': sym, + 'ticks': [ + { + 'type': 'bid', + 'price': float(first_bid[0]), + 'size': float(first_bid[1]), + }, + { + 'type': 'bsize', + 'price': float(first_bid[0]), + 'size': float(first_bid[1]), + }, + { + 'type': 'ask', + 'price': float(first_ask[0]), + 'size': float(first_ask[1]), + }, + { + 'type': 'asize', + 'price': float(first_ask[0]), + 'size': float(first_ask[1]), + }, + ], + } + + case _: + log.warn(f'Unhandled message: {msg}') @acm From 7ff8aa1ba0cc51c291a9279c7749ff8186d88cf3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 11 May 2023 12:32:38 -0400 Subject: [PATCH 209/294] ib: passthrough host arg to vnc client for click hack --- piker/brokers/ib/_util.py | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/piker/brokers/ib/_util.py b/piker/brokers/ib/_util.py index a94f77f1..114022fa 100644 --- a/piker/brokers/ib/_util.py +++ b/piker/brokers/ib/_util.py @@ -19,13 +19,21 @@ runnable script-programs. ''' -from typing import Literal +from __future__ import annotations +from functools import partial +from typing import ( + Literal, + TYPE_CHECKING, +) import subprocess import tractor from .._util import log +if TYPE_CHECKING: + from .api import MethodProxy + _reset_tech: Literal[ 'vnc', @@ -39,7 +47,9 @@ _reset_tech: Literal[ async def data_reset_hack( + proxy: MethodProxy, reset_type: str = 'data', + **kwargs, ) -> None: ''' @@ -71,10 +81,17 @@ async def data_reset_hack( ''' global _reset_tech + client: 'IBCLIENTTHING' = proxy._aio_ns.ib.client + match _reset_tech: case 'vnc': try: - await tractor.to_asyncio.run_task(vnc_click_hack) + await tractor.to_asyncio.run_task( + partial( + vnc_click_hack, + host=client.host, + ) + ) except OSError: _reset_tech = 'i3ipc_xdotool' try: @@ -94,10 +111,11 @@ async def data_reset_hack( async def vnc_click_hack( + host: str = 'localhost', reset_type: str = 'data' ) -> None: ''' - Reset the data or netowork connection for the VNC attached + Reset the data or network connection for the VNC attached ib gateway using magic combos. ''' @@ -106,7 +124,7 @@ async def vnc_click_hack( import asyncvnc async with asyncvnc.connect( - 'localhost', + host, port=3003, # password='ibcansmbz', ) as client: From 957224bdc5981945405bf69a780dc9f7f37fed6a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 12 May 2023 09:41:45 -0400 Subject: [PATCH 210/294] ib: support remote host vnc client connections I figure we might as well support multiple types of distributed multi-host setups; why not allow running the API (gateway) and thus vnc server on a diff host and allowing clients to connect and do their thing B) Deatz: - make `ib._util.data_reset_hack()` take in a `vnc_host` which gets proxied through to the `asyncvnc` client. - pull `ib_insync.client.Client` host value and pass-through to data reset machinery, presuming the vnc server is running in the same container (and/or the same host). - if no vnc connection **and** no i3ipc trick can be used, just report to the user that they need to remove the data throttle manually. - fix `feed.get_bars()` to handle throttle cases the same based on error msg matching, not error the code and add a max `_failed_resets` count to trigger bailing on the query loop. --- piker/brokers/ib/_util.py | 51 +++++++++++++---- piker/brokers/ib/feed.py | 112 ++++++++++++++++++++++---------------- 2 files changed, 103 insertions(+), 60 deletions(-) diff --git a/piker/brokers/ib/_util.py b/piker/brokers/ib/_util.py index 114022fa..4c3bbb34 100644 --- a/piker/brokers/ib/_util.py +++ b/piker/brokers/ib/_util.py @@ -32,7 +32,10 @@ import tractor from .._util import log if TYPE_CHECKING: - from .api import MethodProxy + from .api import ( + MethodProxy, + ib_Client + ) _reset_tech: Literal[ @@ -47,9 +50,8 @@ _reset_tech: Literal[ async def data_reset_hack( - proxy: MethodProxy, - reset_type: str = 'data', - **kwargs, + vnc_host: str, + reset_type: Literal['data', 'connection'], ) -> None: ''' @@ -79,9 +81,13 @@ async def data_reset_hack( that need to be wrangle. ''' - global _reset_tech - client: 'IBCLIENTTHING' = proxy._aio_ns.ib.client + no_setup_msg:str = ( + 'No data reset hack test setup for {vnc_host}!\n' + 'See setup @\n' + 'https://github.com/pikers/piker/tree/master/piker/brokers/ib' + ) + global _reset_tech match _reset_tech: case 'vnc': @@ -89,15 +95,26 @@ async def data_reset_hack( await tractor.to_asyncio.run_task( partial( vnc_click_hack, - host=client.host, + host=vnc_host, ) ) except OSError: - _reset_tech = 'i3ipc_xdotool' + if vnc_host != 'localhost': + log.warning(no_setup_msg) + return False + + try: + import i3ipc + except ModuleNotFoundError: + log.warning(no_setup_msg) + return False + try: i3ipc_xdotool_manual_click_hack() + _reset_tech = 'i3ipc_xdotool' return True except OSError: + log.exception(no_setup_msg) return False case 'i3ipc_xdotool': @@ -119,9 +136,21 @@ async def vnc_click_hack( ib gateway using magic combos. ''' - key = {'data': 'f', 'connection': 'r'}[reset_type] + try: + import asyncvnc + except ModuleNotFoundError: + log.warning( + "In order to leverage `piker`'s built-in data reset hacks, install " + "the `asyncvnc` project: https://github.com/barneygale/asyncvnc" + ) + return - import asyncvnc + # two different hot keys which trigger diff types of reset + # requests B) + key = { + 'data': 'f', + 'connection': 'r' + }[reset_type] async with asyncvnc.connect( host, @@ -140,8 +169,6 @@ async def vnc_click_hack( def i3ipc_xdotool_manual_click_hack() -> None: - import i3ipc - i3 = i3ipc.Connection() t = i3.get_tree() diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index df1eea6a..61288a3a 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -212,7 +212,7 @@ _pacing: str = ( async def wait_on_data_reset( proxy: MethodProxy, reset_type: str = 'data', - timeout: float = 16, + timeout: float = float('inf'), task_status: TaskStatus[ tuple[ @@ -228,7 +228,7 @@ async def wait_on_data_reset( 'HMDS data farm connection is OK:ushmds' ) - # XXX: other event messages we might want to try and + # TODO: other event messages we might want to try and # wait for but i wasn't able to get any of this # reliable.. # reconnect_start = proxy.status_event( @@ -239,14 +239,21 @@ async def wait_on_data_reset( # ) # try to wait on the reset event(s) to arrive, a timeout # will trigger a retry up to 6 times (for now). + client = proxy._aio_ns.ib.client done = trio.Event() with trio.move_on_after(timeout) as cs: task_status.started((cs, done)) - log.warning('Sending DATA RESET request') - res = await data_reset_hack(reset_type=reset_type) + log.warning( + 'Sending DATA RESET request:\n' + f'{client}' + ) + res = await data_reset_hack( + vnc_host=client.host, + reset_type=reset_type, + ) if not res: log.warning( @@ -280,7 +287,7 @@ async def wait_on_data_reset( _data_resetter_task: trio.Task | None = None - +_failed_resets: int = 0 async def get_bars( @@ -299,6 +306,7 @@ async def get_bars( # history queries for instrument, presuming that most don't # not trade for a week XD max_nodatas: int = 6, + max_failed_resets: int = 6, task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, @@ -308,7 +316,7 @@ async def get_bars( a ``MethoProxy``. ''' - global _data_resetter_task + global _data_resetter_task, _failed_resets nodatas_count: int = 0 data_cs: trio.CancelScope | None = None @@ -321,8 +329,11 @@ async def get_bars( result_ready = trio.Event() async def query(): + + global _failed_resets nonlocal result, data_cs, end_dt, nodatas_count - while True: + + while _failed_resets < max_failed_resets: try: out = await proxy.bars( fqsn=fqsn, @@ -382,49 +393,48 @@ async def get_bars( f'Symbol: {fqsn}', ) - elif err.code == 162: - if ( - 'HMDS query returned no data' in msg - ): - # XXX: this is now done in the storage mgmt - # layer and we shouldn't implicitly decrement - # the frame dt index since the upper layer may - # be doing so concurrently and we don't want to - # be delivering frames that weren't asked for. - # try to decrement start point and look further back - # end_dt = end_dt.subtract(seconds=2000) - logmsg = "SUBTRACTING DAY from DT index" - if end_dt is not None: - end_dt = end_dt.subtract(days=1) - elif end_dt is None: - end_dt = pendulum.now().subtract(days=1) + elif ( + 'HMDS query returned no data' in msg + ): + # XXX: this is now done in the storage mgmt + # layer and we shouldn't implicitly decrement + # the frame dt index since the upper layer may + # be doing so concurrently and we don't want to + # be delivering frames that weren't asked for. + # try to decrement start point and look further back + # end_dt = end_dt.subtract(seconds=2000) + logmsg = "SUBTRACTING DAY from DT index" + if end_dt is not None: + end_dt = end_dt.subtract(days=1) + elif end_dt is None: + end_dt = pendulum.now().subtract(days=1) - log.warning( - f'NO DATA found ending @ {end_dt}\n' - + logmsg + log.warning( + f'NO DATA found ending @ {end_dt}\n' + + logmsg + ) + + if nodatas_count >= max_nodatas: + raise DataUnavailable( + f'Presuming {fqsn} has no further history ' + f'after {max_nodatas} tries..' ) - if nodatas_count >= max_nodatas: - raise DataUnavailable( - f'Presuming {fqsn} has no further history ' - f'after {max_nodatas} tries..' - ) + nodatas_count += 1 + continue - nodatas_count += 1 - continue - - elif 'API historical data query cancelled' in err.message: - log.warning( - 'Query cancelled by IB (:eyeroll:):\n' - f'{err.message}' - ) - continue - elif ( - 'Trading TWS session is connected from a different IP' - in err.message - ): - log.warning("ignoring ip address warning") - continue + elif 'API historical data query cancelled' in err.message: + log.warning( + 'Query cancelled by IB (:eyeroll:):\n' + f'{err.message}' + ) + continue + elif ( + 'Trading TWS session is connected from a different IP' + in err.message + ): + log.warning("ignoring ip address warning") + continue # XXX: more or less same as above timeout case elif _pacing in msg: @@ -433,8 +443,11 @@ async def get_bars( 'Resetting farms with `ctrl-alt-f` hack\n' ) + client = proxy._aio_ns.ib.client + # cancel any existing reset task if data_cs: + log.cancel(f'Cancelling existing reset for {client}') data_cs.cancel() # spawn new data reset task @@ -442,10 +455,13 @@ async def get_bars( partial( wait_on_data_reset, proxy, - timeout=float('inf'), reset_type='connection' ) ) + if reset_done: + _failed_resets = 0 + else: + _failed_resets += 1 continue else: @@ -482,7 +498,7 @@ async def get_bars( partial( wait_on_data_reset, proxy, - timeout=float('inf'), + reset_type='data', ) ) # sync wait on reset to complete From 488a0cd119495153bb1f652d6e0e803fc9e86c85 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 12 May 2023 12:40:09 -0400 Subject: [PATCH 211/294] Add `.config.load_account()` Allows for direct loading of an "account file configuration" contents without having to pass the explicit config dir path. In this case we are also rewriting the `pps...toml` file names to instead have a `account.` prefix, but providing this helper function allows such changes more easily in the future - since callers won't have to use the lower level `.load()` input signature. Also add some todo comments about moving to `tomlkit`. --- piker/config.py | 77 +++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 61 insertions(+), 16 deletions(-) diff --git a/piker/config.py b/piker/config.py index 136c3819..94796b3b 100644 --- a/piker/config.py +++ b/piker/config.py @@ -27,6 +27,7 @@ from pathlib import Path from bidict import bidict import toml +# import tomlkit # TODO! from .log import get_logger @@ -143,11 +144,11 @@ if _parent_user: _conf_names: set[str] = { - 'brokers', - # 'pps', - 'trades', + 'piker', # god config + 'brokers', # sec backend deatz + # 'trades', # 'watchlists', - 'paper_trades' + # 'paper_trades' } # TODO: probably drop all this super legacy, questrade specific, @@ -191,7 +192,6 @@ def get_conf_path( Contains files such as: - brokers.toml - - pp.toml - watchlists.toml # maybe coming soon ;) @@ -234,26 +234,71 @@ def load( ) if not path.is_file(): - fn: str = _conf_fn_w_ext(conf_name) + if path is None: + fn: str = _conf_fn_w_ext(conf_name) - # try to copy in a template config to the user's directory if - # one exists. - template: Path = repodir() / 'config' / fn - if template.is_file(): - shutil.copyfile(template, path) + # try to copy in a template config to the user's directory if + # one exists. + template: Path = repodir() / 'config' / fn + if template.is_file(): + shutil.copyfile(template, path) else: # create empty file with path.open(mode='x'): pass - else: - with path.open(mode='r'): - pass # touch it - config: dict = toml.load(str(path), **tomlkws) + with path.open(mode='r') as fp: + # TODO: move to tomlkit: + # - needs to be fixed to support bidict? + # - we need to use or fork's fix to do multiline array + # indenting. + config: dict = toml.loads( + fp.read(), + **tomlkws, + ) + log.debug(f"Read config file {path}") return config, path +def load_account( + brokername: str, + acctid: str, + +) -> tuple[dict, str]: + ''' + Load a accounting (with positions) file from + ~/.config/piker/accounting/account...toml. + + ''' + legacy_fn: str = f'pps.{brokername}.{acctid}.toml' + fn: str = f'account.{brokername}.{acctid}.toml' + + dirpath: Path = _config_dir / 'accounting' + config, path = load(path=dirpath / fn) + + if not config: + legacypath = dirpath / legacy_fn + log.warning( + f'Your account file -> {legacypath}\n' + f'is using the legacy `pps.` prefix..\n' + f'Rewriting contents to new name -> {path}\n' + 'Please delete the old file!\n' + ) + legacy_config, _ = load(path=legacypath) + config.update(legacy_config) + + # XXX: override the presumably previously non-existant + # file with legacy's contents. + write( + config, + path=path, + ) + + return config, path + + + def write( config: dict, # toml config as dict @@ -298,7 +343,7 @@ def write( def load_accounts( - providers: Optional[list[str]] = None + providers: list[str] | None = None ) -> bidict[str, Optional[str]]: From 5278f8b560425b7897b0333a0f17e0b2a3d74a01 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 12 May 2023 13:01:45 -0400 Subject: [PATCH 212/294] Add `.config.load_ledger()` for transaction record files --- piker/config.py | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/piker/config.py b/piker/config.py index 94796b3b..568def08 100644 --- a/piker/config.py +++ b/piker/config.py @@ -22,11 +22,13 @@ import platform import sys import os import shutil +import time from typing import Optional from pathlib import Path from bidict import bidict import toml +import tomli # import tomlkit # TODO! from .log import get_logger @@ -220,7 +222,7 @@ def load( **tomlkws, -) -> tuple[dict, str]: +) -> tuple[dict, Path]: ''' Load config file by name. @@ -265,7 +267,7 @@ def load_account( brokername: str, acctid: str, -) -> tuple[dict, str]: +) -> tuple[dict, Path]: ''' Load a accounting (with positions) file from ~/.config/piker/accounting/account...toml. @@ -298,6 +300,32 @@ def load_account( return config, path +def load_ledger( + brokername: str, + acctid: str, + +) -> tuple[dict, Path]: + + ldir: Path = _config_dir / 'accounting' / 'ledgers' + if not ldir.is_dir(): + ldir.mkdir() + + fname = f'trades_{brokername}_{acctid}.toml' + fpath: Path = ldir / fname + + if not fpath.is_file(): + log.info( + f'Creating new local trades ledger: {fpath}' + ) + fpath.touch() + + with fpath.open(mode='rb') as cf: + start = time.time() + ledger_dict = tomli.load(cf) + log.debug(f'Ledger load took {time.time() - start}s') + + return ledger_dict, fpath + def write( config: dict, # toml config as dict From 5f79434b234f44432dad2efc80e7c18c39584e17 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 12 May 2023 13:02:29 -0400 Subject: [PATCH 213/294] Use new `.config` helpers for `accounting._pos/._ledger` file loading --- piker/accounting/__init__.py | 8 ++++---- piker/accounting/_ledger.py | 25 +++---------------------- piker/accounting/_pos.py | 11 ++++------- piker/accounting/cli.py | 4 ++-- 4 files changed, 13 insertions(+), 35 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index d8d1fec9..28dd88b4 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -61,7 +61,7 @@ def get_likely_pair( dst: str, bs_mktid: str, -) -> str: +) -> str | None: ''' Attempt to get the likely trading pair matching a given destination asset `dst: str`. @@ -76,9 +76,9 @@ def get_likely_pair( # positions where the src fiat was used to # buy some other dst which was furhter used # to buy another dst..) - log.warning( - f'No src fiat {src} found in {bs_mktid}?' - ) + # log.warning( + # f'No src fiat {src} found in {bs_mktid}?' + # ) return likely_dst = bs_mktid[:src_name_start] diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 8025ec3d..564ba9fe 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -34,7 +34,6 @@ from pendulum import ( datetime, parse, ) -import tomli import toml from .. import config @@ -227,30 +226,12 @@ def open_trade_ledger( name as defined in the user's ``brokers.toml`` config. ''' - ldir: Path = config._config_dir / 'ledgers' - if not ldir.is_dir(): - ldir.mkdir() - - fname = f'trades_{broker}_{account}.toml' - tradesfile: Path = ldir / fname - - if not tradesfile.is_file(): - log.info( - f'Creating new local trades ledger: {tradesfile}' - ) - tradesfile.touch() - - with tradesfile.open(mode='rb') as cf: - start = time.time() - ledger_dict = tomli.load(cf) - log.info(f'Ledger load took {time.time() - start}s') - cpy = ledger_dict.copy() - + ledger_dict, fpath = config.load_ledger(broker, account) + cpy = ledger_dict.copy() ledger = TransactionLedger( ledger_dict=cpy, - file_path=tradesfile, + file_path=fpath, ) - try: yield ledger finally: diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 12c2e19f..60ba104c 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -187,7 +187,7 @@ class Position(Struct): inline_table = toml.TomlDecoder().get_empty_inline_table() # serialize datetime to parsable `str` - dtstr = inline_table['dt'] = str(data['dt']) + dtstr = inline_table['dt'] = data['dt'].isoformat('T') assert 'Datetime' not in dtstr # insert optional clear fields in column order @@ -670,8 +670,7 @@ class PpTable(Struct): pos: Position for bs_mktid, pos in active.items(): - - # keep the minimal amount of clears that make up this + # NOTE: we only store the minimal amount of clears that make up this # position since the last net-zero state. pos.minimize_clears() pos.ensure_state() @@ -679,7 +678,7 @@ class PpTable(Struct): # serialize to pre-toml form fqme, asdict = pos.to_pretoml() - # assert 'Datetime' not in asdict['dt'] + assert 'Datetime' not in asdict['clears'][0]['dt'] log.info(f'Updating active pp: {fqme}') # XXX: ugh, it's cuz we push the section under @@ -807,9 +806,7 @@ def open_pps( ''' conf: dict conf_path: Path - conf, conf_path = config.load( - f'pps.{brokername}.{acctid}', - ) + conf, conf_path = config.load_account(brokername, acctid) if brokername in conf: log.warning( diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 7e68ce6f..5cdd4a58 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -79,7 +79,7 @@ def broker_init( # enabled.append('piker.data.feed') # non-blocking setup of brokerd service nursery - from ..brokers import _setup_persistent_brokerd + from ..brokers._daemon import _setup_persistent_brokerd return ( start_actor_kwargs, # to `ActorNursery.start_actor()` @@ -217,4 +217,4 @@ def sync( if __name__ == "__main__": - ledger() + ledger() # this is called from ``>> ledger `` From 2865f0efe932348f7819a58a6ec32b049cf64ffb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 12 May 2023 16:05:45 -0400 Subject: [PATCH 214/294] `piker.config`: use `tomlkit` for accounting files We still need to get some patches landed in order to resolve: - https://github.com/sdispater/tomlkit/issues/288 - https://github.com/sdispater/tomlkit/issues/289 - https://github.com/sdispater/tomlkit/issues/290 But, this does work for style preservation and the inline-table style we were previously hacking into the `toml` lib in `.accounting._toml`, which we can pretty much just drop now B) Relates to #496 (pretty much solves it near-term i think?) --- piker/config.py | 52 ++++++++++++++++++++++++++++++++++--------------- setup.py | 7 ++++--- 2 files changed, 40 insertions(+), 19 deletions(-) diff --git a/piker/config.py b/piker/config.py index 568def08..1e3ce0e6 100644 --- a/piker/config.py +++ b/piker/config.py @@ -23,13 +23,19 @@ import sys import os import shutil import time -from typing import Optional +from typing import ( + Callable, + MutableMapping, +) from pathlib import Path from bidict import bidict -import toml -import tomli -# import tomlkit # TODO! +import tomlkit +try: + import tomllib +except ModuleNotFoundError: + import tomli as tomllib + from .log import get_logger @@ -220,6 +226,11 @@ def load( conf_name: str = 'brokers', path: Path | None = None, + decode: Callable[ + [str | bytes,], + MutableMapping, + ] = tomllib.loads, + **tomlkws, ) -> tuple[dict, Path]: @@ -250,11 +261,7 @@ def load( pass with path.open(mode='r') as fp: - # TODO: move to tomlkit: - # - needs to be fixed to support bidict? - # - we need to use or fork's fix to do multiline array - # indenting. - config: dict = toml.loads( + config: dict = decode( fp.read(), **tomlkws, ) @@ -277,7 +284,10 @@ def load_account( fn: str = f'account.{brokername}.{acctid}.toml' dirpath: Path = _config_dir / 'accounting' - config, path = load(path=dirpath / fn) + config, path = load( + path=dirpath / fn, + decode=tomlkit.parse, + ) if not config: legacypath = dirpath / legacy_fn @@ -287,7 +297,16 @@ def load_account( f'Rewriting contents to new name -> {path}\n' 'Please delete the old file!\n' ) - legacy_config, _ = load(path=legacypath) + legacy_config, _ = load( + path=legacypath, + + # TODO: move to tomlkit: + # - needs to be fixed to support bidict? + # https://github.com/sdispater/tomlkit/issues/289 + # - we need to use or fork's fix to do multiline array + # indenting. + decode=tomlkit.parse, + ) config.update(legacy_config) # XXX: override the presumably previously non-existant @@ -295,6 +314,7 @@ def load_account( write( config, path=path, + fail_empty=False, ) return config, path @@ -321,7 +341,7 @@ def load_ledger( with fpath.open(mode='rb') as cf: start = time.time() - ledger_dict = tomli.load(cf) + ledger_dict = tomlkit.parse(cf.read()) log.debug(f'Ledger load took {time.time() - start}s') return ledger_dict, fpath @@ -362,10 +382,10 @@ def write( f"Writing config `{name}` file to:\n" f"{path}" ) - with path.open(mode='w') as cf: - return toml.dump( + with path.open(mode='w') as fp: + return tomlkit.dump( # preserve style on write B) config, - cf, + fp, **toml_kwargs, ) @@ -373,7 +393,7 @@ def write( def load_accounts( providers: list[str] | None = None -) -> bidict[str, Optional[str]]: +) -> bidict[str, str | None]: conf, path = load() accounts = bidict() diff --git a/setup.py b/setup.py index 59690acd..a3e60cd6 100755 --- a/setup.py +++ b/setup.py @@ -44,15 +44,16 @@ setup( ] }, install_requires=[ - 'toml', - 'tomli', # fastest pure py reader - 'click', + 'tomlkit', # fork & fix for now: + 'tomli', # for pre-3.11 'colorlog', 'attrs', 'pygments', 'colorama', # numba traceback coloring 'msgspec', # performant IPC messaging and structs 'protobuf', + 'typer', + 'rich', # async 'trio', From a62283bae2310f025a70d7c1bb2591e345734564 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 12 May 2023 16:15:12 -0400 Subject: [PATCH 215/294] Drop final use of `toml` 3rd party lib We moved to `tomlkit` as per #496 and this lets us drop the mess that was the inline-table encoder in `.accounting._toml` XD Relates to #496 --- piker/accounting/_pos.py | 28 +++---- piker/accounting/_toml.py | 156 -------------------------------------- 2 files changed, 10 insertions(+), 174 deletions(-) delete mode 100644 piker/accounting/_toml.py diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 60ba104c..f94722d0 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -36,11 +36,8 @@ from typing import ( import pendulum from pendulum import datetime, now +import tomlkit -from ._toml import ( - toml, - PpsEncoder, -) from ._ledger import ( Transaction, iter_by_dt, @@ -180,11 +177,16 @@ class Position(Struct): elif expiry: d['expiry'] = str(expiry) - toml_clears_list: list[dict[str, Any]] = [] + clears_table: tomlkit.Array = tomlkit.array() + clears_table.multiline( + multiline=True, + indent='', + ) # reverse sort so latest clears are at top of section? for tid, data in iter_by_dt(clears): - inline_table = toml.TomlDecoder().get_empty_inline_table() + + inline_table = tomlkit.inline_table() # serialize datetime to parsable `str` dtstr = inline_table['dt'] = data['dt'].isoformat('T') @@ -201,9 +203,9 @@ class Position(Struct): inline_table[k] = data[k] inline_table['tid'] = tid - toml_clears_list.append(inline_table) + clears_table.append(inline_table) - d['clears'] = toml_clears_list + d['clears'] = clears_table return fqme, d @@ -732,19 +734,9 @@ class PpTable(Struct): for entry in list(self.conf): del self.conf[entry] - # TODO: why tf haven't they already done this for inline - # tables smh.. - enc = PpsEncoder(preserve=True) - # table_bs_type = type(toml.TomlDecoder().get_empty_inline_table()) - enc.dump_funcs[ - toml.decoder.InlineTableDict - ] = enc.dump_inline_table - config.write( config=self.conf, path=self.conf_path, - encoder=enc, - fail_empty=False ) diff --git a/piker/accounting/_toml.py b/piker/accounting/_toml.py deleted file mode 100644 index 7ac91b06..00000000 --- a/piker/accounting/_toml.py +++ /dev/null @@ -1,156 +0,0 @@ -# piker: trading gear for hackers -# Copyright (C) Tyler Goodlet (in stewardship for pikers) - -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. - -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -''' -TOML codec hacks to make position tables look decent. - -(looking at you "`toml`-lib"..) - -''' -import re - -import toml - - -# TODO: instead see if we can hack tomli and tomli-w to do the same: -# - https://github.com/hukkin/tomli -# - https://github.com/hukkin/tomli-w -class PpsEncoder(toml.TomlEncoder): - ''' - Special "styled" encoder that makes a ``pps.toml`` redable and - compact by putting `.clears` tables inline and everything else - flat-ish. - - ''' - separator = ',' - - def dump_list(self, v): - ''' - Dump an inline list with a newline after every element and - with consideration for denoted inline table types. - - ''' - retval = "[\n" - for u in v: - if isinstance(u, toml.decoder.InlineTableDict): - out = self.dump_inline_table(u) - else: - out = str(self.dump_value(u)) - - retval += " " + out + "," + "\n" - retval += "]" - return retval - - def dump_inline_table(self, section): - """Preserve inline table in its compact syntax instead of expanding - into subsection. - https://github.com/toml-lang/toml#user-content-inline-table - """ - val_list = [] - for k, v in section.items(): - # if isinstance(v, toml.decoder.InlineTableDict): - if isinstance(v, dict): - val = self.dump_inline_table(v) - else: - val = str(self.dump_value(v)) - - val_list.append(k + " = " + val) - - retval = "{ " + ", ".join(val_list) + " }" - return retval - - def dump_sections(self, o, sup): - retstr = "" - if sup != "" and sup[-1] != ".": - sup += '.' - retdict = self._dict() - arraystr = "" - for section in o: - qsection = str(section) - value = o[section] - - if not re.match(r'^[A-Za-z0-9_-]+$', section): - qsection = toml.encoder._dump_str(section) - - # arrayoftables = False - if ( - self.preserve - and isinstance(value, toml.decoder.InlineTableDict) - ): - retstr += ( - qsection - + - " = " - + - self.dump_inline_table(o[section]) - + - '\n' # only on the final terminating left brace - ) - - # XXX: this code i'm pretty sure is just blatantly bad - # and/or wrong.. - # if isinstance(o[section], list): - # for a in o[section]: - # if isinstance(a, dict): - # arrayoftables = True - # if arrayoftables: - # for a in o[section]: - # arraytabstr = "\n" - # arraystr += "[[" + sup + qsection + "]]\n" - # s, d = self.dump_sections(a, sup + qsection) - # if s: - # if s[0] == "[": - # arraytabstr += s - # else: - # arraystr += s - # while d: - # newd = self._dict() - # for dsec in d: - # s1, d1 = self.dump_sections(d[dsec], sup + - # qsection + "." + - # dsec) - # if s1: - # arraytabstr += ("[" + sup + qsection + - # "." + dsec + "]\n") - # arraytabstr += s1 - # for s1 in d1: - # newd[dsec + "." + s1] = d1[s1] - # d = newd - # arraystr += arraytabstr - - elif isinstance(value, dict): - retdict[qsection] = o[section] - - elif o[section] is not None: - retstr += ( - qsection - + - " = " - + - str(self.dump_value(o[section])) - ) - - # if not isinstance(value, dict): - if not isinstance(value, toml.decoder.InlineTableDict): - # inline tables should not contain newlines: - # https://toml.io/en/v1.0.0#inline-table - retstr += '\n' - - else: - raise ValueError(value) - - retstr += arraystr - return (retstr, retdict) From df9615505742589fb9f602e1d49a340803915d11 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 13 May 2023 14:06:27 -0400 Subject: [PATCH 216/294] Always allow overruns in sampler context Requires https://github.com/goodboy/tractor/pull/357. Avoid overruns when doing concurrent live feed init over multiple brokers. --- piker/data/_sampling.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/piker/data/_sampling.py b/piker/data/_sampling.py index c124c95c..61ec72cb 100644 --- a/piker/data/_sampling.py +++ b/piker/data/_sampling.py @@ -351,7 +351,9 @@ async def register_with_sampler( if open_index_stream: try: - async with ctx.open_stream() as stream: + async with ctx.open_stream( + allow_overruns=True, + ) as stream: if sub_for_broadcasts: subs.add(stream) From 50a4c425d3f06433d3e8f3ad2b7f98e5941b461a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 13 May 2023 16:05:23 -0400 Subject: [PATCH 217/294] Add `touch_if_dne: bool` to `config.load()` So that we aren't creating blank files for legacy configs (as we do name changes or wtv). Further change `.get_conf_path()` to validate against new `account.` prefix and a god `conf.toml` file. --- piker/config.py | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/piker/config.py b/piker/config.py index 1e3ce0e6..be77709f 100644 --- a/piker/config.py +++ b/piker/config.py @@ -152,11 +152,9 @@ if _parent_user: _conf_names: set[str] = { - 'piker', # god config + 'conf', # god config 'brokers', # sec backend deatz - # 'trades', # - 'watchlists', - # 'paper_trades' + 'watchlists', # (user defined) market lists } # TODO: probably drop all this super legacy, questrade specific, @@ -207,7 +205,7 @@ def get_conf_path( - strats.toml ''' - if 'pps.' not in conf_name: + if 'account.' not in conf_name: assert str(conf_name) in _conf_names fn = _conf_fn_w_ext(conf_name) @@ -223,13 +221,14 @@ def repodir() -> Path: def load( - conf_name: str = 'brokers', + conf_name: str = 'brokers', # appended with .toml suffix path: Path | None = None, decode: Callable[ [str | bytes,], MutableMapping, ] = tomllib.loads, + touch_if_dne: bool = False, **tomlkws, @@ -237,9 +236,13 @@ def load( ''' Load config file by name. + If desired config is not in the top level piker-user config path then + pass the ``path: Path`` explicitly. + ''' path: Path = path or get_conf_path(conf_name) + # create the $HOME/.config/piker dir if dne if not _config_dir.is_dir(): _config_dir.mkdir( parents=True, @@ -255,8 +258,9 @@ def load( template: Path = repodir() / 'config' / fn if template.is_file(): shutil.copyfile(template, path) - else: - # create empty file + + # touch an empty file + elif touch_if_dne: with path.open(mode='x'): pass @@ -292,10 +296,10 @@ def load_account( if not config: legacypath = dirpath / legacy_fn log.warning( - f'Your account file -> {legacypath}\n' - f'is using the legacy `pps.` prefix..\n' + f'Your account file is using the legacy `pps.` prefix..\n' f'Rewriting contents to new name -> {path}\n' 'Please delete the old file!\n' + f'|-> {legacypath}\n' ) legacy_config, _ = load( path=legacypath, From 779c0b73c9bb24b7696ef0bfba3119bbdcee9f85 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 13 May 2023 16:07:17 -0400 Subject: [PATCH 218/294] Make `.accounting._ledger` use `tomlkit` So that styling is preserved on write but requires that we pop `None` values (in this case any unset `.expiry` transactions) due to `tomkit` having no support for writing them as values? --- piker/accounting/_ledger.py | 30 ++++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 564ba9fe..b46b46ba 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -34,7 +34,7 @@ from pendulum import ( datetime, parse, ) -import toml +import tomlkit from .. import config from ..data.types import Struct @@ -122,17 +122,27 @@ class TransactionLedger(UserDict): def write_config(self) -> None: ''' - Render the self.data ledger dict to it's TML file form. + Render the self.data ledger dict to it's TOML file form. ''' + towrite: dict[str, Any] = self.data.copy() + + for tid, txdict in self.data.items(): + + # drop key for non-expiring assets + if ( + 'expiry' in txdict + and txdict['expiry'] is None + ): + txdict.pop('expiry') + + # re-write old acro-key + fqme = txdict.get('fqsn') + if fqme: + txdict['fqme'] = fqme + with self.file_path.open(mode='w') as fp: - - # rewrite the key name to fqme if needed - fqsn: str = self.data.get('fqsn') - if fqsn: - self.data['fqme'] = fqsn - - toml.dump(self.data, fp) + tomlkit.dump(towrite, fp) def update_from_t( self, @@ -239,7 +249,7 @@ def open_trade_ledger( # TODO: show diff output? # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries - log.info(f'Updating ledger for {tradesfile}:\n') + log.info(f'Updating ledger for {fpath}:\n') ledger.write_config() From c349d50f2f09b94ad8c9b86905f48bc6fbe48d1c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 13 May 2023 16:12:18 -0400 Subject: [PATCH 219/294] Allow creation of empty account files --- piker/accounting/_pos.py | 1 + 1 file changed, 1 insertion(+) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index f94722d0..90391142 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -737,6 +737,7 @@ class PpTable(Struct): config.write( config=self.conf, path=self.conf_path, + fail_empty=False, ) From b288d7051a7d2a5b23ecd54475eb0572fcc301ed Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 13 May 2023 16:44:28 -0400 Subject: [PATCH 220/294] ib.broker: load account name map as a `bidict` (no `tomlkit` support) --- piker/brokers/ib/broker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 8379ce5e..476886d3 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -563,7 +563,7 @@ async def trades_dialogue( tables: dict[str, PpTable] = {} order_msgs: list[Status] = [] conf = get_config() - accounts_def_inv = conf['accounts'].inverse + accounts_def_inv: bidict[str, str] = bidict(conf['accounts']).inverse with ( ExitStack() as lstack, From b572cd1b77681359c472ab8c6b3cf16e27687e35 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 13 May 2023 16:45:05 -0400 Subject: [PATCH 221/294] kucoin: store fqme -> mktids table Instead of pre-converting and mapping piker style fqmes to `KucoinMktPair`s make `Client._pairs` keyed by the kucoin native market ids and instead also create a `._fqmes2mktids: bidict[str, str]` for doing lookups to the native pair from the fqme. Also, adjust any remaining `fqsn` naming to fqme. --- piker/brokers/kucoin.py | 54 ++++++++++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 20 deletions(-) diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index 015b248c..9cbb5a97 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -41,13 +41,14 @@ from typing import ( import wsproto from uuid import uuid4 +from fuzzywuzzy import process as fuzzy +from trio_typing import TaskStatus import asks +from bidict import bidict +import numpy as np +import pendulum import tractor import trio -from trio_typing import TaskStatus -from fuzzywuzzy import process as fuzzy -import pendulum -import numpy as np from piker.accounting._mktinfo import ( Asset, @@ -228,6 +229,7 @@ class Client: def __init__(self) -> None: self._config: BrokerConfig | None = get_config() self._pairs: dict[str, KucoinMktPair] = {} + self._fqmes2mktids: bidict[str, str] = bidict() self._bars: list[list[float]] = [] self._currencies: dict[str, Currency] = {} @@ -374,15 +376,22 @@ class Client: async def _get_pairs( self, - ) -> dict[str, KucoinMktPair]: + ) -> tuple[ + dict[str, KucoinMktPair], + bidict[str, KucoinMktPair], + ]: entries = await self._request('GET', 'symbols') - syms = { - item['name'].lower().replace('-', ''): KucoinMktPair(**item) - for item in entries - } + log.info(f' {len(entries)} Kucoin market pairs fetched') - log.info(f' {len(syms)} Kucoin market pairs fetched') - return syms + pairs: dict[str, KucoinMktPair] = {} + fqmes2mktids: bidict[str, str] = bidict() + for item in entries: + pair = pairs[item['name']] = KucoinMktPair(**item) + fqmes2mktids[ + item['name'].lower().replace('-', '') + ] = pair.name + + return pairs, fqmes2mktids async def cache_pairs( self, @@ -390,14 +399,18 @@ class Client: ) -> dict[str, KucoinMktPair]: ''' - Get cached pairs and convert keyed symbols into fqsns if ya want + Get request all market pairs and store in a local cache. + + Also create a table of piker style fqme -> kucoin symbols. ''' if ( not self._pairs or update ): - self._pairs.update(await self._get_pairs()) + pairs, fqmes = await self._get_pairs() + self._pairs.update(pairs) + self._fqmes2mktids.update(fqmes) return self._pairs @@ -430,7 +443,7 @@ class Client: async def _get_bars( self, - fqsn: str, + fqme: str, start_dt: datetime | None = None, end_dt: datetime | None = None, @@ -479,7 +492,7 @@ class Client: start_dt = int(start_dt.timestamp()) end_dt = int(end_dt.timestamp()) - kucoin_sym = fqsn_to_kucoin_sym(fqsn, self._pairs) + kucoin_sym = self._fqmes2mktids[fqme] url = ( f'market/candles?type={type}' @@ -537,12 +550,12 @@ class Client: return array -def fqsn_to_kucoin_sym( - fqsn: str, +def fqme_to_kucoin_sym( + fqme: str, pairs: dict[str, KucoinMktPair], ) -> str: - pair_data = pairs[fqsn] + pair_data = pairs[fqme] return pair_data.baseCurrency + '-' + pair_data.quoteCurrency @@ -612,8 +625,9 @@ async def get_mkt_info( bs_fqme, _, broker = fqme.partition('.') pairs: dict[str, KucoinMktPair] = await client.cache_pairs() - pair: KucoinMktPair = pairs[bs_fqme] - bs_mktid: str = pair.symbol + bs_mktid: str = client._fqmes2mktids[bs_fqme] + pair: KucoinMktPair = pairs[bs_mktid] + assert bs_mktid == pair.symbol # pair: KucoinMktPair = await client.pair_info(pair_str) assets: dict[str, Currency] = client._currencies From 2e8268b53e8a1b3c9630139783d0123918a23bbc Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 13 May 2023 16:51:11 -0400 Subject: [PATCH 222/294] Allow passing `allow_overruns: bool` to `Services.start_service_task()` --- piker/service/_mngr.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/piker/service/_mngr.py b/piker/service/_mngr.py index 69712c07..e37bb7ec 100644 --- a/piker/service/_mngr.py +++ b/piker/service/_mngr.py @@ -56,6 +56,7 @@ class Services: name: str, portal: tractor.Portal, target: Callable, + allow_overruns: bool = False, **ctx_kwargs, ) -> (trio.CancelScope, tractor.Context): @@ -81,6 +82,7 @@ class Services: with trio.CancelScope() as cs: async with portal.open_context( target, + allow_overruns=allow_overruns, **ctx_kwargs, ) as (ctx, first): From 1f0db3103dbd3e0db29525f15da52357570921e0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 13 May 2023 17:27:45 -0400 Subject: [PATCH 223/294] ib.broker: always cast `asset_type` to `str` --- piker/brokers/ib/broker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 476886d3..71175b07 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -1227,7 +1227,7 @@ def norm_trade_records( pair = MktPair.from_fqme( fqme=f'{symbol}.{suffix}.ib', bs_mktid=str(conid), - _atype=asset_type, + _atype=str(asset_type), # XXX: can't serlialize `tomlkit.String` price_tick=price_tick, # NOTE: for "legacy" assets, volume is normally discreet, not From cfb125beefd6f59585014c4abbc786f3183f92ff Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 13 May 2023 17:35:46 -0400 Subject: [PATCH 224/294] `.data.feed`: finally solve startup overruns issue We need to allow overruns during the async multi-broker context spawning init bc some backends might take longer then others to setup (eg. binance vs. kucoin) and result in some context (stream) being overrun by the time we get to the `.open_stream()` phase. Ideally, we can maybe adjust the concurrent setup to be more of a task-per-provider style to avoid this in the future - which would also in theory result in more-immediate per-provider setup in terms showing ready feeds asap. Also, does a bunch of renaming from fqsn -> fqme and drops the lower casing of input symbols instead expecting the caller to know what the data backend it's requesting is going to be able to handle in terms of symbology. --- piker/data/feed.py | 69 +++++++++++++++++++++++++++------------------- 1 file changed, 41 insertions(+), 28 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index ac6b188a..047bd40d 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -267,8 +267,6 @@ async def allocate_persistent_feed( # TODO: probably make a struct msg type for this as well # since eventually we do want to have more efficient IPC.. first_quote: dict[str, Any] - - symstr = symstr.lower() ( init_msgs, first_quote, @@ -465,9 +463,6 @@ async def open_feed_bus( for symbol in symbols: - # we always use lower case keys internally - symbol = symbol.lower() - # if no cached feed for this symbol has been created for this # brokerd yet, start persistent stream and shm writer task in # service nursery @@ -653,7 +648,12 @@ class Feed(Struct): brokers: Sequence[str] | None = None, ) -> trio.abc.ReceiveChannel: + ''' + Open steams to multiple data providers (``brokers``) and + multiplex their msgs onto a common mem chan for + only-requires-a-single-thread style consumption. + ''' if brokers is None: mods = self.mods brokers = list(self.mods) @@ -739,7 +739,7 @@ async def install_brokerd_search( @acm async def maybe_open_feed( - fqsns: list[str], + fqmes: list[str], loglevel: Optional[str] = None, **kwargs, @@ -754,12 +754,12 @@ async def maybe_open_feed( in a tractor broadcast receiver. ''' - fqsn = fqsns[0] + fqme = fqmes[0] async with maybe_open_context( acm_func=open_feed, kwargs={ - 'fqsns': fqsns, + 'fqmes': fqmes, 'loglevel': loglevel, 'tick_throttle': kwargs.get('tick_throttle'), @@ -767,12 +767,12 @@ async def maybe_open_feed( 'allow_overruns': kwargs.get('allow_overruns', True), 'start_stream': kwargs.get('start_stream', True), }, - key=fqsn, + key=fqme, ) as (cache_hit, feed): if cache_hit: - log.info(f'Using cached feed for {fqsn}') + log.info(f'Using cached feed for {fqme}') # add a new broadcast subscription for the quote stream # if this feed is likely already in use @@ -793,7 +793,7 @@ async def maybe_open_feed( @acm async def open_feed( - fqsns: list[str], + fqmes: list[str], loglevel: str | None = None, allow_overruns: bool = True, @@ -808,9 +808,9 @@ async def open_feed( providers: dict[ModuleType, list[str]] = {} feed = Feed() - for fqsn in fqsns: - brokername, *_ = unpack_fqme(fqsn) - bfqsn = fqsn.replace('.' + brokername, '') + for fqme in fqmes: + brokername, *_ = unpack_fqme(fqme) + bfqme = fqme.replace('.' + brokername, '') try: mod = get_brokermod(brokername) @@ -818,13 +818,13 @@ async def open_feed( mod = get_ingestormod(brokername) # built a per-provider map to instrument names - providers.setdefault(mod, []).append(bfqsn) + providers.setdefault(mod, []).append(bfqme) feed.mods[mod.name] = mod # one actor per brokerd for now brokerd_ctxs = [] - for brokermod, bfqsns in providers.items(): + for brokermod, bfqmes in providers.items(): # if no `brokerd` for this backend exists yet we spawn # a daemon actor for it. @@ -843,7 +843,7 @@ async def open_feed( bus_ctxs: list[AsyncContextManager] = [] for ( portal, - (brokermod, bfqsns), + (brokermod, bfqmes), ) in zip(portals, providers.items()): feed.portals[brokermod] = portal @@ -868,10 +868,20 @@ async def open_feed( portal.open_context( open_feed_bus, brokername=brokermod.name, - symbols=bfqsns, + symbols=bfqmes, loglevel=loglevel, start_stream=start_stream, tick_throttle=tick_throttle, + + # XXX: super important to avoid + # the brokerd from some other + # backend overruning the task here + # bc some other brokerd took longer + # to startup before we hit the `.open_stream()` + # loop below XD .. really we should try to do each + # of these stream open sequences sequentially per + # backend? .. need some thot! + allow_overruns=True, ) ) @@ -880,16 +890,16 @@ async def open_feed( async with ( gather_contexts(bus_ctxs) as ctxs, ): - stream_ctxs = [] + stream_ctxs: list[tractor.MsgStream] = [] for ( (ctx, flumes_msg_dict), - (brokermod, bfqsns), + (brokermod, bfqmes), ) in zip(ctxs, providers.items()): - for fqsn, flume_msg in flumes_msg_dict.items(): + for fqme, flume_msg in flumes_msg_dict.items(): flume = Flume.from_msg(flume_msg) - assert flume.symbol.fqsn == fqsn - feed.flumes[fqsn] = flume + assert flume.symbol.fqme == fqme + feed.flumes[fqme] = flume # TODO: do we need this? flume.feed = feed @@ -915,21 +925,24 @@ async def open_feed( ) ) + stream: tractor.MsgStream + brokermod: ModuleType + fqmes: list[str] async with ( gather_contexts(stream_ctxs) as streams, ): for ( stream, - (brokermod, bfqsns), + (brokermod, bfqmes), ) in zip(streams, providers.items()): assert stream feed.streams[brokermod.name] = stream - # apply `brokerd`-common steam to each flume - # tracking a symbol from that provider. - for fqsn, flume in feed.flumes.items(): - if brokermod.name == flume.symbol.broker: + # apply `brokerd`-common stream to each flume + # tracking a live market feed from that provider. + for fqme, flume in feed.flumes.items(): + if brokermod.name == flume.mkt.broker: flume.stream = stream assert len(feed.mods) == len(feed.portals) == len(feed.streams) From ebe351e2eec9abcf5a682089036e493b7651e01d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 14 May 2023 15:13:14 -0400 Subject: [PATCH 225/294] kucoin: raise `DataUnavailable` if we get empty time array at some point? --- piker/brokers/kucoin.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index 9cbb5a97..2f66d4eb 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -884,6 +884,11 @@ async def open_history_client( times = array['time'] + if not len(times): + raise DataUnavailable( + f'No more history before {start_dt}?' + ) + if end_dt is None: inow = round(time.time()) From 98c043815a5c69381df50a9a793305f8b364db5f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 14 May 2023 20:24:19 -0400 Subject: [PATCH 226/294] Woops, implement `Symbol.fqme` same a `Mktpair`.. --- piker/accounting/_mktinfo.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 27996ae0..8e974ec9 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -552,6 +552,9 @@ class Symbol(Struct): ''' key: str + broker: str = '' + venue: str = '' + # precision descriptors for price and vlm tick_size: Decimal = Decimal('0.01') lot_tick_size: Decimal = Decimal('0.0') @@ -571,9 +574,11 @@ class Symbol(Struct): lot_size = info.get('lot_tick_size', 0.0) return Symbol( + broker=broker, key=mktep, tick_size=tick_size, lot_tick_size=lot_size, + venue=venue, suffix=suffix, broker_info={broker: info}, ) @@ -603,17 +608,13 @@ class Symbol(Struct): return list(self.broker_info.keys())[0] @property - def fqsn(self) -> str: - broker = self.broker - key = self.key - if self.suffix: - tokens = (key, self.suffix, broker) - else: - tokens = (key, broker) - - return '.'.join(tokens).lower() - - fqme = fqsn + def fqme(self) -> str: + return maybe_cons_tokens([ + self.key, # final "pair name" (eg. qqq[/usd], btcusdt) + self.venue, + self.suffix, # includes expiry and other con info + self.broker, + ]) def quantize( self, From 1e1e64f7f983d603196ecd2668b554aec33b0ae5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 15 May 2023 13:30:34 -0400 Subject: [PATCH 227/294] ib: fix op error when `end_dt` is `None`: the first query --- piker/brokers/ib/feed.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index 61288a3a..a9d75d55 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -212,7 +212,7 @@ _pacing: str = ( async def wait_on_data_reset( proxy: MethodProxy, reset_type: str = 'data', - timeout: float = float('inf'), + timeout: float = 16, # float('inf'), task_status: TaskStatus[ tuple[ @@ -351,7 +351,10 @@ async def get_bars( bars, bars_array, dt_duration = out - if not bars: + if ( + not bars + and end_dt + ): log.warning( f'History is blank for {dt_duration} from {end_dt}' ) From 12638350346988f7a4b59430988c4731e89609de Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 15 May 2023 15:35:30 -0400 Subject: [PATCH 228/294] ib.api: make `get_sym_details()` and `get_quote()` mutex methods --- piker/brokers/ib/api.py | 41 +++++++++++++++++++---------------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 0ad9683e..f761515b 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -862,7 +862,6 @@ class Client: ) -> tuple[ Contract, - Ticker, ContractDetails, ]: ''' @@ -872,26 +871,23 @@ class Client: ''' contract = (await self.find_contracts(symbol))[0] + details_fute = self.ib.reqContractDetailsAsync(contract) + details = (await details_fute)[0] + return contract, details + + async def get_quote( + self, + contract: Contract, + + ) -> Ticker: + ''' + Return a single (snap) quote for symbol. + + ''' ticker: Ticker = self.ib.reqMktData( contract, snapshot=True, ) - details_fute = self.ib.reqContractDetailsAsync(contract) - details = (await details_fute)[0] - - return contract, ticker, details - - async def get_quote( - self, - symbol: str, - - ) -> tuple[Contract, Ticker, ContractDetails]: - ''' - Return a single quote for symbol. - - ''' - contract, ticker, details = await self.get_sym_details(symbol) - ready = ticker.updateEvent # ensure a last price gets filled in before we deliver quote @@ -908,21 +904,22 @@ class Client: else: if not warnset: log.warning( - f'Quote for {symbol} timed out: market is closed?' + f'Quote for {contract} timed out: market is closed?' ) warnset = True else: - log.info(f'Got first quote for {symbol}') + log.info(f'Got first quote for {contract}') break else: if not warnset: log.warning( - f'Symbol {symbol} is not returning a quote ' - 'it may be outside trading hours?') + f'Contract {contract} is not returning a quote ' + 'it may be outside trading hours?' + ) warnset = True - return contract, ticker, details + return ticker # async to be consistent for the client proxy, and cuz why not. def submit_limit( From f20e2d6ee25de46e0c8d5701690813542893fd47 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 15 May 2023 15:35:57 -0400 Subject: [PATCH 229/294] ib.feed: start drafting out `get_mkt_info()` endpoint --- piker/brokers/ib/feed.py | 97 +++++++++++++++++++++++----------------- 1 file changed, 55 insertions(+), 42 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index a9d75d55..eb9fc7aa 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -19,7 +19,10 @@ Data feed endpoints pre-wrapped and ready for use with ``tractor``/``trio``. """ from __future__ import annotations import asyncio -from contextlib import asynccontextmanager as acm +from contextlib import ( + asynccontextmanager as acm, + nullcontext, +) from decimal import Decimal from dataclasses import asdict from datetime import datetime @@ -59,6 +62,9 @@ from .api import ( Contract, ) from ._util import data_reset_hack +from piker._cacheables import ( + async_lifo_cache, +) # https://interactivebrokers.github.io/tws-api/tick_types.html @@ -733,41 +739,55 @@ def normalize( return data -# TODO! -# async def get_mkt_info( -# fqme: str, +@async_lifo_cache() +async def get_mkt_info( + fqme: str, -# _cache: dict[str, MktPair] = {} + proxy: MethodProxy | None = None, -# ) -> tuple[MktPair, Pair]: +) -> tuple[MktPair, Pair]: -# both = _cache.get(fqme) -# if both: -# return both + # we don't need to split off any fqme broker part? + # bs_fqme, _, broker = fqme.partition('.') -# proxy: MethodProxy -# async with open_data_client() as proxy: + proxy: MethodProxy + if proxy is not None: + client_ctx = nullcontext(proxy) + else: + client_ctx = open_data_client -# pair: Pair = await client.exch_info(fqme.upper()) -# mkt = MktPair( -# dst=Asset( -# name=pair.baseAsset, -# atype='crypto', -# tx_tick=digits_to_dec(pair.baseAssetPrecision), -# ), -# src=Asset( -# name=pair.quoteAsset, -# atype='crypto', -# tx_tick=digits_to_dec(pair.quoteAssetPrecision), -# ), -# price_tick=pair.price_tick, -# size_tick=pair.size_tick, -# bs_mktid=pair.symbol, -# broker='binance', -# ) -# both = mkt, pair -# _cache[fqme] = both -# return both + async with client_ctx as proxy: + + try: + ( + con, # Contract + details, # ContractDetails + ) = await proxy.get_sym_details(symbol=fqme) + except ConnectionError: + log.exception(f'Proxy is ded {proxy._aio_ns}') + raise + + # pair: Pair = await client.exch_info(fqme.upper()) + + # mkt = MktPair( + # dst=Asset( + # name=pair.baseAsset, + # atype='crypto', + # tx_tick=digits_to_dec(pair.baseAssetPrecision), + # ), + # src=Asset( + # name=pair.quoteAsset, + # atype='crypto', + # tx_tick=digits_to_dec(pair.quoteAssetPrecision), + # ), + # price_tick=pair.price_tick, + # size_tick=pair.size_tick, + # bs_mktid=pair.symbol, + # broker='binance', + # ) + + # return both + return con, details async def stream_quotes( @@ -794,18 +814,11 @@ async def stream_quotes( proxy: MethodProxy async with open_data_client() as proxy: - try: - ( - con, # Contract - first_ticker, # Ticker - details, # ContractDetails - ) = await proxy.get_sym_details(symbol=sym) - except ConnectionError: - log.exception(f'Proxy is ded {proxy._aio_ns}') - raise + con, details = await get_mkt_info(sym, proxy=proxy) + first_ticker = await proxy.get_quote(contract=con) first_quote = normalize(first_ticker) - # print(f'first quote: {first_quote}') + log.runtime(f'FIRST QUOTE: {first_quote}') def mk_init_msgs() -> dict[str, dict]: ''' @@ -879,7 +892,7 @@ async def stream_quotes( # TODO: we should instead spawn a task that waits on a feed to start # and let it wait indefinitely..instead of this hard coded stuff. with trio.move_on_after(1): - contract, first_ticker, details = await proxy.get_quote(symbol=sym) + first_ticker = await proxy.get_quote(contract=con) # it might be outside regular trading hours so see if we can at # least grab history. From b096ee3b7a5ef15b3c9067c06af1b30902a9f58c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 16 May 2023 16:30:30 -0400 Subject: [PATCH 230/294] Make `FeedInit.shm_write_opts` an empty dict by default --- piker/data/validate.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/piker/data/validate.py b/piker/data/validate.py index 2b07a094..c295c179 100644 --- a/piker/data/validate.py +++ b/piker/data/validate.py @@ -49,7 +49,11 @@ class FeedInit(Struct, frozen=True): ''' mkt_info: MktPair - shm_write_opts: dict[str, Any] | None = None + + # NOTE: only field we use rn in ``.data.feed`` + # TODO: maybe make a SamplerConfig(Struct)? + shm_write_opts: dict[str, Any] = {} + # 'sum_tick_vlm': True def validate_backend( From 147e1baee967da07017cc541964653ecf6b93716 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 16 May 2023 17:00:15 -0400 Subject: [PATCH 231/294] Remove typo-ed `sum_tick_vlm` config from all crypto backends --- piker/brokers/binance.py | 5 +---- piker/brokers/kraken/feed.py | 7 +------ piker/brokers/kucoin.py | 7 +------ 3 files changed, 3 insertions(+), 16 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 778f3aac..69b55dc1 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -569,10 +569,7 @@ async def stream_quotes( # build out init msgs according to latest spec init_msgs.append( - FeedInit( - mkt_info=mkt, - shm_write_opts={'sum_tick_vml': False}, - ) + FeedInit(mkt_info=mkt) ) iter_subids = itertools.count() diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index 4d496376..0cc24464 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -334,12 +334,7 @@ async def stream_quotes( for sym_str in symbols: mkt, pair = await get_mkt_info(sym_str) init_msgs.append( - FeedInit( - mkt_info=mkt, - shm_write_opts={ - 'sum_tick_vml': False, - }, - ) + FeedInit(mkt_info=mkt) ) ws_pairs.append(pair.wsname) diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index 2f66d4eb..6da6496a 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -685,12 +685,7 @@ async def stream_quotes( for sym_str in symbols: mkt, pair = await get_mkt_info(sym_str) init_msgs.append( - FeedInit( - mkt_info=mkt, - shm_write_opts={ - 'sum_tick_vml': False, - }, - ) + FeedInit(mkt_info=mkt) ) ws: NoBsWs From 07b7d1d2297f7a6d4a4f4eeed0a096654a372f37 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 16 May 2023 17:29:07 -0400 Subject: [PATCH 232/294] ib: implement `FeedInit` style quote stream setup As per the new market info packing schema this patch almost gets it completely compatible and useful via implementing the `get_mkt_info()` backend module endpoint B) There's still some questions around `MktPair.src` since all the contract search machinery in the ib api isn't expecting a fiat currency in the symbol key: for ex. `mnq/usd.cme.20230616.ib` has no handling for the `[/]usd` part. For now i'm just excluding the `.src` since it requires extra parsing on quotes-feed requests even though this is also currently breaking forex pairs (idealpro or wtv). I think ideally we do move to a `dst/src..` style but it's going to require adjustments to all the existing crypto backends.. This also allows dropping the old `mk_init_msgs()` closure. --- piker/brokers/ib/broker.py | 6 ++ piker/brokers/ib/feed.py | 210 +++++++++++++++++++------------------ 2 files changed, 112 insertions(+), 104 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 71175b07..2f4cdb78 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -1164,6 +1164,12 @@ def norm_trade_records( exch = record['exchange'] lexch = record.get('listingExchange') + # NOTE: remove null values since `tomlkit` can't serialize + # them to file. + dnc = record.pop('deltaNeutralContract', False) + if dnc is not None: + record['deltaNeutralContract'] = dnc + suffix = lexch or exch symbol = record['symbol'] diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index eb9fc7aa..eacfca7b 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -58,13 +58,18 @@ from .api import ( open_client_proxies, get_preferred_data_client, Ticker, - RequestError, Contract, + RequestError, ) from ._util import data_reset_hack from piker._cacheables import ( async_lifo_cache, ) +from piker.accounting import ( + Asset, + MktPair, +) +from piker.data.validate import FeedInit # https://interactivebrokers.github.io/tws-api/tick_types.html @@ -115,7 +120,9 @@ async def open_data_client() -> MethodProxy: @acm async def open_history_client( - fqsn: str, + fqme: str, + + # mkt: MktPair | None = None, ) -> tuple[Callable, int]: ''' @@ -134,6 +141,11 @@ async def open_history_client( async with open_data_client() as proxy: + # TODO: maybe strip the `MktPair.src: Asset` key here? + # see the comment below.. + # if mkt is not None: + # fqme: str = mkt.fqme.remove(mkt.src.name) + max_timeout: float = 2. mean: float = 0 count: int = 0 @@ -141,10 +153,10 @@ async def open_history_client( head_dt: None | datetime = None if ( # fx cons seem to not provide this endpoint? - 'idealpro' not in fqsn + 'idealpro' not in fqme ): try: - head_dt = await proxy.get_head_time(fqsn=fqsn) + head_dt = await proxy.get_head_time(fqsn=fqme) except RequestError: head_dt = None @@ -159,7 +171,7 @@ async def open_history_client( query_start = time.time() out, timedout = await get_bars( proxy, - fqsn, + fqme, timeframe, end_dt=end_dt, ) @@ -517,7 +529,9 @@ async def get_bars( return result, data_cs is not None -asset_type_map = { +# re-mapping to piker asset type names +# https://github.com/erdewit/ib_insync/blob/master/ib_insync/contract.py#L113 +_asset_type_map = { 'STK': 'stock', 'OPT': 'option', 'FUT': 'future', @@ -558,7 +572,7 @@ async def _setup_quote_stream( '294', # Trade rate / minute '295', # Vlm rate / minute ), - contract: Optional[Contract] = None, + contract: Contract | None = None, ) -> trio.abc.ReceiveChannel: ''' @@ -745,19 +759,19 @@ async def get_mkt_info( proxy: MethodProxy | None = None, -) -> tuple[MktPair, Pair]: +) -> tuple[MktPair, ibis.ContractDetails]: - # we don't need to split off any fqme broker part? + # XXX: we don't need to split off any fqme broker part? # bs_fqme, _, broker = fqme.partition('.') proxy: MethodProxy + get_details: bool = False if proxy is not None: client_ctx = nullcontext(proxy) else: client_ctx = open_data_client async with client_ctx as proxy: - try: ( con, # Contract @@ -767,27 +781,61 @@ async def get_mkt_info( log.exception(f'Proxy is ded {proxy._aio_ns}') raise - # pair: Pair = await client.exch_info(fqme.upper()) + # TODO: more consistent field translation + init_info: dict = {} + atype = _asset_type_map[con.secType] - # mkt = MktPair( - # dst=Asset( - # name=pair.baseAsset, - # atype='crypto', - # tx_tick=digits_to_dec(pair.baseAssetPrecision), - # ), - # src=Asset( - # name=pair.quoteAsset, - # atype='crypto', - # tx_tick=digits_to_dec(pair.quoteAssetPrecision), - # ), - # price_tick=pair.price_tick, - # size_tick=pair.size_tick, - # bs_mktid=pair.symbol, - # broker='binance', - # ) + venue = con.primaryExchange or con.exchange + price_tick: Decimal = Decimal(str(details.minTick)) - # return both - return con, details + if atype == 'stock': + # XXX: GRRRR they don't support fractional share sizes for + # stocks from the API?! + # if con.secType == 'STK': + size_tick = Decimal('1') + else: + size_tick: Decimal = Decimal(str(details.minSize).rstrip('0')) + # |-> TODO: there is also the Contract.sizeIncrement, bt wtf is it? + + # NOTE: this is duplicate from the .broker.norm_trade_records() + # routine, we should factor all this parsing somewhere.. + expiry_str = str(con.lastTradeDateOrContractMonth) + # if expiry: + # expiry_str: str = str(pendulum.parse( + # str(expiry).strip(' ') + # )) + + mkt = MktPair( + dst=Asset( + name=con.symbol.lower(), + atype=atype, + tx_tick=size_tick, + ), + + # TODO: currently we can't pass the fiat src asset because + # then we'll get a `MNQUSD` request for history data.. + # we need to figure out how we're going to handle this (later?) + # but likely we want all backends to eventually handle + # ``dst/src.venue.`` style? + # src=Asset( + # name=str(con.currency), + # atype='fiat', + # tx_tick=Decimal('0.01'), # right? + # ), + + price_tick=price_tick, + size_tick=size_tick, + + bs_mktid=str(con.conId), + venue=str(venue), + expiry=expiry_str, + broker='ib', + + # TODO: options contract info as str? + # contract_info= + ) + + return mkt, details async def stream_quotes( @@ -812,83 +860,36 @@ async def stream_quotes( sym = symbols[0] log.info(f'request for real-time quotes: {sym}') + init_msgs: list[FeedInit] = [] + proxy: MethodProxy + mkt: MktPair + details: ibis.ContractDetails async with open_data_client() as proxy: - con, details = await get_mkt_info(sym, proxy=proxy) + mkt, details = await get_mkt_info( + sym, + proxy=proxy, # passed to avoid implicit client load + ) - first_ticker = await proxy.get_quote(contract=con) - first_quote = normalize(first_ticker) + init_msg = FeedInit(mkt_info=mkt) + + has_vlm: bool = True + if mkt.dst.atype in { + 'forex', + 'index', + 'commodity', + }: + has_vlm = False + # tell sampler config that it shouldn't do vlm summing. + init_msg.shm_write_opts['sum_tick_vlm'] = False + + init_msgs.append(init_msg) + + con: Contract = details.contract + first_ticker: Ticker = await proxy.get_quote(contract=con) + first_quote: dict = normalize(first_ticker) log.runtime(f'FIRST QUOTE: {first_quote}') - def mk_init_msgs() -> dict[str, dict]: - ''' - Collect a bunch of meta-data useful for feed startup and - pack in a `dict`-msg. - - ''' - # pass back some symbol info like min_tick, trading_hours, etc. - con: Contract = details.contract - syminfo = asdict(details) - syminfo.update(syminfo['contract']) - - # nested dataclass we probably don't need and that won't IPC - # serialize - syminfo.pop('secIdList') - - # TODO: more consistent field translation - atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']] - - if atype in { - 'forex', - 'index', - 'commodity', - }: - syminfo['no_vlm'] = True - - # XXX: pretty sure we don't need this any more right? - # for stocks it seems TWS reports too small a tick size - # such that you can't submit orders with that granularity? - # min_price_tick = Decimal('0.01') if atype == 'stock' else 0 - # price_tick = max(price_tick, min_tick) - - price_tick: Decimal = Decimal(str(syminfo['minTick'])) - size_tick: Decimal = Decimal(str(syminfo['minSize']).rstrip('0')) - - # XXX: GRRRR they don't support fractional share sizes for - # stocks from the API?! - if con.secType == 'STK': - size_tick = Decimal('1') - - syminfo['price_tick_size'] = price_tick - # NOTE: as you'd expect for "legacy" assets, the "volume - # precision" is normally discreet. - syminfo['lot_tick_size'] = size_tick - - # should be at top level right? - syminfo['bs_mktid'] = con.conId - - # ibclient = proxy._aio_ns.ib.client - # host, port = ibclient.host, ibclient.port - fqsn = first_quote['fqsn'] - - # TODO: for loop through all symbols passed in - init_msgs: dict[str, dict] = { - # pass back token, and bool, signalling if we're the writer - # and that history has been written - sym: { - 'symbol_info': syminfo, - 'fqsn': fqsn, - 'bs_mktid': con.conId, - }, - # 'status': { - # 'data_ep': f'{host}:{port}', - # }, - - } - return init_msgs, syminfo - - init_msgs, syminfo = mk_init_msgs() - # TODO: we should instead spawn a task that waits on a feed to start # and let it wait indefinitely..instead of this hard coded stuff. with trio.move_on_after(1): @@ -954,13 +955,14 @@ async def stream_quotes( nurse.start_soon(reset_on_feed) async with aclosing(stream): - if syminfo.get('no_vlm', False): + # if syminfo.get('no_vlm', False): + if not has_vlm: # generally speaking these feeds don't # include vlm data. - atype = syminfo['asset_type'] + atype = mkt.dst.atype log.info( - f'No-vlm {sym}@{atype}, skipping quote poll' + f'No-vlm {mkt.fqme}@{atype}, skipping quote poll' ) else: From 5c8a45c64a5b2c1656dca6bd0acf55ca7d7bdbaa Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 17 May 2023 09:45:00 -0400 Subject: [PATCH 233/294] Fix `MktPair.bs_fqme` to properly strip broker suffix --- piker/accounting/_mktinfo.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 8e974ec9..b01d72fc 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -444,7 +444,8 @@ class MktPair(Struct, frozen=True): FQME sin broker part XD ''' - return self.fqme.rstrip(f'.{self.broker}') + head, _, broker = self.fqme.rpartition('.') + return head @property def fqsn(self) -> str: From ae049eb84f980bee945181d73d00965a43f18121 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 17 May 2023 10:19:14 -0400 Subject: [PATCH 234/294] Pass and use `MktPair` throughout history routines Previously we were passing the `fqme: str` which isn't as extensive nor were we able to pass `MktPair` direct to backend history manager-loading routines (which should be able to rely on always receiving it since currently `stream_quotes()` is always called first for setup). This also starts a slight bit of configuration oriented tsdb info loading (via a new `conf.toml`) such that a user can decide to host their (marketstore) db on a remote host and our container spawning and client code will do the right startup automatically based on the config. |-> Related to this I've added some comments about doing storage backend module loading which should get actually written out as part of patches coming in #486 (or something related). Don't allow overruns again in history context since it seems it was never a problem? --- piker/data/feed.py | 11 +++-- piker/data/history.py | 94 ++++++++++++++++++++++++++++--------------- 2 files changed, 70 insertions(+), 35 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index 047bd40d..52316e99 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -333,7 +333,7 @@ async def allocate_persistent_feed( manage_history, mod, bus, - fqme, + mkt, some_data_ready, feed_is_live, ) @@ -378,7 +378,12 @@ async def allocate_persistent_feed( # NOTE: if not configured otherwise, we always sum tick volume # values in the OHLCV sampler. - sum_tick_vlm: bool = (init.shm_write_opts or {}).get('sum_tick_vlm', True) + sum_tick_vlm: bool = True + if init.shm_write_opts: + sum_tick_vlm: bool = init.shm_write_opts.get( + 'sum_tick_vlm', + True, + ) # NOTE: if no high-freq sampled data has (yet) been loaded, # seed the buffer with a history datum - this is most handy @@ -525,7 +530,7 @@ async def open_feed_bus( # NOTE we allow this since it's common to have the live # quote feed actor's sampling task push faster then the # the local UI-graphics code during startup. - allow_overruns=True, + # allow_overruns=True, ) as stream, ): diff --git a/piker/data/history.py b/piker/data/history.py index 3e0a3a62..00cc019e 100644 --- a/piker/data/history.py +++ b/piker/data/history.py @@ -38,6 +38,11 @@ import tractor import pendulum import numpy as np +from .. import config +from ..accounting._mktinfo import ( + MktPair, + unpack_fqme, +) from ._util import ( log, ) @@ -84,7 +89,7 @@ def diff_history( async def start_backfill( mod: ModuleType, - bfqsn: str, + mkt: MktPair, shm: ShmArray, timeframe: float, sampler_stream: tractor.MsgStream, @@ -104,7 +109,11 @@ async def start_backfill( tuple[np.ndarray, str] ] config: dict[str, int] - async with mod.open_history_client(bfqsn) as (hist, config): + + bs_fqme: str = mkt.bs_fqme + async with mod.open_history_client( + bs_fqme, + ) as (hist, config): # get latest query's worth of history all the way # back to what is recorded in the tsdb @@ -134,7 +143,7 @@ async def start_backfill( surr = array[-6:] diff_in_mins = round(diff/60., ndigits=2) log.warning( - f'STEP ERROR `{bfqsn}` for period {step_size_s}s:\n' + f'STEP ERROR `{bs_fqme}` for period {step_size_s}s:\n' f'Off by `{diff}` seconds (or `{diff_in_mins}` mins)\n' 'Surrounding 6 time stamps:\n' f'{list(surr["time"])}\n' @@ -161,7 +170,7 @@ async def start_backfill( shm.push(to_push, prepend=True) # TODO: *** THIS IS A BUG *** - # we need to only broadcast to subscribers for this fqsn.. + # we need to only broadcast to subscribers for this fqme.. # otherwise all fsps get reset on every chart.. await sampler_stream.send('broadcast_all') @@ -248,7 +257,7 @@ async def start_backfill( ): start_dt = min(starts) log.warning( - f"{bfqsn}: skipping duplicate frame @ {next_start_dt}" + f"{bs_fqme}: skipping duplicate frame @ {next_start_dt}" ) starts[start_dt] += 1 continue @@ -321,7 +330,7 @@ async def start_backfill( f'{start_dt} -> {end_dt}' ) await storage.write_ohlcv( - f'{bfqsn}.{mod.name}', # lul.. + f'{mkt.fqme}', to_push, timeframe, ) @@ -342,7 +351,7 @@ async def start_backfill( async def basic_backfill( bus: _FeedsBus, mod: ModuleType, - bfqsn: str, + mkt: MktPair, shms: dict[int, ShmArray], sampler_stream: tractor.MsgStream, feed_is_live: trio.Event, @@ -361,7 +370,7 @@ async def basic_backfill( partial( start_backfill, mod, - bfqsn, + mkt, shm, timeframe, sampler_stream, @@ -378,8 +387,7 @@ async def tsdb_backfill( marketstore: ModuleType, bus: _FeedsBus, storage: Storage, - fqsn: str, - bfqsn: str, + mkt: MktPair, shms: dict[int, ShmArray], sampler_stream: tractor.MsgStream, feed_is_live: trio.Event, @@ -393,17 +401,17 @@ async def tsdb_backfill( # TODO: this should be used verbatim for the pure # shm backfiller approach below. dts_per_tf: dict[int, datetime] = {} + fqme: str = mkt.fqme # start history anal and load missing new data via backend. for timeframe, shm in shms.items(): # loads a (large) frame of data from the tsdb depending # on the db's query size limit. tsdb_history, first_tsdb_dt, last_tsdb_dt = await storage.load( - fqsn, + fqme, timeframe=timeframe, ) - broker, *_ = unpack_fqme(fqsn) try: ( latest_start_dt, @@ -413,7 +421,7 @@ async def tsdb_backfill( partial( start_backfill, mod, - bfqsn, + mkt, shm, timeframe, sampler_stream, @@ -541,7 +549,7 @@ async def tsdb_backfill( while shm._first.value > 0: tsdb_history = await storage.read_ohlcv( - fqsn, + fqme, timeframe=timeframe, end=tsdb_last_frame_start, ) @@ -599,7 +607,7 @@ async def tsdb_backfill( async def manage_history( mod: ModuleType, bus: _FeedsBus, - fqsn: str, + mkt: MktPair, some_data_ready: trio.Event, feed_is_live: trio.Event, timeframe: float = 60, # in seconds @@ -628,11 +636,12 @@ async def manage_history( name, uuid = uid service = name.rstrip(f'.{mod.name}') + fqme: str = mkt.fqme + # (maybe) allocate shm array for this broker/symbol which will # be used for fast near-term history capture and processing. hist_shm, opened = maybe_open_shm_array( - # key=f'{fqsn}_hist_p{port}', - key=f'piker.{service}[{uuid[:16]}.{fqsn}.hist', + key=f'piker.{service}[{uuid[:16]}.{fqme}.hist', # use any broker defined ohlc dtype: dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype), @@ -649,9 +658,7 @@ async def manage_history( ) rt_shm, opened = maybe_open_shm_array( - # key=f'{fqsn}_rt_p{port}', - # key=f'piker.{service}.{fqsn}_rt.{uuid}', - key=f'piker.{service}[{uuid[:16]}.{fqsn}.rt', + key=f'piker.{service}[{uuid[:16]}.{fqme}.rt', # use any broker defined ohlc dtype: dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype), @@ -691,23 +698,47 @@ async def manage_history( ) as sample_stream: - log.info('Scanning for existing `marketstored`') - tsdb_is_up = await check_for_service('marketstored') - - bfqsn = fqsn.replace('.' + mod.name, '') - open_history_client = getattr(mod, 'open_history_client', None) + open_history_client = getattr( + mod, + 'open_history_client', + None, + ) assert open_history_client + conf, path = config.load('conf') + tsdbconf = conf['network'].get('tsdb') + + # lookup backend tsdb module by name and load any user service + # settings for connecting to the tsdb service. + tsdb_backend: str = tsdbconf.pop('backend') + tsdb_host: str = tsdbconf['host'] + + # TODO: import and load storagemod by name + # mod = get_storagemod(tsdb_backend) + from ..service import marketstore + + tsdb_is_up: bool = False + try_remote_tsdb: bool = False + if tsdb_host == 'localhost': + log.info('Scanning for existing `{tsbd_backend}`') + tsdb_is_up: bool = await check_for_service(f'{tsdb_backend}d') + else: + try_remote_tsdb: bool = True + if ( tsdb_is_up - and opened - and open_history_client + or try_remote_tsdb + and ( + opened + and open_history_client + ) ): log.info('Found existing `marketstored`') - from ..service import marketstore async with ( - marketstore.open_storage_client(fqsn)as storage, + marketstore.open_storage_client( + **tsdbconf + ) as storage, ): # TODO: drop returning the output that we pass in? await bus.nursery.start( @@ -716,8 +747,7 @@ async def manage_history( marketstore, bus, storage, - fqsn, - bfqsn, + mkt, { 1: rt_shm, 60: hist_shm, @@ -752,7 +782,7 @@ async def manage_history( await basic_backfill( bus, mod, - bfqsn, + mkt, { 1: rt_shm, 60: hist_shm, From 3294defee1a411dfed6032aab807e3cd76a646a1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 17 May 2023 10:46:32 -0400 Subject: [PATCH 235/294] `fqme` adjustments to marketstore module Mostly renaming from the old acronym. This also contains necessary conf.toml loading in order to call `open_storage_client()` which now does not have default network contact info. --- piker/service/marketstore.py | 64 +++++++++++++++++++++--------------- 1 file changed, 37 insertions(+), 27 deletions(-) diff --git a/piker/service/marketstore.py b/piker/service/marketstore.py index f2174ad2..4ca496b5 100644 --- a/piker/service/marketstore.py +++ b/piker/service/marketstore.py @@ -59,6 +59,7 @@ from ._util import ( ) from ..data.feed import maybe_open_feed from .._profile import Profiler +from .. import config # ahabd-supervisor and container level config @@ -332,8 +333,8 @@ def quote_to_marketstore_structarray( @acm async def get_client( - host: str = 'localhost', - port: int = _config['grpc_listen_port'], + host: str | None, + port: int | None, ) -> MarketstoreClient: ''' @@ -342,8 +343,8 @@ async def get_client( ''' async with open_marketstore_client( - host, - port + host or 'localhost', + port or _config['grpc_listen_port'], ) as client: yield client @@ -407,7 +408,7 @@ class Storage: async def load( self, - fqsn: str, + fqme: str, timeframe: int, ) -> tuple[ @@ -418,7 +419,7 @@ class Storage: first_tsdb_dt, last_tsdb_dt = None, None hist = await self.read_ohlcv( - fqsn, + fqme, # on first load we don't need to pull the max # history per request size worth. limit=3000, @@ -441,7 +442,7 @@ class Storage: async def read_ohlcv( self, - fqsn: str, + fqme: str, timeframe: int | str, end: int | None = None, limit: int = int(800e3), @@ -451,14 +452,14 @@ class Storage: client = self.client syms = await client.list_symbols() - if fqsn not in syms: + if fqme not in syms: return {} # use the provided timeframe or 1s by default tfstr = tf_in_1s.get(timeframe, tf_in_1s[1]) params = Params( - symbols=fqsn, + symbols=fqme, timeframe=tfstr, attrgroup='OHLCV', end=end, @@ -488,7 +489,7 @@ class Storage: # TODO: it turns out column access on recarrays is actually slower: # https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist # it might make sense to make these structured arrays? - data_set = result.by_symbols()[fqsn] + data_set = result.by_symbols()[fqme] array = data_set.array # XXX: ensure sample rate is as expected @@ -503,11 +504,11 @@ class Storage: 'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG' f'WIPING HISTORY FOR {ts}s' ) - await self.delete_ts(fqsn, timeframe) + await self.delete_ts(fqme, timeframe) # try reading again.. return await self.read_ohlcv( - fqsn, + fqme, timeframe, end, limit, @@ -537,7 +538,7 @@ class Storage: async def write_ohlcv( self, - fqsn: str, + fqme: str, ohlcv: np.ndarray, timeframe: int, append_and_duplicate: bool = True, @@ -570,7 +571,7 @@ class Storage: # write to db resp = await self.client.write( to_push, - tbk=f'{fqsn}/{tfkey}/OHLCV', + tbk=f'{fqme}/{tfkey}/OHLCV', # NOTE: will will append duplicates # for the same timestamp-index. @@ -593,7 +594,7 @@ class Storage: # write to db resp = await self.client.write( to_push, - tbk=f'{fqsn}/{tfkey}/OHLCV', + tbk=f'{fqme}/{tfkey}/OHLCV', # NOTE: will will append duplicates # for the same timestamp-index. @@ -625,8 +626,8 @@ class Storage: @acm async def open_storage_client( - fqsn: str, - period: Union[int, str | None] = None, # in seconds + host: str, + grpc_port: int, ) -> tuple[Storage, dict[str, np.ndarray]]: ''' @@ -635,7 +636,10 @@ async def open_storage_client( ''' async with ( # eventually a storage backend endpoint - get_client() as client, + get_client( + host=host, + port=grpc_port, + ) as client, ): # slap on our wrapper api yield Storage(client) @@ -643,7 +647,7 @@ async def open_storage_client( @acm async def open_tsdb_client( - fqsn: str, + fqme: str, ) -> Storage: # TODO: real-time dedicated task for ensuring @@ -677,25 +681,31 @@ async def open_tsdb_client( delayed=False, ) + # load any user service settings for connecting to tsdb + conf, path = config.load('conf') + tsdbconf = conf['network'].get('tsdb') + backend = tsdbconf.pop('backend') async with ( - open_storage_client(fqsn) as storage, + open_storage_client( + **tsdbconf, + ) as storage, maybe_open_feed( - [fqsn], + [fqme], start_stream=False, ) as feed, ): - profiler(f'opened feed for {fqsn}') + profiler(f'opened feed for {fqme}') # to_append = feed.hist_shm.array # to_prepend = None - if fqsn: - flume = feed.flumes[fqsn] + if fqme: + flume = feed.flumes[fqme] symbol = flume.symbol if symbol: - fqsn = symbol.fqsn + fqme = symbol.fqme # diff db history with shm and only write the missing portions # ohlcv = flume.hist_shm.array @@ -703,7 +713,7 @@ async def open_tsdb_client( # TODO: use pg profiler # for secs in (1, 60): # tsdb_array = await storage.read_ohlcv( - # fqsn, + # fqme, # timeframe=timeframe, # ) # # hist diffing: @@ -726,7 +736,7 @@ async def open_tsdb_client( # log.info( # f'Writing datums {array.size} -> to tsdb from shm\n' # ) - # await storage.write_ohlcv(fqsn, array) + # await storage.write_ohlcv(fqme, array) # profiler('Finished db writes') From d0ba9a0a587ff9815307179e288525304063b226 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 17 May 2023 10:58:12 -0400 Subject: [PATCH 236/294] Start draft `conf.toml` "root" config with tsdb contact info --- config/conf.toml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 config/conf.toml diff --git a/config/conf.toml b/config/conf.toml new file mode 100644 index 00000000..6dde7ee6 --- /dev/null +++ b/config/conf.toml @@ -0,0 +1,4 @@ +[network] +tsdb.backend = 'marketstore' +tsdb.host = 'localhost' +tsdb.grpc_port = 5995 From a44e926c2f70d220da43f713c645f61ef085a022 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 17 May 2023 11:59:19 -0400 Subject: [PATCH 237/294] kucoin: handle ws welcome, subs-ack and pong msgs Previously the subscription response handling was a bit sloppy what with ignoring the welcome msg; this now correctly expects the correct startup sequence. Also this avoids warn logging on pong messages by expecting them in the msg loop and further drops the `KucoinMsg` struct and instead changes the msg loop to expect `dict`s and only cast to structs on live feed msgs that we actually process/relay. --- piker/brokers/kucoin.py | 49 ++++++++++++++++++----------------------- 1 file changed, 21 insertions(+), 28 deletions(-) diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index 6da6496a..1e6d2cd0 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -176,17 +176,6 @@ class KucoinL2(Struct, frozen=True): timestamp: float -class KucoinMsg(Struct, frozen=True): - ''' - Generic outer-wrapper for any Kucoin ws msg - - ''' - type: str - topic: str - subject: str - data: list[KucoinTrade | KucoinL2] - - class Currency(Struct, frozen=True): ''' Currency (asset) info: @@ -743,12 +732,14 @@ async def subscribe( 'id': connect_id, 'type': 'subscribe', 'topic': ep, - # 'topic': f'/spotMarket/level2Depth5:{bs_mktid}', 'privateChannel': False, 'response': True, } ) + welcome_msg = await ws.recv_msg() + log.info(f'WS welcome: {welcome_msg}') + for _ in topics: ack_msg = await ws.recv_msg() log.info(f'Sub ACK: {ack_msg}') @@ -782,19 +773,16 @@ async def stream_messages( ''' last_trade_ts: float = 0 + dict_msg: dict[str, Any] async for dict_msg in ws: - if 'subject' not in dict_msg: - log.warn(f'Unhandled message: {dict_msg}') - continue + match dict_msg: + case { + 'subject': 'trade.ticker', + 'data': trade_data_dict, + }: + trade_data = KucoinTrade(**trade_data_dict) - msg = KucoinMsg(**dict_msg) - match msg: - case KucoinMsg( - subject='trade.ticker', - ): - trade_data = KucoinTrade(**msg.data) - - # XXX: Filter for duplicate messages as ws feed will + # XXX: Filter out duplicate messages as ws feed will # send duplicate market state # https://docs.kucoin.com/#level2-5-best-ask-bid-orders if trade_data.time == last_trade_ts: @@ -816,10 +804,11 @@ async def stream_messages( ], } - case KucoinMsg( - subject='level2', - ): - l2_data = KucoinL2(**msg.data) + case { + 'subject': 'level2', + 'data': trade_data_dict, + }: + l2_data = KucoinL2(**trade_data_dict) first_ask = l2_data.asks[0] first_bid = l2_data.bids[0] yield 'l1', { @@ -848,8 +837,12 @@ async def stream_messages( ], } + case {'type': 'pong'}: + # resp to ping task req + continue + case _: - log.warn(f'Unhandled message: {msg}') + log.warn(f'Unhandled message: {dict_msg}') @acm From 12bfabf05668a06a1a2feafc5bc172bb6e008bc6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 17 May 2023 16:43:31 -0400 Subject: [PATCH 238/294] Expose `.accounting.unpack_fqme()` --- piker/accounting/__init__.py | 1 + piker/clearing/_client.py | 2 +- piker/clearing/_paper_engine.py | 2 +- piker/data/feed.py | 2 +- piker/ui/_app.py | 2 +- tests/test_ems.py | 2 +- tests/test_feeds.py | 2 +- 7 files changed, 7 insertions(+), 6 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index 28dd88b4..94779319 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -37,6 +37,7 @@ from ._mktinfo import ( dec_digits, digits_to_dec, MktPair, + unpack_fqme, ) log = get_logger(__name__) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index a9f0fb23..7c3a9882 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -30,7 +30,7 @@ from tractor.trionics import broadcast_receiver from ._util import ( log, # sub-sys logger ) -from ..accounting._mktinfo import unpack_fqme +from ..accounting import unpack_fqme from ..data.types import Struct from ..service import maybe_open_emsd from ._messages import ( diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 56e04577..373f5bcf 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -52,7 +52,7 @@ from ..accounting import ( open_pps, ) from ..data._normalize import iterticks -from ..accounting._mktinfo import unpack_fqme +from ..accounting import unpack_fqme from ._util import ( log, # sub-sys logger ) diff --git a/piker/data/feed.py b/piker/data/feed.py index 52316e99..32ad49d0 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -70,7 +70,7 @@ from .history import ( ) from .ingest import get_ingestormod from .types import Struct -from ..accounting._mktinfo import ( +from ..accounting import ( MktPair, unpack_fqme, ) diff --git a/piker/ui/_app.py b/piker/ui/_app.py index a1e31a6e..13f19f40 100644 --- a/piker/ui/_app.py +++ b/piker/ui/_app.py @@ -28,7 +28,7 @@ from ..service import maybe_spawn_brokerd from . import _event from ._exec import run_qtractor from ..data.feed import install_brokerd_search -from ..accounting._mktinfo import unpack_fqme +from ..accounting import unpack_fqme from . import _search from ._chart import GodWidget from ..log import get_logger diff --git a/tests/test_ems.py b/tests/test_ems.py index ec9aefb6..414bc906 100644 --- a/tests/test_ems.py +++ b/tests/test_ems.py @@ -39,7 +39,7 @@ from piker.clearing import ( open_ems, OrderClient, ) -from piker.accounting._mktinfo import ( +from piker.accounting import ( unpack_fqme, ) from piker.accounting import ( diff --git a/tests/test_feeds.py b/tests/test_feeds.py index df854a79..371a6e89 100644 --- a/tests/test_feeds.py +++ b/tests/test_feeds.py @@ -14,7 +14,7 @@ from piker.data import ( open_feed, ) from piker.data.flows import Flume -from piker.accounting._mktinfo import ( +from piker.accounting import ( unpack_fqme, ) From 89e8a834bf29a7aec1c56669313c60287d2de129 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 17 May 2023 16:47:15 -0400 Subject: [PATCH 239/294] Support fqme rendering *without* the src key Since most (legacy) stock brokers design their symbology without including the target exchange's source asset name - normally a fiat currency like USD - this adds an option for rendering market endpoints without that token for simpler use in backends for such brokers. As an example IB doesn't expect a `mnq/usd.cme.ib` symbol and instead presumes that since the CME lists all assets in USD then the source asset is implied. Impl details: - add `MktPair.pair: str` which replaces `.key` as a better name. - offer a `without_src: bool` to a new `.get_fqme()` getter method which will render everything the same minus the src token. - expose the new flag through both the new `.get_fqme()` and `.get_bs_fqme()` methods and wrap those both under the original property names `.bs_fqme` and `.fqme`. --- piker/accounting/_mktinfo.py | 46 ++++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 12 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index b01d72fc..73f2f49a 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -356,6 +356,13 @@ class MktPair(Struct, frozen=True): ''' The "endpoint key" for this market. + ''' + return self.pair + + @property + def pair(self) -> str: + ''' + The "endpoint asset pair key" for this market. Eg. mnq/usd or btc/usdt or xmr/btc In most other tina platforms this is referred to as the @@ -390,13 +397,16 @@ class MktPair(Struct, frozen=True): return maybe_cons_tokens(field_strs) - # NOTE: the main idea behind an fqme is to map a "market address" - # to some endpoint from a transaction provider (eg. a broker) such - # that we build a table of `fqme: str -> bs_mktid: Any` where any "piker - # market address" maps 1-to-1 to some broker trading endpoint. - # @cached_property - @property - def fqme(self) -> str: + def get_fqme( + self, + + # NOTE: allow dropping the source asset from the + # market endpoint's pair key. Eg. to change + # mnq/usd.<> -> mnq.<> which is useful when + # searching (legacy) stock exchanges. + without_src: bool = False, + + ) -> str: ''' Return the fully qualified market endpoint-address for the pair of transacting assets. @@ -431,21 +441,33 @@ class MktPair(Struct, frozen=True): https://github.com/pikers/piker/issues/467 ''' + key: str = self.pair if not without_src else str(self.dst) return maybe_cons_tokens([ - self.key, # final "pair name" (eg. qqq[/usd], btcusdt) + key, # final "pair name" (eg. qqq[/usd], btcusdt) self.venue, self.suffix, # includes expiry and other con info self.broker, ]) - @property - def bs_fqme(self) -> str: + # NOTE: the main idea behind an fqme is to map a "market address" + # to some endpoint from a transaction provider (eg. a broker) such + # that we build a table of `fqme: str -> bs_mktid: Any` where any "piker + # market address" maps 1-to-1 to some broker trading endpoint. + # @cached_property + fqme = property(get_fqme) + + def get_bs_fqme( + self, + **kwargs, + ) -> str: ''' FQME sin broker part XD ''' - head, _, broker = self.fqme.rpartition('.') - return head + sin_broker, *_ = self.get_fqme(**kwargs).rpartition('.') + return sin_broker + + bs_fqme = property(get_bs_fqme) @property def fqsn(self) -> str: From 907eaa68cb992c3a57f341983135377e2220b430 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 17 May 2023 16:52:15 -0400 Subject: [PATCH 240/294] Pass `mkt: MktPair` to `.open_history_client()` Since porting all backends to the new `FeedInit` + `MktPair` + `Asset` style init, we can now just directly pass a `MktPair` instance to the history endpoint(s) since it's always called *after* the live feed `.stream_quotes()` ep B) This has a lot of benefits including allowing brokerd backends to have more flexible, pre-processed market endpoint meta-data that piker has already validated; makes handling special cases in much more straight forward as well such as forex pairs from legacy brokers XD First pass changes all crypto backends to expect this new input, ib will come next after handling said special cases.. --- piker/brokers/binance.py | 4 +++- piker/brokers/deribit/feed.py | 3 ++- piker/brokers/kraken/feed.py | 9 ++++++--- piker/brokers/kucoin.py | 6 +++++- piker/data/history.py | 12 ++++-------- 5 files changed, 20 insertions(+), 14 deletions(-) diff --git a/piker/brokers/binance.py b/piker/brokers/binance.py index 69b55dc1..48b28d6f 100644 --- a/piker/brokers/binance.py +++ b/piker/brokers/binance.py @@ -476,10 +476,12 @@ def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: @acm async def open_history_client( - symbol: str, + mkt: MktPair, ) -> tuple[Callable, int]: + symbol: str = mkt.bs_fqme + # TODO implement history getter for the new storage layer. async with open_cached_client('binance') as client: diff --git a/piker/brokers/deribit/feed.py b/piker/brokers/deribit/feed.py index deb0422f..a9420402 100644 --- a/piker/brokers/deribit/feed.py +++ b/piker/brokers/deribit/feed.py @@ -62,9 +62,10 @@ log = get_logger(__name__) @acm async def open_history_client( - instrument: str, + mkt: MktPair, ) -> tuple[Callable, int]: + fnstrument: str = mkt.bs_fqme # TODO implement history getter for the new storage layer. async with open_cached_client('deribit') as client: diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index 0cc24464..526590fe 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -25,8 +25,9 @@ from contextlib import ( from datetime import datetime from typing import ( Any, - Optional, + AsyncGenerator, Callable, + Optional, ) import time @@ -214,9 +215,11 @@ def normalize( @acm async def open_history_client( - symbol: str, + mkt: MktPair, -) -> tuple[Callable, int]: +) -> AsyncGenerator[Callable, None]: + + symbol: str = mkt.bs_fqme # TODO implement history getter for the new storage layer. async with open_cached_client('kraken') as client: diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index 1e6d2cd0..3f8b71d0 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -847,8 +847,12 @@ async def stream_messages( @acm async def open_history_client( - symbol: str, + mkt: MktPair, + ) -> AsyncGenerator[Callable, None]: + + symbol: str = mkt.bs_fqme + async with open_cached_client('kucoin') as client: log.info('Attempting to open kucoin history client') diff --git a/piker/data/history.py b/piker/data/history.py index 00cc019e..182408f3 100644 --- a/piker/data/history.py +++ b/piker/data/history.py @@ -39,7 +39,7 @@ import pendulum import numpy as np from .. import config -from ..accounting._mktinfo import ( +from ..accounting import ( MktPair, unpack_fqme, ) @@ -54,9 +54,6 @@ from ._sharedmem import ( ShmArray, _secs_in_day, ) -from ..accounting._mktinfo import ( - unpack_fqme, -) from ._source import base_iohlc_dtype from ._sampling import ( open_sample_stream, @@ -110,9 +107,8 @@ async def start_backfill( ] config: dict[str, int] - bs_fqme: str = mkt.bs_fqme async with mod.open_history_client( - bs_fqme, + mkt, ) as (hist, config): # get latest query's worth of history all the way @@ -143,7 +139,7 @@ async def start_backfill( surr = array[-6:] diff_in_mins = round(diff/60., ndigits=2) log.warning( - f'STEP ERROR `{bs_fqme}` for period {step_size_s}s:\n' + f'STEP ERROR `{mkt.fqme}` for period {step_size_s}s:\n' f'Off by `{diff}` seconds (or `{diff_in_mins}` mins)\n' 'Surrounding 6 time stamps:\n' f'{list(surr["time"])}\n' @@ -257,7 +253,7 @@ async def start_backfill( ): start_dt = min(starts) log.warning( - f"{bs_fqme}: skipping duplicate frame @ {next_start_dt}" + f"{mkt.fqme}: skipping duplicate frame @ {next_start_dt}" ) starts[start_dt] += 1 continue From b2bf0b06f256952001da0eb8e9f146b2694ca866 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 17 May 2023 16:56:04 -0400 Subject: [PATCH 241/294] ib.api: wholesale fqsn -> fqme renames --- piker/brokers/ib/api.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index f761515b..e9a9dc49 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -388,7 +388,7 @@ class Client: async def bars( self, - fqsn: str, + fqme: str, # EST in ISO 8601 format is required... below is EPOCH start_dt: Union[datetime, str] = "1970-01-01T00:00:00.000000-05:00", @@ -405,7 +405,7 @@ class Client: ) -> tuple[BarDataList, np.ndarray, pendulum.Duration]: ''' - Retreive OHLCV bars for a fqsn over a range to the present. + Retreive OHLCV bars for a fqme over a range to the present. ''' # See API docs here: @@ -425,7 +425,7 @@ class Client: _enters += 1 - contract = (await self.find_contracts(fqsn))[0] + contract = (await self.find_contracts(fqme))[0] bars_kwargs.update(getattr(contract, 'bars_kwargs', {})) bars = await self.ib.reqHistoricalDataAsync( @@ -676,10 +676,10 @@ class Client: currency = '' - # fqsn parsing stage + # fqme parsing stage # ------------------ if '.ib' in pattern: - from ..accounting._mktinfo import unpack_fqme + from piker.accounting import unpack_fqme _, symbol, venue, expiry = unpack_fqme(pattern) else: @@ -841,14 +841,14 @@ class Client: async def get_head_time( self, - fqsn: str, + fqme: str, ) -> datetime: ''' Return the first datetime stamp for ``contract``. ''' - contract = (await self.find_contracts(fqsn))[0] + contract = (await self.find_contracts(fqme))[0] return await self.ib.reqHeadTimeStampAsync( contract, whatToShow='TRADES', @@ -1081,7 +1081,7 @@ def con2fqsn( ) -> tuple[str, bool]: ''' - Convert contracts to fqsn-style strings to be used both in symbol-search + Convert contracts to fqme-style strings to be used both in symbol-search matching and as feed tokens passed to the front end data deed layer. Previously seen contracts are cached by id. From 97b2b25256c9866d9c65951a6fe530be3cc82746 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 18 May 2023 01:25:04 -0400 Subject: [PATCH 242/294] Avoid import cycle in clearing client --- piker/clearing/_client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index 7c3a9882..a0218023 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -30,7 +30,6 @@ from tractor.trionics import broadcast_receiver from ._util import ( log, # sub-sys logger ) -from ..accounting import unpack_fqme from ..data.types import Struct from ..service import maybe_open_emsd from ._messages import ( @@ -238,6 +237,8 @@ async def open_ems( broker control client-API. ''' + # TODO: prolly hand in the `MktPair` instance directly here as well! + from piker.accounting import unpack_fqme broker, mktep, venue, suffix = unpack_fqme(fqme) async with maybe_open_emsd( From a1a10676cd5c1bacba0ac314deb0dc7bea3ac2e5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 18 May 2023 11:27:31 -0400 Subject: [PATCH 243/294] Go back to `tomllib` for ledger loading, it's wayy faster --- piker/accounting/_ledger.py | 3 +++ piker/config.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index b46b46ba..9c3f80a4 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -35,6 +35,7 @@ from pendulum import ( parse, ) import tomlkit +import tomli from .. import config from ..data.types import Struct @@ -141,8 +142,10 @@ class TransactionLedger(UserDict): if fqme: txdict['fqme'] = fqme + print(f'WRITING LEDGER {self.file_path}') with self.file_path.open(mode='w') as fp: tomlkit.dump(towrite, fp) + print(f'FINISHED WRITING LEDGER {self.file_path}') def update_from_t( self, diff --git a/piker/config.py b/piker/config.py index be77709f..b708e252 100644 --- a/piker/config.py +++ b/piker/config.py @@ -345,7 +345,7 @@ def load_ledger( with fpath.open(mode='rb') as cf: start = time.time() - ledger_dict = tomlkit.parse(cf.read()) + ledger_dict = tomllib.load(cf) log.debug(f'Ledger load took {time.time() - start}s') return ledger_dict, fpath From 8d7a9fa19ec5f3551d8f5a7468d0361546923928 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 18 May 2023 12:01:30 -0400 Subject: [PATCH 244/294] Make `MktPair.pair()` a meth, allow passing in a delim character --- piker/accounting/_mktinfo.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 73f2f49a..313fa338 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -359,8 +359,10 @@ class MktPair(Struct, frozen=True): ''' return self.pair - @property - def pair(self) -> str: + def pair( + self, + delim_char: str | None = None, + ) -> str: ''' The "endpoint asset pair key" for this market. Eg. mnq/usd or btc/usdt or xmr/btc @@ -372,7 +374,7 @@ class MktPair(Struct, frozen=True): return maybe_cons_tokens( [str(self.dst), str(self.src)], - delim_char='', + delim_char=delim_char or '', ) @property @@ -405,6 +407,7 @@ class MktPair(Struct, frozen=True): # mnq/usd.<> -> mnq.<> which is useful when # searching (legacy) stock exchanges. without_src: bool = False, + delim_char: str | None = None, ) -> str: ''' @@ -441,7 +444,12 @@ class MktPair(Struct, frozen=True): https://github.com/pikers/piker/issues/467 ''' - key: str = self.pair if not without_src else str(self.dst) + key: str = ( + self.pair(delim_char=delim_char) + if not without_src + else str(self.dst) + ) + return maybe_cons_tokens([ key, # final "pair name" (eg. qqq[/usd], btcusdt) self.venue, From 89d24cfe339774cc348f543f6eba8aed40e99607 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 18 May 2023 12:52:34 -0400 Subject: [PATCH 245/294] Oof, fix closed position popping by fqme.. --- piker/accounting/_pos.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 90391142..9a07614c 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -724,8 +724,9 @@ class PpTable(Struct): # drop any entries that are computed as net-zero # we don't care about storing in the pps file. if closed: - for fqme in closed: - self.conf.pop(fqme, None) + bs_mktid: str + for bs_mktid, pos in closed.items(): + self.conf.pop(pos.symbol.fqme) # if there are no active position entries according # to the toml dump output above, then clear the config @@ -879,7 +880,7 @@ def open_pps( trans: list[Transaction] = [] for clears_table in toml_clears_list: - tid = clears_table.pop('tid') + tid = clears_table.get('tid') dtstr = clears_table['dt'] dt = pendulum.parse(dtstr) clears_table['dt'] = dt From c6da09f3c65627324a08e513b626ddc581f1b68c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 18 May 2023 18:07:12 -0400 Subject: [PATCH 246/294] Add fast(er), time-sorted ledger records Turns out that reading **and** writing with `tomlkit` is just wayya slow for large documents like ledger files so move to using the `tomli` sibling pkg `tomli-w` which seems to much improve on the latency, though obviously longer run we're likely going to want: - a better algorithm for only back loading records using as little history as possible - a different serialization format for production maybe something like apache parquet? The only issue with using a non-style-preserving writer is that we don't necessarily get TOML conf ordering for free (without first ordering it ourselves), and thus this patch also adds much more general date-time sorting machinery which is now **required** when using `open_trades_ledger()` via a `tx_sort: Callable`. By default we now provide `.accounting._ledger.iter_by_dt()` (exposed in the subpkg mod) which conducts dynamic "datetime key detection" based parsing of records based on a `parsers: dict[str, Callabe]` input table. The default should handle most use cases including all currently supported live backends (kraken, ib) as well as our paper engine ledger-records format. Granulars: - adjust `Position.iter_clears()` to use new `iter_by_dt(key=lambda ..)` signature. - add `tomli-w` to setup and our `tomlkit` fork to requirements file. - move `.write_config()` to bottom of class defn. - fix closed pos popping to not error if pp was already popped.. --- piker/accounting/__init__.py | 1 + piker/accounting/_ledger.py | 146 +++++++++++++++++++++-------------- piker/accounting/_pos.py | 18 ++++- requirements.txt | 5 ++ setup.py | 6 +- 5 files changed, 113 insertions(+), 63 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index 94779319..4c4a0ca1 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -22,6 +22,7 @@ for tendiez. from ..log import get_logger from ._ledger import ( + iter_by_dt, Transaction, TransactionLedger, open_trade_ledger, diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 9c3f80a4..4073b3a6 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -32,10 +32,11 @@ from typing import ( from pendulum import ( datetime, + DateTime, + from_timestamp, parse, ) -import tomlkit -import tomli +import tomli_w # for fast ledger writing from .. import config from ..data.types import Struct @@ -116,37 +117,13 @@ class TransactionLedger(UserDict): self, ledger_dict: dict, file_path: Path, + tx_sort: Callable, ) -> None: self.file_path = file_path + self.tx_sort = tx_sort super().__init__(ledger_dict) - def write_config(self) -> None: - ''' - Render the self.data ledger dict to it's TOML file form. - - ''' - towrite: dict[str, Any] = self.data.copy() - - for tid, txdict in self.data.items(): - - # drop key for non-expiring assets - if ( - 'expiry' in txdict - and txdict['expiry'] is None - ): - txdict.pop('expiry') - - # re-write old acro-key - fqme = txdict.get('fqsn') - if fqme: - txdict['fqme'] = fqme - - print(f'WRITING LEDGER {self.file_path}') - with self.file_path.open(mode='w') as fp: - tomlkit.dump(towrite, fp) - print(f'FINISHED WRITING LEDGER {self.file_path}') - def update_from_t( self, t: Transaction, @@ -182,6 +159,7 @@ class TransactionLedger(UserDict): # and instead call it for each entry incrementally: # normer = mod.norm_trade_record(txdict) + # TODO: use tx_sort here yah? for tid, txdict in self.data.items(): # special field handling for datetimes # to ensure pendulum is used! @@ -195,22 +173,20 @@ class TransactionLedger(UserDict): # the ``.sys: MktPair`` info, so skip. continue - yield ( - tid, - Transaction( - fqsn=fqme, - tid=txdict['tid'], - dt=dt, - price=txdict['price'], - size=txdict['size'], - cost=txdict.get('cost', 0), - bs_mktid=txdict['bs_mktid'], + tx = Transaction( + fqsn=fqme, + tid=txdict['tid'], + dt=dt, + price=txdict['price'], + size=txdict['size'], + cost=txdict.get('cost', 0), + bs_mktid=txdict['bs_mktid'], - # TODO: change to .sys! - sym=mkt, - expiry=parse(expiry) if expiry else None, - ) + # TODO: change to .sys! + sym=mkt, + expiry=parse(expiry) if expiry else None, ) + yield tid, tx def to_trans( self, @@ -223,12 +199,81 @@ class TransactionLedger(UserDict): ''' return dict(self.iter_trans(**kwargs)) + def write_config( + self, + + ) -> None: + ''' + Render the self.data ledger dict to it's TOML file form. + + ''' + cpy = self.data.copy() + towrite: dict[str, Any] = {} + for tid, trans in cpy.items(): + + # drop key for non-expiring assets + txdict = towrite[tid] = self.data[tid] + if ( + 'expiry' in txdict + and txdict['expiry'] is None + ): + txdict.pop('expiry') + + # re-write old acro-key + fqme = txdict.get('fqsn') + if fqme: + txdict['fqme'] = fqme + + with self.file_path.open(mode='wb') as fp: + tomli_w.dump(towrite, fp) + + +def iter_by_dt( + records: dict[str, Any], + + # NOTE: parsers are looked up in the insert order + # so if you know that the record stats show some field + # is more common then others, stick it at the top B) + parsers: dict[tuple[str], Callable] = { + 'dt': None, # parity case + 'datetime': parse, # datetime-str + 'time': from_timestamp, # float epoch + }, + key: Callable | None = None, + +) -> Iterator[tuple[str, dict]]: + ''' + Iterate entries of a ``records: dict`` table sorted by entry recorded + datetime presumably set at the ``'dt'`` field in each entry. + + ''' + txs = records.items() + + def dyn_parse_to_dt( + pair: tuple[str, dict], + ) -> DateTime: + _, txdict = pair + k, v, parser = next( + (k, txdict[k], parsers[k]) for k in parsers if k in txdict + ) + + return parser(v) if parser else v + + for tid, data in sorted( + records.items(), + key=key or dyn_parse_to_dt, + ): + yield tid, data + @cm def open_trade_ledger( broker: str, account: str, + # default is to sort by detected datetime-ish field + tx_sort: Callable = iter_by_dt, + ) -> Generator[dict, None, None]: ''' Indempotently create and read in a trade log file from the @@ -244,6 +289,7 @@ def open_trade_ledger( ledger = TransactionLedger( ledger_dict=cpy, file_path=fpath, + tx_sort=tx_sort, ) try: yield ledger @@ -254,19 +300,3 @@ def open_trade_ledger( # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries log.info(f'Updating ledger for {fpath}:\n') ledger.write_config() - - -def iter_by_dt( - clears: dict[str, Any], - -) -> Iterator[tuple[str, dict]]: - ''' - Iterate entries of a ``clears: dict`` table sorted by entry recorded - datetime presumably set at the ``'dt'`` field in each entry. - - ''' - for tid, data in sorted( - list(clears.items()), - key=lambda item: item[1]['dt'], - ): - yield tid, data diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 9a07614c..2d3700bc 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -307,10 +307,16 @@ class Position(Struct): datetime-stamped order. ''' - return iter_by_dt(self.clears) + # sort on the already existing datetime that should have + # been generated for the entry's table + return iter_by_dt( + self.clears, + key=lambda entry: entry[1]['dt'] + ) def calc_ppu( self, + # include transaction cost in breakeven price # and presume the worst case of the same cost # to exit this transaction (even though in reality @@ -726,7 +732,15 @@ class PpTable(Struct): if closed: bs_mktid: str for bs_mktid, pos in closed.items(): - self.conf.pop(pos.symbol.fqme) + fqme: str = pos.symbol.fqme + if fqme in self.conf: + self.conf.pop(fqme) + else: + # TODO: we reallly need a diff set of + # loglevels/colors per subsys. + log.warning( + f'Recent position for {fqme} was closed!' + ) # if there are no active position entries according # to the toml dump output above, then clear the config diff --git a/requirements.txt b/requirements.txt index 25951629..ba4dc620 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,3 +13,8 @@ # ``asyncvnc`` for sending interactions to ib-gw inside docker -e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc + + +# ``tomlkit`` for account files and configs; we've +# added some new features that need to get upstreamed: +-e git+https://github.com/pikers/tomlkit.git@writing_docs_tweaks#egg=tomlkit diff --git a/setup.py b/setup.py index a3e60cd6..c63622b2 100755 --- a/setup.py +++ b/setup.py @@ -44,8 +44,9 @@ setup( ] }, install_requires=[ - 'tomlkit', # fork & fix for now: + # 'tomlkit', # fork & fix for now.. 'tomli', # for pre-3.11 + 'tomli-w', # for fast ledger writing 'colorlog', 'attrs', 'pygments', @@ -65,8 +66,7 @@ setup( # normally pinned to particular git hashes.. # 'tractor', # 'asyncvnc', - # 'pyqtgraph', - # anyio-marketstore # mkts tsdb client + # 'anyio-marketstore', # mkts tsdb client # brokers 'asks', # for non-ws rest apis From 53003618cbc1b7e9a59f990e5bc2489c1a59fb9a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 00:16:58 -0400 Subject: [PATCH 247/294] Add longer timeout on brokerd ctx cancel; seems to work? --- piker/accounting/cli.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 5cdd4a58..76cbc1ab 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -209,7 +209,14 @@ def sync( ) console.print(summary) - await brokerd_ctx.cancel() + + # exit via ctx cancellation. + await brokerd_ctx.cancel(timeout=1) + # TODO: once ported to newer tractor branch we should + # be able to do a loop like this: + # while brokerd_ctx.cancel_called_remote is None: + # await trio.sleep(0.01) + # await brokerd_ctx.cancel() await portal.cancel_actor() From 60a6f3269c81221f7bc730d0d58215c0c4dd6a84 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 09:41:44 -0400 Subject: [PATCH 248/294] ib: use flex report datetime sort Since `open_trade_ledger()` now requires a sort we pass in a combo of the std `pendulum.parse()` for API records and a custom flex parser for flex entries pulled offline. Add special handling for `MktPair.src` such that when it's a fiat (like it should always be for most legacy assets) we try to get the fqme without that `.src` token (i.e. not mnqusd) to avoid breaking roundtripping of live feed requests (due to new symbology) as well as the current tsdb table key set.. Do a wholesale renaming of fqsn -> fqme in most of the rest of the backend modules. --- piker/brokers/ib/_flex_reports.py | 4 +++ piker/brokers/ib/_util.py | 4 +++ piker/brokers/ib/api.py | 15 +++++++--- piker/brokers/ib/broker.py | 16 ++++++++--- piker/brokers/ib/feed.py | 47 ++++++++++++++++++------------- 5 files changed, 58 insertions(+), 28 deletions(-) diff --git a/piker/brokers/ib/_flex_reports.py b/piker/brokers/ib/_flex_reports.py index d26e0e3f..2f34d037 100644 --- a/piker/brokers/ib/_flex_reports.py +++ b/piker/brokers/ib/_flex_reports.py @@ -35,6 +35,10 @@ from piker.accounting import ( def parse_flex_dt( record: str, ) -> pendulum.datetime: + ''' + Parse stupid flex record datetime stamps for the `dateTime` field.. + + ''' date, ts = record.split(';') dt = pendulum.parse(date) ts = f'{ts[:2]}:{ts[2:4]}:{ts[4:]}' diff --git a/piker/brokers/ib/_util.py b/piker/brokers/ib/_util.py index 4c3bbb34..585ea18d 100644 --- a/piker/brokers/ib/_util.py +++ b/piker/brokers/ib/_util.py @@ -170,6 +170,10 @@ async def vnc_click_hack( def i3ipc_xdotool_manual_click_hack() -> None: i3 = i3ipc.Connection() + + # TODO: might be worth offering some kinda api for grabbing + # the window id from the pid? + # https://stackoverflow.com/a/2250879 t = i3.get_tree() orig_win_id = t.find_focused().window diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index e9a9dc49..e64f085f 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -14,11 +14,10 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -""" -``ib`` core API client machinery; mostly sane wrapping around -``ib_insync``. +''' +Core API client machinery; mostly sane/useful wrapping around `ib_insync`.. -""" +''' from __future__ import annotations from contextlib import ( asynccontextmanager as acm, @@ -1450,6 +1449,14 @@ class MethodProxy: while not chan.closed(): # send through method + ``kwargs: dict`` as pair msg = await chan.receive() + + # TODO: implement reconnect functionality like + # in our `.data._web_bs.NoBsWs` + # try: + # msg = await chan.receive() + # except ConnectionError: + # self.reset() + # print(f'NEXT MSG: {msg}') # TODO: py3.10 ``match:`` syntax B) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 2f4cdb78..25a82ef3 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -60,6 +60,7 @@ from piker.accounting import ( Position, Transaction, open_trade_ledger, + iter_by_dt, open_pps, PpTable, ) @@ -434,9 +435,9 @@ async def update_and_audit_msgs( if validate and p.size: # raise ValueError( log.error( - f'UNEXPECTED POSITION says IB:\n' - 'Maybe they LIQUIDATED YOU or are missing ledger txs?\n' - f'PIKER:\n{pikerfmtmsg}\n\n' + f'UNEXPECTED POSITION says IB => {msg.symbol}\n' + 'Maybe they LIQUIDATED YOU or are missing ledger entries?\n' + f'{pikerfmtmsg}\n\n' ) msgs.append(msg) @@ -581,6 +582,13 @@ async def trades_dialogue( open_trade_ledger( 'ib', acctid, + tx_sort=partial( + iter_by_dt, + parsers={ + 'dateTime': parse_flex_dt, + 'datetime': pendulum.parse, + }, + ), ) ) @@ -654,7 +662,7 @@ async def trades_dialogue( # update position table with latest ledger from all # gathered transactions: ledger file + api records. - trans = norm_trade_records(ledger) + trans: dict[str, Transaction] = norm_trade_records(ledger) table.update_from_trans(trans) # process pp value reported from ib's system. we only diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index eacfca7b..28db4eee 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -120,9 +120,7 @@ async def open_data_client() -> MethodProxy: @acm async def open_history_client( - fqme: str, - - # mkt: MktPair | None = None, + mkt: MktPair, ) -> tuple[Callable, int]: ''' @@ -130,7 +128,7 @@ async def open_history_client( that takes in ``pendulum.datetime`` and returns ``numpy`` arrays. ''' - # TODO: + # TODO: mostly meta-data processing to drive shm and tsdb storage.. # - add logic to handle tradable hours and only grab # valid bars in the range? # - we want to avoid overrunning the underlying shm array buffer and @@ -139,12 +137,21 @@ async def open_history_client( # the shm size will be driven by user config and available sys # memory. + # IB's internal symbology does not expect the "source asset" in + # the "symbol name", what we call the "market name". This is + # common in most legacy market brokers since it's presumed that + # given a certain stock exchange, listed assets are traded + # "from" a particular source fiat, normally something like USD. + if ( + mkt.src + and mkt.src.atype == 'fiat' + ): + fqme: str = mkt.get_bs_fqme(without_src=True) + else: + fqme = mkt.bs_fqme + async with open_data_client() as proxy: - # TODO: maybe strip the `MktPair.src: Asset` key here? - # see the comment below.. - # if mkt is not None: - # fqme: str = mkt.fqme.remove(mkt.src.name) max_timeout: float = 2. mean: float = 0 @@ -156,7 +163,7 @@ async def open_history_client( 'idealpro' not in fqme ): try: - head_dt = await proxy.get_head_time(fqsn=fqme) + head_dt = await proxy.get_head_time(fqme=fqme) except RequestError: head_dt = None @@ -310,7 +317,7 @@ _failed_resets: int = 0 async def get_bars( proxy: MethodProxy, - fqsn: str, + fqme: str, timeframe: int, # blank to start which tells ib to look up the latest datum @@ -354,7 +361,7 @@ async def get_bars( while _failed_resets < max_failed_resets: try: out = await proxy.bars( - fqsn=fqsn, + fqme=fqme, end_dt=end_dt, sample_period_s=timeframe, @@ -380,7 +387,7 @@ async def get_bars( continue if bars_array is None: - raise SymbolNotFound(fqsn) + raise SymbolNotFound(fqme) first_dt = pendulum.from_timestamp( bars[0].date.timestamp()) @@ -411,7 +418,7 @@ async def get_bars( if 'No market data permissions for' in msg: # TODO: signalling for no permissions searches raise NoData( - f'Symbol: {fqsn}', + f'Symbol: {fqme}', ) elif ( @@ -437,7 +444,7 @@ async def get_bars( if nodatas_count >= max_nodatas: raise DataUnavailable( - f'Presuming {fqsn} has no further history ' + f'Presuming {fqme} has no further history ' f'after {max_nodatas} tries..' ) @@ -701,7 +708,7 @@ def normalize( # check for special contract types con = ticker.contract - fqsn, calc_price = con2fqsn(con) + fqme, calc_price = con2fqsn(con) # convert named tuples to dicts so we send usable keys new_ticks = [] @@ -731,9 +738,9 @@ def normalize( # serialize for transport data = asdict(ticker) - # generate fqsn with possible specialized suffix + # generate fqme with possible specialized suffix # for derivatives, note the lowercase. - data['symbol'] = data['fqsn'] = fqsn + data['symbol'] = data['fqme'] = fqme # convert named tuples to dicts for transport tbts = data.get('tickByTicks') @@ -1002,9 +1009,9 @@ async def stream_quotes( # last = time.time() async for ticker in stream: quote = normalize(ticker) - fqsn = quote['fqsn'] - # print(f'sending {fqsn}:\n{quote}') - await send_chan.send({fqsn: quote}) + fqme = quote['fqme'] + # print(f'sending {fqme}:\n{quote}') + await send_chan.send({fqme: quote}) # ugh, clear ticks since we've consumed them ticker.ticks = [] From e4e4cacef3746442c700f5a3249054dff77b8af6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 11:52:36 -0400 Subject: [PATCH 249/294] .data.feed: Less stringency with fqme matching `Flume.mkt.fqme` might not be exactly the same as the local version now since we've had to add some hacks to certain backends (cough ib) to handle `MktPair.src` not being set as an `Asset` (yet). --- piker/data/feed.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index 32ad49d0..0a8fd0f6 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -815,7 +815,7 @@ async def open_feed( for fqme in fqmes: brokername, *_ = unpack_fqme(fqme) - bfqme = fqme.replace('.' + brokername, '') + bs_fqme = fqme.replace('.' + brokername, '') try: mod = get_brokermod(brokername) @@ -823,7 +823,7 @@ async def open_feed( mod = get_ingestormod(brokername) # built a per-provider map to instrument names - providers.setdefault(mod, []).append(bfqme) + providers.setdefault(mod, []).append(bs_fqme) feed.mods[mod.name] = mod # one actor per brokerd for now @@ -903,7 +903,7 @@ async def open_feed( for fqme, flume_msg in flumes_msg_dict.items(): flume = Flume.from_msg(flume_msg) - assert flume.symbol.fqme == fqme + # assert flume.symbol.fqme == fqme feed.flumes[fqme] = flume # TODO: do we need this? From 660a94d610019f5790dab03cd6e6c6a1a36c966e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 11:54:36 -0400 Subject: [PATCH 250/294] Don't expect `conf.toml`'s network section For testing this is particularly true until we offer a template with whatever (likely localhost) settings planned to ship. --- piker/data/history.py | 52 +++++++++++++++++++++++++++---------------- 1 file changed, 33 insertions(+), 19 deletions(-) diff --git a/piker/data/history.py b/piker/data/history.py index 182408f3..f8260c86 100644 --- a/piker/data/history.py +++ b/piker/data/history.py @@ -325,8 +325,19 @@ async def start_backfill( f'Writing {ln} frame to storage:\n' f'{start_dt} -> {end_dt}' ) + + if mkt.dst.atype != 'crypto': + # for now, our table key schema is not including + # the dst[/src] source asset token. + col_sym_key: str = mkt.get_fqme( + delim_char='', + without_src=True, + ) + else: + col_sym_key: str = mkt.get_fqme(delim_char='') + await storage.write_ohlcv( - f'{mkt.fqme}', + col_sym_key, to_push, timeframe, ) @@ -632,7 +643,7 @@ async def manage_history( name, uuid = uid service = name.rstrip(f'.{mod.name}') - fqme: str = mkt.fqme + fqme: str = mkt.get_fqme(delim_char='') # (maybe) allocate shm array for this broker/symbol which will # be used for fast near-term history capture and processing. @@ -701,25 +712,28 @@ async def manage_history( ) assert open_history_client - conf, path = config.load('conf') - tsdbconf = conf['network'].get('tsdb') - - # lookup backend tsdb module by name and load any user service - # settings for connecting to the tsdb service. - tsdb_backend: str = tsdbconf.pop('backend') - tsdb_host: str = tsdbconf['host'] - - # TODO: import and load storagemod by name - # mod = get_storagemod(tsdb_backend) - from ..service import marketstore - tsdb_is_up: bool = False try_remote_tsdb: bool = False - if tsdb_host == 'localhost': - log.info('Scanning for existing `{tsbd_backend}`') - tsdb_is_up: bool = await check_for_service(f'{tsdb_backend}d') - else: - try_remote_tsdb: bool = True + + conf, path = config.load('conf', touch_if_dne=True) + net = conf.get('network') + if net: + tsdbconf = net.get('tsdb') + + # lookup backend tsdb module by name and load any user service + # settings for connecting to the tsdb service. + tsdb_backend: str = tsdbconf.pop('backend') + tsdb_host: str = tsdbconf['host'] + + # TODO: import and load storagemod by name + # mod = get_storagemod(tsdb_backend) + from ..service import marketstore + if tsdb_host == 'localhost': + log.info('Scanning for existing `{tsbd_backend}`') + tsdb_is_up: bool = await check_for_service(f'{tsdb_backend}d') + + else: + try_remote_tsdb: bool = True if ( tsdb_is_up From 2f2d612b5ff353139b268a4561f691f95d1c6a79 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 11:57:37 -0400 Subject: [PATCH 251/294] Add todo to switch to `dst/src` delim --- piker/accounting/_mktinfo.py | 1 + 1 file changed, 1 insertion(+) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 313fa338..341ff2db 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -374,6 +374,7 @@ class MktPair(Struct, frozen=True): return maybe_cons_tokens( [str(self.dst), str(self.src)], + # TODO: make the default '/' delim_char=delim_char or '', ) From 588770d0346baad93d4e4fda634639ae8af9363a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 12:00:13 -0400 Subject: [PATCH 252/294] ib: rename lingering fqsn -> fqme --- piker/brokers/ib/api.py | 16 ++++++++-------- piker/brokers/ib/broker.py | 14 +++++++------- piker/brokers/ib/feed.py | 7 ++++--- 3 files changed, 19 insertions(+), 18 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index e64f085f..8636ddd2 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -500,7 +500,7 @@ class Client: # nested dataclass we probably don't need and that won't # IPC serialize.. d.secIdList = '' - key, calc_price = con2fqsn(d.contract) + key, calc_price = con2fqme(d.contract) details[key] = d return details @@ -656,7 +656,7 @@ class Client: self._cons[conid] = con return con - def parse_patt2fqsn( + def parse_patt2fqme( self, pattern: str, @@ -721,7 +721,7 @@ class Client: ) -> Contract: if pattern is not None: - symbol, currency, exch, expiry = self.parse_patt2fqsn( + symbol, currency, exch, expiry = self.parse_patt2fqme( pattern, ) sectype = '' @@ -1074,7 +1074,7 @@ class Client: return self.ib.positions(account=account) -def con2fqsn( +def con2fqme( con: Contract, _cache: dict[int, (str, bool)] = {} @@ -1140,12 +1140,12 @@ def con2fqsn( if expiry: suffix += f'.{expiry}' - fqsn_key = symbol.lower() + fqme_key = symbol.lower() if suffix: - fqsn_key = '.'.join((fqsn_key, suffix)).lower() + fqme_key = '.'.join((fqme_key, suffix)).lower() - _cache[con.conId] = fqsn_key, calc_price - return fqsn_key, calc_price + _cache[con.conId] = fqme_key, calc_price + return fqme_key, calc_price # per-actor API ep caching diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 25a82ef3..800baed7 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -81,7 +81,7 @@ from piker.accounting import ( ) from .api import ( _accounts2clients, - con2fqsn, + con2fqme, log, get_config, open_client_proxies, @@ -100,7 +100,7 @@ def pack_position( ]: con = pos.contract - fqsn, calc_price = con2fqsn(con) + fqme, calc_price = con2fqme(con) # TODO: options contracts into a sane format.. return ( @@ -108,7 +108,7 @@ def pack_position( BrokerdPosition( broker='ib', account=pos.account, - symbol=fqsn, + symbol=fqme, currency=con.currency, size=float(pos.position), avg_price=float(pos.avgCost) / float(con.multiplier or 1.0), @@ -468,11 +468,11 @@ async def aggr_open_orders( # TODO: in the case of the SMART venue (aka ib's # router-clearing sys) we probably should handle - # showing such orders overtop of the fqsn for the + # showing such orders overtop of the fqme for the # primary exchange, how to map this easily is going # to be a bit tricky though? deats = await proxy.con_deats(contracts=[con]) - fqsn = list(deats)[0] + fqme = list(deats)[0] reqid = order.orderId @@ -490,7 +490,7 @@ async def aggr_open_orders( action=action, exec_mode='live', oid=str(reqid), - symbol=fqsn, + symbol=fqme, account=accounts_def.inverse[order.account], price=order.lmtPrice, size=size, @@ -1224,7 +1224,7 @@ def norm_trade_records( elif asset_type == 'STK': asset_type: str = 'stock' - # try to build out piker fqsn from record. + # try to build out piker fqme from record. expiry = ( record.get('lastTradeDateOrContractMonth') or record.get('expiry') diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index 28db4eee..b13e963a 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -50,7 +50,7 @@ from .._util import ( ) from .api import ( # _adhoc_futes_set, - con2fqsn, + con2fqme, log, load_aio_clients, ibis, @@ -708,7 +708,7 @@ def normalize( # check for special contract types con = ticker.contract - fqme, calc_price = con2fqsn(con) + fqme, calc_price = con2fqme(con) # convert named tuples to dicts so we send usable keys new_ticks = [] @@ -823,7 +823,8 @@ async def get_mkt_info( # then we'll get a `MNQUSD` request for history data.. # we need to figure out how we're going to handle this (later?) # but likely we want all backends to eventually handle - # ``dst/src.venue.`` style? + # ``dst/src.venue.`` style !? + # src=Asset( # name=str(con.currency), # atype='fiat', From c93d119873863897a07d0fc6d48742d7337d5cde Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 12:00:41 -0400 Subject: [PATCH 253/294] Move tmpdir creation into separate fixture Since `.config.load()` was changed to not touch conf files by default (without explicitly setting `touch_if_dne: bool`), this ensures both the global module value is set and the `brokers.toml` file exists before every test. --- tests/conftest.py | 49 +++++++++++++++++++++++++++++++++------------ tests/test_feeds.py | 5 ++--- 2 files changed, 38 insertions(+), 16 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index d5b0d697..364be44a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,6 @@ import os from pathlib import Path import pytest -import pytest_trio import tractor from piker import ( config, @@ -149,19 +148,49 @@ async def _open_test_pikerd( @pytest.fixture -def open_test_pikerd( - request: pytest.FixtureRequest, +def tmpconfdir( tmp_path: Path, - loglevel: str, -): +) -> Path: + ''' + Ensure the `brokers.toml` file for the test run exists + since we changed it to not touch files by default. + + Here we override the default (in the user dir) and + set the global module var the same as we do inside + the `tmpconfdir` fixture. + + ''' tmpconfdir: Path = tmp_path / '_testing' tmpconfdir.mkdir() - tmpconfdir_str: str = str(tmpconfdir) + # touch the `brokers.toml` file since it won't + # exist in the tmp test dir by default! # override config dir in the root actor (aka # this top level testing process). from piker import config - config._config_dir = tmpconfdir + config._config_dir: Path = tmpconfdir + conf, path = config.load( + touch_if_dne=True, + ) + + return tmpconfdir + + # NOTE: the `tmp_dir` fixture will wipe any files older then 3 test + # sessions by default: + # https://docs.pytest.org/en/6.2.x/tmpdir.html#the-default-base-temporary-directory + # BUT, if we wanted to always wipe conf dir and all contained files, + # rmtree(str(tmp_path)) + + +@pytest.fixture +def open_test_pikerd( + request: pytest.FixtureRequest, + tmp_path: Path, + tmpconfdir: Path, + loglevel: str, +): + + tmpconfdir_str: str = str(tmpconfdir) # NOTE: on linux the tmp config dir is generally located at: # /tmp/pytest-of-/pytest-/test_/ @@ -199,12 +228,6 @@ def open_test_pikerd( debug_mode=debug_mode, ) - # NOTE: the `tmp_dir` fixture will wipe any files older then 3 test - # sessions by default: - # https://docs.pytest.org/en/6.2.x/tmpdir.html#the-default-base-temporary-directory - # BUT, if we wanted to always wipe conf dir and all contained files, - # rmtree(str(tmp_path)) - # TODO: teardown checks such as, # - no leaked subprocs or shm buffers # - all requested container service are torn down diff --git a/tests/test_feeds.py b/tests/test_feeds.py index 371a6e89..07d368fa 100644 --- a/tests/test_feeds.py +++ b/tests/test_feeds.py @@ -7,8 +7,8 @@ from pprint import pprint from typing import AsyncContextManager import pytest -# import tractor import trio + from piker.data import ( ShmArray, open_feed, @@ -37,7 +37,7 @@ def test_multi_fqsn_feed( open_test_pikerd: AsyncContextManager, fqmes: set[str], loglevel: str, - ci_env: bool + ci_env: bool, ): ''' Start a real-time data feed for provided fqme and pull @@ -103,7 +103,6 @@ def test_multi_fqsn_feed( for fqme, quote in quotes.items(): cntr[fqme] += 1 - # await tractor.breakpoint() flume = feed.flumes[fqme] ohlcv: ShmArray = flume.rt_shm hist_ohlcv: ShmArray = flume.hist_shm From 31a00eca9404aca3004b60f4bad0681a49370ceb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 12:13:00 -0400 Subject: [PATCH 254/294] Rename fqsn -> fqme in ui mods --- piker/fsp/_engine.py | 10 ++-- piker/ui/_app.py | 6 +-- piker/ui/_chart.py | 10 ++-- piker/ui/_dataviz.py | 4 +- piker/ui/_display.py | 116 ++++++++++++++++++++--------------------- piker/ui/_fsp.py | 14 ++--- piker/ui/_position.py | 10 ++-- piker/ui/_search.py | 22 ++++---- piker/ui/order_mode.py | 18 +++---- 9 files changed, 105 insertions(+), 105 deletions(-) diff --git a/piker/fsp/_engine.py b/piker/fsp/_engine.py index 3e500f46..13dcfccb 100644 --- a/piker/fsp/_engine.py +++ b/piker/fsp/_engine.py @@ -104,14 +104,14 @@ async def fsp_compute( disabled=True ) - fqsn = symbol.fqme + fqme = symbol.fqme out_stream = func( # TODO: do we even need this if we do the feed api right? # shouldn't a local stream do this before we get a handle # to the async iterable? it's that or we do some kinda # async itertools style? - filter_quotes_by_sym(fqsn, quote_stream), + filter_quotes_by_sym(fqme, quote_stream), # XXX: currently the ``ohlcv`` arg flume.rt_shm, @@ -271,7 +271,7 @@ async def cascade( ctx: tractor.Context, # data feed key - fqsn: str, + fqme: str, src_shm_token: dict, dst_shm_token: tuple[str, np.dtype], @@ -329,7 +329,7 @@ async def cascade( # open a data feed stream with requested broker feed: Feed async with data.feed.maybe_open_feed( - [fqsn], + [fqme], # TODO throttle tick outputs from *this* daemon since # it'll emit tons of ticks due to the throttle only @@ -339,7 +339,7 @@ async def cascade( ) as feed: - flume = feed.flumes[fqsn] + flume = feed.flumes[fqme] symbol = flume.symbol assert src.token == flume.rt_shm.token profiler(f'{func}: feed up') diff --git a/piker/ui/_app.py b/piker/ui/_app.py index 13f19f40..ee4faf57 100644 --- a/piker/ui/_app.py +++ b/piker/ui/_app.py @@ -100,8 +100,8 @@ async def _async_main( starting_done = sbar.open_status('starting ze sexy chartz') needed_brokermods: dict[str, ModuleType] = {} - for fqsn in syms: - brokername, *_ = unpack_fqme(fqsn) + for fqme in syms: + brokername, *_ = unpack_fqme(fqme) needed_brokermods[brokername] = brokers[brokername] async with ( @@ -120,7 +120,7 @@ async def _async_main( # this internally starts a ``display_symbol_data()`` task above order_mode_ready = await godwidget.load_symbols( - fqsns=syms, + fqmes=syms, loglevel=loglevel, ) diff --git a/piker/ui/_chart.py b/piker/ui/_chart.py index 464fb8b7..13ec23c3 100644 --- a/piker/ui/_chart.py +++ b/piker/ui/_chart.py @@ -152,7 +152,7 @@ class GodWidget(QWidget): def set_chart_symbols( self, - group_key: tuple[str], # of form . + group_key: tuple[str], # of form . all_linked: tuple[LinkedSplits, LinkedSplits], # type: ignore ) -> None: @@ -170,7 +170,7 @@ class GodWidget(QWidget): async def load_symbols( self, - fqsns: list[str], + fqmes: list[str], loglevel: str, reset: bool = False, @@ -183,7 +183,7 @@ class GodWidget(QWidget): ''' # NOTE: for now we use the first symbol in the set as the "key" # for the overlay of feeds on the chart. - group_key: tuple[str] = tuple(fqsns) + group_key: tuple[str] = tuple(fqmes) all_linked = self.get_chart_symbols(group_key) order_mode_started = trio.Event() @@ -217,7 +217,7 @@ class GodWidget(QWidget): self._root_n.start_soon( display_symbol_data, self, - fqsns, + fqmes, loglevel, order_mode_started, ) @@ -548,7 +548,7 @@ class LinkedSplits(QWidget): # be no distinction since we will have multiple symbols per # view as part of "aggregate feeds". self.chart = self.add_plot( - name=symbol.fqsn, + name=symbol.fqme, shm=shm, flume=flume, style=style, diff --git a/piker/ui/_dataviz.py b/piker/ui/_dataviz.py index 7f1ef41e..721483e1 100644 --- a/piker/ui/_dataviz.py +++ b/piker/ui/_dataviz.py @@ -1249,7 +1249,7 @@ class Viz(Struct): i_step = lasts['index'] # last index-specific step. i_step_t = lasts['time'] # last time step. - # fqsn = self.flume.symbol.fqsn + # fqme = self.flume.symbol.fqme # check if "last (is) in view" -> is a real-time update necessary? if self.index_field == 'index': @@ -1304,7 +1304,7 @@ class Viz(Struct): varz['i_last_append'] = i_step # print( - # f'DOING APPEND => {fqsn}\n' + # f'DOING APPEND => {fqme}\n' # f'i_step: {i_step}\n' # f'i_step_t: {i_step_t}\n' # f'glast: {glast}\n' diff --git a/piker/ui/_display.py b/piker/ui/_display.py index d7500544..40ec23b2 100644 --- a/piker/ui/_display.py +++ b/piker/ui/_display.py @@ -163,7 +163,7 @@ class DisplayState(Struct): Chart-local real-time graphics state container. ''' - fqsn: str + fqme: str godwidget: GodWidget quotes: dict[str, Any] @@ -223,7 +223,7 @@ async def increment_history_view( async for msg in istream: profiler = Profiler( - msg=f'History chart cycle for: `{ds.fqsn}`', + msg=f'History chart cycle for: `{ds.fqme}`', delayed=True, disabled=not pg_profile_enabled(), ms_threshold=ms_slower_then, @@ -232,7 +232,7 @@ async def increment_history_view( # l3 = ds.viz.shm.array[-3:] # print( - # f'fast step for {ds.flume.symbol.fqsn}:\n' + # f'fast step for {ds.flume.symbol.fqme}:\n' # f'{list(l3["time"])}\n' # f'{l3}\n' # ) @@ -317,17 +317,17 @@ async def graphics_update_loop( dss: dict[str, DisplayState] = {} - for fqsn, flume in feed.flumes.items(): + for fqme, flume in feed.flumes.items(): ohlcv = flume.rt_shm hist_ohlcv = flume.hist_shm symbol = flume.symbol - fqsn = symbol.fqsn + fqme = symbol.fqme # update last price sticky - fast_viz = fast_chart._vizs[fqsn] + fast_viz = fast_chart._vizs[fqme] index_field = fast_viz.index_field fast_pi = fast_viz.plot - last_price_sticky = fast_pi.getAxis('right')._stickies[fqsn] + last_price_sticky = fast_pi.getAxis('right')._stickies[fqme] last_price_sticky.update_from_data( *ohlcv.array[-1][[ index_field, @@ -336,9 +336,9 @@ async def graphics_update_loop( ) last_price_sticky.show() - hist_viz = hist_chart._vizs[fqsn] + hist_viz = hist_chart._vizs[fqme] slow_pi = hist_viz.plot - hist_last_price_sticky = slow_pi.getAxis('right')._stickies[fqsn] + hist_last_price_sticky = slow_pi.getAxis('right')._stickies[fqme] hist_last_price_sticky.update_from_data( *hist_ohlcv.array[-1][[ index_field, @@ -346,7 +346,7 @@ async def graphics_update_loop( ]] ) - vlm_chart = vlm_charts[fqsn] + vlm_chart = vlm_charts[fqme] vlm_viz = vlm_chart._vizs.get('volume') if vlm_chart else None ( @@ -381,8 +381,8 @@ async def graphics_update_loop( fast_chart.show() last_quote_s = time.time() - dss[fqsn] = ds = linked.display_state = DisplayState(**{ - 'fqsn': fqsn, + dss[fqme] = ds = linked.display_state = DisplayState(**{ + 'fqme': fqme, 'godwidget': godwidget, 'quotes': {}, @@ -454,11 +454,11 @@ async def graphics_update_loop( last_quote_s = time.time() - for fqsn, quote in quotes.items(): - ds = dss[fqsn] + for fqme, quote in quotes.items(): + ds = dss[fqme] ds.quotes = quote - rt_pi, hist_pi = pis[fqsn] + rt_pi, hist_pi = pis[fqme] # chart isn't active/shown so skip render cycle and # pause feed(s) @@ -466,14 +466,14 @@ async def graphics_update_loop( fast_chart.linked.isHidden() or not rt_pi.isVisible() ): - print(f'{fqsn} skipping update for HIDDEN CHART') + print(f'{fqme} skipping update for HIDDEN CHART') fast_chart.pause_all_feeds() continue ic = fast_chart.view._in_interact if ic: fast_chart.pause_all_feeds() - print(f'{fqsn} PAUSING DURING INTERACTION') + print(f'{fqme} PAUSING DURING INTERACTION') await ic.wait() fast_chart.resume_all_feeds() @@ -495,7 +495,7 @@ def graphics_update_cycle( ) -> None: profiler = Profiler( - msg=f'Graphics loop cycle for: `{ds.fqsn}`', + msg=f'Graphics loop cycle for: `{ds.fqme}`', disabled=not pg_profile_enabled(), ms_threshold=ms_slower_then, delayed=True, @@ -509,7 +509,7 @@ def graphics_update_cycle( # - use a streaming minmax algo and drop the use of the # state-tracking ``multi_maxmin()`` routine from above? - fqsn = ds.fqsn + fqme = ds.fqme chart = ds.chart vlm_chart = ds.vlm_chart @@ -548,7 +548,7 @@ def graphics_update_cycle( # the true range? This way you can slap in orders outside the # current L1 (only) book range. main_vb: ChartView = main_viz.plot.vb - this_viz: Viz = chart._vizs[fqsn] + this_viz: Viz = chart._vizs[fqme] this_vb: ChartView = this_viz.plot.vb this_yr = this_vb._yrange if this_yr: @@ -600,7 +600,7 @@ def graphics_update_cycle( profiler, ) - profiler(f'{fqsn} `multi_maxmin()` call') + profiler(f'{fqme} `multi_maxmin()` call') # iterate frames of ticks-by-type such that we only update graphics # using the last update per type where possible. @@ -828,7 +828,7 @@ def graphics_update_cycle( # update any overlayed fsp flows if ( - curve_name != fqsn + curve_name != fqme ): update_fsp_chart( viz, @@ -939,7 +939,7 @@ def graphics_update_cycle( liv and do_rt_update or do_px_step ) - and curve_name not in {fqsn} + and curve_name not in {fqme} ): update_fsp_chart( viz, @@ -1008,7 +1008,7 @@ async def link_views_with_region( hist_pi.addItem(region, ignoreBounds=True) region.setOpacity(6/16) - viz = rt_chart.get_viz(flume.symbol.fqsn) + viz = rt_chart.get_viz(flume.symbol.fqme) assert viz index_field = viz.index_field @@ -1035,7 +1035,7 @@ async def link_views_with_region( # HFT/real-time chart. rng = mn, mx = viewRange[0] - # hist_viz = hist_chart.get_viz(flume.symbol.fqsn) + # hist_viz = hist_chart.get_viz(flume.symbol.fqme) # hist = hist_viz.shm.array[-3:] # print( # f'mn: {mn}\n' @@ -1153,7 +1153,7 @@ _quote_throttle_rate: int = 60 - 6 async def display_symbol_data( godwidget: GodWidget, - fqsns: list[str], + fqmes: list[str], loglevel: str, order_mode_started: trio.Event, @@ -1176,9 +1176,9 @@ async def display_symbol_data( # group_key=loading_sym_key, # ) - for fqsn in fqsns: + for fqme in fqmes: loading_sym_key = sbar.open_status( - f'loading {fqsn} ->', + f'loading {fqme} ->', group_key=True ) @@ -1197,7 +1197,7 @@ async def display_symbol_data( # TODO: we should be able to increase this if we use some # `mypyc` speedups elsewhere? 22ish seems to be the sweet # spot for single-feed chart. - num_of_feeds = len(fqsns) + num_of_feeds = len(fqmes) mx: int = 22 if num_of_feeds > 1: # there will be more ctx switches with more than 1 feed so we @@ -1213,18 +1213,18 @@ async def display_symbol_data( feed: Feed async with open_feed( - fqsns, + fqmes, loglevel=loglevel, tick_throttle=cycles_per_feed, ) as feed: # use expanded contract symbols passed back from feed layer. - fqsns = list(feed.flumes.keys()) + fqmes = list(feed.flumes.keys()) # step_size_s = 1 # tf_key = tf_in_1s[step_size_s] godwidget.window.setWindowTitle( - f'{fqsns} ' + f'{fqmes} ' # f'tick:{symbol.tick_size} ' # f'step:{tf_key} ' ) @@ -1276,7 +1276,7 @@ async def display_symbol_data( # for the "first"/selected symbol we create new chart widgets # and sub-charts for FSPs - fqsn, flume = fitems[0] + fqme, flume = fitems[0] # TODO NOTE: THIS CONTROLS WHAT SYMBOL IS USED FOR ORDER MODE # SUBMISSIONS, we need to make this switch based on selection. @@ -1287,7 +1287,7 @@ async def display_symbol_data( hist_ohlcv: ShmArray = flume.hist_shm symbol = flume.symbol - fqsn = symbol.fqsn + fqme = symbol.fqme hist_chart = hist_linked.plot_ohlc_main( symbol, @@ -1304,9 +1304,9 @@ async def display_symbol_data( # ensure the last datum graphic is generated # for zoom-interaction purposes. - hist_viz = hist_chart.get_viz(fqsn) - hist_viz.draw_last(array_key=fqsn) - pis.setdefault(fqsn, [None, None])[1] = hist_chart.plotItem + hist_viz = hist_chart.get_viz(fqme) + hist_viz.draw_last(array_key=fqme) + pis.setdefault(fqme, [None, None])[1] = hist_chart.plotItem # don't show when not focussed hist_linked.cursor.always_show_xlabel = False @@ -1322,8 +1322,8 @@ async def display_symbol_data( 'last_step_color': 'original', }, ) - rt_viz = rt_chart.get_viz(fqsn) - pis.setdefault(fqsn, [None, None])[0] = rt_chart.plotItem + rt_viz = rt_chart.get_viz(fqme) + pis.setdefault(fqme, [None, None])[0] = rt_chart.plotItem # for pause/resume on mouse interaction rt_chart.feed = feed @@ -1338,7 +1338,7 @@ async def display_symbol_data( has_vlm(ohlcv) and vlm_chart is None ): - vlm_chart = vlm_charts[fqsn] = await ln.start( + vlm_chart = vlm_charts[fqme] = await ln.start( open_vlm_displays, rt_linked, flume, @@ -1372,7 +1372,7 @@ async def display_symbol_data( godwidget.resize_all() await trio.sleep(0) - for fqsn, flume in fitems[1:]: + for fqme, flume in fitems[1:]: # get a new color from the palette bg_chart_color, bg_last_bar_color = next(palette) @@ -1380,18 +1380,18 @@ async def display_symbol_data( hist_ohlcv: ShmArray = flume.hist_shm symbol = flume.symbol - fqsn = symbol.fqsn + fqme = symbol.fqme hist_pi = hist_chart.overlay_plotitem( - name=fqsn, - axis_title=fqsn, + name=fqme, + axis_title=fqme, ) hist_viz = hist_chart.draw_curve( - fqsn, + fqme, hist_ohlcv, flume, - array_key=fqsn, + array_key=fqme, overlay=hist_pi, pi=hist_pi, is_ohlc=True, @@ -1402,26 +1402,26 @@ async def display_symbol_data( # ensure the last datum graphic is generated # for zoom-interaction purposes. - hist_viz.draw_last(array_key=fqsn) + hist_viz.draw_last(array_key=fqme) # TODO: we need a better API to do this.. # specially store ref to shm for lookup in display loop # since only a placeholder of `None` is entered in # ``.draw_curve()``. - hist_viz = hist_chart._vizs[fqsn] + hist_viz = hist_chart._vizs[fqme] assert hist_viz.plot is hist_pi - pis.setdefault(fqsn, [None, None])[1] = hist_pi + pis.setdefault(fqme, [None, None])[1] = hist_pi rt_pi = rt_chart.overlay_plotitem( - name=fqsn, - axis_title=fqsn, + name=fqme, + axis_title=fqme, ) rt_viz = rt_chart.draw_curve( - fqsn, + fqme, ohlcv, flume, - array_key=fqsn, + array_key=fqme, overlay=rt_pi, pi=rt_pi, is_ohlc=True, @@ -1434,9 +1434,9 @@ async def display_symbol_data( # specially store ref to shm for lookup in display loop # since only a placeholder of `None` is entered in # ``.draw_curve()``. - rt_viz = rt_chart._vizs[fqsn] + rt_viz = rt_chart._vizs[fqme] assert rt_viz.plot is rt_pi - pis.setdefault(fqsn, [None, None])[0] = rt_pi + pis.setdefault(fqme, [None, None])[0] = rt_pi rt_chart.setFocus() @@ -1452,7 +1452,7 @@ async def display_symbol_data( # greedily do a view range default and pane resizing # on startup before loading the order-mode machinery. - for fqsn, flume in feed.flumes.items(): + for fqme, flume in feed.flumes.items(): # size view to data prior to order mode init rt_chart.main_viz.default_view( @@ -1495,7 +1495,7 @@ async def display_symbol_data( ) # boot order-mode - order_ctl_fqme: str = fqsns[0] + order_ctl_fqme: str = fqmes[0] mode: OrderMode async with ( open_order_mode( @@ -1522,7 +1522,7 @@ async def display_symbol_data( hist_chart.main_viz.default_view( do_min_bars=True, ) - hist_viz = hist_chart.get_viz(fqsn) + hist_viz = hist_chart.get_viz(fqme) await trio.sleep(0) godwidget.resize_all() diff --git a/piker/ui/_fsp.py b/piker/ui/_fsp.py index 5c61b07e..06814c28 100644 --- a/piker/ui/_fsp.py +++ b/piker/ui/_fsp.py @@ -398,7 +398,7 @@ class FspAdmin: portal: tractor.Portal, complete: trio.Event, started: trio.Event, - fqsn: str, + fqme: str, dst_fsp_flume: Flume, conf: dict, target: Fsp, @@ -418,7 +418,7 @@ class FspAdmin: cascade, # data feed key - fqsn=fqsn, + fqme=fqme, # TODO: pass `Flume.to_msg()`s here? # mems @@ -444,7 +444,7 @@ class FspAdmin: # register output data self._registry[ - (fqsn, ns_path) + (fqme, ns_path) ] = ( stream, dst_fsp_flume.rt_shm, @@ -484,11 +484,11 @@ class FspAdmin: ) -> (Flume, trio.Event): - fqsn = self.flume.symbol.fqsn + fqme = self.flume.symbol.get_fqme(delim_char='') # allocate an output shm array key, dst_shm, opened = maybe_mk_fsp_shm( - fqsn, + fqme, target=target, readonly=True, ) @@ -519,7 +519,7 @@ class FspAdmin: # if not opened: # raise RuntimeError( - # f'Already started FSP `{fqsn}:{func_name}`' + # f'Already started FSP `{fqme}:{func_name}`' # ) complete = trio.Event() @@ -529,7 +529,7 @@ class FspAdmin: portal, complete, started, - fqsn, + fqme, dst_fsp_flume, conf, target, diff --git a/piker/ui/_position.py b/piker/ui/_position.py index f08e234a..7ec859da 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -125,7 +125,7 @@ async def update_pnl_from_feed( # watch out for wrong quote msg-data if you muck # with backend feed subs code.. - # assert sym == quote['fqsn'] + # assert sym == quote['fqme'] for tick in iterticks(quote, types): # print(f'{1/period} Hz') @@ -417,7 +417,7 @@ class SettingsPane: mode = self.order_mode sym = mode.chart.linked.symbol size = tracker.live_pp.size - flume: Feed = mode.feed.flumes[sym.fqsn] + flume: Feed = mode.feed.flumes[sym.fqme] pnl_value = 0 if size: @@ -430,9 +430,9 @@ class SettingsPane: # maybe start update task global _pnl_tasks - fqsn = sym.fqme - if fqsn not in _pnl_tasks: - _pnl_tasks[fqsn] = True + fqme = sym.fqme + if fqme not in _pnl_tasks: + _pnl_tasks[fqme] = True self.order_mode.nursery.start_soon( update_pnl_from_feed, flume, diff --git a/piker/ui/_search.py b/piker/ui/_search.py index 9627e83d..216a94b6 100644 --- a/piker/ui/_search.py +++ b/piker/ui/_search.py @@ -639,10 +639,10 @@ class SearchWidget(QtWidgets.QWidget): godw = self.godwidget # first entry in the cache is the current symbol(s) - fqsns = set() - for multi_fqsns in list(godw._chart_cache): - for fqsn in set(multi_fqsns): - fqsns.add(fqsn) + fqmes = set() + for multi_fqmes in list(godw._chart_cache): + for fqme in set(multi_fqmes): + fqmes.add(fqme) if keep_current_item_selected: sel = self.view.selectionModel() @@ -650,7 +650,7 @@ class SearchWidget(QtWidgets.QWidget): self.view.set_section_entries( 'cache', - list(fqsns), + list(fqmes), # remove all other completion results except for cache clear_all=only, reverse=True, @@ -722,18 +722,18 @@ class SearchWidget(QtWidgets.QWidget): cidx, provider, symbol = value godw = self.godwidget - fqsn = f'{symbol}.{provider}' - log.info(f'Requesting symbol: {fqsn}') + fqme = f'{symbol}.{provider}' + log.info(f'Requesting symbol: {fqme}') # assert provider in symbol await godw.load_symbols( - fqsns=[fqsn], + fqmes=[fqme], loglevel='info', ) # fully qualified symbol name (SNS i guess is what we're # making?) - fqsn = '.'.join([symbol, provider]).lower() + fqme = '.'.join([symbol, provider]).lower() if clear_to_cache: @@ -743,7 +743,7 @@ class SearchWidget(QtWidgets.QWidget): # LIFO order. this is normally only done internally by # the chart on new symbols being loaded into memory godw.set_chart_symbols( - (fqsn,), ( + (fqme,), ( godw.hist_linked, godw.rt_linked, ) @@ -753,7 +753,7 @@ class SearchWidget(QtWidgets.QWidget): ) self.bar.focus() - return fqsn + return fqme def space_dims(self) -> tuple[float, float]: ''' diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 32b8b039..098bf14f 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -549,7 +549,7 @@ class OrderMode: # XXX: seems to fail on certain types of races? # assert len(lines) == 2 if lines: - flume: Flume = self.feed.flumes[chart.linked.symbol.fqsn] + flume: Flume = self.feed.flumes[chart.linked.symbol.fqme] _, _, ratio = flume.get_ds_info() for chart, shm in [ @@ -691,7 +691,7 @@ async def open_order_mode( feed: Feed, godw: GodWidget, - fqsn: str, + fqme: str, started: trio.Event, loglevel: str = 'info' @@ -722,7 +722,7 @@ async def open_order_mode( # spawn EMS actor-service async with ( open_ems( - fqsn, + fqme, loglevel=loglevel, ) as ( client, @@ -734,7 +734,7 @@ async def open_order_mode( trio.open_nursery() as tn, ): - log.info(f'Opening order mode for {fqsn}') + log.info(f'Opening order mode for {fqme}') # annotations editors lines = LineEditor(godw=godw) @@ -1027,7 +1027,7 @@ async def process_trade_msg( dialog: Dialog = mode.dialogs.get(oid) if dialog: - fqsn = dialog.symbol + fqme = dialog.symbol match msg: case Status( @@ -1050,16 +1050,16 @@ async def process_trade_msg( assert msg.resp in ('open', 'dark_open'), f'Unknown msg: {msg}' sym = mode.chart.linked.symbol - fqsn = sym.fqme + fqme = sym.fqme if ( - ((order.symbol + f'.{msg.src}') == fqsn) + ((order.symbol + f'.{msg.src}') == fqme) # a existing dark order for the same symbol or ( - order.symbol == fqsn + order.symbol == fqme and ( msg.src in ('dark', 'paperboi') - or (msg.src in fqsn) + or (msg.src in fqme) ) ) From dd10acbbf9fed566281c41316c9846362215e98c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 12:21:38 -0400 Subject: [PATCH 255/294] Replace `Transaction.fqsn` -> `.fqme` Change over all client (broker) code which constructs transactions and finally wipe required `.fqsn` usage from `.accounting` B) --- piker/accounting/_ledger.py | 11 +++-------- piker/accounting/_pos.py | 2 +- piker/brokers/ib/broker.py | 2 +- piker/brokers/kraken/api.py | 2 +- piker/brokers/kraken/broker.py | 6 +++--- piker/clearing/_paper_engine.py | 6 +++--- 6 files changed, 12 insertions(+), 17 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 4073b3a6..23845fde 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -54,8 +54,8 @@ class Transaction(Struct, frozen=True): # TODO: unify this with the `MktPair`, # once we have that as a required field, - # we don't really need the fqsn any more.. - fqsn: str + # we don't really need the fqme any more.. + fqme: str tid: Union[str, int] # unique transaction id size: float @@ -68,11 +68,6 @@ class Transaction(Struct, frozen=True): # via the `MktPair`? expiry: datetime | None = None - # remap for back-compat - @property - def fqme(self) -> str: - return self.fqsn - # TODO: drop the Symbol type, construct using # t.sys (the transaction system) @@ -174,7 +169,7 @@ class TransactionLedger(UserDict): continue tx = Transaction( - fqsn=fqme, + fqme=fqme, tid=txdict['tid'], dt=dt, price=txdict['price'], diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 2d3700bc..bc84f5b7 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -900,7 +900,7 @@ def open_pps( clears_table['dt'] = dt trans.append(Transaction( - fqsn=bs_mktid, + fqme=bs_mktid, sym=mkt, bs_mktid=bs_mktid, tid=tid, diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 800baed7..1667d5f8 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -1266,7 +1266,7 @@ def norm_trade_records( insort( records, Transaction( - fqsn=fqme, + fqme=fqme, sym=pair, tid=tid, size=size, diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index 026dca71..24330638 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -401,7 +401,7 @@ class Client: fqme = asset_key + '.kraken' tx = Transaction( - fqsn=fqme, + fqme=fqme, sym=asset, tid=entry['txid'], dt=pendulum.from_timestamp(entry['time']), diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 994b7158..0a9056af 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -895,7 +895,7 @@ async def handle_order_updates( ids.inverse.get(reqid) is None ): # parse out existing live order - fqsn = pair.replace('/', '').lower() + fqme = pair.replace('/', '').lower() price = float(price) size = float(vol) @@ -922,7 +922,7 @@ async def handle_order_updates( action=action, exec_mode='live', oid=oid, - symbol=fqsn, + symbol=fqme, account=acc_name, price=price, size=size, @@ -1207,7 +1207,7 @@ async def norm_trade_records( mkt: MktPair = (await get_mkt_info(fqme))[0] records[tid] = Transaction( - fqsn=fqme, + fqme=fqme, sym=mkt, tid=tid, size=size, diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 373f5bcf..d6e29245 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -263,7 +263,7 @@ class PaperBoi(Struct): # other then this thing, our fqme address. bs_mktid: str = fqme t = Transaction( - fqsn=fqme, + fqme=fqme, sym=self._mkts[fqme], tid=oid, size=size, @@ -577,9 +577,9 @@ async def trades_dialogue( mkt_by_fqme[fqme] = mkt # for each sym in the ledger load it's `MktPair` info - for tid, tdict in ledger.data.items(): + for tid, txdict in ledger.data.items(): # TODO: switch this to fqme - l_fqme = tdict['fqsn'] + l_fqme = txdict.get('fqme', txdict['fqsn']) if ( gmi From 7f246697b4e2ebbe0b5905baf8312d7697fabf77 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 12:24:21 -0400 Subject: [PATCH 256/294] Remove remaining `fqsn` usage from code base minus backward compats --- piker/clearing/_messages.py | 2 +- piker/data/_sampling.py | 8 ++++---- piker/data/cli.py | 14 +++++++------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/piker/clearing/_messages.py b/piker/clearing/_messages.py index 61579787..126cd347 100644 --- a/piker/clearing/_messages.py +++ b/piker/clearing/_messages.py @@ -191,7 +191,7 @@ class BrokerdOrder(Struct): account: str time_ns: int - symbol: str # fqsn + symbol: str # fqme price: float size: float diff --git a/piker/data/_sampling.py b/piker/data/_sampling.py index 61ec72cb..20bf9b49 100644 --- a/piker/data/_sampling.py +++ b/piker/data/_sampling.py @@ -618,10 +618,10 @@ async def sample_and_broadcast( ] = bus.get_subs(sub_key) # NOTE: by default the broker backend doesn't append - # it's own "name" into the fqsn schema (but maybe it + # it's own "name" into the fqme schema (but maybe it # should?) so we have to manually generate the correct # key here. - fqsn = f'{broker_symbol}.{brokername}' + fqme = f'{broker_symbol}.{brokername}' lags: int = 0 # TODO: speed up this loop in an AOT compiled lang (like @@ -640,7 +640,7 @@ async def sample_and_broadcast( # pushes to the ``uniform_rate_send()`` below. try: stream.send_nowait( - (fqsn, quote) + (fqme, quote) ) except trio.WouldBlock: overruns[sub_key] += 1 @@ -672,7 +672,7 @@ async def sample_and_broadcast( raise trio.BrokenResourceError else: await stream.send( - {fqsn: quote} + {fqme: quote} ) if cs.cancelled_caught: diff --git a/piker/data/cli.py b/piker/data/cli.py index cee729e5..59db1037 100644 --- a/piker/data/cli.py +++ b/piker/data/cli.py @@ -184,10 +184,10 @@ def storage( symbol = symbols[0] async with open_tsdb_client(symbol) as storage: if delete: - for fqsn in symbols: + for fqme in symbols: syms = await storage.client.list_symbols() - resp60s = await storage.delete_ts(fqsn, 60) + resp60s = await storage.delete_ts(fqme, 60) msgish = resp60s.ListFields()[0][1] if 'error' in str(msgish): @@ -199,15 +199,15 @@ def storage( # well, if we ever can make this work we # probably want to dogsplain the real reason # for the delete errurz..llululu - if fqsn not in syms: - log.error(f'Pair {fqsn} dne in DB') + if fqme not in syms: + log.error(f'Pair {fqme} dne in DB') - log.error(f'Deletion error: {fqsn}\n{msgish}') + log.error(f'Deletion error: {fqme}\n{msgish}') - resp1s = await storage.delete_ts(fqsn, 1) + resp1s = await storage.delete_ts(fqme, 1) msgish = resp1s.ListFields()[0][1] if 'error' in str(msgish): - log.error(f'Deletion error: {fqsn}\n{msgish}') + log.error(f'Deletion error: {fqme}\n{msgish}') trio.run(main) From d3bafb00630f9b8251a2853217dc9c924329284b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 22:52:21 -0400 Subject: [PATCH 257/294] Always prefer a config template if found --- piker/config.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/piker/config.py b/piker/config.py index b708e252..d838bcdd 100644 --- a/piker/config.py +++ b/piker/config.py @@ -228,6 +228,7 @@ def load( [str | bytes,], MutableMapping, ] = tomllib.loads, + touch_if_dne: bool = False, **tomlkws, @@ -249,18 +250,19 @@ def load( exist_ok=True, ) - if not path.is_file(): - if path is None: - fn: str = _conf_fn_w_ext(conf_name) + if ( + not path.is_file() + and touch_if_dne + ): + fn: str = _conf_fn_w_ext(conf_name) - # try to copy in a template config to the user's directory if - # one exists. - template: Path = repodir() / 'config' / fn - if template.is_file(): - shutil.copyfile(template, path) + # try to copy in a template config to the user's directory if + # one exists. + template: Path = repodir() / 'config' / fn + if template.is_file(): + shutil.copyfile(template, path) - # touch an empty file - elif touch_if_dne: + else: # just touch an empty file with same name with path.open(mode='x'): pass From 56b23e1fcc0640ba89ca167b7125386e28de8d75 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 22 May 2023 23:34:42 -0400 Subject: [PATCH 258/294] Add docker and elasticsearch to test deps --- requirements-test.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/requirements-test.txt b/requirements-test.txt index e079f8a6..ad27fc5d 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1 +1,3 @@ pytest +docker +elasticsearch From 611d1ee3fcc13df729fd40af6279c2b160a15360 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 23 May 2023 10:05:45 -0400 Subject: [PATCH 259/294] Drop db flags from pikerd startup --- piker/service/_actor_runtime.py | 57 --------------------------------- 1 file changed, 57 deletions(-) diff --git a/piker/service/_actor_runtime.py b/piker/service/_actor_runtime.py index ec14dbf9..843ef76b 100644 --- a/piker/service/_actor_runtime.py +++ b/piker/service/_actor_runtime.py @@ -152,11 +152,6 @@ async def open_pikerd( debug_mode: bool = False, registry_addr: None | tuple[str, int] = None, - # db init flags - tsdb: bool = False, - es: bool = False, - drop_root_perms_for_ahab: bool = True, - **kwargs, ) -> Services: @@ -196,50 +191,6 @@ async def open_pikerd( Services.service_n = service_nursery Services.debug_mode = debug_mode - if tsdb: - from ._ahab import start_ahab - from .marketstore import start_marketstore - - log.info('Spawning `marketstore` supervisor') - ctn_ready, config, (cid, pid) = await service_nursery.start( - partial( - start_ahab, - 'marketstored', - start_marketstore, - loglevel=loglevel, - drop_root_perms=drop_root_perms_for_ahab, - ) - - ) - log.info( - f'`marketstored` up!\n' - f'pid: {pid}\n' - f'container id: {cid[:12]}\n' - f'config: {pformat(config)}' - ) - - if es: - from ._ahab import start_ahab - from .elastic import start_elasticsearch - - log.info('Spawning `elasticsearch` supervisor') - ctn_ready, config, (cid, pid) = await service_nursery.start( - partial( - start_ahab, - 'elasticsearch', - start_elasticsearch, - loglevel=loglevel, - drop_root_perms=drop_root_perms_for_ahab, - ) - ) - - log.info( - f'`elasticsearch` up!\n' - f'pid: {pid}\n' - f'container id: {cid[:12]}\n' - f'config: {pformat(config)}' - ) - try: yield Services @@ -279,9 +230,6 @@ async def open_pikerd( async def maybe_open_pikerd( loglevel: Optional[str] = None, registry_addr: None | tuple = None, - tsdb: bool = False, - es: bool = False, - drop_root_perms_for_ahab: bool = True, **kwargs, @@ -335,11 +283,6 @@ async def maybe_open_pikerd( loglevel=loglevel, registry_addr=registry_addr, - # ahabd (docker super) specific controls - tsdb=tsdb, - es=es, - drop_root_perms_for_ahab=drop_root_perms_for_ahab, - # passthrough to ``tractor`` init **kwargs, From bd919f9d66eb27ca69d9715c985f9723ed9906b1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 23 May 2023 11:31:58 -0400 Subject: [PATCH 260/294] _ahab: use `Services` api to spawn docker tasks Allows for using the `Services.cancel_service()` api for explicit cancellation in tests and eventually for remote teardown. Change `.start_ahab()` to an `@acm` `start_ahab_service()` and just yield back the same values we were returning prior. Also fix the logging (level) to actually reflect what's passed in - we weren't using the correct name / instance from the `.sevice` subpkg.. --- piker/service/_ahab.py | 60 +++++++++++++++++++++++++++++------------- piker/service/_mngr.py | 8 ++++-- 2 files changed, 47 insertions(+), 21 deletions(-) diff --git a/piker/service/_ahab.py b/piker/service/_ahab.py index 0629aeda..49d72de6 100644 --- a/piker/service/_ahab.py +++ b/piker/service/_ahab.py @@ -19,6 +19,7 @@ Supervisor for ``docker`` with included async and SC wrapping to ensure a cancellable container lifetime system. ''' +from __future__ import annotations from collections import ChainMap from functools import partial import os @@ -48,6 +49,7 @@ from requests.exceptions import ( ReadTimeout, ) +from ._mngr import Services from ._util import ( log, # sub-sys logger get_console_log, @@ -187,7 +189,11 @@ class Container: and entry not in seen_so_far ): seen_so_far.add(entry) - getattr(log, level.lower(), log.error)(f'{msg}') + getattr( + log, + level.lower(), + log.error + )(f'{msg}') if level == 'fatal': raise ApplicationLogError(msg) @@ -263,8 +269,10 @@ class Container: start = time.time() for _ in range(6): - with trio.move_on_after(0.5) as cs: - log.cancel('polling for CNTR logs...') + with trio.move_on_after(1) as cs: + log.cancel( + 'polling for CNTR logs for {stop_predicate}..' + ) try: await self.process_logs_until( @@ -328,16 +336,13 @@ class Container: async def open_ahabd( ctx: tractor.Context, endpoint: str, # ns-pointer str-msg-type - loglevel: str | None = 'cancel', + loglevel: str | None = None, - **kwargs, + **ep_kwargs, ) -> None: - log = get_console_log( - loglevel, - name=__name__, - ) + log = get_console_log(loglevel or 'cancel') async with open_docker() as client: @@ -350,7 +355,7 @@ async def open_ahabd( cntr_config, start_pred, stop_pred, - ) = ep_func(client) + ) = ep_func(client, **ep_kwargs) cntr = Container(dcntr) conf: ChainMap[str, Any] = ChainMap( @@ -446,10 +451,17 @@ async def open_ahabd( ) -async def start_ahab( +@acm +async def start_ahab_service( + services: Services, service_name: str, + + # endpoint config passed as **kwargs endpoint: Callable[docker.DockerClient, DockerContainer], + ep_kwargs: dict, loglevel: str | None = 'cancel', + + # supervisor config drop_root_perms: bool = True, task_status: TaskStatus[ @@ -470,6 +482,9 @@ async def start_ahab( is started. ''' + # global log + log = get_console_log(loglevel or 'cancel') + cn_ready = trio.Event() try: async with tractor.open_nursery() as an: @@ -498,21 +513,28 @@ async def start_ahab( )[2] # named user's uid ) - async with portal.open_context( - open_ahabd, + cs, first = await services.start_service_task( + name=service_name, + portal=portal, + + # rest: endpoint inputs + target=open_ahabd, endpoint=str(NamespacePath.from_ref(endpoint)), loglevel='cancel', - ) as (ctx, first): + **ep_kwargs, + ) - cid, pid, cntr_config = first + cid, pid, cntr_config = first - task_status.started(( + try: + yield ( cn_ready, cntr_config, (cid, pid), - )) - - await trio.sleep_forever() + ) + finally: + log.info(f'Cancelling ahab service `{service_name}`') + await services.cancel_service(service_name) # since we demoted root perms in this parent # we'll get a perms error on proc cleanup in diff --git a/piker/service/_mngr.py b/piker/service/_mngr.py index e37bb7ec..70771593 100644 --- a/piker/service/_mngr.py +++ b/piker/service/_mngr.py @@ -33,8 +33,11 @@ from ._util import ( ) -# TODO: factor this into a ``tractor.highlevel`` extension -# pack for the library. +# TODO: we need remote wrapping and a general soln: +# - factor this into a ``tractor.highlevel`` extension # pack for the +# library. +# - wrap a "remote api" wherein you can get a method proxy +# to the pikerd actor for starting services remotely! class Services: actor_n: tractor._supervise.ActorNursery @@ -80,6 +83,7 @@ class Services: ) -> Any: with trio.CancelScope() as cs: + async with portal.open_context( target, allow_overruns=allow_overruns, From e7a172b6565c9f65b42d545869aa7a087f6cf90f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 23 May 2023 11:39:18 -0400 Subject: [PATCH 261/294] Reimplement marketstore and elasticsearch daemons Using the new `._ahab.start_ahab_service()` mngr of course, and now support user config overrides (such that our defaults can be modified by a keen user, say using a config file, or for testing). This is where the functionality moved out of the `pikerd` init has been moved - instead of being triggered by bool flag inputs to that factory. For marketstore actually support overriding the entire yaml config via runtime `_yaml_config_str: str` formatting with any passed user dict, primarily focussing on supporting override of the sockaddrs for testing. --- piker/service/elastic.py | 45 +++++++++++++++++++ piker/service/marketstore.py | 87 ++++++++++++++++++++++++++++++------ 2 files changed, 119 insertions(+), 13 deletions(-) diff --git a/piker/service/elastic.py b/piker/service/elastic.py index 6714a9ec..902f4fde 100644 --- a/piker/service/elastic.py +++ b/piker/service/elastic.py @@ -15,6 +15,7 @@ # along with this program. If not, see . from __future__ import annotations +from contextlib import asynccontextmanager as acm from typing import ( Any, TYPE_CHECKING, @@ -122,3 +123,47 @@ def start_elasticsearch( health_query, chk_for_closed_msg, ) + + +@acm +async def start_ahab_daemon( + service_mngr: Services, + user_config: dict | None = None, + loglevel: str | None = None, + +) -> tuple[str, dict]: + ''' + Task entrypoint to start the estasticsearch docker container using + the service manager. + + ''' + from ._ahab import start_ahab_service + + # dict-merge any user settings + conf: dict = _config.copy() + if user_config: + conf = conf | user_config + + dname: str = 'esd' + log.info(f'Spawning `{dname}` supervisor') + async with start_ahab_service( + service_mngr, + dname, + + # NOTE: docker-py client is passed at runtime + start_elasticsearch, + ep_kwargs={'user_config': conf}, + loglevel=loglevel, + + ) as ( + ctn_ready, + config, + (cid, pid), + ): + log.info( + f'`{dname}` up!\n' + f'pid: {pid}\n' + f'container id: {cid[:12]}\n' + f'config: {pformat(config)}' + ) + yield dname, conf diff --git a/piker/service/marketstore.py b/piker/service/marketstore.py index 4ca496b5..930c44da 100644 --- a/piker/service/marketstore.py +++ b/piker/service/marketstore.py @@ -26,6 +26,8 @@ from __future__ import annotations from contextlib import asynccontextmanager as acm from datetime import datetime +from functools import partial +from pprint import pformat from typing import ( Any, Union, @@ -70,7 +72,7 @@ _config = { 'startup_timeout': 2, } -_yaml_config = ''' +_yaml_config_str: str = ''' # piker's ``marketstore`` config. # mount this config using: @@ -112,18 +114,18 @@ triggers: # config: # filter: "nasdaq" -'''.format(**_config) +''' def start_marketstore( client: docker.DockerClient, - + user_config: dict, **kwargs, ) -> tuple[DockerContainer, dict[str, Any]]: ''' - Start and supervise a marketstore instance with its config bind-mounted - in from the piker config directory on the system. + Start and supervise a marketstore instance with its config + bind-mounted in from the piker config directory on the system. The equivalent cli cmd to this code is: @@ -147,14 +149,16 @@ def start_marketstore( os.mkdir(mktsdir) yml_file = os.path.join(mktsdir, 'mkts.yml') + yaml_config = _yaml_config_str.format(**user_config) + if not os.path.isfile(yml_file): log.warning( f'No `marketstore` config exists?: {yml_file}\n' 'Generating new file from template:\n' - f'{_yaml_config}\n' + f'{yaml_config}\n' ) with open(yml_file, 'w') as yf: - yf.write(_yaml_config) + yf.write(yaml_config) # create a mount from user's local piker config dir into container config_dir_mnt = docker.types.Mount( @@ -177,6 +181,9 @@ def start_marketstore( type='bind', ) + grpc_listen_port = int(user_config['grpc_listen_port']) + ws_listen_port = int(user_config['ws_listen_port']) + dcntr: DockerContainer = client.containers.run( 'alpacamarkets/marketstore:latest', # do we need this for cmds? @@ -184,8 +191,8 @@ def start_marketstore( # '-p 5993:5993', ports={ - '5993/tcp': 5993, # jsonrpc / ws? - '5995/tcp': 5995, # grpc + f'{ws_listen_port}/tcp': ws_listen_port, + f'{grpc_listen_port}/tcp': grpc_listen_port, }, mounts=[ config_dir_mnt, @@ -205,7 +212,13 @@ def start_marketstore( return "launching tcp listener for all services..." in msg async def stop_matcher(msg: str): - return "exiting..." in msg + return ( + # not sure when this happens, some kinda stop condition + "exiting..." in msg + + # after we send SIGINT.. + or "initiating graceful shutdown due to 'interrupt' request" in msg + ) return ( dcntr, @@ -217,6 +230,49 @@ def start_marketstore( ) +@acm +async def start_ahab_daemon( + service_mngr: Services, + user_config: dict | None = None, + loglevel: str | None = None, + +) -> tuple[str, dict]: + ''' + Task entrypoint to start the marketstore docker container using the + service manager. + + ''' + from ._ahab import start_ahab_service + + # dict-merge any user settings + conf: dict = _config.copy() + if user_config: + conf: dict = conf | user_config + + dname: str = 'marketstored' + log.info(f'Spawning `{dname}` supervisor') + async with start_ahab_service( + service_mngr, + dname, + + # NOTE: docker-py client is passed at runtime + start_marketstore, + ep_kwargs={'user_config': conf}, + loglevel=loglevel, + ) as ( + ctn_ready, + config, + (cid, pid), + ): + log.info( + f'`{dname}` up!\n' + f'pid: {pid}\n' + f'container id: {cid[:12]}\n' + f'config: {pformat(config)}' + ) + yield dname, conf + + _tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK') _tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids) @@ -681,9 +737,12 @@ async def open_tsdb_client( delayed=False, ) - # load any user service settings for connecting to tsdb - conf, path = config.load('conf') - tsdbconf = conf['network'].get('tsdb') + # load any user service settings for connecting to + rootconf, path = config.load( + 'conf', + touch_if_dne=True, + ) + tsdbconf = rootconf['network'].get('tsdb') backend = tsdbconf.pop('backend') async with ( open_storage_client( @@ -903,3 +962,5 @@ async def stream_quotes( if quotes: yield quotes + + From d094625bd632029146196c21e300275e81fe8c4b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 23 May 2023 12:12:09 -0400 Subject: [PATCH 262/294] Activate docker daemons via flags using exit stack --- piker/cli/__init__.py | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/piker/cli/__init__.py b/piker/cli/__init__.py index 63b8321a..70610135 100644 --- a/piker/cli/__init__.py +++ b/piker/cli/__init__.py @@ -19,6 +19,7 @@ CLI commons. ''' import os +from contextlib import AsyncExitStack import click import trio @@ -69,8 +70,8 @@ def pikerd( Spawn the piker broker-daemon. ''' + from .. import service - from ..service import open_pikerd log = get_console_log(loglevel) if pdb: @@ -90,17 +91,36 @@ def pikerd( ) async def main(): + service_mngr: service.Services + async with ( - open_pikerd( - tsdb=tsdb, - es=es, + service.open_pikerd( loglevel=loglevel, debug_mode=pdb, registry_addr=reg_addr, - ), # normally delivers a ``Services`` handle + ) as service_mngr, # normally delivers a ``Services`` handle trio.open_nursery() as n, + + AsyncExitStack() as stack, ): + if tsdb: + dname, conf = await stack.enter_async_context( + service.marketstore.start_ahab_daemon( + service_mngr, + loglevel=loglevel, + ) + ) + log.info(f'TSDB `{dname}` up with conf:\n{conf}') + + if es: + dname, conf = await stack.enter_async_context( + service.elastic.start_ahab_daemon( + service_mngr, + loglevel=loglevel, + ) + ) + log.info(f'DB `{dname}` up with conf:\n{conf}') await trio.sleep_forever() From cd55d027c4dc37a6bc84a4a3f5ae55013902f2ab Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 23 May 2023 12:17:55 -0400 Subject: [PATCH 263/294] Re-implement db tests using new ahab daemons Avoids the really sloppy flag passing to `open_pikerd()` and allows for separation of the individual docker daemon starts. Also add a new `root_conf() -> Path` fixture which will open and load the `dict` for the new root `conf.toml` file. --- tests/conftest.py | 13 +++++++---- tests/test_databases.py | 52 +++++++++++++++++++++++++++++++++++++---- 2 files changed, 56 insertions(+), 9 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 364be44a..f797ea15 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -120,12 +120,7 @@ async def _open_test_pikerd( 'piker_test_dir': tmpconfdir, }, - # tests may need to spawn containers dynamically - # or just in sequence per test, so we keep root. - drop_root_perms_for_ahab=False, - debug_mode=debug_mode, - **kwargs, ) as service_manager, @@ -182,6 +177,14 @@ def tmpconfdir( # rmtree(str(tmp_path)) +@pytest.fixture +def root_conf(tmpconfdir) -> dict: + return config.load( + 'conf', + touch_if_dne=True, + ) + + @pytest.fixture def open_test_pikerd( request: pytest.FixtureRequest, diff --git a/tests/test_databases.py b/tests/test_databases.py index 554b0990..13485cb7 100644 --- a/tests/test_databases.py +++ b/tests/test_databases.py @@ -14,6 +14,7 @@ from piker.service import elastic def test_marketstore_startup_and_version( open_test_pikerd: AsyncContextManager, loglevel: str, + root_conf: dict, ): ''' Verify marketstore tsdb starts up and we can @@ -21,18 +22,39 @@ def test_marketstore_startup_and_version( ''' async def main(): + user_conf: dict = { + 'grpc_listen_port': 5995 + 6, + 'ws_listen_port': 5993 + 6, + } + + dname: str # service name + config: dict # service name async with ( open_test_pikerd( loglevel=loglevel, - tsdb=True + # tsdb=True ) as ( _, # host _, # port pikerd_portal, services, ), + + marketstore.start_ahab_daemon( + services, + user_conf, + loglevel=loglevel, + + ) as (dname, config) ): + # ensure user config was applied + for k, v in user_conf.items(): + assert config[k] == v + + # netconf: dict = root_conf['network'] + # tsdbconf = netconf['tsdb'] + # TODO: we should probably make this connection poll # loop part of the `get_client()` implementation no? @@ -45,7 +67,12 @@ def test_marketstore_startup_and_version( for _ in range(3): # NOTE: default sockaddr is embedded within - async with marketstore.get_client() as client: + async with marketstore.get_client( + host='localhost', + port=user_conf['grpc_listen_port'], + + ) as client: + print(f'Client is up @ {user_conf}!') with trio.move_on_after(1) as cs: syms = await client.list_symbols() @@ -64,7 +91,13 @@ def test_marketstore_startup_and_version( ) print('VERSION CHECKED') + break # get out of retry-connect loop + else: + raise RuntimeError('Failed to connect to {conf}!') + + # gracefully teardown docker-daemon-service + print(f'Cancelling docker service {dname}') trio.run(main) @@ -80,18 +113,29 @@ def test_elasticsearch_startup_and_version( ''' async def main(): - port = 19200 + port: int = 19200 + user_conf: dict = { + 'port': port, + } + + dname: str # service name + config: dict # service name async with ( open_test_pikerd( loglevel=loglevel, - es=True ) as ( _, # host _, # port pikerd_portal, services, ), + elastic.start_ahab_daemon( + services, + user_conf, + loglevel=loglevel, + + ) as (dname, config) ): # TODO: much like the above connect loop for mkts, we should # probably make this sync start part of the From 6ad8c603d599b4a0d5db26d34df6915fa493d3f8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 23 May 2023 16:45:58 -0400 Subject: [PATCH 264/294] More detailed `Position.events` todo --- piker/accounting/_pos.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index bc84f5b7..453ee324 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -51,7 +51,10 @@ from ._mktinfo import ( ) from .. import config from ..brokers import get_brokermod -from ..clearing._messages import BrokerdPosition, Status +from ..clearing._messages import ( + BrokerdPosition, + Status, +) from ..data.types import Struct from ..log import get_logger @@ -882,11 +885,11 @@ def open_pps( # TODO: RE: general "events" instead of just "clears": # - make this an `events` field and support more event types - # such as 'split', 'name_change', 'mkt_info', etc.. + # such as 'split', 'name_change', 'mkt_info', etc.. # - should be make a ``Struct`` for clear/event entries? convert - # "clear events table" from the toml config (list of a dicts) - # and load it into object form for use in position processing of - # new clear events. + # "clear events table" from the toml config (list of a dicts) + # and load it into object form for use in position processing of + # new clear events. # convert clears sub-tables (only in this form # for toml re-presentation) back into a master table. From ff74d47fd583971b930f75850f74f5ac9e3dab30 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 23 May 2023 16:46:21 -0400 Subject: [PATCH 265/294] kucoin: fix fqme or search result key lookups --- piker/brokers/kucoin.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index 3f8b71d0..17376e8d 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -614,11 +614,21 @@ async def get_mkt_info( bs_fqme, _, broker = fqme.partition('.') pairs: dict[str, KucoinMktPair] = await client.cache_pairs() - bs_mktid: str = client._fqmes2mktids[bs_fqme] - pair: KucoinMktPair = pairs[bs_mktid] + + try: + # likely search result key which is already in native mkt symbol form + pair: KucoinMktPair = pairs[bs_fqme] + bs_mktid: str = bs_fqme + + except KeyError: + + # likely a piker-style fqme from API request or CLI + bs_mktid: str = client._fqmes2mktids[bs_fqme] + pair: KucoinMktPair = pairs[bs_mktid] + + # symbology sanity assert bs_mktid == pair.symbol - # pair: KucoinMktPair = await client.pair_info(pair_str) assets: dict[str, Currency] = client._currencies # TODO: maybe just do this processing in From b44b0915ca81ca5c37c99126c3a715ed2e88c5c6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 23 May 2023 19:11:40 -0400 Subject: [PATCH 266/294] ib: i guess only discard `MktPair.src: Asset` on non-forex XD --- piker/brokers/ib/feed.py | 39 ++++++++++++++++++++++++++------------- 1 file changed, 26 insertions(+), 13 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index b13e963a..3a7728d2 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -146,7 +146,18 @@ async def open_history_client( mkt.src and mkt.src.atype == 'fiat' ): - fqme: str = mkt.get_bs_fqme(without_src=True) + fqme_kwargs: dict[str, Any] = {} + + if mkt.dst.atype == 'forex': + + # XXX: for now we do need the src token kept in since + fqme_kwargs = { + 'without_src': False, # default is True + 'delim_char': '', # bc they would normally use a frickin `.` smh + } + + fqme: str = mkt.get_bs_fqme(**(fqme_kwargs)) + else: fqme = mkt.bs_fqme @@ -812,24 +823,26 @@ async def get_mkt_info( # str(expiry).strip(' ') # )) + # TODO: currently we can't pass the fiat src asset because + # then we'll get a `MNQUSD` request for history data.. + # we need to figure out how we're going to handle this (later?) + # but likely we want all backends to eventually handle + # ``dst/src.venue.`` style !? + src: str | Asset = '' + if atype == 'forex': + src = Asset( + name=str(con.currency), + atype='fiat', + tx_tick=Decimal('0.01'), # right? + ) + mkt = MktPair( dst=Asset( name=con.symbol.lower(), atype=atype, tx_tick=size_tick, ), - - # TODO: currently we can't pass the fiat src asset because - # then we'll get a `MNQUSD` request for history data.. - # we need to figure out how we're going to handle this (later?) - # but likely we want all backends to eventually handle - # ``dst/src.venue.`` style !? - - # src=Asset( - # name=str(con.currency), - # atype='fiat', - # tx_tick=Decimal('0.01'), # right? - # ), + src=src, price_tick=price_tick, size_tick=size_tick, From e82f7f9012729e3b9acb5519e119d923867e4f44 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 23 May 2023 22:39:38 -0400 Subject: [PATCH 267/294] Skip elasticsearch test for now, container build seems borked? --- tests/test_databases.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_databases.py b/tests/test_databases.py index 13485cb7..66438e33 100644 --- a/tests/test_databases.py +++ b/tests/test_databases.py @@ -1,11 +1,12 @@ from typing import AsyncContextManager import logging -import trio +import pytest from elasticsearch import ( Elasticsearch, ConnectionError, ) +import trio from piker.service import marketstore from piker.service import elastic @@ -102,6 +103,7 @@ def test_marketstore_startup_and_version( trio.run(main) +@pytest.mark.skip def test_elasticsearch_startup_and_version( open_test_pikerd: AsyncContextManager, loglevel: str, From 8e97814c1f002333a96f7e7f9d75f9a234bfaffb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 24 May 2023 08:25:14 -0400 Subject: [PATCH 268/294] Add "no vlm" indication to `FeedInit` Stash it for now in the (now mutable by default) `.shm_write_opts` and have the new `Flume._has_vlm: bool` (only set to false internally by feed layer) which can be read via new public `.has_vlm()` predicate. Move out the old `.ui/_fsp` helper logic to this flume method. --- piker/data/feed.py | 6 +++++- piker/data/flows.py | 20 ++++++++++++++++++++ piker/data/validate.py | 9 +++++++-- piker/ui/_display.py | 3 +-- piker/ui/_fsp.py | 8 -------- 5 files changed, 33 insertions(+), 13 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index 0a8fd0f6..88d2a386 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -309,7 +309,7 @@ async def allocate_persistent_feed( init: FeedInit = validate_backend( mod, [symstr], - init_msgs, + init_msgs, # NOTE: only 1 should be delivered for now.. ) mkt: MktPair = init.mkt_info fqme: str = mkt.fqme @@ -355,6 +355,10 @@ async def allocate_persistent_feed( _hist_shm_token=hist_shm.token, izero_hist=izero_hist, izero_rt=izero_rt, + + # NOTE: some instruments don't have this provided, + # eg. commodities and forex from ib. + _has_vlm=init.shm_write_opts['has_vlm'], ) # for ambiguous names we simply register the diff --git a/piker/data/flows.py b/piker/data/flows.py index ecb727e8..07ca304e 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -240,3 +240,23 @@ class Flume(Struct): ) imx = times.shape[0] - 1 return min(first, imx) + + # only set by external msg or creator, never + # manually! + _has_vlm: bool = True + + def has_vlm(self) -> bool: + + if not self._has_vlm: + return False + + # make sure that the instrument supports volume history + # (sometimes this is not the case for some commodities and + # derivatives) + vlm: np.ndarray = self.rt_shm.array['volume'] + return not bool( + np.all(np.isin(vlm, -1)) + or np.all(np.isnan(vlm)) + ) + + diff --git a/piker/data/validate.py b/piker/data/validate.py index c295c179..8f6c1d5a 100644 --- a/piker/data/validate.py +++ b/piker/data/validate.py @@ -25,6 +25,8 @@ from typing import ( Any, ) +from msgspec import field + from .types import Struct from ..accounting import ( Asset, @@ -52,8 +54,11 @@ class FeedInit(Struct, frozen=True): # NOTE: only field we use rn in ``.data.feed`` # TODO: maybe make a SamplerConfig(Struct)? - shm_write_opts: dict[str, Any] = {} - # 'sum_tick_vlm': True + shm_write_opts: dict[str, Any] = field( + default_factory=lambda: { + 'has_vlm': True, + 'sum_tick_vlm': True, + }) def validate_backend( diff --git a/piker/ui/_display.py b/piker/ui/_display.py index 40ec23b2..eb8e330b 100644 --- a/piker/ui/_display.py +++ b/piker/ui/_display.py @@ -62,7 +62,6 @@ from ._style import hcolor from ._fsp import ( update_fsp_chart, start_fsp_displays, - has_vlm, open_vlm_displays, ) from ._forms import ( @@ -1335,7 +1334,7 @@ async def display_symbol_data( None | ChartPlotWidget ] = {}.fromkeys(feed.flumes) if ( - has_vlm(ohlcv) + flume.has_vlm() and vlm_chart is None ): vlm_chart = vlm_charts[fqme] = await ln.start( diff --git a/piker/ui/_fsp.py b/piker/ui/_fsp.py index 06814c28..6435e970 100644 --- a/piker/ui/_fsp.py +++ b/piker/ui/_fsp.py @@ -72,14 +72,6 @@ from .._profile import Profiler log = get_logger(__name__) -def has_vlm(ohlcv: ShmArray) -> bool: - # make sure that the instrument supports volume history - # (sometimes this is not the case for some commodities and - # derivatives) - vlm = ohlcv.array['volume'] - return not bool(np.all(np.isin(vlm, -1)) or np.all(np.isnan(vlm))) - - def update_fsp_chart( viz, graphics_name: str, From e8787d89c6592d86c1f079757c3b6ff3ccb3ff8e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 24 May 2023 08:28:16 -0400 Subject: [PATCH 269/294] ib: unset vlm via new `FeedInit.shm_write_opts` field --- piker/brokers/ib/feed.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index 3a7728d2..fb896624 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -894,15 +894,14 @@ async def stream_quotes( init_msg = FeedInit(mkt_info=mkt) - has_vlm: bool = True if mkt.dst.atype in { 'forex', 'index', 'commodity', }: - has_vlm = False # tell sampler config that it shouldn't do vlm summing. init_msg.shm_write_opts['sum_tick_vlm'] = False + init_msg.shm_write_opts['has_vlm'] = False init_msgs.append(init_msg) @@ -977,7 +976,7 @@ async def stream_quotes( async with aclosing(stream): # if syminfo.get('no_vlm', False): - if not has_vlm: + if not init_msg.shm_write_opts['has_vlm']: # generally speaking these feeds don't # include vlm data. From 9a063ccb11620226bc7b8b237fe143675a5bb52a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 24 May 2023 09:11:24 -0400 Subject: [PATCH 270/294] ib: Solve lingering bugs for non-vlm contracts Contract matching in live setup was borked; switch to `MktPair.dst.atype` matching, don't override the `cmdty` "venue" (a weird special case) in `get_mkt_info()` otherwise lookup will fail.. --- piker/brokers/ib/feed.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index fb896624..b4edae17 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -803,7 +803,11 @@ async def get_mkt_info( init_info: dict = {} atype = _asset_type_map[con.secType] - venue = con.primaryExchange or con.exchange + if atype == 'commodity': + venue: str = 'cmdty' + else: + venue = con.primaryExchange or con.exchange + price_tick: Decimal = Decimal(str(details.minTick)) if atype == 'stock': @@ -918,12 +922,12 @@ async def stream_quotes( # it might be outside regular trading hours so see if we can at # least grab history. if ( - isnan(first_ticker.last) - and type(first_ticker.contract) not in ( - ibis.Commodity, - ibis.Forex, - ibis.Crypto, - ) + isnan(first_ticker.last) # last quote price value is nan + and mkt.dst.atype not in { + 'commodity', + 'forex', + 'crypto', + } ): task_status.started((init_msgs, first_quote)) From bd8e4760d599210f3b48c62c69f0de74a59fd2d3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 24 May 2023 12:16:17 -0400 Subject: [PATCH 271/294] Port everything strictly to `Position.mkt` and `Flume.mkt` --- piker/accounting/_allocate.py | 2 +- piker/accounting/_mktinfo.py | 5 +++ piker/accounting/_pos.py | 61 +++++++++++---------------------- piker/brokers/ib/broker.py | 3 +- piker/clearing/_ems.py | 2 +- piker/clearing/_paper_engine.py | 2 +- piker/data/flows.py | 33 +++--------------- piker/data/history.py | 4 +-- piker/fsp/_api.py | 15 +++----- piker/fsp/_engine.py | 10 +++--- piker/service/marketstore.py | 2 +- piker/ui/_display.py | 18 +++++----- piker/ui/_fsp.py | 30 +++++++++++----- piker/ui/_position.py | 4 +-- piker/ui/order_mode.py | 2 +- 15 files changed, 80 insertions(+), 113 deletions(-) diff --git a/piker/accounting/_allocate.py b/piker/accounting/_allocate.py index 4bafc2f6..18900c9f 100644 --- a/piker/accounting/_allocate.py +++ b/piker/accounting/_allocate.py @@ -203,7 +203,7 @@ class Allocator(Struct): # compute a fractional slots size to display slots_used = self.slots_used( Position( - symbol=sym, + mkt=sym, size=order_size, ppu=price, bs_mktid=sym, diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 341ff2db..653c8d04 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -295,6 +295,11 @@ class MktPair(Struct, frozen=True): dst=dst, src=src, **msg, + # XXX NOTE: ``msgspec`` can encode `Decimal` + # but it doesn't decide to it by default since + # we aren't spec-cing these msgs as structs, SO + # we have to ensure we do a struct type case (which `.copy()` + # does) to ensure we get the right type! ).copy() @property diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 453ee324..dda39177 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -79,11 +79,7 @@ class Position(Struct): file system (in TOML) and to interchange as a msg over IPC. ''' - symbol: Symbol | MktPair - - @property - def mkt(self) -> MktPair: - return self.symbol + mkt: MktPair # can be +ve or -ve for long/short size: float @@ -143,37 +139,20 @@ class Position(Struct): # listing venue here even when the backend isn't providing # it via the trades ledger.. # drop symbol obj in serialized form - s = d.pop('symbol') - fqme = s.fqme + mkt: MktPair = d.pop('mkt') + assert isinstance(mkt, MktPair) + + fqme = mkt.fqme broker, mktep, venue, suffix = unpack_fqme(fqme) - if isinstance(s, Symbol): - sym_info = s.broker_info[broker] - d['asset_type'] = sym_info['asset_type'] - d['price_tick'] = ( - sym_info.get('price_tick_size') - or - s.tick_size - ) - d['size_tick'] = ( - sym_info.get('lot_tick_size') - or - s.lot_tick_size - ) + # an asset resolved mkt where we have ``Asset`` info about + # each tradeable asset in the market. + if mkt.resolved: + dst: Asset = mkt.dst + d['asset_type'] = dst.atype - # the newwww wayyy B) - else: - mkt = s - assert isinstance(mkt, MktPair) - - # an asset resolved mkt where we have ``Asset`` info about - # each tradeable asset in the market. - if mkt.resolved: - dst: Asset = mkt.dst - d['asset_type'] = dst.atype - - d['price_tick'] = mkt.price_tick - d['size_tick'] = mkt.size_tick + d['price_tick'] = mkt.price_tick + d['size_tick'] = mkt.size_tick if self.expiry is None: d.pop('expiry', None) @@ -267,12 +246,12 @@ class Position(Struct): ) -> None: # XXX: better place to do this? - symbol = self.symbol + mkt = self.mkt # TODO: switch to new fields..? # .size_tick_digits, .price_tick_digits - size_tick_digits = symbol.lot_size_digits - price_tick_digits = symbol.tick_size_digits + size_tick_digits = mkt.lot_size_digits + price_tick_digits = mkt.tick_size_digits self.ppu = round( # TODO: change this to ppu? @@ -470,7 +449,7 @@ class Position(Struct): size = round(size * self.split_ratio) return float( - self.symbol.quantize(size), + self.mkt.quantize(size), ) def minimize_clears( @@ -571,7 +550,7 @@ class PpTable(Struct): if not pp: # if no existing pp, allocate fresh one. pp = pps[bs_mktid] = Position( - mkt, + mkt=mkt, size=0.0, ppu=0.0, bs_mktid=bs_mktid, @@ -583,8 +562,8 @@ class PpTable(Struct): # a shorter string), instead use the one from the # transaction since it likely has (more) full # information from the provider. - if len(pp.symbol.fqme) < len(fqme): - pp.symbol = mkt + if len(pp.mkt.fqme) < len(fqme): + pp.mkt = mkt clears = pp.clears if clears: @@ -735,7 +714,7 @@ class PpTable(Struct): if closed: bs_mktid: str for bs_mktid, pos in closed.items(): - fqme: str = pos.symbol.fqme + fqme: str = pos.mkt.fqme if fqme in self.conf: self.conf.pop(fqme) else: diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 1667d5f8..bcd947ea 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -345,6 +345,7 @@ async def update_and_audit_msgs( ) -> list[BrokerdPosition]: msgs: list[BrokerdPosition] = [] + p: Position for p in pps: bs_mktid = p.bs_mktid @@ -427,7 +428,7 @@ async def update_and_audit_msgs( # right since `.broker` is already included? account=f'ib.{acctid}', # XXX: the `.ib` is stripped..? - symbol=p.symbol.fqme, + symbol=p.mkt.fqme, # currency=ibppmsg.currency, size=p.size, avg_price=p.ppu, diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 77cad1bd..ee7ec284 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -1281,7 +1281,7 @@ async def process_client_order_cmds( # TODO: make this configurable from our top level # config, prolly in a .clearing` section? spread_slap: float = 5 - min_tick = float(flume.symbol.size_tick) + min_tick = float(flume.mkt.size_tick) min_tick_digits = float_digits(min_tick) if action == 'buy': diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index d6e29245..bf4f0948 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -611,7 +611,7 @@ async def trades_dialogue( pp_msgs.append(BrokerdPosition( broker=broker, account='paper', - symbol=pos.symbol.fqme, + symbol=pos.mkt.fqme, size=pos.size, avg_price=pos.ppu, )) diff --git a/piker/data/flows.py b/piker/data/flows.py index 07ca304e..7776a602 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -22,7 +22,6 @@ real-time data processing data-structures. """ from __future__ import annotations -# from decimal import Decimal from typing import ( TYPE_CHECKING, ) @@ -31,13 +30,8 @@ import tractor import pendulum import numpy as np -from ..accounting._mktinfo import ( - MktPair, - Symbol, -) -from ._util import ( - log, -) +from ..accounting import MktPair +from ._util import log from .types import Struct from ._sharedmem import ( attach_shm_array, @@ -94,18 +88,10 @@ class Flume(Struct): queuing properties. ''' - mkt: MktPair | Symbol + mkt: MktPair first_quote: dict _rt_shm_token: _Token - @property - def symbol(self) -> MktPair | Symbol: - log.warning( - '`Flume.symbol` is deprecated!\n' - 'Use `.mkt: MktPair` instead!' - ) - return self.mkt - # optional since some data flows won't have a "downsampled" history # buffer/stream (eg. FSPs). _hist_shm_token: _Token | None = None @@ -208,18 +194,7 @@ class Flume(Struct): ''' mkt_msg = msg.pop('mkt') - - if 'dst' in mkt_msg: - mkt = MktPair.from_msg(mkt_msg) - - else: - # XXX NOTE: ``msgspec`` can encode `Decimal` - # but it doesn't decide to it by default since - # we aren't spec-cing these msgs as structs, SO - # we have to ensure we do a struct type case (which `.copy()` - # does) to ensure we get the right type! - mkt = Symbol(**mkt_msg).copy() - + mkt = MktPair.from_msg(mkt_msg) return cls(mkt=mkt, **msg) def get_index( diff --git a/piker/data/history.py b/piker/data/history.py index f8260c86..28a4590e 100644 --- a/piker/data/history.py +++ b/piker/data/history.py @@ -648,7 +648,7 @@ async def manage_history( # (maybe) allocate shm array for this broker/symbol which will # be used for fast near-term history capture and processing. hist_shm, opened = maybe_open_shm_array( - key=f'piker.{service}[{uuid[:16]}.{fqme}.hist', + key=f'piker.{service}[{uuid[:16]}].{fqme}.hist', # use any broker defined ohlc dtype: dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype), @@ -665,7 +665,7 @@ async def manage_history( ) rt_shm, opened = maybe_open_shm_array( - key=f'piker.{service}[{uuid[:16]}.{fqme}.rt', + key=f'piker.{service}[{uuid[:16]}].{fqme}.rt', # use any broker defined ohlc dtype: dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype), diff --git a/piker/fsp/_api.py b/piker/fsp/_api.py index 8226d16b..11d1e7dc 100644 --- a/piker/fsp/_api.py +++ b/piker/fsp/_api.py @@ -174,16 +174,6 @@ def fsp( return Fsp(wrapped, outputs=(wrapped.__name__,)) -def mk_fsp_shm_key( - sym: str, - target: Fsp - -) -> str: - actor_name, uuid = tractor.current_actor().uid - uuid_snip: str = uuid[:16] - return f'piker.{actor_name}[{uuid_snip}].{sym}.{target.name}' - - def maybe_mk_fsp_shm( sym: str, target: Fsp, @@ -207,7 +197,10 @@ def maybe_mk_fsp_shm( [(field_name, float) for field_name in target.outputs] ) - key = mk_fsp_shm_key(sym, target) + # (attempt to) uniquely key the fsp shm buffers + actor_name, uuid = tractor.current_actor().uid + uuid_snip: str = uuid[:16] + key: str = f'piker.{actor_name}[{uuid_snip}].{sym}.{target.name}' shm, opened = maybe_open_shm_array( key, diff --git a/piker/fsp/_engine.py b/piker/fsp/_engine.py index 13dcfccb..9a6ebddb 100644 --- a/piker/fsp/_engine.py +++ b/piker/fsp/_engine.py @@ -45,7 +45,7 @@ from ..data._sampling import ( _default_delay_s, open_sample_stream, ) -from ..accounting._mktinfo import Symbol +from ..accounting import MktPair from ._api import ( Fsp, _load_builtins, @@ -85,7 +85,7 @@ async def filter_quotes_by_sym( async def fsp_compute( - symbol: Symbol, + mkt: MktPair, flume: Flume, quote_stream: trio.abc.ReceiveChannel, @@ -104,7 +104,7 @@ async def fsp_compute( disabled=True ) - fqme = symbol.fqme + fqme = mkt.fqme out_stream = func( # TODO: do we even need this if we do the feed api right? @@ -340,7 +340,7 @@ async def cascade( ) as feed: flume = feed.flumes[fqme] - symbol = flume.symbol + mkt = flume.mkt assert src.token == flume.rt_shm.token profiler(f'{func}: feed up') @@ -352,7 +352,7 @@ async def cascade( fsp_target = partial( fsp_compute, - symbol=symbol, + mkt=mkt, flume=flume, quote_stream=flume.stream, diff --git a/piker/service/marketstore.py b/piker/service/marketstore.py index 930c44da..68b9e953 100644 --- a/piker/service/marketstore.py +++ b/piker/service/marketstore.py @@ -762,7 +762,7 @@ async def open_tsdb_client( if fqme: flume = feed.flumes[fqme] - symbol = flume.symbol + symbol = flume.mkt if symbol: fqme = symbol.fqme diff --git a/piker/ui/_display.py b/piker/ui/_display.py index eb8e330b..04b363b1 100644 --- a/piker/ui/_display.py +++ b/piker/ui/_display.py @@ -231,7 +231,7 @@ async def increment_history_view( # l3 = ds.viz.shm.array[-3:] # print( - # f'fast step for {ds.flume.symbol.fqme}:\n' + # f'fast step for {ds.flume.mkt.fqme}:\n' # f'{list(l3["time"])}\n' # f'{l3}\n' # ) @@ -319,7 +319,7 @@ async def graphics_update_loop( for fqme, flume in feed.flumes.items(): ohlcv = flume.rt_shm hist_ohlcv = flume.hist_shm - symbol = flume.symbol + symbol = flume.mkt fqme = symbol.fqme # update last price sticky @@ -360,7 +360,7 @@ async def graphics_update_loop( last, volume = ohlcv.array[-1][['close', 'volume']] - symbol = flume.symbol + symbol = flume.mkt l1 = L1Labels( fast_pi, @@ -1007,7 +1007,7 @@ async def link_views_with_region( hist_pi.addItem(region, ignoreBounds=True) region.setOpacity(6/16) - viz = rt_chart.get_viz(flume.symbol.fqme) + viz = rt_chart.get_viz(flume.mkt.fqme) assert viz index_field = viz.index_field @@ -1034,7 +1034,7 @@ async def link_views_with_region( # HFT/real-time chart. rng = mn, mx = viewRange[0] - # hist_viz = hist_chart.get_viz(flume.symbol.fqme) + # hist_viz = hist_chart.get_viz(flume.mkt.fqme) # hist = hist_viz.shm.array[-3:] # print( # f'mn: {mn}\n' @@ -1279,13 +1279,13 @@ async def display_symbol_data( # TODO NOTE: THIS CONTROLS WHAT SYMBOL IS USED FOR ORDER MODE # SUBMISSIONS, we need to make this switch based on selection. - rt_linked._symbol = flume.symbol - hist_linked._symbol = flume.symbol + rt_linked._symbol = flume.mkt + hist_linked._symbol = flume.mkt ohlcv: ShmArray = flume.rt_shm hist_ohlcv: ShmArray = flume.hist_shm - symbol = flume.symbol + symbol = flume.mkt fqme = symbol.fqme hist_chart = hist_linked.plot_ohlc_main( @@ -1378,7 +1378,7 @@ async def display_symbol_data( ohlcv: ShmArray = flume.rt_shm hist_ohlcv: ShmArray = flume.hist_shm - symbol = flume.symbol + symbol = flume.mkt fqme = symbol.fqme hist_pi = hist_chart.overlay_plotitem( diff --git a/piker/ui/_fsp.py b/piker/ui/_fsp.py index 6435e970..f942ff14 100644 --- a/piker/ui/_fsp.py +++ b/piker/ui/_fsp.py @@ -46,7 +46,7 @@ from ..data._sharedmem import ( try_read, ) from ..data.feed import Flume -from ..accounting._mktinfo import Symbol +from ..accounting import MktPair from ._chart import ( ChartPlotWidget, LinkedSplits, @@ -476,7 +476,8 @@ class FspAdmin: ) -> (Flume, trio.Event): - fqme = self.flume.symbol.get_fqme(delim_char='') + src_mkt: MktPair = self.flume.mkt + fqme: str = src_mkt.get_fqme(delim_char='') # allocate an output shm array key, dst_shm, opened = maybe_mk_fsp_shm( @@ -488,14 +489,27 @@ class FspAdmin: portal = self.cluster.get(worker_name) or self.rr_next_portal() provider_tag = portal.channel.uid - symbol = Symbol( - key=key, - broker_info={ - provider_tag: {'asset_type': 'fsp'}, - }, + # TODO: this should probably be turned into a + # ``Cascade`` type which describes the routing + # of an fsp's IO in terms of sinc -> source + # shm/IPC endpoints? + mkt = MktPair( + + # make this a couple addrs encapsing + # the flume routing? + src=src_mkt.dst, + dst=target.name, + + # make this a precision / rounding value? + price_tick=src_mkt.price_tick, + size_tick=src_mkt.size_tick, + + bs_mktid=target.name, + broker='piker', + _atype='fsp', ) dst_fsp_flume = Flume( - mkt=symbol, + mkt=mkt, _rt_shm_token=dst_shm.token, first_quote={}, diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 7ec859da..59ab434d 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -91,7 +91,7 @@ async def update_pnl_from_feed( pp: PositionTracker = order_mode.current_pp live: Position = pp.live_pp - key: str = live.symbol.fqme + key: str = live.mkt.fqme log.info(f'Starting pnl display for {pp.alloc.account}') @@ -862,7 +862,7 @@ class PositionTracker: alloc = self.alloc # update allocator settings - asset_type = pp.symbol.type_key + asset_type = pp.mkt.type_key # specific configs by asset class / type if asset_type in _derivs: diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 098bf14f..44558251 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -771,7 +771,7 @@ async def open_order_mode( # net-zero pp startup_pp = Position( - symbol=symbol, + mkt=symbol, size=0, ppu=0, From 738d0ca38b8c39b52b22d789a3dc6eda20032e1c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 24 May 2023 12:30:57 -0400 Subject: [PATCH 272/294] Rename db tests to test_docker_services --- tests/{test_databases.py => test_docker_services.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{test_databases.py => test_docker_services.py} (100%) diff --git a/tests/test_databases.py b/tests/test_docker_services.py similarity index 100% rename from tests/test_databases.py rename to tests/test_docker_services.py From 35f0520cb0177f475ad23c436638fd50e6557436 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 24 May 2023 15:26:51 -0400 Subject: [PATCH 273/294] Drop `Symbol` / `.symbol` support from `.accounting` Only stuff left was the allocator stuff. Drop the top level subpkg exports and finally kill off the awkwardly named `Symbol.lot_size_digits` properties XD Expose a bunch more util funcs at subpkg top level, do some typing in allocator method internals. --- piker/accounting/__init__.py | 14 +++++++-- piker/accounting/_allocate.py | 56 +++++++++++++++++++++-------------- piker/accounting/_ledger.py | 6 ++-- piker/accounting/_mktinfo.py | 4 +-- piker/accounting/_pos.py | 8 ++--- 5 files changed, 51 insertions(+), 37 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index 4c4a0ca1..778bdd4e 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -38,23 +38,33 @@ from ._mktinfo import ( dec_digits, digits_to_dec, MktPair, + Symbol, unpack_fqme, ) +from ._allocate import ( + mk_allocator, + Allocator, +) log = get_logger(__name__) __all__ = [ + 'Allocator', 'Asset', - 'dec_digits', - 'digits_to_dec', 'MktPair', 'Position', 'PpTable', + 'Symbol', 'Transaction', 'TransactionLedger', + 'dec_digits', + 'digits_to_dec', + 'iter_by_dt', 'load_pps_from_ledger', + 'mk_allocator', 'open_pps', 'open_trade_ledger', + 'unpack_fqme', ] diff --git a/piker/accounting/_allocate.py b/piker/accounting/_allocate.py index 18900c9f..b4345785 100644 --- a/piker/accounting/_allocate.py +++ b/piker/accounting/_allocate.py @@ -24,7 +24,7 @@ from typing import Optional from bidict import bidict from ._pos import Position -from ._mktinfo import Symbol +from . import MktPair from ..data.types import Struct @@ -42,7 +42,7 @@ SizeUnit = Enum( class Allocator(Struct): - symbol: Symbol + mkt: MktPair # TODO: if we ever want ot support non-uniform entry-slot-proportion # "sizes" @@ -114,8 +114,8 @@ class Allocator(Struct): depending on position / order entry config. ''' - sym = self.symbol - ld = sym.lot_size_digits + mkt: MktPair = self.mkt + ld: int = mkt.size_tick_digits size_unit = self.size_unit live_size = live_pp.size @@ -125,13 +125,13 @@ class Allocator(Struct): u_per_slot, currency_per_slot = self.step_sizes() if size_unit == 'units': - slot_size = u_per_slot - l_sub_pp = self.units_limit - abs_live_size + slot_size: float = u_per_slot + l_sub_pp: float = self.units_limit - abs_live_size elif size_unit == 'currency': - live_cost_basis = abs_live_size * live_pp.ppu - slot_size = currency_per_slot / price - l_sub_pp = (self.currency_limit - live_cost_basis) / price + live_cost_basis: float = abs_live_size * live_pp.ppu + slot_size: float = currency_per_slot / price + l_sub_pp: float = (self.currency_limit - live_cost_basis) / price else: raise ValueError( @@ -141,8 +141,14 @@ class Allocator(Struct): # an entry (adding-to or starting a pp) if ( live_size == 0 - or (action == 'buy' and live_size > 0) - or action == 'sell' and live_size < 0 + or ( + action == 'buy' + and live_size > 0 + ) + or ( + action == 'sell' + and live_size < 0 + ) ): order_size = min( slot_size, @@ -178,7 +184,7 @@ class Allocator(Struct): order_size = max(slotted_pp, slot_size) if ( - abs_live_size < slot_size or + abs_live_size < slot_size # NOTE: front/back "loading" heurstic: # if the remaining pp is in between 0-1.5x a slot's @@ -187,14 +193,17 @@ class Allocator(Struct): # **without** going past a net-zero pp. if the pp is # > 1.5x a slot size, then front load: exit a slot's and # expect net-zero to be acquired on the final exit. - slot_size < pp_size < round((1.5*slot_size), ndigits=ld) or + or slot_size < pp_size < round((1.5*slot_size), ndigits=ld) + or ( - # underlying requires discrete (int) units (eg. stocks) - # and thus our slot size (based on our limit) would - # exit a fractional unit's worth so, presuming we aren't - # supporting a fractional-units-style broker, we need - # exit the final unit. - ld == 0 and abs_live_size == 1 + # underlying requires discrete (int) units (eg. stocks) + # and thus our slot size (based on our limit) would + # exit a fractional unit's worth so, presuming we aren't + # supporting a fractional-units-style broker, we need + # exit the final unit. + ld == 0 + and abs_live_size == 1 + ) ): order_size = abs_live_size @@ -203,13 +212,14 @@ class Allocator(Struct): # compute a fractional slots size to display slots_used = self.slots_used( Position( - mkt=sym, + mkt=mkt, size=order_size, ppu=price, - bs_mktid=sym, + bs_mktid=mkt.bs_mktid, ) ) + # TODO: render an actual ``Executable`` type here? return { 'size': abs(round(order_size, ndigits=ld)), 'size_digits': ld, @@ -249,7 +259,7 @@ class Allocator(Struct): def mk_allocator( - symbol: Symbol, + mkt: MktPair, startup_pp: Position, # default allocation settings @@ -276,6 +286,6 @@ def mk_allocator( defaults.update(user_def) return Allocator( - symbol=symbol, + mkt=mkt, **defaults, ) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 23845fde..5107f2bb 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -22,9 +22,9 @@ from __future__ import annotations from collections import UserDict from contextlib import contextmanager as cm from pathlib import Path -import time from typing import ( Any, + Callable, Iterator, Union, Generator @@ -158,7 +158,7 @@ class TransactionLedger(UserDict): for tid, txdict in self.data.items(): # special field handling for datetimes # to ensure pendulum is used! - fqme = txdict.get('fqme', txdict['fqsn']) + fqme = txdict.get('fqme') or txdict['fqsn'] dt = parse(txdict['dt']) expiry = txdict.get('expiry') @@ -242,8 +242,6 @@ def iter_by_dt( datetime presumably set at the ``'dt'`` field in each entry. ''' - txs = records.items() - def dyn_parse_to_dt( pair: tuple[str, dict], ) -> DateTime: diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 653c8d04..046195ca 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -519,11 +519,11 @@ class MktPair(Struct, frozen=True): return self._atype @property - def tick_size_digits(self) -> int: + def price_tick_digits(self) -> int: return float_digits(self.price_tick) @property - def lot_size_digits(self) -> int: + def size_tick_digits(self) -> int: return float_digits(self.size_tick) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index dda39177..3af0eeef 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -44,7 +44,6 @@ from ._ledger import ( open_trade_ledger, ) from ._mktinfo import ( - Symbol, MktPair, Asset, unpack_fqme, @@ -247,11 +246,8 @@ class Position(Struct): # XXX: better place to do this? mkt = self.mkt - - # TODO: switch to new fields..? - # .size_tick_digits, .price_tick_digits - size_tick_digits = mkt.lot_size_digits - price_tick_digits = mkt.tick_size_digits + size_tick_digits = mkt.size_tick_digits + price_tick_digits = mkt.price_tick_digits self.ppu = round( # TODO: change this to ppu? From 39af215d61759acb929e531693e06a5153fad6b0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 24 May 2023 15:29:42 -0400 Subject: [PATCH 274/294] kraken: use new `Position.mkt` attr --- piker/brokers/kraken/broker.py | 2 +- piker/brokers/kucoin.py | 1 - piker/clearing/_paper_engine.py | 3 +-- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 0a9056af..28f5d026 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -402,7 +402,7 @@ def trades2pps( # right since `.broker` is already # included? account='kraken.' + acctid, - symbol=p.symbol.fqme, + symbol=p.mkt.fqme, size=p.size, avg_price=p.ppu, currency='', diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index 17376e8d..8cf06300 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -647,7 +647,6 @@ async def get_mkt_info( tx_tick=digits_to_dec(dst.precision), info=dst.to_dict(), ) - mkt = MktPair( dst=dst_asset, src=src_asset, diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index bf4f0948..44171dbc 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -578,8 +578,7 @@ async def trades_dialogue( # for each sym in the ledger load it's `MktPair` info for tid, txdict in ledger.data.items(): - # TODO: switch this to fqme - l_fqme = txdict.get('fqme', txdict['fqsn']) + l_fqme: str = txdict.get('fqme') or txdict['fqsn'] if ( gmi From 1b577eebf6f97e4e38411513aebd587fb9121ee6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 24 May 2023 15:30:17 -0400 Subject: [PATCH 275/294] Change over the UI layer to use `MktPair` Including changing to `LinkedSplits.mkt: MktPair` and adding an explicit setter method for setting it and being sure that nothing breaks in the display system init! For this commit we leave in warning access to `LinkedSplits.symbol` but will remove in following commit. --- piker/data/feed.py | 2 +- piker/ui/_chart.py | 38 ++++++++++++++++++++---------- piker/ui/_cursor.py | 4 ++-- piker/ui/_dataviz.py | 51 +++++++++++++++++++++++------------------ piker/ui/_display.py | 37 ++++++++++++++++-------------- piker/ui/_fsp.py | 9 ++++---- piker/ui/_position.py | 15 +++++++----- piker/ui/_signalling.py | 5 +++- piker/ui/order_mode.py | 45 +++++++++++++++++++----------------- 9 files changed, 120 insertions(+), 86 deletions(-) diff --git a/piker/data/feed.py b/piker/data/feed.py index 88d2a386..1714cf19 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -907,7 +907,7 @@ async def open_feed( for fqme, flume_msg in flumes_msg_dict.items(): flume = Flume.from_msg(flume_msg) - # assert flume.symbol.fqme == fqme + # assert flume.mkt.fqme == fqme feed.flumes[fqme] = flume # TODO: do we need this? diff --git a/piker/ui/_chart.py b/piker/ui/_chart.py index 13ec23c3..b2da3fa2 100644 --- a/piker/ui/_chart.py +++ b/piker/ui/_chart.py @@ -68,7 +68,10 @@ from ..data.feed import ( Feed, Flume, ) -from ..accounting._mktinfo import Symbol +from ..accounting import ( + MktPair, + Symbol, +) from ..log import get_logger from ._interaction import ChartView from ._forms import FieldsForm @@ -287,7 +290,7 @@ class GodWidget(QWidget): pp_nav.hide() # set window titlebar info - symbol = self.rt_linked.symbol + symbol = self.rt_linked.mkt if symbol is not None: self.window.setWindowTitle( f'{symbol.fqme} ' @@ -452,7 +455,7 @@ class LinkedSplits(QWidget): # update the UI for a given "chart instance". self.display_state: DisplayState | None = None - self._symbol: Symbol = None + self._mkt: MktPair | Symbol = None def on_splitter_adjust( self, @@ -474,9 +477,20 @@ class LinkedSplits(QWidget): **kwargs, ) + def set_mkt_info( + self, + mkt: MktPair, + ) -> None: + self._mkt = mkt + @property - def symbol(self) -> Symbol: - return self._symbol + def mkt(self) -> MktPair: + return self._mkt + + @property + def symbol(self) -> Symbol | MktPair: + log.warning(f'{type(self)}.symbol is now deprecated use .mkt!') + return self.mkt def set_split_sizes( self, @@ -521,7 +535,7 @@ class LinkedSplits(QWidget): def plot_ohlc_main( self, - symbol: Symbol, + mkt: MktPair, shm: ShmArray, flume: Flume, sidepane: FieldsForm, @@ -540,7 +554,7 @@ class LinkedSplits(QWidget): # add crosshairs self.cursor = Cursor( linkedsplits=self, - digits=symbol.tick_size_digits, + digits=mkt.price_tick_digits, ) # NOTE: atm the first (and only) OHLC price chart for the symbol @@ -548,7 +562,7 @@ class LinkedSplits(QWidget): # be no distinction since we will have multiple symbols per # view as part of "aggregate feeds". self.chart = self.add_plot( - name=symbol.fqme, + name=mkt.fqme, shm=shm, flume=flume, style=style, @@ -1030,7 +1044,7 @@ class ChartPlotWidget(pg.PlotWidget): ''' view = vb or self.view viz = self.main_viz - l, r = viz.view_range() + left, right = viz.view_range() x_shift = viz.index_step() * datums if datums >= 300: @@ -1040,8 +1054,8 @@ class ChartPlotWidget(pg.PlotWidget): # should trigger broadcast on all overlays right? view.setXRange( - min=l + x_shift, - max=r + x_shift, + min=left + x_shift, + max=right + x_shift, # TODO: holy shit, wtf dude... why tf would this not be 0 by # default... speechless. @@ -1227,7 +1241,7 @@ class ChartPlotWidget(pg.PlotWidget): # if the sticky is for our symbol # use the tick size precision for display name = name or pi.name - sym = self.linked.symbol + sym = self.linked.mkt digits = None if name == sym.key: digits = sym.tick_size_digits diff --git a/piker/ui/_cursor.py b/piker/ui/_cursor.py index 83986762..0a2c82b1 100644 --- a/piker/ui/_cursor.py +++ b/piker/ui/_cursor.py @@ -228,7 +228,7 @@ class ContentsLabel(pg.LabelItem): 'bar_wap', ] ], - name=name, + # name=name, index=ix, ) ) @@ -363,7 +363,7 @@ class Cursor(pg.GraphicsObject): # value used for rounding y-axis discreet tick steps # computing once, up front, here cuz why not - mkt = self.linked._symbol + mkt = self.linked.mkt self._y_tick_mult = 1/float(mkt.price_tick) # line width in view coordinates diff --git a/piker/ui/_dataviz.py b/piker/ui/_dataviz.py index 721483e1..a24c7d5c 100644 --- a/piker/ui/_dataviz.py +++ b/piker/ui/_dataviz.py @@ -436,12 +436,12 @@ class Viz(Struct): else: if x_range is None: ( - l, + xl, _, lbar, rbar, _, - r, + xr, ) = self.datums_range() profiler(f'{self.name} got bars range') @@ -585,12 +585,12 @@ class Viz(Struct): Return a range tuple for the datums present in view. ''' - l, r = view_range or self.view_range() + xl, xr = view_range or self.view_range() index_field: str = index_field or self.index_field if index_field == 'index': - l: int = round(l) - r: int = round(r) + xl: int = round(xl) + xr: int = round(xr) if array is None: array = self.shm.array @@ -601,12 +601,12 @@ class Viz(Struct): # invalid view state if ( - r < l - or l < 0 - or r < 0 + xr < xl + or xl < 0 + or xr < 0 or ( - l > last - and r > last + xl > last + and xr > last ) ): leftmost: int = first @@ -616,12 +616,12 @@ class Viz(Struct): # determine first and last datums in view determined by # l -> r view range. rightmost = max( - min(last, ceil(r)), + min(last, ceil(xr)), first, ) leftmost = min( - max(first, floor(l)), + max(first, floor(xl)), last, rightmost - 1, ) @@ -632,12 +632,12 @@ class Viz(Struct): self.vs.xrange = leftmost, rightmost return ( - l, # left x-in-view + xl, # left x-in-view first, # first datum leftmost, rightmost, last, # last_datum - r, # right-x-in-view + xr, # right-x-in-view ) def read( @@ -665,12 +665,12 @@ class Viz(Struct): profiler('self.shm.array READ') ( - l, + xl, ifirst, lbar, rbar, ilast, - r, + xr, ) = self.datums_range( index_field=index_field, array=array, @@ -715,8 +715,8 @@ class Viz(Struct): # a uniform time stamp step size? else: # get read-relative indices adjusting for master shm index. - lbar_i = max(l, ifirst) - ifirst - rbar_i = min(r, ilast) - ifirst + lbar_i = max(xl, ifirst) - ifirst + rbar_i = min(xr, ilast) - ifirst # NOTE: the slice here does NOT include the extra ``+ 1`` # BUT the ``in_view`` slice DOES.. @@ -1244,18 +1244,25 @@ class Viz(Struct): ''' # get most recent right datum index in-view - l, start, datum_start, datum_stop, stop, r = self.datums_range() + ( + xl, + start, + datum_start, + datum_stop, + stop, + xr, + ) = self.datums_range() lasts = self.shm.array[-1] i_step = lasts['index'] # last index-specific step. i_step_t = lasts['time'] # last time step. - # fqme = self.flume.symbol.fqme + # fqme = self.flume.mkt.fqme # check if "last (is) in view" -> is a real-time update necessary? if self.index_field == 'index': - liv = (r >= i_step) + liv = (xr >= i_step) else: - liv = (r >= i_step_t) + liv = (xr >= i_step_t) # compute the first available graphic obj's x-units-per-pixel # TODO: make this not loop through all vizs each time! diff --git a/piker/ui/_display.py b/piker/ui/_display.py index 04b363b1..c747eb31 100644 --- a/piker/ui/_display.py +++ b/piker/ui/_display.py @@ -37,6 +37,9 @@ import pyqtgraph as pg from msgspec import field # from .. import brokers +from ..accounting import ( + MktPair, +) from ..data.feed import ( open_feed, Feed, @@ -319,8 +322,8 @@ async def graphics_update_loop( for fqme, flume in feed.flumes.items(): ohlcv = flume.rt_shm hist_ohlcv = flume.hist_shm - symbol = flume.mkt - fqme = symbol.fqme + mkt = flume.mkt + fqme = mkt.fqme # update last price sticky fast_viz = fast_chart._vizs[fqme] @@ -360,13 +363,13 @@ async def graphics_update_loop( last, volume = ohlcv.array[-1][['close', 'volume']] - symbol = flume.mkt + mkt = flume.mkt l1 = L1Labels( fast_pi, # determine precision/decimal lengths - digits=symbol.tick_size_digits, - size_digits=symbol.lot_size_digits, + digits=mkt.price_tick_digits, + size_digits=mkt.size_tick_digits, ) # TODO: @@ -449,7 +452,7 @@ async def graphics_update_loop( and quote_rate >= display_rate ): pass - # log.warning(f'High quote rate {symbol.key}: {quote_rate}') + # log.warning(f'High quote rate {mkt.fqme}: {quote_rate}') last_quote_s = time.time() @@ -1224,7 +1227,7 @@ async def display_symbol_data( # tf_key = tf_in_1s[step_size_s] godwidget.window.setWindowTitle( f'{fqmes} ' - # f'tick:{symbol.tick_size} ' + # f'tick:{mkt.tick_size} ' # f'step:{tf_key} ' ) # generate order mode side-pane UI @@ -1234,8 +1237,8 @@ async def display_symbol_data( godwidget.pp_pane = pp_pane # create top history view chart above the "main rt chart". - rt_linked = godwidget.rt_linked - hist_linked = godwidget.hist_linked + rt_linked: LinkedSplits = godwidget.rt_linked + hist_linked: LinkedSplits = godwidget.hist_linked # NOTE: here we insert the slow-history chart set into # the fast chart's splitter -> so it's a splitter of charts @@ -1279,17 +1282,17 @@ async def display_symbol_data( # TODO NOTE: THIS CONTROLS WHAT SYMBOL IS USED FOR ORDER MODE # SUBMISSIONS, we need to make this switch based on selection. - rt_linked._symbol = flume.mkt - hist_linked._symbol = flume.mkt + rt_linked.set_mkt_info(flume.mkt) + hist_linked.set_mkt_info(flume.mkt) ohlcv: ShmArray = flume.rt_shm hist_ohlcv: ShmArray = flume.hist_shm - symbol = flume.mkt - fqme = symbol.fqme + mkt: MktPair = flume.mkt + fqme = mkt.fqme hist_chart = hist_linked.plot_ohlc_main( - symbol, + mkt, hist_ohlcv, flume, # in the case of history chart we explicitly set `False` @@ -1311,7 +1314,7 @@ async def display_symbol_data( hist_linked.cursor.always_show_xlabel = False rt_chart = rt_linked.plot_ohlc_main( - symbol, + mkt, ohlcv, flume, # in the case of history chart we explicitly set `False` @@ -1378,8 +1381,8 @@ async def display_symbol_data( ohlcv: ShmArray = flume.rt_shm hist_ohlcv: ShmArray = flume.hist_shm - symbol = flume.mkt - fqme = symbol.fqme + mkt = flume.mkt + fqme = mkt.fqme hist_pi = hist_chart.overlay_plotitem( name=fqme, diff --git a/piker/ui/_fsp.py b/piker/ui/_fsp.py index f942ff14..b4aa2b10 100644 --- a/piker/ui/_fsp.py +++ b/piker/ui/_fsp.py @@ -29,7 +29,6 @@ from typing import ( Any, ) -import numpy as np import msgspec import tractor import pyqtgraph as pg @@ -428,7 +427,7 @@ class FspAdmin: in self._flow_registry.items() ], - ) as (ctx, last_index), + ) as (ctx, _), ctx.open_stream() as stream, ): @@ -486,8 +485,10 @@ class FspAdmin: readonly=True, ) - portal = self.cluster.get(worker_name) or self.rr_next_portal() - provider_tag = portal.channel.uid + portal: tractor.Portal = ( + self.cluster.get(worker_name) + or self.rr_next_portal() + ) # TODO: this should probably be turned into a # ``Cascade`` type which describes the routing diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 59ab434d..a2e6c19e 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -45,7 +45,10 @@ from ..calc import ( pnl, puterize, ) -from ..accounting._allocate import Allocator +from ..accounting import ( + Allocator, + MktPair, +) from ..accounting import ( Position, ) @@ -244,7 +247,7 @@ class SettingsPane: # a ``brokerd`) then error and switch back to the last # selection. if tracker is None: - sym = old_tracker.charts[0].linked.symbol.key + sym: str = old_tracker.charts[0].linked.mkt.fqme log.error( f'Account `{account_name}` can not be set for {sym}' ) @@ -415,9 +418,10 @@ class SettingsPane: ''' mode = self.order_mode - sym = mode.chart.linked.symbol + mkt: MktPair = mode.chart.linked.mkt size = tracker.live_pp.size - flume: Feed = mode.feed.flumes[sym.fqme] + fqme: str = mkt.fqme + flume: Feed = mode.feed.flumes[fqme] pnl_value = 0 if size: @@ -430,7 +434,6 @@ class SettingsPane: # maybe start update task global _pnl_tasks - fqme = sym.fqme if fqme not in _pnl_tasks: _pnl_tasks[fqme] = True self.order_mode.nursery.start_soon( @@ -555,7 +558,7 @@ class Nav(Struct): ''' for key, chart in self.charts.items(): - size_digits = size_digits or chart.linked.symbol.lot_size_digits + size_digits = size_digits or chart.linked.mkt.size_tick_digits line = self.lines.get(key) level_marker = self.level_markers[key] pp_label = self.pp_labels[key] diff --git a/piker/ui/_signalling.py b/piker/ui/_signalling.py index 13bc2fc8..c952b49d 100644 --- a/piker/ui/_signalling.py +++ b/piker/ui/_signalling.py @@ -23,7 +23,10 @@ WARNING: this code likely doesn't work at all (yet) """ import numpy as np import pyqtgraph as pg -from PyQt5 import QtCore, QtGui, QtWidgets +from PyQt5 import ( + QtCore, + QtWidgets, +) from .quantdom.charts import CenteredTextItem from .quantdom.base import Quotes diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 44558251..21f7bd22 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -36,17 +36,18 @@ import trio from PyQt5.QtCore import Qt from .. import config -from ..accounting import Position -from ..accounting._allocate import ( +from ..accounting import ( + Allocator, + Position, mk_allocator, + MktPair, + Symbol, ) from ..clearing._client import ( open_ems, OrderClient, ) from ._style import _font -from ..accounting._mktinfo import Symbol -from ..accounting import MktPair from ..data.feed import ( Feed, Flume, @@ -93,7 +94,7 @@ class Dialog(Struct): order: Order symbol: str lines: list[LevelLine] - last_status_close: Callable = lambda: None + last_status_close: Callable | None = None msgs: dict[str, dict] = {} fills: dict[str, Any] = {} @@ -288,10 +289,10 @@ class OrderMode: # since that's illogical / a no-op. return - symbol = self.chart.linked.symbol + mkt: MktPair = self.chart.linked.mkt # NOTE : we could also use instead, - # symbol.quantize(price, quantity_type='price') + # mkt.quantize(price, quantity_type='price') # but it returns a Decimal and it's probably gonna # be slower? # TODO: should we be enforcing this precision @@ -301,7 +302,7 @@ class OrderMode: price = round( price, - ndigits=symbol.tick_size_digits, + ndigits=mkt.size_tick_digits, ) order = self._staged_order = Order( @@ -309,8 +310,8 @@ class OrderMode: price=price, account=self.current_pp.alloc.account, size=0, - symbol=symbol, - brokers=[symbol.broker], + symbol=mkt.fqme, + brokers=[mkt.broker], oid='', # filled in on submit exec_mode=trigger_type, # dark or live ) @@ -457,10 +458,10 @@ class OrderMode: the EMS, adjust mirrored level line on secondary chart. ''' - mktinfo = self.chart.linked.symbol + mktinfo: MktPair = self.chart.linked.mkt level = round( line.value(), - ndigits=mktinfo.tick_size_digits, + ndigits=mktinfo.size_tick_digits, ) # updated by level change callback set in ``.new_line_from_order()`` dialog = line.dialog @@ -497,7 +498,9 @@ class OrderMode: # a submission is the start of a new order dialog dialog = self.dialogs[uuid] dialog.lines = lines - dialog.last_status_close() + cls: Callable | None = dialog.last_status_close + if cls: + cls() for line in lines: @@ -549,7 +552,7 @@ class OrderMode: # XXX: seems to fail on certain types of races? # assert len(lines) == 2 if lines: - flume: Flume = self.feed.flumes[chart.linked.symbol.fqme] + flume: Flume = self.feed.flumes[chart.linked.mkt.fqme] _, _, ratio = flume.get_ds_info() for chart, shm in [ @@ -740,15 +743,15 @@ async def open_order_mode( lines = LineEditor(godw=godw) arrows = ArrowEditor(godw=godw) - # symbol id - symbol = chart.linked.symbol + # market endpoint info + mkt: MktPair = chart.linked.mkt # map of per-provider account keys to position tracker instances trackers: dict[str, PositionTracker] = {} # load account names from ``brokers.toml`` accounts_def = config.load_accounts( - providers=[symbol.broker], + providers=[mkt.broker], ) # XXX: ``brokerd`` delivers a set of account names that it @@ -771,17 +774,17 @@ async def open_order_mode( # net-zero pp startup_pp = Position( - mkt=symbol, + mkt=mkt, size=0, ppu=0, # XXX: BLEH, do we care about this on the client side? - bs_mktid=symbol.key, + bs_mktid=mkt.key, ) # allocator config - alloc = mk_allocator( - symbol=symbol, + alloc: Allocator = mk_allocator( + mkt=mkt, account=account_name, # if this startup size is greater the allocator limit, From 7381c361cdf85d590cddb96e77df3abf5c07fd64 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 24 May 2023 15:42:14 -0400 Subject: [PATCH 276/294] Strictly drop `LinkedSplits.symbol` B) --- piker/ui/_chart.py | 10 ++-------- piker/ui/_lines.py | 2 +- piker/ui/order_mode.py | 4 ++-- 3 files changed, 5 insertions(+), 11 deletions(-) diff --git a/piker/ui/_chart.py b/piker/ui/_chart.py index b2da3fa2..a1935641 100644 --- a/piker/ui/_chart.py +++ b/piker/ui/_chart.py @@ -70,7 +70,6 @@ from ..data.feed import ( ) from ..accounting import ( MktPair, - Symbol, ) from ..log import get_logger from ._interaction import ChartView @@ -455,7 +454,7 @@ class LinkedSplits(QWidget): # update the UI for a given "chart instance". self.display_state: DisplayState | None = None - self._mkt: MktPair | Symbol = None + self._mkt: MktPair = None def on_splitter_adjust( self, @@ -487,11 +486,6 @@ class LinkedSplits(QWidget): def mkt(self) -> MktPair: return self._mkt - @property - def symbol(self) -> Symbol | MktPair: - log.warning(f'{type(self)}.symbol is now deprecated use .mkt!') - return self.mkt - def set_split_sizes( self, prop: float | None = None, @@ -1236,7 +1230,7 @@ class ChartPlotWidget(pg.PlotWidget): # TODO: UGH! just make this not here! we should # be making the sticky from code which has access - # to the ``Symbol`` instance.. + # to the ``MktPair`` instance.. # if the sticky is for our symbol # use the tick size precision for display diff --git a/piker/ui/_lines.py b/piker/ui/_lines.py index 59796d4f..6f64a349 100644 --- a/piker/ui/_lines.py +++ b/piker/ui/_lines.py @@ -126,7 +126,7 @@ class LevelLine(pg.InfiniteLine): self._on_drag_start = lambda l: None self._on_drag_end = lambda l: None - self._y_incr_mult = float(1 / chart.linked.symbol.size_tick) + self._y_incr_mult = float(1 / chart.linked.mkt.size_tick) self._right_end_sc: float = 0 # use px caching diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 21f7bd22..62e54680 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -996,7 +996,7 @@ async def process_trade_msg( if name in ( 'position', ): - sym = mode.chart.linked.symbol + sym: MktPair = mode.chart.linked.mkt pp_msg_symbol = msg['symbol'].lower() fqme = sym.fqme broker = sym.broker @@ -1052,7 +1052,7 @@ async def process_trade_msg( ) assert msg.resp in ('open', 'dark_open'), f'Unknown msg: {msg}' - sym = mode.chart.linked.symbol + sym: MktPair = mode.chart.linked.mkt fqme = sym.fqme if ( ((order.symbol + f'.{msg.src}') == fqme) From f0a346dcc3a5c2057f9b0b515b96b91679843c0b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 24 May 2023 17:25:23 -0400 Subject: [PATCH 277/294] Some linting fixes after trying out `ruff` --- piker/accounting/cli.py | 9 ++++++++- piker/brokers/kraken/api.py | 2 +- piker/data/history.py | 4 ++-- piker/service/marketstore.py | 7 ++++--- piker/ui/_lines.py | 4 ++-- 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 76cbc1ab..ee91d1b3 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -28,6 +28,7 @@ import tractor import trio import typer +from ..log import get_logger from ..service import ( open_piker_runtime, ) @@ -102,6 +103,7 @@ def sync( "-l", ), ): + log = get_logger(loglevel) console = Console() try: @@ -134,6 +136,10 @@ def sync( tractor.open_nursery() as an, ): + log.info( + f'Piker runtime up as {actor.uid}@{sockaddr}' + ) + portal = await an.start_actor( loglevel=loglevel, debug_mode=pdb, @@ -162,9 +168,10 @@ def sync( async with ( open_trades_endpoint as ( brokerd_ctx, - (positions, accounts,), + (positions, accounts), ), ): + assert len(accounts) == 1 summary: str = ( '[dim underline]Piker Position Summary[/] ' f'[dim blue underline]{brokername}[/]' diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index 24330638..1ebdb759 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -296,7 +296,7 @@ class Client: for bs_mktid, info in assets.items(): altname = self._altnames[bs_mktid] = info['altname'] - aclass = info['aclass'] + aclass: str = info['aclass'] self.assets[bs_mktid] = Asset( name=altname.lower(), diff --git a/piker/data/history.py b/piker/data/history.py index 28a4590e..ebfe8c65 100644 --- a/piker/data/history.py +++ b/piker/data/history.py @@ -41,7 +41,6 @@ import numpy as np from .. import config from ..accounting import ( MktPair, - unpack_fqme, ) from ._util import ( log, @@ -110,6 +109,7 @@ async def start_backfill( async with mod.open_history_client( mkt, ) as (hist, config): + log.info(f'{mod} history client returned backfill config: {config}') # get latest query's worth of history all the way # back to what is recorded in the tsdb @@ -326,7 +326,7 @@ async def start_backfill( f'{start_dt} -> {end_dt}' ) - if mkt.dst.atype != 'crypto': + if mkt.dst.atype not in {'crypto', 'crypto_currency'}: # for now, our table key schema is not including # the dst[/src] source asset token. col_sym_key: str = mkt.get_fqme( diff --git a/piker/service/marketstore.py b/piker/service/marketstore.py index 68b9e953..ac0ad0a4 100644 --- a/piker/service/marketstore.py +++ b/piker/service/marketstore.py @@ -26,7 +26,6 @@ from __future__ import annotations from contextlib import asynccontextmanager as acm from datetime import datetime -from functools import partial from pprint import pformat from typing import ( Any, @@ -59,6 +58,7 @@ from ._util import ( log, # sub-sys logger get_console_log, ) +from . import Services from ..data.feed import maybe_open_feed from .._profile import Profiler from .. import config @@ -260,7 +260,7 @@ async def start_ahab_daemon( ep_kwargs={'user_config': conf}, loglevel=loglevel, ) as ( - ctn_ready, + _, config, (cid, pid), ): @@ -583,6 +583,7 @@ class Storage: client = self.client syms = await client.list_symbols() if key not in syms: + await tractor.breakpoint() raise KeyError(f'`{key}` table key not found in\n{syms}?') tbk = mk_tbk(( @@ -743,7 +744,7 @@ async def open_tsdb_client( touch_if_dne=True, ) tsdbconf = rootconf['network'].get('tsdb') - backend = tsdbconf.pop('backend') + # backend = tsdbconf.pop('backend') async with ( open_storage_client( **tsdbconf, diff --git a/piker/ui/_lines.py b/piker/ui/_lines.py index 6f64a349..62ce9de1 100644 --- a/piker/ui/_lines.py +++ b/piker/ui/_lines.py @@ -123,8 +123,8 @@ class LevelLine(pg.InfiniteLine): self._track_cursor: bool = False self.always_show_labels = always_show_labels - self._on_drag_start = lambda l: None - self._on_drag_end = lambda l: None + self._on_drag_start = lambda lvln: None + self._on_drag_end = lambda lvln: None self._y_incr_mult = float(1 / chart.linked.mkt.size_tick) self._right_end_sc: float = 0 From da4d344e63eb9682a6b8b927cb5adbf692d9abeb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 25 May 2023 13:53:14 -0400 Subject: [PATCH 278/294] Change to `piker_pin` branch in `tomlkit` fork --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ba4dc620..742c2efa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,4 +17,4 @@ # ``tomlkit`` for account files and configs; we've # added some new features that need to get upstreamed: --e git+https://github.com/pikers/tomlkit.git@writing_docs_tweaks#egg=tomlkit +-e git+https://github.com/pikers/tomlkit.git@piker_pin#egg=tomlkit From 9c80969fd5e5f7e26bb6caa02285a3c2963b7803 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 25 May 2023 16:01:21 -0400 Subject: [PATCH 279/294] .data.validate: add missing endpoint warnings --- piker/brokers/__init__.py | 1 + piker/data/validate.py | 47 +++++++++++++++++++++++++++++++++++++-- 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/piker/brokers/__init__.py b/piker/brokers/__init__.py index c67f4003..93393654 100644 --- a/piker/brokers/__init__.py +++ b/piker/brokers/__init__.py @@ -25,6 +25,7 @@ __brokers__ = [ 'ib', 'kraken', 'kucoin' + # broken but used to work # 'questrade', # 'robinhood', diff --git a/piker/data/validate.py b/piker/data/validate.py index 8f6c1d5a..321b4296 100644 --- a/piker/data/validate.py +++ b/piker/data/validate.py @@ -23,6 +23,7 @@ from pprint import pformat from types import ModuleType from typing import ( Any, + Callable, ) from msgspec import field @@ -60,6 +61,31 @@ class FeedInit(Struct, frozen=True): 'sum_tick_vlm': True, }) +# XXX: we group backend endpoints into 3 +# groups to determine "degrees" of functionality. +_eps: dict[str, list[str]] = { + + # basic API `Client` layer + 'middleware': [ + 'get_client', + ], + + # (live) data streaming / loading / search + 'datad': [ + 'get_mkt_info', + 'open_history_client', + 'open_symbol_search', + 'stream_quotes', + ], + + # live order control and trading + 'brokerd': [ + 'trades_dialogue', + # TODO: ledger normalizer helper? + # norm_trades(records: dict[str, Any]) -> TransactionLedger) + ], +} + def validate_backend( mod: ModuleType, @@ -77,6 +103,20 @@ def validate_backend( that haven't been implemented by this backend yet. ''' + for daemon_name, eps in _eps.items(): + for name in eps: + ep: Callable = getattr( + mod, + name, + None, + ) + if ep is None: + log.warning( + f'Provider backend {mod.name} is missing ' + f'{daemon_name} support :(\n' + f'The following endpoint is missing: {name}' + ) + inits: list[ FeedInit | dict[str, Any] ] = init_msgs @@ -128,6 +168,8 @@ def validate_backend( mkt: MktPair match init: + + # backend is using old dict msg delivery case { 'symbol_info': dict(symbol_info), 'fqsn': bs_fqme, @@ -164,6 +206,7 @@ def validate_backend( _atype=symbol_info['asset_type'] ) + # backend is using new `MktPair` but not entirely case { 'mkt_info': MktPair( dst=Asset(), @@ -182,7 +225,6 @@ def validate_backend( ) as init: name: str = mod.name log.info( - f'NICE JOB {name} BACKEND being fully up to API spec B)\n' f"{name}'s `MktPair` info:\n" f'{pformat(mkt.to_dict())}\n' f'shm conf: {pformat(shm_opts)}\n' @@ -203,7 +245,8 @@ def validate_backend( mkt = init.mkt_info assert mkt.type_key - # `MktPair` wish list + # backend is using new `MktPair` but not embedded `Asset` types + # for the .src/.dst.. if not isinstance(mkt.src, Asset): warn_msg += ( f'ALSO, {mod.name.upper()} should try to deliver\n' From 9bc11d8dd9f7960ba5a0d1773a486f1301d19f87 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 25 May 2023 17:55:20 -0400 Subject: [PATCH 280/294] Add basic config checking tests --- tests/test_accounting.py | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 tests/test_accounting.py diff --git a/tests/test_accounting.py b/tests/test_accounting.py new file mode 100644 index 00000000..f5a3bd8d --- /dev/null +++ b/tests/test_accounting.py @@ -0,0 +1,35 @@ +''' +`piker.accounting` mgmt calculations for +- positioning +- ledger updates +- config file IO + +''' +from pathlib import Path + +from piker import config + + +def test_root_conf_networking_section( + root_conf: dict, +): + conf, path = config.load( + 'conf', + touch_if_dne=True, + ) + assert conf['network']['tsdb'] + + +def test_account_file_default_empty( + tmpconfdir: Path, +): + conf, path = config.load_account( + 'kraken', + 'paper', + ) + + # ensure the account file empty but created + # and in the correct place in the filesystem! + assert not conf + assert path.parent.is_dir() + assert path.parent.name == 'accounting' From 4a8e8a32f96243c84b90b393fc25d96762341cf3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 25 May 2023 17:56:14 -0400 Subject: [PATCH 281/294] Fix account config loading logic discovered in new test XD --- piker/config.py | 70 +++++++++++++++++++-------------- piker/service/_actor_runtime.py | 3 -- 2 files changed, 41 insertions(+), 32 deletions(-) diff --git a/piker/config.py b/piker/config.py index d838bcdd..3128e93d 100644 --- a/piker/config.py +++ b/piker/config.py @@ -241,8 +241,6 @@ def load( pass the ``path: Path`` explicitly. ''' - path: Path = path or get_conf_path(conf_name) - # create the $HOME/.config/piker dir if dne if not _config_dir.is_dir(): _config_dir.mkdir( @@ -250,22 +248,27 @@ def load( exist_ok=True, ) + path_provided: bool = path is not None + path: Path = path or get_conf_path(conf_name) + if ( not path.is_file() and touch_if_dne ): - fn: str = _conf_fn_w_ext(conf_name) - - # try to copy in a template config to the user's directory if - # one exists. - template: Path = repodir() / 'config' / fn - if template.is_file(): - shutil.copyfile(template, path) - - else: # just touch an empty file with same name + # only do a template if no path provided, + # just touch an empty file with same name. + if path_provided: with path.open(mode='x'): pass + # try to copy in a template config to the user's dir if one + # exists. + else: + fn: str = _conf_fn_w_ext(conf_name) + template: Path = repodir() / 'config' / fn + if template.is_file(): + shutil.copyfile(template, path) + with path.open(mode='r') as fp: config: dict = decode( fp.read(), @@ -283,16 +286,24 @@ def load_account( ) -> tuple[dict, Path]: ''' Load a accounting (with positions) file from - ~/.config/piker/accounting/account...toml. + $PIKER_CONFIG_DIR/accounting/account...toml. + + Where normally $PIKER_CONFIG_DIR = ~/.config/piker/ + and we implicitly create a accounting subdir which should + normally be linked to a git repo managed by the user B) ''' legacy_fn: str = f'pps.{brokername}.{acctid}.toml' fn: str = f'account.{brokername}.{acctid}.toml' dirpath: Path = _config_dir / 'accounting' + if not dirpath.is_dir(): + dirpath.mkdir() + config, path = load( path=dirpath / fn, decode=tomlkit.parse, + touch_if_dne=True, ) if not config: @@ -303,25 +314,26 @@ def load_account( 'Please delete the old file!\n' f'|-> {legacypath}\n' ) - legacy_config, _ = load( - path=legacypath, + if legacypath.is_file(): + legacy_config, _ = load( + path=legacypath, - # TODO: move to tomlkit: - # - needs to be fixed to support bidict? - # https://github.com/sdispater/tomlkit/issues/289 - # - we need to use or fork's fix to do multiline array - # indenting. - decode=tomlkit.parse, - ) - config.update(legacy_config) + # TODO: move to tomlkit: + # - needs to be fixed to support bidict? + # https://github.com/sdispater/tomlkit/issues/289 + # - we need to use or fork's fix to do multiline array + # indenting. + decode=tomlkit.parse, + ) + config.update(legacy_config) - # XXX: override the presumably previously non-existant - # file with legacy's contents. - write( - config, - path=path, - fail_empty=False, - ) + # XXX: override the presumably previously non-existant + # file with legacy's contents. + write( + config, + path=path, + fail_empty=False, + ) return config, path diff --git a/piker/service/_actor_runtime.py b/piker/service/_actor_runtime.py index 843ef76b..78938a5f 100644 --- a/piker/service/_actor_runtime.py +++ b/piker/service/_actor_runtime.py @@ -19,8 +19,6 @@ """ from __future__ import annotations -from pprint import pformat -from functools import partial import os from typing import ( Optional, @@ -35,7 +33,6 @@ import tractor import trio from ._util import ( - log, # sub-sys logger get_console_log, ) from ._mngr import ( From 06cc3ac92cb0f7b5a4d3a60b39bef87e85db32ec Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 25 May 2023 18:04:52 -0400 Subject: [PATCH 282/294] Tidy up ems tests as per some `ruff`in --- tests/test_ems.py | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/tests/test_ems.py b/tests/test_ems.py index 414bc906..8ee99807 100644 --- a/tests/test_ems.py +++ b/tests/test_ems.py @@ -50,16 +50,6 @@ from piker.accounting import ( log = get_logger(__name__) -async def open_pikerd( - open_test_pikerd: AsyncContextManager, - -) -> Services: - async with ( - open_test_pikerd() as (_, _, _, services), - ): - yield services - - async def order_and_and_wait_for_ppmsg( client: OrderClient, trades_stream: tractor.MsgStream, @@ -174,7 +164,7 @@ def test_ems_err_on_bad_broker( async def load_bad_fqme(): try: async with ( - open_test_pikerd() as (_, _, _, services), + open_test_pikerd() as (_, _, _, _), open_ems( 'doggycoin.doggy', @@ -268,7 +258,7 @@ async def submit_and_check( trades_stream, # tractor.MsgStream startup_pps, accounts, - dialogs, + _, # dialogs ) ): # no positions on startup @@ -370,7 +360,7 @@ def test_multi_fill_positions( nonlocal ppmsg, pos async with ( - open_test_pikerd() as (_, _, _, services), + open_test_pikerd() as (_, _, _, _), ): ppmsg, pos = await submit_and_check( fills=fills, @@ -385,19 +375,24 @@ def test_multi_fill_positions( # account (i.e. a user can expect to see paper pps persist across # runtime sessions. async def just_check_pp(): + nonlocal ppmsg + async with ( - open_test_pikerd() as (_, _, _, services), + open_test_pikerd() as (_, _, _, _), ): await match_ppmsgs_on_ems_boot([ppmsg]) run_and_tollerate_cancels(just_check_pp) +# TODO: still need to implement offline storage of darks/alerts/paper +# lives probably all the same way.. see +# https://github.com/pikers/piker/issues/463 def test_open_orders_reloaded( open_test_pikerd: AsyncContextManager, loglevel: str, - fills: tuple[dict], + # fills: tuple[dict], check_cross_session: bool = False, ): From 3d8c1a7b3ce584a505c9ba6efe0bc827aeb639e9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 26 May 2023 14:05:32 -0400 Subject: [PATCH 283/294] ib: don't log-emit ib pp msg when none exists.. --- piker/brokers/ib/broker.py | 1 - 1 file changed, 1 deletion(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index bcd947ea..73477c0a 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -438,7 +438,6 @@ async def update_and_audit_msgs( log.error( f'UNEXPECTED POSITION says IB => {msg.symbol}\n' 'Maybe they LIQUIDATED YOU or are missing ledger entries?\n' - f'{pikerfmtmsg}\n\n' ) msgs.append(msg) From 40c5f39f0de5ea8540e5048c653288b79b42bd78 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 26 May 2023 14:42:09 -0400 Subject: [PATCH 284/294] conftest: be explicit about which config we touch --- tests/conftest.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index f797ea15..641b2815 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -164,7 +164,9 @@ def tmpconfdir( # this top level testing process). from piker import config config._config_dir: Path = tmpconfdir + conf, path = config.load( + conf_name='brokers', touch_if_dne=True, ) From 3b5bd8f43eb0dd90e023483b2c495bb160fc0db4 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 26 May 2023 14:42:35 -0400 Subject: [PATCH 285/294] Ensure quote last price is a `float` --- piker/clearing/_ems.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index ee7ec284..16069d1e 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -563,7 +563,7 @@ class Router(Struct): flume = feed.flumes[fqme] first_quote: dict = flume.first_quote book: DarkBook = self.get_dark_book(broker) - book.lasts[fqme]: float = first_quote['last'] + book.lasts[fqme]: float = float(first_quote['last']) async with self.maybe_open_brokerd_dialog( brokermod=brokermod, From 5f1d0fcb8c0c06e7ae6904f13ac7214d16461f39 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 26 May 2023 14:58:59 -0400 Subject: [PATCH 286/294] `tmpconfdir`: always assert brokers config created --- tests/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/conftest.py b/tests/conftest.py index 641b2815..3dce2014 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -169,6 +169,7 @@ def tmpconfdir( conf_name='brokers', touch_if_dne=True, ) + assert path.is_file(), 'WTH.. `brokers.toml` not created!?' return tmpconfdir From 5e2107ff15d9965cd3e4bfa7391a91c94be1d179 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 26 May 2023 16:50:15 -0400 Subject: [PATCH 287/294] Adjust `config.load()` to handle CI git checkout dir, seems they changed it!? --- piker/config.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/piker/config.py b/piker/config.py index 3128e93d..e2c63ea4 100644 --- a/piker/config.py +++ b/piker/config.py @@ -217,7 +217,16 @@ def repodir() -> Path: Return the abspath as ``Path`` to the git repo's root dir. ''' - return Path(__file__).absolute().parent.parent + repodir: Path = Path(__file__).absolute().parent.parent + confdir: Path = repodir / 'config' + + if not confdir.is_dir(): + # prolly inside stupid GH actions CI.. + repodir: Path = Path(os.environ.get('GITHUB_WORKSPACE')) + confdir: Path = repodir / 'config' + + assert confdir.is_dir(), f'{confdir} DNE, {repodir} is likely incorrect!' + return repodir def load( @@ -269,6 +278,11 @@ def load( if template.is_file(): shutil.copyfile(template, path) + elif fn and template: + assert template.is_file(), f'{template} is not a file!?' + + assert path.is_file(), f'Config file {path} not created!?' + with path.open(mode='r') as fp: config: dict = decode( fp.read(), From 9ec664f7c868d3717e88a6081a24ee0e62bf6f1c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 26 May 2023 16:50:53 -0400 Subject: [PATCH 288/294] Drop elastic search container build for now since we're also skipping the test --- .github/workflows/ci.yml | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 65b020f1..89c43132 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,16 +43,21 @@ jobs: - name: Checkout uses: actions/checkout@v3 - - name: Build DB container - run: docker build -t piker:elastic dockering/elastic + # elastic only + # - name: Build DB container + # run: docker build -t piker:elastic dockering/elastic - name: Setup python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: '3.10' + # elastic only + # - name: Install dependencies + # run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager + - name: Install dependencies - run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager + run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager - name: Test suite run: pytest tests -rs From 024cf8b8c2606bfbb90888bd379b654b3f6c94d7 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 26 May 2023 16:51:11 -0400 Subject: [PATCH 289/294] add in `[kucoin]` section to brokers conf --- config/brokers.toml | 44 ++++++++++++++++++++++++++------------------ 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/config/brokers.toml b/config/brokers.toml index bb57c78d..7205d82c 100644 --- a/config/brokers.toml +++ b/config/brokers.toml @@ -1,19 +1,32 @@ [questrade] -refresh_token = "" -access_token = "" -api_server = "https://api06.iq.questrade.com/" +refresh_token = '' +access_token = '' +api_server = 'https://api06.iq.questrade.com/' expires_in = 1800 -token_type = "Bearer" +token_type = 'Bearer' expires_at = 1616095326.355846 + +[deribit] +key_id = '' +key_secret = '' + + [kraken] -key_descr = "api_0" -api_key = "" -secret = "" +key_descr = '' +api_key = '' +secret = '' + + +[kucoin] +key_id = '' +key_secret = '' +key_passphrase = '' + [ib] hosts = [ - "127.0.0.1", + '127.0.0.1', ] # XXX: the order in which ports will be scanned # (by the `brokerd` daemon-actor) @@ -30,8 +43,8 @@ ports = [ # is not supported so you have to manually download # and XML report and put it in a location that can be # accessed by the ``brokerd.ib`` backend code for parsing. -flex_token = '666666666666666666666666' -flex_trades_query_id = '666666' # live account +flex_token = '' +flex_trades_query_id = '' # live account # when clients are being scanned this determines # which clients are preferred to be used for data @@ -47,11 +60,6 @@ prefer_data_account = [ # the order in which accounts will be selectable # in the order mode UI (if found via clients during # API-app scanning)when a new symbol is loaded. -paper = "XX0000000" -margin = "X0000000" -ira = "X0000000" - - -[deribit] -key_id = 'XXXXXXXX' -key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx' +paper = 'XX0000000' +margin = 'X0000000' +ira = 'X0000000' From 4f67ac0337596170fcf36b0a8c88e9c023eba3b8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 26 May 2023 17:16:43 -0400 Subject: [PATCH 290/294] Change to new context-cancelled msg contents: pikerd is canceller --- tests/conftest.py | 3 +++ tests/test_services.py | 24 ++++++++++++++++-------- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 3dce2014..366d5d95 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -103,6 +103,9 @@ async def _open_test_pikerd( a different port then the default to allow testing alongside a running stack. + Calls `.service._actor_runtime.maybe_open_pikerd()`` + to boot the root actor / tractor runtime. + ''' import random from piker.service import maybe_open_pikerd diff --git a/tests/test_services.py b/tests/test_services.py index 082f629f..6f7b6f4c 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -33,8 +33,8 @@ def test_runtime_boot( ): ''' Verify we can boot the `pikerd` service stack using the - `open_test_pikerd` fixture helper and that registry address details - match up. + `open_test_pikerd()` fixture helper and that contact-registry + address details match up. ''' async def main(): @@ -55,6 +55,9 @@ def test_runtime_boot( assert pikerd_portal.channel.raddr == daemon_addr assert pikerd_portal.channel.raddr == portal.channel.raddr + # no service tasks should be started + assert not services.service_tasks + trio.run(main) @@ -121,8 +124,7 @@ def test_ensure_ems_in_paper_actors( async def main(): # type declares - book: OrderClient - trades_stream: tractor.MsgStream + client: OrderClient pps: dict[str, list[BrokerdPosition]] accounts: list[str] dialogs: dict[str, Status] @@ -139,8 +141,8 @@ def test_ensure_ems_in_paper_actors( mode='paper', loglevel=loglevel, ) as ( - book, - trades_stream, + client, + _, # trades_stream: tractor.MsgStream pps, accounts, dialogs, @@ -152,6 +154,9 @@ def test_ensure_ems_in_paper_actors( assert not pps assert not dialogs + assert not client._sent_orders + assert accounts + pikerd_subservices = ['emsd', 'samplerd'] async with ( @@ -169,10 +174,13 @@ def test_ensure_ems_in_paper_actors( print('ALL SERVICES STARTED, terminating..') await services.cancel_service('emsd') + # ensure we receive a remote cancellation error caused by the + # pikerd root actor since we used the `.cancel_service()` API + # above B) with pytest.raises( tractor._exceptions.ContextCancelled, ) as exc_info: trio.run(main) - cancel_msg: str = '_emsd_main()` was remotely cancelled by its caller' - assert cancel_msg in exc_info.value.args[0] + cancelled_msg: str = "was remotely cancelled by remote actor (\'pikerd\'" + assert cancelled_msg in exc_info.value.args[0] From d6331ce9e18b9fccc5264f9270f34655afeebe0d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 28 May 2023 12:41:14 -0400 Subject: [PATCH 291/294] Add nonlocal annots to satisfy ruff --- piker/clearing/_ems.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 16069d1e..e41ddbf1 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -408,6 +408,8 @@ class Router(Struct): return def mk_paper_ep(): + nonlocal brokermod, exec_mode + # for logging purposes brokermod = paper @@ -1397,6 +1399,10 @@ async def maybe_open_trade_relays( cache_hit, (relay, feed, client_ready) ): + if cache_hit: + log.info(f'Reusing existing trades relay for {fqme}:\n' + f'{relay}\n') + yield relay, feed, client_ready From 41aa87f84709cf88c5770dfab8100608fdc1676d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 28 May 2023 13:13:43 -0400 Subject: [PATCH 292/294] Fix `_digits` attr names in order mode.. --- piker/ui/order_mode.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 62e54680..2cd22610 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -302,7 +302,7 @@ class OrderMode: price = round( price, - ndigits=mkt.size_tick_digits, + ndigits=mkt.price_tick_digits, ) order = self._staged_order = Order( @@ -461,7 +461,7 @@ class OrderMode: mktinfo: MktPair = self.chart.linked.mkt level = round( line.value(), - ndigits=mktinfo.size_tick_digits, + ndigits=mktinfo.price_tick_digits, ) # updated by level change callback set in ``.new_line_from_order()`` dialog = line.dialog From f6549fcb62bc55f4a4034a99c0c0d8d140db80ba Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 28 May 2023 14:05:03 -0400 Subject: [PATCH 293/294] Always allocate a new `OrderClient` per `open_ems()` call --- piker/clearing/_client.py | 31 +++++++++++++------------------ 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index a0218023..65a21fef 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -165,8 +165,6 @@ class OrderClient(Struct): ) -_client: OrderClient = None - async def relay_orders_from_sync_code( @@ -274,34 +272,31 @@ async def open_ems( # open 2-way trade command stream ctx.open_stream() as trades_stream, ): - # use any pre-existing actor singleton client. - global _client - if _client is None: - size = 100 - tx, rx = trio.open_memory_channel(size) - brx = broadcast_receiver(rx, size) + size: int = 100 # what should this be? + tx, rx = trio.open_memory_channel(size) + brx = broadcast_receiver(rx, size) - # setup local ui event streaming channels for request/resp - # streamging with EMS daemon - _client = OrderClient( - _ems_stream=trades_stream, - _to_relay_task=tx, - _from_sync_order_client=brx, - ) + # setup local ui event streaming channels for request/resp + # streamging with EMS daemon + client = OrderClient( + _ems_stream=trades_stream, + _to_relay_task=tx, + _from_sync_order_client=brx, + ) - _client._ems_stream = trades_stream + client._ems_stream = trades_stream # start sync code order msg delivery task async with trio.open_nursery() as n: n.start_soon( relay_orders_from_sync_code, - _client, + client, fqme, trades_stream ) yield ( - _client, + client, trades_stream, positions, accounts, From abd3cefd845ac2600fe8420bf67cd5f517bc7fea Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 28 May 2023 14:28:56 -0400 Subject: [PATCH 294/294] Parametrize ems service test to cancel with API and kbi --- tests/test_ems.py | 7 ++- tests/test_services.py | 120 +++++++++++++++++++++++++++++++---------- 2 files changed, 96 insertions(+), 31 deletions(-) diff --git a/tests/test_ems.py b/tests/test_ems.py index 8ee99807..f9c010f0 100644 --- a/tests/test_ems.py +++ b/tests/test_ems.py @@ -20,7 +20,6 @@ from typing import ( ) import trio -# import pytest_trio from exceptiongroup import BaseExceptionGroup import pytest @@ -50,6 +49,7 @@ from piker.accounting import ( log = get_logger(__name__) + async def order_and_and_wait_for_ppmsg( client: OrderClient, trades_stream: tractor.MsgStream, @@ -370,7 +370,10 @@ def test_multi_fill_positions( run_and_tollerate_cancels(atest) - if check_cross_session or accum_size != 0: + if ( + check_cross_session + or accum_size != 0 + ): # rerun just to check that position info is persistent for the paper # account (i.e. a user can expect to see paper pps persist across # runtime sessions. diff --git a/tests/test_services.py b/tests/test_services.py index 6f7b6f4c..433e97f3 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -2,9 +2,13 @@ Actor tree daemon sub-service verifications ''' -from typing import AsyncContextManager +from typing import ( + AsyncContextManager, + Callable, +) from contextlib import asynccontextmanager as acm +from exceptiongroup import BaseExceptionGroup import pytest import trio import tractor @@ -61,24 +65,10 @@ def test_runtime_boot( trio.run(main) -@acm -async def ensure_service( - name: str, - sockaddr: tuple[str, int] | None = None, -) -> None: - async with find_service(name) as portal: - remote_sockaddr = portal.channel.raddr - print(f'FOUND `{name}` @ {remote_sockaddr}') - - if sockaddr: - assert remote_sockaddr == sockaddr - - yield portal - - def test_ensure_datafeed_actors( open_test_pikerd: AsyncContextManager, loglevel: str, + # cancel_method: str, ) -> None: ''' @@ -94,6 +84,7 @@ def test_ensure_datafeed_actors( async def main(): async with ( open_test_pikerd(), + open_feed( ['xbtusdt.kraken'], loglevel=loglevel, @@ -106,15 +97,89 @@ def test_ensure_datafeed_actors( ensure_service(brokerd_name), ensure_service('samplerd'), ): - pass + await trio.sleep(0.1) trio.run(main) +@acm +async def ensure_service( + name: str, + sockaddr: tuple[str, int] | None = None, +) -> None: + async with find_service(name) as portal: + remote_sockaddr = portal.channel.raddr + print(f'FOUND `{name}` @ {remote_sockaddr}') + + if sockaddr: + assert remote_sockaddr == sockaddr + + yield portal + + +def run_test_w_cancel_method( + cancel_method: str, + main: Callable, + +) -> None: + ''' + Run our runtime under trio and expect a certain type of cancel condition + depending on input. + + ''' + cancelled_msg: str = ( + "was remotely cancelled by remote actor (\'pikerd\'") + + if cancel_method == 'sigint': + with pytest.raises( + BaseExceptionGroup, + ) as exc_info: + trio.run(main) + + multi = exc_info.value + + for suberr in multi.exceptions: + match suberr: + # ensure we receive a remote cancellation error caused + # by the pikerd root actor since we used the + # `.cancel_service()` API above B) + case tractor.ContextCancelled(): + assert cancelled_msg in suberr.args[0] + + case KeyboardInterrupt(): + pass + + case _: + pytest.fail(f'Unexpected error {suberr}') + + elif cancel_method == 'services': + + # XXX NOTE: oddly, when you pass --pdb to pytest, i think since + # we also use that to enable the underlying tractor debug mode, + # it causes this to not raise for some reason? So if you see + # that while changing this test.. it's prolly that. + + with pytest.raises( + tractor.ContextCancelled + ) as exc_info: + trio.run(main) + + assert cancelled_msg in exc_info.value.args[0] + + else: + pytest.fail(f'Test is broken due to {cancel_method}') + + +@pytest.mark.parametrize( + 'cancel_method', + ['services', 'sigint'], +) def test_ensure_ems_in_paper_actors( open_test_pikerd: AsyncContextManager, loglevel: str, + cancel_method: str, + ) -> None: actor_name: str = 'brokerd' @@ -153,7 +218,7 @@ def test_ensure_ems_in_paper_actors( # local ledger and `pps.toml` state ;) assert not pps assert not dialogs - + # XXX: should be new client with no state from other tests assert not client._sent_orders assert accounts @@ -171,16 +236,13 @@ def test_ensure_ems_in_paper_actors( # implicitly by the ems. assert brokerd_name in services.service_tasks - print('ALL SERVICES STARTED, terminating..') - await services.cancel_service('emsd') + print('ALL SERVICES STARTED, cancelling runtime with:\n' + f'-> {cancel_method}') - # ensure we receive a remote cancellation error caused by the - # pikerd root actor since we used the `.cancel_service()` API - # above B) - with pytest.raises( - tractor._exceptions.ContextCancelled, - ) as exc_info: - trio.run(main) + if cancel_method == 'services': + await services.cancel_service('emsd') - cancelled_msg: str = "was remotely cancelled by remote actor (\'pikerd\'" - assert cancelled_msg in exc_info.value.args[0] + elif cancel_method == 'sigint': + raise KeyboardInterrupt + + run_test_w_cancel_method(cancel_method, main)