Compare commits

...

9 Commits

Author SHA1 Message Date
Tyler Goodlet 5d30325e91 `.questrade`: link in ws-API issue! 2026-01-02 00:10:19 -05:00
Tyler Goodlet 3f0498f266 `.kraken.broker`: need to `await verify_balances()` .. 2026-01-02 00:10:19 -05:00
Tyler Goodlet 03f83d25d5 `.brokers.ib.feed`: better `tractor.to_asyncio` typing and var naming throughout! 2026-01-02 00:10:19 -05:00
Tyler Goodlet 9dbd55c4e4 `.brokers.cli`: module type and todo for `--pdb` flag to NOT src from sub-cmd 2026-01-02 00:00:37 -05:00
Tyler Goodlet 00e59057c7 Type loaded backend modules 2026-01-02 00:00:37 -05:00
Tyler Goodlet a92bb87cf3 Bump various `.brokers.core` doc string content/style 2026-01-02 00:00:37 -05:00
Tyler Goodlet 81693cc2f7 Teensie `piker.data` styling tweaks
- use more compact optional value style with `|`-union
- fix `.flows` typing-only import since we need `MktPair` to be
  immediately defined for use on a `msgspec.Struct` field.
- more "tree-like" warning msg in `.validate()` reporting.
2026-01-01 15:18:36 -05:00
Tyler Goodlet 729a44a4e5 Invert `getattr()` check for `get_mkt_pairs()` ep
Such that we `return` early when not defined by the provider backend to
reduce an indent level in `SymbologyCache.load()`.
2026-01-01 15:18:36 -05:00
Tyler Goodlet 15078a713d Allow ledger passes to ignore (symcache) unknown fqmes
For example in the paper-eng, if you have a backend that doesn't fully
support a symcache (yet) it's handy to be able to ignore processing
other paper-eng txns when all you care about at the moment is the
simulated symbol.

NOTE, that currently this will still result in a key-error when you load
more then one mkt with the paper engine (for which the backend does not
have the symcache implemented) since no fqme ad-hoc query was made for
the 2nd symbol (and i'm not sure we should support that kinda hackery
over just encouraging the sym-cache being added?). Def needs a little
more thought depending on how many backends are never going to be able
to (easily) support caching..
2026-01-01 15:18:36 -05:00
12 changed files with 202 additions and 104 deletions

View File

@ -30,7 +30,8 @@ from types import ModuleType
from typing import ( from typing import (
Any, Any,
Iterator, Iterator,
Generator Generator,
TYPE_CHECKING,
) )
import pendulum import pendulum
@ -59,8 +60,10 @@ from ..clearing._messages import (
BrokerdPosition, BrokerdPosition,
) )
from piker.types import Struct from piker.types import Struct
from piker.data._symcache import SymbologyCache from piker.log import get_logger
from ..log import get_logger
if TYPE_CHECKING:
from piker.data._symcache import SymbologyCache
log = get_logger(__name__) log = get_logger(__name__)
@ -493,6 +496,17 @@ class Account(Struct):
_mktmap_table: dict[str, MktPair] | None = None, _mktmap_table: dict[str, MktPair] | None = None,
only_require: list[str]|True = True,
# ^list of fqmes that are "required" to be processed from
# this ledger pass; we often don't care about others and
# definitely shouldn't always error in such cases.
# (eg. broker backend loaded that doesn't yet supsport the
# symcache but also, inside the paper engine we don't ad-hoc
# request `get_mkt_info()` for every symbol in the ledger,
# only the one for which we're simulating against).
# TODO, not sure if there's a better soln for this, ideally
# all backends get symcache support afap i guess..
) -> dict[str, Position]: ) -> dict[str, Position]:
''' '''
Update the internal `.pps[str, Position]` table from input Update the internal `.pps[str, Position]` table from input
@ -535,11 +549,32 @@ class Account(Struct):
if _mktmap_table is None: if _mktmap_table is None:
raise raise
required: bool = (
only_require is True
or (
only_require is not True
and
fqme in only_require
)
)
# XXX: caller is allowed to provide a fallback # XXX: caller is allowed to provide a fallback
# mktmap table for the case where a new position is # mktmap table for the case where a new position is
# being added and the preloaded symcache didn't # being added and the preloaded symcache didn't
# have this entry prior (eg. with frickin IB..) # have this entry prior (eg. with frickin IB..)
mkt = _mktmap_table[fqme] if (
not (mkt := _mktmap_table.get(fqme))
and
required
):
raise
elif not required:
continue
else:
# should be an entry retreived somewhere
assert mkt
if not (pos := pps.get(bs_mktid)): if not (pos := pps.get(bs_mktid)):
@ -656,7 +691,7 @@ class Account(Struct):
def write_config(self) -> None: def write_config(self) -> None:
''' '''
Write the current account state to the user's account TOML file, normally Write the current account state to the user's account TOML file, normally
something like ``pps.toml``. something like `pps.toml`.
''' '''
# TODO: show diff output? # TODO: show diff output?

View File

@ -98,13 +98,14 @@ async def open_cached_client(
If one has not been setup do it and cache it. If one has not been setup do it and cache it.
''' '''
brokermod = get_brokermod(brokername) brokermod: ModuleType = get_brokermod(brokername)
# TODO: make abstract or `typing.Protocol`
# client: Client
async with maybe_open_context( async with maybe_open_context(
acm_func=brokermod.get_client, acm_func=brokermod.get_client,
kwargs=kwargs, kwargs=kwargs,
) as (cache_hit, client): ) as (cache_hit, client):
if cache_hit: if cache_hit:
log.runtime(f'Reusing existing {client}') log.runtime(f'Reusing existing {client}')

View File

@ -471,11 +471,15 @@ def search(
''' '''
# global opts # global opts
brokermods = list(config['brokermods'].values()) brokermods: list[ModuleType] = list(config['brokermods'].values())
# TODO: this is coming from the `search --pdb` NOT from
# the `piker --pdb` XD ..
# -[ ] pull from the parent click ctx's values..dumdum
# assert pdb
# define tractor entrypoint # define tractor entrypoint
async def main(func): async def main(func):
async with maybe_open_pikerd( async with maybe_open_pikerd(
loglevel=config['loglevel'], loglevel=config['loglevel'],
debug_mode=pdb, debug_mode=pdb,

View File

@ -22,7 +22,9 @@ routines should be primitive data types where possible.
""" """
import inspect import inspect
from types import ModuleType from types import ModuleType
from typing import List, Dict, Any, Optional from typing import (
Any,
)
import trio import trio
@ -34,8 +36,10 @@ from ..accounting import MktPair
async def api(brokername: str, methname: str, **kwargs) -> dict: async def api(brokername: str, methname: str, **kwargs) -> dict:
"""Make (proxy through) a broker API call by name and return its result. '''
""" Make (proxy through) a broker API call by name and return its result.
'''
brokermod = get_brokermod(brokername) brokermod = get_brokermod(brokername)
async with brokermod.get_client() as client: async with brokermod.get_client() as client:
meth = getattr(client, methname, None) meth = getattr(client, methname, None)
@ -62,10 +66,14 @@ async def api(brokername: str, methname: str, **kwargs) -> dict:
async def stocks_quote( async def stocks_quote(
brokermod: ModuleType, brokermod: ModuleType,
tickers: List[str] tickers: list[str]
) -> Dict[str, Dict[str, Any]]:
"""Return quotes dict for ``tickers``. ) -> dict[str, dict[str, Any]]:
""" '''
Return a `dict` of snapshot quotes for the provided input
`tickers`: a `list` of fqmes.
'''
async with brokermod.get_client() as client: async with brokermod.get_client() as client:
return await client.quote(tickers) return await client.quote(tickers)
@ -74,13 +82,15 @@ async def stocks_quote(
async def option_chain( async def option_chain(
brokermod: ModuleType, brokermod: ModuleType,
symbol: str, symbol: str,
date: Optional[str] = None, date: str|None = None,
) -> Dict[str, Dict[str, Dict[str, Any]]]: ) -> dict[str, dict[str, dict[str, Any]]]:
"""Return option chain for ``symbol`` for ``date``. '''
Return option chain for ``symbol`` for ``date``.
By default all expiries are returned. If ``date`` is provided By default all expiries are returned. If ``date`` is provided
then contract quotes for that single expiry are returned. then contract quotes for that single expiry are returned.
"""
'''
async with brokermod.get_client() as client: async with brokermod.get_client() as client:
if date: if date:
id = int((await client.tickers2ids([symbol]))[symbol]) id = int((await client.tickers2ids([symbol]))[symbol])
@ -98,7 +108,7 @@ async def option_chain(
# async def contracts( # async def contracts(
# brokermod: ModuleType, # brokermod: ModuleType,
# symbol: str, # symbol: str,
# ) -> Dict[str, Dict[str, Dict[str, Any]]]: # ) -> dict[str, dict[str, dict[str, Any]]]:
# """Return option contracts (all expiries) for ``symbol``. # """Return option contracts (all expiries) for ``symbol``.
# """ # """
# async with brokermod.get_client() as client: # async with brokermod.get_client() as client:
@ -110,15 +120,24 @@ async def bars(
brokermod: ModuleType, brokermod: ModuleType,
symbol: str, symbol: str,
**kwargs, **kwargs,
) -> Dict[str, Dict[str, Dict[str, Any]]]: ) -> dict[str, dict[str, dict[str, Any]]]:
"""Return option contracts (all expiries) for ``symbol``. '''
""" Return option contracts (all expiries) for ``symbol``.
'''
async with brokermod.get_client() as client: async with brokermod.get_client() as client:
return await client.bars(symbol, **kwargs) return await client.bars(symbol, **kwargs)
async def search_w_brokerd(name: str, pattern: str) -> dict: async def search_w_brokerd(
name: str,
pattern: str,
) -> dict:
# TODO: WHY NOT WORK!?!
# when we `step` through the next block?
# import tractor
# await tractor.pause()
async with open_cached_client(name) as client: async with open_cached_client(name) as client:
# TODO: support multiple asset type concurrent searches. # TODO: support multiple asset type concurrent searches.
@ -130,12 +149,12 @@ async def symbol_search(
pattern: str, pattern: str,
**kwargs, **kwargs,
) -> Dict[str, Dict[str, Dict[str, Any]]]: ) -> dict[str, dict[str, dict[str, Any]]]:
''' '''
Return symbol info from broker. Return symbol info from broker.
''' '''
results = [] results: list[str] = []
async def search_backend( async def search_backend(
brokermod: ModuleType brokermod: ModuleType
@ -143,6 +162,13 @@ async def symbol_search(
brokername: str = mod.name brokername: str = mod.name
# TODO: figure this the FUCK OUT
# -> ok so obvi in the root actor any async task that's
# spawned outside the main tractor-root-actor task needs to
# call this..
# await tractor.devx._debug.maybe_init_greenback()
# tractor.pause_from_sync()
async with maybe_spawn_brokerd( async with maybe_spawn_brokerd(
mod.name, mod.name,
infect_asyncio=getattr( infect_asyncio=getattr(
@ -162,7 +188,6 @@ async def symbol_search(
)) ))
async with trio.open_nursery() as n: async with trio.open_nursery() as n:
for mod in brokermods: for mod in brokermods:
n.start_soon(search_backend, mod.name) n.start_soon(search_backend, mod.name)
@ -172,11 +197,13 @@ async def symbol_search(
async def mkt_info( async def mkt_info(
brokermod: ModuleType, brokermod: ModuleType,
fqme: str, fqme: str,
**kwargs, **kwargs,
) -> MktPair: ) -> MktPair:
''' '''
Return MktPair info from broker including src and dst assets. Return the `piker.accounting.MktPair` info struct from a given
backend broker tradable src/dst asset pair.
''' '''
async with open_cached_client(brokermod.name) as client: async with open_cached_client(brokermod.name) as client:

View File

@ -587,7 +587,7 @@ async def get_bars(
data_cs.cancel() data_cs.cancel()
# spawn new data reset task # spawn new data reset task
data_cs, reset_done = await nurse.start( data_cs, reset_done = await tn.start(
partial( partial(
wait_on_data_reset, wait_on_data_reset,
proxy, proxy,
@ -607,11 +607,11 @@ async def get_bars(
# such that simultaneous symbol queries don't try data resettingn # such that simultaneous symbol queries don't try data resettingn
# too fast.. # too fast..
unset_resetter: bool = False unset_resetter: bool = False
async with trio.open_nursery() as nurse: async with trio.open_nursery() as tn:
# start history request that we allow # start history request that we allow
# to run indefinitely until a result is acquired # to run indefinitely until a result is acquired
nurse.start_soon(query) tn.start_soon(query)
# start history reset loop which waits up to the timeout # start history reset loop which waits up to the timeout
# for a result before triggering a data feed reset. # for a result before triggering a data feed reset.
@ -631,7 +631,7 @@ async def get_bars(
unset_resetter: bool = True unset_resetter: bool = True
# spawn new data reset task # spawn new data reset task
data_cs, reset_done = await nurse.start( data_cs, reset_done = await tn.start(
partial( partial(
wait_on_data_reset, wait_on_data_reset,
proxy, proxy,
@ -705,7 +705,9 @@ async def _setup_quote_stream(
# to_trio, from_aio = trio.open_memory_channel(2**8) # type: ignore # to_trio, from_aio = trio.open_memory_channel(2**8) # type: ignore
def teardown(): def teardown():
ticker.updateEvent.disconnect(push) ticker.updateEvent.disconnect(push)
log.error(f"Disconnected stream for `{symbol}`") log.error(
f'Disconnected stream for `{symbol}`'
)
client.ib.cancelMktData(contract) client.ib.cancelMktData(contract)
# decouple broadcast mem chan # decouple broadcast mem chan
@ -761,7 +763,10 @@ async def open_aio_quote_stream(
symbol: str, symbol: str,
contract: Contract | None = None, contract: Contract | None = None,
) -> trio.abc.ReceiveStream: ) -> (
trio.abc.Channel| # iface
tractor.to_asyncio.LinkedTaskChannel # actually
):
from tractor.trionics import broadcast_receiver from tractor.trionics import broadcast_receiver
global _quote_streams global _quote_streams
@ -778,6 +783,7 @@ async def open_aio_quote_stream(
yield from_aio yield from_aio
return return
from_aio: tractor.to_asyncio.LinkedTaskChannel
async with tractor.to_asyncio.open_channel_from( async with tractor.to_asyncio.open_channel_from(
_setup_quote_stream, _setup_quote_stream,
symbol=symbol, symbol=symbol,
@ -983,17 +989,18 @@ async def stream_quotes(
) )
cs: trio.CancelScope | None = None cs: trio.CancelScope | None = None
startup: bool = True startup: bool = True
iter_quotes: trio.abc.Channel
while ( while (
startup startup
or cs.cancel_called or cs.cancel_called
): ):
with trio.CancelScope() as cs: with trio.CancelScope() as cs:
async with ( async with (
trio.open_nursery() as nurse, trio.open_nursery() as tn,
open_aio_quote_stream( open_aio_quote_stream(
symbol=sym, symbol=sym,
contract=con, contract=con,
) as stream, ) as iter_quotes,
): ):
# ugh, clear ticks since we've consumed them # ugh, clear ticks since we've consumed them
# (ahem, ib_insync is stateful trash) # (ahem, ib_insync is stateful trash)
@ -1021,9 +1028,9 @@ async def stream_quotes(
await rt_ev.wait() await rt_ev.wait()
cs.cancel() # cancel called should now be set cs.cancel() # cancel called should now be set
nurse.start_soon(reset_on_feed) tn.start_soon(reset_on_feed)
async with aclosing(stream): async with aclosing(iter_quotes):
# if syminfo.get('no_vlm', False): # if syminfo.get('no_vlm', False):
if not init_msg.shm_write_opts['has_vlm']: if not init_msg.shm_write_opts['has_vlm']:
@ -1038,19 +1045,21 @@ async def stream_quotes(
# wait for real volume on feed (trading might be # wait for real volume on feed (trading might be
# closed) # closed)
while True: while True:
ticker = await stream.receive() ticker = await iter_quotes.receive()
# for a real volume contract we rait for # for a real volume contract we rait for
# the first "real" trade to take place # the first "real" trade to take place
if ( if (
# not calc_price # not calc_price
# and not ticker.rtTime # and not ticker.rtTime
not ticker.rtTime False
# not ticker.rtTime
): ):
# spin consuming tickers until we # spin consuming tickers until we
# get a real market datum # get a real market datum
log.debug(f"New unsent ticker: {ticker}") log.debug(f"New unsent ticker: {ticker}")
continue continue
else: else:
log.debug("Received first volume tick") log.debug("Received first volume tick")
# ugh, clear ticks since we've # ugh, clear ticks since we've
@ -1066,13 +1075,18 @@ async def stream_quotes(
log.debug(f"First ticker received {quote}") log.debug(f"First ticker received {quote}")
# tell data-layer spawner-caller that live # tell data-layer spawner-caller that live
# quotes are now streaming. # quotes are now active desptie not having
# necessarily received a first vlm/clearing
# tick.
ticker = await iter_quotes.receive()
feed_is_live.set() feed_is_live.set()
fqme: str = quote['fqme']
await send_chan.send({fqme: quote})
# last = time.time() # last = time.time()
async for ticker in stream: async for ticker in iter_quotes:
quote = normalize(ticker) quote = normalize(ticker)
fqme = quote['fqme'] fqme: str = quote['fqme']
await send_chan.send({fqme: quote}) await send_chan.send({fqme: quote})
# ugh, clear ticks since we've consumed them # ugh, clear ticks since we've consumed them

View File

@ -544,7 +544,7 @@ async def open_trade_dialog(
# to be reloaded. # to be reloaded.
balances: dict[str, float] = await client.get_balances() balances: dict[str, float] = await client.get_balances()
verify_balances( await verify_balances(
acnt, acnt,
src_fiat, src_fiat,
balances, balances,

View File

@ -37,6 +37,12 @@ import tractor
from async_generator import asynccontextmanager from async_generator import asynccontextmanager
import numpy as np import numpy as np
import wrapt import wrapt
# TODO, port to `httpx`/`trio-websocket` whenver i get back to
# writing a proper ws-api streamer for this backend (since the data
# feeds are free now) as per GH feat-req:
# https://github.com/pikers/piker/issues/509
#
import asks import asks
from ..calc import humanize, percent_change from ..calc import humanize, percent_change

View File

@ -655,6 +655,7 @@ async def open_trade_dialog(
# in) use manually constructed table from calling # in) use manually constructed table from calling
# the `.get_mkt_info()` provider EP above. # the `.get_mkt_info()` provider EP above.
_mktmap_table=mkt_by_fqme, _mktmap_table=mkt_by_fqme,
only_require=list(mkt_by_fqme),
) )
pp_msgs: list[BrokerdPosition] = [] pp_msgs: list[BrokerdPosition] = []

View File

@ -31,6 +31,7 @@ from pathlib import Path
from pprint import pformat from pprint import pformat
from typing import ( from typing import (
Any, Any,
Callable,
Sequence, Sequence,
Hashable, Hashable,
TYPE_CHECKING, TYPE_CHECKING,
@ -56,7 +57,7 @@ from piker.brokers import (
) )
if TYPE_CHECKING: if TYPE_CHECKING:
from ..accounting import ( from piker.accounting import (
Asset, Asset,
MktPair, MktPair,
) )
@ -149,57 +150,68 @@ class SymbologyCache(Struct):
'Implement `Client.get_assets()`!' 'Implement `Client.get_assets()`!'
) )
if get_mkt_pairs := getattr(client, 'get_mkt_pairs', None): get_mkt_pairs: Callable|None = getattr(
client,
pairs: dict[str, Struct] = await get_mkt_pairs() 'get_mkt_pairs',
for bs_fqme, pair in pairs.items(): None,
)
# NOTE: every backend defined pair should if not get_mkt_pairs:
# declare it's ns path for roundtrip
# serialization lookup.
if not getattr(pair, 'ns_path', None):
raise TypeError(
f'Pair-struct for {self.mod.name} MUST define a '
'`.ns_path: str`!\n'
f'{pair}'
)
entry = await self.mod.get_mkt_info(pair.bs_fqme)
if not entry:
continue
mkt: MktPair
pair: Struct
mkt, _pair = entry
assert _pair is pair, (
f'`{self.mod.name}` backend probably has a '
'keying-symmetry problem between the pair-`Struct` '
'returned from `Client.get_mkt_pairs()`and the '
'module level endpoint: `.get_mkt_info()`\n\n'
"Here's the struct diff:\n"
f'{_pair - pair}'
)
# NOTE XXX: this means backends MUST implement
# a `Struct.bs_mktid: str` field to provide
# a native-keyed map to their own symbol
# set(s).
self.pairs[pair.bs_mktid] = pair
# NOTE: `MktPair`s are keyed here using piker's
# internal FQME schema so that search,
# accounting and feed init can be accomplished
# a sane, uniform, normalized basis.
self.mktmaps[mkt.fqme] = mkt
self.pair_ns_path: str = tractor.msg.NamespacePath.from_ref(
pair,
)
else:
log.warning( log.warning(
'No symbology cache `Pair` support for `{provider}`..\n' 'No symbology cache `Pair` support for `{provider}`..\n'
'Implement `Client.get_mkt_pairs()`!' 'Implement `Client.get_mkt_pairs()`!'
) )
return self
pairs: dict[str, Struct] = await get_mkt_pairs()
if not pairs:
log.warning(
'No pairs from intial {provider!r} sym-cache request?\n\n'
'`Client.get_mkt_pairs()` -> {pairs!r} ?'
)
return self
for bs_fqme, pair in pairs.items():
if not getattr(pair, 'ns_path', None):
# XXX: every backend defined pair must declare
# a `.ns_path: tractor.NamespacePath` to enable
# roundtrip serialization lookup from a local
# cache file.
raise TypeError(
f'Pair-struct for {self.mod.name} MUST define a '
'`.ns_path: str`!\n\n'
f'{pair!r}'
)
entry = await self.mod.get_mkt_info(pair.bs_fqme)
if not entry:
continue
mkt: MktPair
pair: Struct
mkt, _pair = entry
assert _pair is pair, (
f'`{self.mod.name}` backend probably has a '
'keying-symmetry problem between the pair-`Struct` '
'returned from `Client.get_mkt_pairs()`and the '
'module level endpoint: `.get_mkt_info()`\n\n'
"Here's the struct diff:\n"
f'{_pair - pair}'
)
# NOTE XXX: this means backends MUST implement
# a `Struct.bs_mktid: str` field to provide
# a native-keyed map to their own symbol
# set(s).
self.pairs[pair.bs_mktid] = pair
# NOTE: `MktPair`s are keyed here using piker's
# internal FQME schema so that search,
# accounting and feed init can be accomplished
# a sane, uniform, normalized basis.
self.mktmaps[mkt.fqme] = mkt
self.pair_ns_path: str = tractor.msg.NamespacePath.from_ref(
pair,
)
return self return self

View File

@ -786,7 +786,6 @@ async def install_brokerd_search(
@acm @acm
async def maybe_open_feed( async def maybe_open_feed(
fqmes: list[str], fqmes: list[str],
loglevel: str | None = None, loglevel: str | None = None,
@ -840,13 +839,12 @@ async def maybe_open_feed(
@acm @acm
async def open_feed( async def open_feed(
fqmes: list[str], fqmes: list[str],
loglevel: str | None = None, loglevel: str|None = None,
allow_overruns: bool = True, allow_overruns: bool = True,
start_stream: bool = True, start_stream: bool = True,
tick_throttle: float | None = None, # Hz tick_throttle: float|None = None, # Hz
allow_remote_ctl_ui: bool = False, allow_remote_ctl_ui: bool = False,

View File

@ -36,10 +36,10 @@ from ._sharedmem import (
ShmArray, ShmArray,
_Token, _Token,
) )
from piker.accounting import MktPair
if TYPE_CHECKING: if TYPE_CHECKING:
from ..accounting import MktPair from piker.data.feed import Feed
from .feed import Feed
class Flume(Struct): class Flume(Struct):
@ -82,7 +82,7 @@ class Flume(Struct):
# TODO: do we need this really if we can pull the `Portal` from # TODO: do we need this really if we can pull the `Portal` from
# ``tractor``'s internals? # ``tractor``'s internals?
feed: Feed | None = None feed: Feed|None = None
@property @property
def rt_shm(self) -> ShmArray: def rt_shm(self) -> ShmArray:

View File

@ -113,9 +113,9 @@ def validate_backend(
) )
if ep is None: if ep is None:
log.warning( log.warning(
f'Provider backend {mod.name} is missing ' f'Provider backend {mod.name!r} is missing '
f'{daemon_name} support :(\n' f'{daemon_name!r} support?\n'
f'The following endpoint is missing: {name}' f'|_module endpoint-func missing: {name!r}\n'
) )
inits: list[ inits: list[