Compare commits

..

1 Commits

Author SHA1 Message Date
Gud Boi a3ed73a40d Strip `None` values from symcache dict before TOML write
(this commit msg was generated in some part by [`claude-code`][claude-code-gh])
[claude-code-gh]: https://github.com/anthropics/claude-code

Note that (again) this patch was orig by @dnks and broken out from
a larger commit which added unnecessary/out-of-scope changes we didn't
end up requiring.
2026-03-11 14:36:50 -04:00
16 changed files with 182 additions and 357 deletions

View File

@ -2,7 +2,7 @@
-------------- --------------
more or less the "everything broker" for traditional and international more or less the "everything broker" for traditional and international
markets. they are the "go to" provider for automatic retail trading markets. they are the "go to" provider for automatic retail trading
and we interface to their APIs using the `ib_async` project. and we interface to their APIs using the `ib_insync` project.
status status
****** ******

View File

@ -22,7 +22,7 @@ Sub-modules within break into the core functionalities:
- ``broker.py`` part for orders / trading endpoints - ``broker.py`` part for orders / trading endpoints
- ``feed.py`` for real-time data feed endpoints - ``feed.py`` for real-time data feed endpoints
- ``api.py`` for the core API machinery which is ``trio``-ized - ``api.py`` for the core API machinery which is ``trio``-ized
wrapping around `ib_async`. wrapping around ``ib_insync``.
""" """
from .api import ( from .api import (

View File

@ -111,7 +111,7 @@ def load_flex_trades(
) -> dict[str, Any]: ) -> dict[str, Any]:
from ib_async import flexreport, util from ib_insync import flexreport, util
conf = get_config() conf = get_config()
@ -154,7 +154,8 @@ def load_flex_trades(
trade_entries, trade_entries,
) )
ledger_dict: dict|None ledger_dict: dict | None = None
for acctid in trades_by_account: for acctid in trades_by_account:
trades_by_id = trades_by_account[acctid] trades_by_id = trades_by_account[acctid]

View File

@ -20,7 +20,6 @@ runnable script-programs.
''' '''
from __future__ import annotations from __future__ import annotations
import asyncio
from datetime import ( # noqa from datetime import ( # noqa
datetime, datetime,
date, date,
@ -141,8 +140,7 @@ async def data_reset_hack(
except ( except (
OSError, # no VNC server avail.. OSError, # no VNC server avail..
PermissionError, # asyncvnc pw fail.. PermissionError, # asyncvnc pw fail..
) as _vnc_err: ):
vnc_err = _vnc_err
try: try:
import i3ipc # noqa (since a deps dynamic check) import i3ipc # noqa (since a deps dynamic check)
except ModuleNotFoundError: except ModuleNotFoundError:
@ -168,22 +166,14 @@ async def data_reset_hack(
# localhost but no vnc-client or it borked.. # localhost but no vnc-client or it borked..
else: else:
log.error( try_xdo_manual(client)
'VNC CLICK HACK FAILE with,\n'
f'{vnc_err!r}\n'
)
# breakpoint()
# try_xdo_manual(client)
case 'i3ipc_xdotool': case 'i3ipc_xdotool':
try_xdo_manual(client) try_xdo_manual(client)
# i3ipc_xdotool_manual_click_hack() # i3ipc_xdotool_manual_click_hack()
case _ as tech: case _ as tech:
raise RuntimeError( raise RuntimeError(f'{tech} is not supported for reset tech!?')
f'{tech!r} is not supported for reset tech!?'
)
# we don't really need the ``xdotool`` approach any more B) # we don't really need the ``xdotool`` approach any more B)
return True return True
@ -275,39 +265,14 @@ async def vnc_click_hack(
# 640x1800 # 640x1800
await client.move( await client.move(
Point( Point(
500, # x from left 500,
400, # y from top 500,
) )
) )
# in case a prior dialog win is open/active.
await client.press('ISO_Enter')
# ensure the ib-gw window is active # ensure the ib-gw window is active
await client.click(MOUSE_BUTTON_LEFT) await client.click(MOUSE_BUTTON_LEFT)
# send the hotkeys combo B) # send the hotkeys combo B)
await client.press( await client.press('Ctrl', 'Alt', key) # keys are stacked
'Ctrl',
'Alt',
key,
) # NOTE, keys are stacked
# XXX, sometimes a dialog asking if you want to "simulate
# a reset" will show, in which case we want to select
# "Yes" (by tabbing) and then hit enter.
iters: int = 1
delay: float = 0.3
await asyncio.sleep(delay)
for i in range(iters):
log.info(f'Sending TAB {i}')
await client.press('Tab')
await asyncio.sleep(delay)
for i in range(iters):
log.info(f'Sending ENTER {i}')
await client.press('KP_Enter')
await asyncio.sleep(delay)
def i3ipc_fin_wins_titled( def i3ipc_fin_wins_titled(

View File

@ -15,8 +15,7 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>. # along with this program. If not, see <https://www.gnu.org/licenses/>.
''' '''
Core API client machinery; mostly sane/useful wrapping around Core API client machinery; mostly sane/useful wrapping around `ib_insync`..
`ib_async`..
''' '''
from __future__ import annotations from __future__ import annotations
@ -58,7 +57,7 @@ from pendulum import (
Interval, Interval,
) )
from eventkit import Event from eventkit import Event
from ib_async import ( from ib_insync import (
client as ib_client, client as ib_client,
IB, IB,
Contract, Contract,
@ -144,7 +143,7 @@ _bar_sizes = {
_show_wap_in_history: bool = False _show_wap_in_history: bool = False
# overrides to sidestep pretty questionable design decisions in # overrides to sidestep pretty questionable design decisions in
# ``ib_async``: # ``ib_insync``:
class NonShittyWrapper(Wrapper): class NonShittyWrapper(Wrapper):
def tcpDataArrived(self): def tcpDataArrived(self):
"""Override time stamps to be floats for now. """Override time stamps to be floats for now.
@ -184,7 +183,7 @@ class NonShittyIB(IB):
''' '''
def __init__(self): def __init__(self):
# override `ib_async` internal loggers so we can see wtf # override `ib_insync` internal loggers so we can see wtf
# it's doing.. # it's doing..
self._logger = get_logger( self._logger = get_logger(
name=__name__, name=__name__,
@ -195,7 +194,7 @@ class NonShittyIB(IB):
self.wrapper = NonShittyWrapper(self) self.wrapper = NonShittyWrapper(self)
self.client = ib_client.Client(self.wrapper) self.client = ib_client.Client(self.wrapper)
self.client._logger = get_logger( self.client._logger = get_logger(
name='ib_async.client', name='ib_insync.client',
) )
# self.errorEvent += self._onError # self.errorEvent += self._onError
@ -561,7 +560,7 @@ class Client:
# f'Recursing for more bars:\n' # f'Recursing for more bars:\n'
) )
# XXX, debug! # XXX, debug!
# breakpoint() breakpoint()
# XXX ? TODO? recursively try to re-request? # XXX ? TODO? recursively try to re-request?
# => i think *NO* right? # => i think *NO* right?
# #
@ -768,48 +767,25 @@ class Client:
expiry: str = '', expiry: str = '',
front: bool = False, front: bool = False,
) -> Contract|list[Contract]: ) -> Contract:
''' '''
Get an unqualifed contract for the current "continous" Get an unqualifed contract for the current "continous"
future. future.
When input params result in a so called "ambiguous contract"
situation, we return the list of all matches provided by,
`IB.qualifyContractsAsync(..., returnAll=True)`
''' '''
# it's the "front" contract returned here # it's the "front" contract returned here
if front: if front:
cons = ( con = (await self.ib.qualifyContractsAsync(
await self.ib.qualifyContractsAsync( ContFuture(symbol, exchange=exchange)
ContFuture(symbol, exchange=exchange), ))[0]
returnAll=True,
)
)
else: else:
cons = ( con = (await self.ib.qualifyContractsAsync(
await self.ib.qualifyContractsAsync(
Future( Future(
symbol, symbol,
exchange=exchange, exchange=exchange,
lastTradeDateOrContractMonth=expiry, lastTradeDateOrContractMonth=expiry,
),
returnAll=True,
)
)
con = cons[0]
if isinstance(con, list):
log.warning(
f'{len(con)!r} futes cons matched for input params,\n'
f'symbol={symbol!r}\n'
f'exchange={exchange!r}\n'
f'expiry={expiry!r}\n'
f'\n'
f'cons:\n'
f'{con!r}\n'
) )
))[0]
return con return con
@ -902,7 +878,7 @@ class Client:
currency='USD', currency='USD',
exchange='PAXOS', exchange='PAXOS',
) )
# XXX, on `ib_async` when first tried this, # XXX, on `ib_insync` when first tried this,
# > Error 10299, reqId 141: Expected what to show is # > Error 10299, reqId 141: Expected what to show is
# > AGGTRADES, please use that instead of TRADES., # > AGGTRADES, please use that instead of TRADES.,
# > contract: Crypto(conId=479624278, symbol='BTC', # > contract: Crypto(conId=479624278, symbol='BTC',
@ -934,17 +910,11 @@ class Client:
) )
exch = 'SMART' if not exch else exch exch = 'SMART' if not exch else exch
if isinstance(con, list):
contracts: list[Contract] = con
else:
contracts: list[Contract] = [con] contracts: list[Contract] = [con]
if qualify: if qualify:
try: try:
contracts: list[Contract] = ( contracts: list[Contract] = (
await self.ib.qualifyContractsAsync( await self.ib.qualifyContractsAsync(con)
*contracts
)
) )
except RequestError as err: except RequestError as err:
msg = err.message msg = err.message
@ -1022,6 +992,7 @@ class Client:
async def get_sym_details( async def get_sym_details(
self, self,
fqme: str, fqme: str,
) -> tuple[ ) -> tuple[
Contract, Contract,
ContractDetails, ContractDetails,
@ -1121,7 +1092,7 @@ class Client:
size: int, size: int,
account: str, # if blank the "default" tws account is used account: str, # if blank the "default" tws account is used
# XXX: by default 0 tells ``ib_async`` methods that there is no # XXX: by default 0 tells ``ib_insync`` methods that there is no
# existing order so ask the client to create a new one (which it # existing order so ask the client to create a new one (which it
# seems to do by allocating an int counter - collision prone..) # seems to do by allocating an int counter - collision prone..)
reqid: int = None, reqid: int = None,
@ -1310,7 +1281,7 @@ async def load_aio_clients(
port: int = None, port: int = None,
client_id: int = 6116, client_id: int = 6116,
# the API TCP in `ib_async` connection can be flaky af so instead # the API TCP in `ib_insync` connection can be flaky af so instead
# retry a few times to get the client going.. # retry a few times to get the client going..
connect_retries: int = 3, connect_retries: int = 3,
connect_timeout: float = 30, # in case a remote-host connect_timeout: float = 30, # in case a remote-host
@ -1318,7 +1289,7 @@ async def load_aio_clients(
) -> dict[str, Client]: ) -> dict[str, Client]:
''' '''
Return an ``ib_async.IB`` instance wrapped in our client API. Return an ``ib_insync.IB`` instance wrapped in our client API.
Client instances are cached for later use. Client instances are cached for later use.
@ -1660,7 +1631,6 @@ async def open_aio_client_method_relay(
) -> None: ) -> None:
# with tractor.devx.maybe_open_crash_handler() as _bxerr:
# sync with `open_client_proxy()` caller # sync with `open_client_proxy()` caller
chan.started_nowait(client) chan.started_nowait(client)
@ -1670,11 +1640,7 @@ async def open_aio_client_method_relay(
# relay all method requests to ``asyncio``-side client and deliver # relay all method requests to ``asyncio``-side client and deliver
# back results # back results
while not chan._to_trio._closed: # <- TODO, better check like `._web_bs`? while not chan._to_trio._closed: # <- TODO, better check like `._web_bs`?
msg: ( msg: tuple[str, dict]|dict|None = await chan.get()
None
|tuple[str, dict]
|dict
) = await chan.get()
match msg: match msg:
case None: # termination sentinel case None: # termination sentinel
log.info('asyncio `Client` method-proxy SHUTDOWN!') log.info('asyncio `Client` method-proxy SHUTDOWN!')
@ -1776,7 +1742,7 @@ async def get_client(
) -> Client: ) -> Client:
''' '''
Init the ``ib_async`` client in another actor and return Init the ``ib_insync`` client in another actor and return
a method proxy to it. a method proxy to it.
''' '''

View File

@ -35,14 +35,14 @@ from trio_typing import TaskStatus
import tractor import tractor
from tractor.to_asyncio import LinkedTaskChannel from tractor.to_asyncio import LinkedTaskChannel
from tractor import trionics from tractor import trionics
from ib_async.contract import ( from ib_insync.contract import (
Contract, Contract,
) )
from ib_async.order import ( from ib_insync.order import (
Trade, Trade,
OrderStatus, OrderStatus,
) )
from ib_async.objects import ( from ib_insync.objects import (
Fill, Fill,
Execution, Execution,
CommissionReport, CommissionReport,
@ -181,7 +181,7 @@ async def handle_order_requests(
# validate # validate
order = BrokerdOrder(**request_msg) order = BrokerdOrder(**request_msg)
# XXX: by default 0 tells ``ib_async`` methods that # XXX: by default 0 tells ``ib_insync`` methods that
# there is no existing order so ask the client to create # there is no existing order so ask the client to create
# a new one (which it seems to do by allocating an int # a new one (which it seems to do by allocating an int
# counter - collision prone..) # counter - collision prone..)
@ -237,7 +237,7 @@ async def recv_trade_updates(
) -> None: ) -> None:
''' '''
Receive and relay order control and positioning related events Receive and relay order control and positioning related events
from `ib_async`, pack as tuples and push over mem-chan to our from `ib_insync`, pack as tuples and push over mem-chan to our
trio relay task for processing and relay to EMS. trio relay task for processing and relay to EMS.
''' '''
@ -303,7 +303,7 @@ async def recv_trade_updates(
# much more then a few more pnl fields.. # much more then a few more pnl fields..
# 'updatePortfolioEvent', # 'updatePortfolioEvent',
# XXX: these all seem to be weird ib_async internal # XXX: these all seem to be weird ib_insync internal
# events that we probably don't care that much about # events that we probably don't care that much about
# given the internal design is wonky af.. # given the internal design is wonky af..
# 'newOrderEvent', # 'newOrderEvent',
@ -499,7 +499,7 @@ async def open_trade_event_stream(
] = trio.TASK_STATUS_IGNORED, ] = trio.TASK_STATUS_IGNORED,
): ):
''' '''
Proxy wrapper for starting trade event stream from ib_async Proxy wrapper for starting trade event stream from ib_insync
which spawns an asyncio task that registers an internal closure which spawns an asyncio task that registers an internal closure
(`push_tradies()`) which in turn relays trading events through (`push_tradies()`) which in turn relays trading events through
a `tractor.to_asyncio.LinkedTaskChannel` which the parent a `tractor.to_asyncio.LinkedTaskChannel` which the parent
@ -991,9 +991,6 @@ _statuses: dict[str, str] = {
# TODO: see a current ``ib_insync`` issue around this: # TODO: see a current ``ib_insync`` issue around this:
# https://github.com/erdewit/ib_insync/issues/363 # https://github.com/erdewit/ib_insync/issues/363
'Inactive': 'pending', 'Inactive': 'pending',
# XXX, uhh wut the heck is this?
'ValidationError': 'error',
} }
_action_map = { _action_map = {
@ -1066,19 +1063,8 @@ async def deliver_trade_events(
# TODO: for some reason we can receive a ``None`` here when the # TODO: for some reason we can receive a ``None`` here when the
# ib-gw goes down? Not sure exactly how that's happening looking # ib-gw goes down? Not sure exactly how that's happening looking
# at the eventkit code above but we should probably handle it... # at the eventkit code above but we should probably handle it...
event_name: str
item: (
Trade
|tuple[Trade, Fill]
|CommissionReport
|IbPosition
|dict
)
async for event_name, item in trade_event_stream: async for event_name, item in trade_event_stream:
log.info( log.info(f'Relaying `{event_name}`:\n{pformat(item)}')
f'Relaying {event_name!r}:\n'
f'{pformat(item)}\n'
)
match event_name: match event_name:
case 'orderStatusEvent': case 'orderStatusEvent':
@ -1089,12 +1075,11 @@ async def deliver_trade_events(
trade: Trade = item trade: Trade = item
reqid: str = str(trade.order.orderId) reqid: str = str(trade.order.orderId)
status: OrderStatus = trade.orderStatus status: OrderStatus = trade.orderStatus
status_str: str = _statuses.get( status_str: str = _statuses[status.status]
status.status,
'error',
)
remaining: float = status.remaining remaining: float = status.remaining
if status_str == 'filled': if (
status_str == 'filled'
):
fill: Fill = trade.fills[-1] fill: Fill = trade.fills[-1]
execu: Execution = fill.execution execu: Execution = fill.execution
@ -1125,12 +1110,6 @@ async def deliver_trade_events(
# all units were cleared. # all units were cleared.
status_str = 'closed' status_str = 'closed'
elif status_str == 'error':
log.error(
f'IB reported error status for order ??\n'
f'{status.status!r}\n'
)
# skip duplicate filled updates - we get the deats # skip duplicate filled updates - we get the deats
# from the execution details event # from the execution details event
msg = BrokerdStatus( msg = BrokerdStatus(
@ -1291,24 +1270,14 @@ async def deliver_trade_events(
case 'error': case 'error':
# NOTE: see impl deats in # NOTE: see impl deats in
# `Client.inline_errors()::push_err()` # `Client.inline_errors()::push_err()`
err: dict|str = item err: dict = item
# std case, never relay errors for non-order-control # never relay errors for non-broker related issues
# related issues.
# https://interactivebrokers.github.io/tws-api/message_codes.html # https://interactivebrokers.github.io/tws-api/message_codes.html
if isinstance(err, dict):
code: int = err['error_code'] code: int = err['error_code']
reason: str = err['reason'] reason: str = err['reason']
reqid: str = str(err['reqid']) reqid: str = str(err['reqid'])
# XXX, sometimes you'll get just a `str` of the form,
# '[code 104] connection failed' or something..
elif isinstance(err, str):
code_part, _, reason = err.rpartition(']')
if code_part:
_, _, code = code_part.partition('[code')
reqid: str = '<unknown>'
# "Warning:" msg codes, # "Warning:" msg codes,
# https://interactivebrokers.github.io/tws-api/message_codes.html#warning_codes # https://interactivebrokers.github.io/tws-api/message_codes.html#warning_codes
# - 2109: 'Outside Regular Trading Hours' # - 2109: 'Outside Regular Trading Hours'

View File

@ -36,7 +36,7 @@ from typing import (
) )
from async_generator import aclosing from async_generator import aclosing
import ib_async as ibis import ib_insync as ibis
import numpy as np import numpy as np
from pendulum import ( from pendulum import (
now, now,
@ -100,7 +100,7 @@ tick_types = {
5: 'size', 5: 'size',
8: 'volume', 8: 'volume',
# `ib_async` already packs these into # ``ib_insync`` already packs these into
# quotes under the following fields. # quotes under the following fields.
55: 'trades_per_min', # `'tradeRate'` 55: 'trades_per_min', # `'tradeRate'`
56: 'vlm_per_min', # `'volumeRate'` 56: 'vlm_per_min', # `'volumeRate'`
@ -201,15 +201,6 @@ async def open_history_client(
fqme, fqme,
timeframe, timeframe,
end_dt=end_dt, end_dt=end_dt,
# XXX WARNING, we don't actually use this inside
# `Client.bars()` since it isn't really supported,
# the API instead supports a "duration" of time style
# from the `end_dt` (or at least that was the best
# way to get it working sanely)..
#
# SO, with that in mind be aware that any downstream
# logic based on this may be mostly futile Xp
start_dt=start_dt, start_dt=start_dt,
) )
latency = time.time() - query_start latency = time.time() - query_start
@ -287,27 +278,19 @@ async def open_history_client(
trimmed_bars = bars_array[ trimmed_bars = bars_array[
bars_array['time'] >= start_dt.timestamp() bars_array['time'] >= start_dt.timestamp()
] ]
# XXX, should NEVER get HERE!
if trimmed_bars.size:
trimmed_first_dt: datetime = from_timestamp(trimmed_bars['time'][0])
if ( if (
trimmed_first_dt trimmed_first_dt := from_timestamp(trimmed_bars['time'][0])
>= !=
start_dt start_dt
): ):
msg: str = ( # TODO! rm this once we're more confident it never hits!
# breakpoint()
raise RuntimeError(
f'OHLC-bars array start is gt `start_dt` limit !!\n' f'OHLC-bars array start is gt `start_dt` limit !!\n'
f'start_dt: {start_dt}\n' f'start_dt: {start_dt}\n'
f'first_dt: {first_dt}\n' f'first_dt: {first_dt}\n'
f'trimmed_first_dt: {trimmed_first_dt}\n' f'trimmed_first_dt: {trimmed_first_dt}\n'
f'\n'
f'Delivering shorted frame of {trimmed_bars.size!r}\n'
) )
log.warning(msg)
# TODO! rm this once we're more confident it
# never breaks anything (in the caller)!
# breakpoint()
# raise RuntimeError(msg)
# XXX, overwrite with start_dt-limited frame # XXX, overwrite with start_dt-limited frame
bars_array = trimmed_bars bars_array = trimmed_bars
@ -321,7 +304,7 @@ async def open_history_client(
# TODO: it seems like we can do async queries for ohlc # TODO: it seems like we can do async queries for ohlc
# but getting the order right still isn't working and I'm not # but getting the order right still isn't working and I'm not
# quite sure why.. needs some tinkering and probably # quite sure why.. needs some tinkering and probably
# a lookthrough of the `ib_async` machinery, for eg. maybe # a lookthrough of the `ib_insync` machinery, for eg. maybe
# we have to do the batch queries on the `asyncio` side? # we have to do the batch queries on the `asyncio` side?
yield ( yield (
get_hist, get_hist,
@ -1068,21 +1051,6 @@ def normalize(
# ticker.rtTime.timestamp) / 1000. # ticker.rtTime.timestamp) / 1000.
data.pop('rtTime') data.pop('rtTime')
# XXX, `ib_async` seems to set a
# `'timezone': datetime.timezone.utc` in this `dict`
# which is NOT IPC serializeable sin codec!
#
# pretty sure we don't need any of this field for now anyway?
data.pop('defaults')
if lts := data.get('lastTimeStamp'):
lts.replace(tzinfo=None)
log.warning(
f'Stripping `.tzinfo` from datetime\n'
f'{lts}\n'
)
# breakpoint()
return data return data
@ -1259,7 +1227,7 @@ async def stream_quotes(
): ):
# ?TODO? can we rm this - particularly for `ib_async`? # ?TODO? can we rm this - particularly for `ib_async`?
# ugh, clear ticks since we've consumed them # ugh, clear ticks since we've consumed them
# (ahem, ib_async is stateful trash) # (ahem, ib_insync is stateful trash)
# first_ticker.ticks = [] # first_ticker.ticks = []
# only on first entry at feed boot up # only on first entry at feed boot up

View File

@ -36,7 +36,7 @@ from pendulum import (
parse, parse,
from_timestamp, from_timestamp,
) )
from ib_async import ( from ib_insync import (
Contract, Contract,
Commodity, Commodity,
Fill, Fill,

View File

@ -23,7 +23,6 @@ from contextlib import (
nullcontext, nullcontext,
) )
from decimal import Decimal from decimal import Decimal
from functools import partial
import time import time
from typing import ( from typing import (
Awaitable, Awaitable,
@ -31,9 +30,8 @@ from typing import (
) )
from rapidfuzz import process as fuzzy from rapidfuzz import process as fuzzy
import ib_async as ibis import ib_insync as ibis
import tractor import tractor
from tractor.devx.pformat import ppfmt
import trio import trio
from piker.accounting import ( from piker.accounting import (
@ -217,19 +215,18 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
f'{ib_client}\n' f'{ib_client}\n'
) )
last: float = time.time() last = time.time()
async for pattern in stream: async for pattern in stream:
log.info(f'received {pattern}') log.info(f'received {pattern}')
now: float = time.time() now: float = time.time()
# TODO? check this is no longer true?
# this causes tractor hang... # this causes tractor hang...
# assert 0 # assert 0
assert pattern, 'IB can not accept blank search pattern' assert pattern, 'IB can not accept blank search pattern'
# throttle search requests to no faster then 1Hz # throttle search requests to no faster then 1Hz
diff: float = now - last diff = now - last
if diff < 1.0: if diff < 1.0:
log.debug('throttle sleeping') log.debug('throttle sleeping')
await trio.sleep(diff) await trio.sleep(diff)
@ -240,12 +237,11 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
if ( if (
not pattern not pattern
or or pattern.isspace()
pattern.isspace()
or
# XXX: not sure if this is a bad assumption but it # XXX: not sure if this is a bad assumption but it
# seems to make search snappier? # seems to make search snappier?
len(pattern) < 1 or len(pattern) < 1
): ):
log.warning('empty pattern received, skipping..') log.warning('empty pattern received, skipping..')
@ -258,58 +254,36 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
# XXX: this unblocks the far end search task which may # XXX: this unblocks the far end search task which may
# hold up a multi-search nursery block # hold up a multi-search nursery block
await stream.send({}) await stream.send({})
continue continue
log.info( log.info(f'searching for {pattern}')
f'Searching for FQME with,\n'
f'pattern: {pattern!r}\n'
)
last: float = time.time() last = time.time()
# async batch search using api stocks endpoint and # async batch search using api stocks endpoint and module
# module defined adhoc symbol set. # defined adhoc symbol set.
stock_results: list[dict] = [] stock_results = []
async def extend_results( async def extend_results(
# ?TODO, how to type async-fn!? target: Awaitable[list]
target: Awaitable[list],
pattern: str,
**kwargs,
) -> None: ) -> None:
try: try:
results = await target( results = await target
pattern=pattern,
**kwargs,
)
client_repr: str = proxy._aio_ns.ib.client.__class__.__name__
meth_repr: str = target.keywords["meth"]
log.info(
f'Search query,\n'
f'{client_repr}.{meth_repr}(\n'
f' pattern={pattern!r}\n'
f' **kwargs={kwargs!r},\n'
f') = {ppfmt(list(results))}'
# XXX ^ just the keys since that's what
# shows in UI results table.
)
except tractor.trionics.Lagged: except tractor.trionics.Lagged:
log.exception( print("IB SYM-SEARCH OVERRUN?!?")
'IB SYM-SEARCH OVERRUN?!?\n'
)
return return
stock_results.extend(results) stock_results.extend(results)
for _ in range(10): for _ in range(10):
with trio.move_on_after(3) as cs: with trio.move_on_after(3) as cs:
async with trio.open_nursery() as tn: async with trio.open_nursery() as sn:
tn.start_soon( sn.start_soon(
partial(
extend_results, extend_results,
proxy.search_symbols(
pattern=pattern, pattern=pattern,
target=proxy.search_symbols, upto=5,
upto=10,
), ),
) )
@ -339,9 +313,7 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
# adhoc_match_results = {i[0]: {} for i in # adhoc_match_results = {i[0]: {} for i in
# adhoc_matches} # adhoc_matches}
log.debug( log.debug(f'fuzzy matching stocks {stock_results}')
f'fuzzy matching stocks {ppfmt(stock_results)}'
)
stock_matches = fuzzy.extract( stock_matches = fuzzy.extract(
pattern, pattern,
stock_results, stock_results,
@ -355,10 +327,7 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
# TODO: we used to deliver contract details # TODO: we used to deliver contract details
# {item[2]: item[0] for item in stock_matches} # {item[2]: item[0] for item in stock_matches}
log.debug( log.debug(f"sending matches: {matches.keys()}")
f'Sending final matches\n'
f'{matches.keys()}'
)
await stream.send(matches) await stream.send(matches)
@ -553,11 +522,7 @@ async def get_mkt_info(
if atype == 'commodity': if atype == 'commodity':
venue: str = 'cmdty' venue: str = 'cmdty'
else: else:
venue: str = ( venue = con.primaryExchange or con.exchange
con.primaryExchange
or
con.exchange
)
price_tick: Decimal = Decimal(str(details.minTick)) price_tick: Decimal = Decimal(str(details.minTick))
ib_min_tick_gt_2: Decimal = Decimal('0.01') ib_min_tick_gt_2: Decimal = Decimal('0.01')

View File

@ -41,9 +41,8 @@ from pendulum import (
) )
if TYPE_CHECKING: if TYPE_CHECKING:
from ib_async import ( from ib_insync import (
TradingSession, TradingSession,
Contract,
ContractDetails, ContractDetails,
) )
from exchange_calendars.exchange_calendars import ( from exchange_calendars.exchange_calendars import (
@ -83,20 +82,8 @@ def has_holiday(
''' '''
tz: str = con_deats.timeZoneId tz: str = con_deats.timeZoneId
con: Contract = con_deats.contract exch: str = con_deats.contract.primaryExchange
exch: str = ( cal: ExchangeCalendar = xcals.get_calendar(exch)
con.primaryExchange
or
con.exchange
)
# XXX, ad-hoc handle any IB exchange which are non-std
# via lookup table..
std_exch: dict = {
'ARCA': 'ARCX',
}.get(exch, exch)
cal: ExchangeCalendar = xcals.get_calendar(std_exch)
end: datetime = period.end end: datetime = period.end
# _start: datetime = period.start # _start: datetime = period.start
# ?TODO, can rm ya? # ?TODO, can rm ya?
@ -249,7 +236,7 @@ def is_venue_closure(
# #
# NOTE, this was generated by @guille from a gpt5 prompt # NOTE, this was generated by @guille from a gpt5 prompt
# and was originally thot to be needed before learning about # and was originally thot to be needed before learning about
# `ib_async.contract.ContractDetails._parseSessions()` and # `ib_insync.contract.ContractDetails._parseSessions()` and
# it's downstream meths.. # it's downstream meths..
# #
# This is still likely useful to keep for now to parse the # This is still likely useful to keep for now to parse the

View File

@ -19,6 +19,7 @@ Platform configuration (files) mgmt.
""" """
import platform import platform
import sys
import os import os
import shutil import shutil
from typing import ( from typing import (
@ -28,7 +29,6 @@ from typing import (
from pathlib import Path from pathlib import Path
from bidict import bidict from bidict import bidict
import platformdirs
import tomlkit import tomlkit
try: try:
import tomllib import tomllib
@ -41,7 +41,7 @@ from .log import get_logger
log = get_logger('broker-config') log = get_logger('broker-config')
# XXX NOTE: orig impl was taken from `click` # XXX NOTE: taken from `click`
# |_https://github.com/pallets/click/blob/main/src/click/utils.py#L449 # |_https://github.com/pallets/click/blob/main/src/click/utils.py#L449
# #
# (since apparently they have some super weirdness with SIGINT and # (since apparently they have some super weirdness with SIGINT and
@ -54,21 +54,44 @@ def get_app_dir(
force_posix: bool = False, force_posix: bool = False,
) -> str: ) -> str:
''' r"""Returns the config folder for the application. The default behavior
Returns the config folder for the application. The default behavior
is to return whatever is most appropriate for the operating system. is to return whatever is most appropriate for the operating system.
---- To give you an idea, for an app called ``"Foo Bar"``, something like
NOTE, below is originally from `click` impl fn, we can prolly remove? the following folders could be returned:
----
Mac OS X:
``~/Library/Application Support/Foo Bar``
Mac OS X (POSIX):
``~/.foo-bar``
Unix:
``~/.config/foo-bar``
Unix (POSIX):
``~/.foo-bar``
Win XP (roaming):
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo``
Win XP (not roaming):
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
Win 7 (roaming):
``C:\Users\<user>\AppData\Roaming\Foo Bar``
Win 7 (not roaming):
``C:\Users\<user>\AppData\Local\Foo Bar``
.. versionadded:: 2.0
:param app_name: the application name. This should be properly capitalized
and can contain whitespace.
:param roaming: controls if the folder should be roaming or not on Windows. :param roaming: controls if the folder should be roaming or not on Windows.
Has no affect otherwise. Has no affect otherwise.
:param force_posix: if this is set to `True` then on any POSIX system the :param force_posix: if this is set to `True` then on any POSIX system the
folder will be stored in the home folder with a leading folder will be stored in the home folder with a leading
dot instead of the XDG config home or darwin's dot instead of the XDG config home or darwin's
application support folder. application support folder.
''' """
def _posixify(name):
return "-".join(name.split()).lower()
# NOTE: for testing with `pytest` we leverage the `tmp_dir` # NOTE: for testing with `pytest` we leverage the `tmp_dir`
# fixture to generate (and clean up) a test-request-specific # fixture to generate (and clean up) a test-request-specific
# directory for isolated configuration files such that, # directory for isolated configuration files such that,
@ -94,30 +117,23 @@ def get_app_dir(
# assert testdirpath.exists(), 'piker test harness might be borked!?' # assert testdirpath.exists(), 'piker test harness might be borked!?'
# app_name = str(testdirpath) # app_name = str(testdirpath)
os_name: str = platform.system() if platform.system() == 'Windows':
conf_dir: Path = platformdirs.user_config_path() key = "APPDATA" if roaming else "LOCALAPPDATA"
app_dir: Path = conf_dir / app_name folder = os.environ.get(key)
if folder is None:
# ?TODO, from `click`; can remove? folder = os.path.expanduser("~")
return os.path.join(folder, app_name)
if force_posix: if force_posix:
def _posixify(name):
return "-".join(name.split()).lower()
return os.path.join( return os.path.join(
os.path.expanduser( os.path.expanduser("~/.{}".format(_posixify(app_name))))
"~/.{}".format( if sys.platform == "darwin":
_posixify(app_name) return os.path.join(
os.path.expanduser("~/Library/Application Support"), app_name
) )
return os.path.join(
os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
_posixify(app_name),
) )
)
log.info(
f'Using user config directory,\n'
f'platform.system(): {os_name!r}\n'
f'conf_dir: {conf_dir!r}\n'
f'app_dir: {conf_dir!r}\n'
)
return app_dir
_click_config_dir: Path = Path(get_app_dir('piker')) _click_config_dir: Path = Path(get_app_dir('piker'))
@ -234,9 +250,7 @@ def repodir() -> Path:
repodir: Path = Path(os.environ.get('GITHUB_WORKSPACE')) repodir: Path = Path(os.environ.get('GITHUB_WORKSPACE'))
confdir: Path = repodir / 'config' confdir: Path = repodir / 'config'
assert confdir.is_dir(), ( assert confdir.is_dir(), f'{confdir} DNE, {repodir} is likely incorrect!'
f'{confdir} DNE, {repodir} is likely incorrect!'
)
return repodir return repodir

View File

@ -105,6 +105,15 @@ class SymbologyCache(Struct):
def write_config(self) -> None: def write_config(self) -> None:
def clean_dict_for_toml(d):
'''Remove None values from dict recursively for TOML serialization'''
if isinstance(d, dict):
return {k: clean_dict_for_toml(v) for k, v in d.items() if v is not None}
elif isinstance(d, list):
return [clean_dict_for_toml(item) for item in d if item is not None]
else:
return d
# put the backend's pair-struct type ref at the top # put the backend's pair-struct type ref at the top
# of file if possible. # of file if possible.
cachedict: dict[str, Any] = { cachedict: dict[str, Any] = {
@ -125,7 +134,9 @@ class SymbologyCache(Struct):
dct = cachedict[key] = {} dct = cachedict[key] = {}
for key, struct in table.items(): for key, struct in table.items():
dct[key] = struct.to_dict(include_non_members=False) raw_dict = struct.to_dict(include_non_members=False)
# Clean None values for TOML compatibility
dct[key] = clean_dict_for_toml(raw_dict)
try: try:
with self.fp.open(mode='wb') as fp: with self.fp.open(mode='wb') as fp:

View File

@ -249,20 +249,10 @@ async def maybe_fill_null_segments(
end_dt=end_dt, end_dt=end_dt,
) )
if array.size == 0:
log.warning(
f'Valid gap from backend ??\n'
f'{end_dt} -> {start_dt}\n'
)
# ?TODO? do we want to remove the nulls and push
# the close price here for the gap duration?
await tractor.pause()
break
if ( if (
frame_start_dt := (from_timestamp(array['time'][0])) frame_start_dt := (
< from_timestamp(array['time'][0])
backfill_until_dt ) < backfill_until_dt
): ):
log.error( log.error(
f'Invalid frame_start !?\n' f'Invalid frame_start !?\n'
@ -624,17 +614,10 @@ async def start_backfill(
else: else:
log.warning( log.warning(
f'0 BARS TO PUSH after diff!?\n' '0 BARS TO PUSH after diff!?\n'
f'{next_start_dt} -> {last_start_dt}' f'{next_start_dt} -> {last_start_dt}'
f'\n'
f'This might mean we rxed a gap frame which starts BEFORE,\n'
f'backfill_until_dt: {backfill_until_dt}\n'
f'end_dt_param: {end_dt_param}\n'
) )
# XXX, to debug it and be sure. await tractor.pause()
# await tractor.pause()
break
# Check if we're about to exceed buffer capacity BEFORE # Check if we're about to exceed buffer capacity BEFORE
# attempting the push # attempting the push

View File

@ -34,7 +34,6 @@ import uuid
from bidict import bidict from bidict import bidict
import tractor import tractor
from tractor.devx.pformat import ppfmt
import trio import trio
from piker import config from piker import config
@ -1208,10 +1207,11 @@ async def process_trade_msg(
f'\n' f'\n'
f'=> CANCELLING ORDER DIALOG <=\n' f'=> CANCELLING ORDER DIALOG <=\n'
# from tractor.devx.pformat import ppfmt
# !TODO LOL, wtf the msg is causing # !TODO LOL, wtf the msg is causing
# a recursion bug! # a recursion bug!
# -[ ] get this shit on msgspec stat! # -[ ] get this shit on msgspec stat!
f'{ppfmt(broker_msg)}' # f'{ppfmt(broker_msg)}'
) )
# do all the things for a cancel: # do all the things for a cancel:
# - drop order-msg dialog from client table # - drop order-msg dialog from client table

View File

@ -52,6 +52,7 @@ dependencies = [
"bidict >=0.23.1", "bidict >=0.23.1",
"colorama >=0.4.6, <0.5.0", "colorama >=0.4.6, <0.5.0",
"colorlog >=6.7.0, <7.0.0", "colorlog >=6.7.0, <7.0.0",
"ib-insync >=0.9.86, <0.10.0",
"numpy>=2.0", "numpy>=2.0",
"polars >=0.20.6", "polars >=0.20.6",
"polars-fuzzy-match>=0.1.5", "polars-fuzzy-match>=0.1.5",
@ -75,8 +76,6 @@ dependencies = [
"numba>=0.61.0", "numba>=0.61.0",
"pyvnc", "pyvnc",
"exchange-calendars>=4.13.1", "exchange-calendars>=4.13.1",
"ib-async>=2.1.0",
"aeventkit>=2.1.0", # XXX, imports as eventkit?
] ]
# ------ dependencies ------ # ------ dependencies ------
# NOTE, by default we ship only a "headless" deps set bc # NOTE, by default we ship only a "headless" deps set bc

41
uv.lock
View File

@ -7,18 +7,6 @@ resolution-markers = [
"sys_platform != 'emscripten' and sys_platform != 'win32'", "sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
[[package]]
name = "aeventkit"
version = "2.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5c/8c/c08db1a1910f8d04ec6a524de522edd0bac181bdf94dbb01183f7685cd77/aeventkit-2.1.0.tar.gz", hash = "sha256:4e7d81bb0a67227121da50a23e19e5bbf13eded541a9f4857eeb6b7b857b738a", size = 24703, upload-time = "2025-06-22T15:54:03.961Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8d/8c/2a4b912b1afa201b25bdd0f5bccf96d5a8b5dccb6131316a8dd2d9cabcc1/aeventkit-2.1.0-py3-none-any.whl", hash = "sha256:962d43f79e731ac43527f2d0defeed118e6dbaa85f1487f5667540ebb8f00729", size = 26678, upload-time = "2025-06-22T15:54:02.141Z" },
]
[[package]] [[package]]
name = "aiodns" name = "aiodns"
version = "3.6.0" version = "3.6.0"
@ -363,6 +351,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/56/01/6f77d042b83260ef9ed73ea9647dfa0ef8414eba0a3fc57a509a088ad39b/elasticsearch-8.19.2-py3-none-any.whl", hash = "sha256:c16ba20c4c76cf6952e836dae7f4e724e00ba7bf31b94b79472b873683accdd4", size = 949706, upload-time = "2025-10-28T16:36:41.003Z" }, { url = "https://files.pythonhosted.org/packages/56/01/6f77d042b83260ef9ed73ea9647dfa0ef8414eba0a3fc57a509a088ad39b/elasticsearch-8.19.2-py3-none-any.whl", hash = "sha256:c16ba20c4c76cf6952e836dae7f4e724e00ba7bf31b94b79472b873683accdd4", size = 949706, upload-time = "2025-10-28T16:36:41.003Z" },
] ]
[[package]]
name = "eventkit"
version = "1.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/16/1e/0fac4e45d71ace143a2673ec642701c3cd16f833a0e77a57fa6a40472696/eventkit-1.0.3.tar.gz", hash = "sha256:99497f6f3c638a50ff7616f2f8cd887b18bbff3765dc1bd8681554db1467c933", size = 28320, upload-time = "2023-12-11T11:41:35.339Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/93/d9/7497d650b69b420e1a913329a843e16c715dac883750679240ef00a921e2/eventkit-1.0.3-py3-none-any.whl", hash = "sha256:0e199527a89aff9d195b9671ad45d2cc9f79ecda0900de8ecfb4c864d67ad6a2", size = 31837, upload-time = "2023-12-11T11:41:33.358Z" },
]
[[package]] [[package]]
name = "exceptiongroup" name = "exceptiongroup"
version = "1.3.1" version = "1.3.1"
@ -538,17 +538,16 @@ wheels = [
] ]
[[package]] [[package]]
name = "ib-async" name = "ib-insync"
version = "2.1.0" version = "0.9.86"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "aeventkit" }, { name = "eventkit" },
{ name = "nest-asyncio" }, { name = "nest-asyncio" },
{ name = "tzdata" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/30/4d/dfc1da8224c3ffcdcd668da7283c4e5f14239a07f83ea66af99700296fc3/ib_async-2.1.0.tar.gz", hash = "sha256:6a03a87d6c06acacb0217a5bea60a8a168ecd5b5a7e86e1c73678d5b48cbc796", size = 87678, upload-time = "2025-12-08T01:42:32.004Z" } sdist = { url = "https://files.pythonhosted.org/packages/55/bb/733d5c81c8c2f54e90898afc7ff3a99f4d53619e6917c848833f9cc1ab56/ib_insync-0.9.86.tar.gz", hash = "sha256:73af602ca2463f260999970c5bd937b1c4325e383686eff301743a4de08d381e", size = 69859, upload-time = "2023-07-02T12:43:31.968Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/80/e7/8f33801788c66f15e9250957ff7f53a8000843f79af1a3ed7a96def0e96b/ib_async-2.1.0-py3-none-any.whl", hash = "sha256:f6d8b991bdbd6dd38e700c61b3dced06ebe0f14be4e5263e2ef10ba10b88d434", size = 88876, upload-time = "2025-12-08T01:42:30.883Z" }, { url = "https://files.pythonhosted.org/packages/8f/f3/28ea87be30570f4d6b8fd24380d12fa74e59467ee003755e76aeb29082b8/ib_insync-0.9.86-py3-none-any.whl", hash = "sha256:a61fbe56ff405d93d211dad8238d7300de76dd6399eafc04c320470edec9a4a4", size = 72980, upload-time = "2023-07-02T12:43:29.928Z" },
] ]
[[package]] [[package]]
@ -934,7 +933,6 @@ name = "piker"
version = "0.1.0a0.dev0" version = "0.1.0a0.dev0"
source = { editable = "." } source = { editable = "." }
dependencies = [ dependencies = [
{ name = "aeventkit" },
{ name = "async-generator" }, { name = "async-generator" },
{ name = "attrs" }, { name = "attrs" },
{ name = "bidict" }, { name = "bidict" },
@ -943,7 +941,7 @@ dependencies = [
{ name = "cryptofeed" }, { name = "cryptofeed" },
{ name = "exchange-calendars" }, { name = "exchange-calendars" },
{ name = "httpx" }, { name = "httpx" },
{ name = "ib-async" }, { name = "ib-insync" },
{ name = "msgspec" }, { name = "msgspec" },
{ name = "numba" }, { name = "numba" },
{ name = "numpy" }, { name = "numpy" },
@ -1011,7 +1009,6 @@ uis = [
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
{ name = "aeventkit", specifier = ">=2.1.0" },
{ name = "async-generator", specifier = ">=1.10,<2.0.0" }, { name = "async-generator", specifier = ">=1.10,<2.0.0" },
{ name = "attrs", specifier = ">=23.1.0,<24.0.0" }, { name = "attrs", specifier = ">=23.1.0,<24.0.0" },
{ name = "bidict", specifier = ">=0.23.1" }, { name = "bidict", specifier = ">=0.23.1" },
@ -1020,7 +1017,7 @@ requires-dist = [
{ name = "cryptofeed", specifier = ">=2.4.0,<3.0.0" }, { name = "cryptofeed", specifier = ">=2.4.0,<3.0.0" },
{ name = "exchange-calendars", specifier = ">=4.13.1" }, { name = "exchange-calendars", specifier = ">=4.13.1" },
{ name = "httpx", specifier = ">=0.27.0,<0.28.0" }, { name = "httpx", specifier = ">=0.27.0,<0.28.0" },
{ name = "ib-async", specifier = ">=2.1.0" }, { name = "ib-insync", specifier = ">=0.9.86,<0.10.0" },
{ name = "msgspec", specifier = ">=0.19.0,<0.20" }, { name = "msgspec", specifier = ">=0.19.0,<0.20" },
{ name = "numba", specifier = ">=0.61.0" }, { name = "numba", specifier = ">=0.61.0" },
{ name = "numpy", specifier = ">=2.0" }, { name = "numpy", specifier = ">=2.0" },