Compare commits

..

No commits in common. "py_codestyling" and "main" have entirely different histories.

60 changed files with 1630 additions and 2631 deletions

View File

@ -3,8 +3,7 @@
"allow": [ "allow": [
"Bash(chmod:*)", "Bash(chmod:*)",
"Bash(/tmp/piker_commits.txt)", "Bash(/tmp/piker_commits.txt)",
"Bash(python:*)", "Bash(python:*)"
"Bash(ls:*)"
], ],
"deny": [], "deny": [],
"ask": [] "ask": []

View File

@ -32,14 +32,7 @@ option.log.disabled = true
[kraken] [kraken]
# the reference fiat asset as can be set key_descr = ''
# in an account's web-trading-UI prefs.
src_fiat = 'usd'
# NOTE for account defs, the following
# lines must match as follows.
accounts.spot = 'spot'
key_descr = 'spot'
api_key = '' api_key = ''
secret = '' secret = ''
# ------ kraken ------ # ------ kraken ------

View File

@ -31,7 +31,6 @@ from piker.log import (
from ._util import ( from ._util import (
BrokerError, BrokerError,
SymbolNotFound, SymbolNotFound,
MarketNotFound as MarketNotFound,
NoData, NoData,
DataUnavailable, DataUnavailable,
DataThrottle, DataThrottle,

View File

@ -20,17 +20,10 @@ Handy cross-broker utils.
""" """
from __future__ import annotations from __future__ import annotations
# from functools import partial # from functools import partial
from typing import (
Type,
)
import json import json
import httpx import httpx
import logging import logging
from msgspec import Struct
from tractor._exceptions import (
reg_err_types,
)
from piker.log import ( from piker.log import (
colorize_json, colorize_json,
@ -66,10 +59,6 @@ class SymbolNotFound(BrokerError):
"Symbol not found by broker search" "Symbol not found by broker search"
class MarketNotFound(SymbolNotFound):
"Mkt-pair not found by broker search"
# TODO: these should probably be moved to `.tsp/.data`? # TODO: these should probably be moved to `.tsp/.data`?
class NoData(BrokerError): class NoData(BrokerError):
''' '''
@ -108,19 +97,6 @@ class DataThrottle(BrokerError):
''' '''
# TODO: add in throttle metrics/feedback # TODO: add in throttle metrics/feedback
class SchemaMismatch(BrokerError):
'''
Market `Pair` fields mismatch, likely due to provider API update.
'''
# auto-register all `BrokerError` subtypes for
# tractor IPC exc-marshalling.
reg_err_types([
BrokerError,
*BrokerError.__subclasses__(),
])
def resproc( def resproc(
resp: httpx.Response, resp: httpx.Response,
@ -147,45 +123,3 @@ def resproc(
log.debug(f"Received json contents:\n{colorize_json(msg)}") log.debug(f"Received json contents:\n{colorize_json(msg)}")
return msg if return_json else resp return msg if return_json else resp
def get_or_raise_on_pair_schema_mismatch(
pair_type: Type[Struct],
fields_data: dict,
provider_name: str,
api_url: str|None = None,
) -> Struct:
'''
Boilerplate helper around assset-`Pair` field schema mismatches,
normally due to provider API updates.
'''
try:
pair: Struct = pair_type(**fields_data)
return pair
except TypeError as err:
from tractor.devx.pformat import ppfmt
repr_data: str = ppfmt(fields_data)
report: str = (
f'Field mismatch we need to codify!\n'
f'\n'
f'{pair_type!r}({repr_data})'
f'\n'
f'^^^ {err.args[0]!r} ^^^\n'
f'\n'
f"Don't panic, prolly {provider_name!r} "
f"changed their symbology schema..\n"
)
if (
api_url
or
(api_url := pair_type._api_url)
):
report += (
f'\n'
f'Check out their API docs here:\n'
f'{api_url}\n'
)
raise SchemaMismatch(report) from err

View File

@ -49,9 +49,6 @@ from piker import config
from piker.clearing._messages import ( from piker.clearing._messages import (
Order, Order,
) )
from piker.brokers._util import (
get_or_raise_on_pair_schema_mismatch,
)
from piker.accounting import ( from piker.accounting import (
Asset, Asset,
digits_to_dec, digits_to_dec,
@ -373,12 +370,20 @@ class Client:
item['filters'] = filters item['filters'] = filters
pair_type: Type = PAIRTYPES[venue] pair_type: Type = PAIRTYPES[venue]
pair: Pair = get_or_raise_on_pair_schema_mismatch( try:
pair_type=pair_type, pair: Pair = pair_type(**item)
fields_data=item, except Exception as e:
provider_name='binance', e.add_note(
api_url='https://binance-docs.github.io/apidocs/spot/en/#exchange-information', f'\n'
) f'New or removed field we need to codify!\n'
f'pair-type: {pair_type!r}\n'
f'\n'
f"Don't panic, prolly stupid binance changed their symbology schema again..\n"
f'Check out their API docs here:\n'
f'\n'
f'https://binance-docs.github.io/apidocs/spot/en/#exchange-information\n'
)
raise
pair_table[pair.symbol.upper()] = pair pair_table[pair.symbol.upper()] = pair
# update an additional top-level-cross-venue-table # update an additional top-level-cross-venue-table
@ -576,8 +581,8 @@ class Client:
self, self,
mkt: MktPair, mkt: MktPair,
start_dt: datetime|None = None, start_dt: datetime | None = None,
end_dt: datetime|None = None, end_dt: datetime | None = None,
as_np: bool = True, as_np: bool = True,
@ -604,11 +609,7 @@ class Client:
start_time = binance_timestamp(start_dt) start_time = binance_timestamp(start_dt)
end_time = binance_timestamp(end_dt) end_time = binance_timestamp(end_dt)
import tractor bs_pair: Pair = self._pairs[mkt.bs_fqme.upper()]
with tractor.devx.maybe_open_crash_handler():
bs_pair: Pair = self._pairs[
mkt.bs_fqme.upper()
]
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
bars = await self._api( bars = await self._api(

View File

@ -48,7 +48,6 @@ import tractor
from piker.brokers import ( from piker.brokers import (
open_cached_client, open_cached_client,
NoData, NoData,
MarketNotFound,
) )
from piker._cacheables import ( from piker._cacheables import (
async_lifo_cache, async_lifo_cache,
@ -204,13 +203,9 @@ async def stream_messages(
yield 'trade', piker_quote yield 'trade', piker_quote
def make_sub( def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]:
pairs: list[str],
sub_name: str,
uid: int,
) -> dict[str, str]:
''' '''
Create a request subscription packet `dict`. Create a request subscription packet dict.
- spot: - spot:
https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams
@ -306,10 +301,6 @@ async def get_mkt_info(
# uppercase since kraken bs_mktid is always upper # uppercase since kraken bs_mktid is always upper
if 'binance' not in fqme.lower(): if 'binance' not in fqme.lower():
log.warning(
f'Missing `.<provider>` part in fqme ??\n'
f'fqme: {fqme!r}\n'
)
fqme += '.binance' fqme += '.binance'
mkt_mode: str = '' mkt_mode: str = ''
@ -324,24 +315,6 @@ async def get_mkt_info(
venue: str = venue.upper() venue: str = venue.upper()
venue_lower: str = venue.lower() venue_lower: str = venue.lower()
if not venue:
if expiry:
expiry = f'.{expiry}'
expected: str = (
f'{mkt_ep}'
f'.<venue>'
f'{expiry}'
f'.{broker}'
)
raise MarketNotFound(
f'Invalid or missing .<venue> part in fqme?\n'
f'\n'
f'fqme: {fqme!r}\n'
f'expected-form>> {expected}\n'
f'\n'
f'Maybe you are missing a ".spot." ?\n'
)
# XXX TODO: we should change the usdtm_futes name to just # XXX TODO: we should change the usdtm_futes name to just
# usdm_futes (dropping the tether part) since it turns out that # usdm_futes (dropping the tether part) since it turns out that
# there are indeed USD-tokens OTHER THEN tether being used as # there are indeed USD-tokens OTHER THEN tether being used as
@ -359,8 +332,7 @@ async def get_mkt_info(
# TODO: handle coinm futes which have a margin asset that # TODO: handle coinm futes which have a margin asset that
# is some crypto token! # is some crypto token!
# https://binance-docs.github.io/apidocs/delivery/en/#exchange-information # https://binance-docs.github.io/apidocs/delivery/en/#exchange-information
or or 'btc' in venue_lower
'btc' in venue_lower
): ):
return None return None
@ -371,21 +343,16 @@ async def get_mkt_info(
if ( if (
venue venue
and and 'spot' not in venue_lower
'spot' not in venue_lower
# XXX: catch all in case user doesn't know which # XXX: catch all in case user doesn't know which
# venue they want (usdtm vs. coinm) and we can choose # venue they want (usdtm vs. coinm) and we can choose
# a default (via config?) once we support coin-m APIs. # a default (via config?) once we support coin-m APIs.
or or 'perp' in venue_lower
'perp' in venue_lower
): ):
if not mkt_mode: if not mkt_mode:
mkt_mode: str = f'{venue_lower}_futes' mkt_mode: str = f'{venue_lower}_futes'
# tracing
# await tractor.pause()
async with open_cached_client( async with open_cached_client(
'binance', 'binance',
) as client: ) as client:

View File

@ -20,7 +20,6 @@ Per market data-type definitions and schemas types.
""" """
from __future__ import annotations from __future__ import annotations
from typing import ( from typing import (
ClassVar,
Literal, Literal,
) )
from decimal import Decimal from decimal import Decimal
@ -204,8 +203,6 @@ class FutesPair(Pair):
# NOTE: see `.data._symcache.SymbologyCache.load()` for why # NOTE: see `.data._symcache.SymbologyCache.load()` for why
ns_path: str = 'piker.brokers.binance:FutesPair' ns_path: str = 'piker.brokers.binance:FutesPair'
_api_url: ClassVar[str] = 'https://binance-docs.github.io/apidocs/spot/en/#exchange-information'
# NOTE: for compat with spot pairs and `MktPair.src: Asset` # NOTE: for compat with spot pairs and `MktPair.src: Asset`
# processing.. # processing..
@property @property

View File

@ -425,7 +425,7 @@ class DataFeed:
async def stream_to_file( async def stream_to_file(
watchlist_name: str, watchlist_name: str,
filename: str, filename: str,
portal: tractor.Portal, portal: tractor._portal.Portal,
tickers: List[str], tickers: List[str],
brokermod: ModuleType, brokermod: ModuleType,
rate: int, rate: int,

View File

@ -23,6 +23,7 @@ from contextlib import (
asynccontextmanager as acm, asynccontextmanager as acm,
) )
from datetime import datetime from datetime import datetime
from functools import partial
import time import time
from typing import ( from typing import (
Any, Any,
@ -523,12 +524,13 @@ async def maybe_open_feed_handler() -> trio.abc.ReceiveStream:
async def aio_price_feed_relay( async def aio_price_feed_relay(
chan: to_asyncio.LinkedTaskChannel,
fh: FeedHandler, fh: FeedHandler,
instrument: Symbol, instrument: Symbol,
from_trio: asyncio.Queue,
to_trio: trio.abc.SendChannel,
) -> None: ) -> None:
async def _trade(data: dict, receipt_timestamp): async def _trade(data: dict, receipt_timestamp):
chan.send_nowait(('trade', { to_trio.send_nowait(('trade', {
'symbol': cb_sym_to_deribit_inst( 'symbol': cb_sym_to_deribit_inst(
str_to_cb_sym(data.symbol)).lower(), str_to_cb_sym(data.symbol)).lower(),
'last': data, 'last': data,
@ -538,7 +540,7 @@ async def aio_price_feed_relay(
})) }))
async def _l1(data: dict, receipt_timestamp): async def _l1(data: dict, receipt_timestamp):
chan.send_nowait(('l1', { to_trio.send_nowait(('l1', {
'symbol': cb_sym_to_deribit_inst( 'symbol': cb_sym_to_deribit_inst(
str_to_cb_sym(data.symbol)).lower(), str_to_cb_sym(data.symbol)).lower(),
'ticks': [ 'ticks': [
@ -568,7 +570,7 @@ async def aio_price_feed_relay(
install_signal_handlers=False) install_signal_handlers=False)
# sync with trio # sync with trio
chan.started_nowait(None) to_trio.send_nowait(None)
await asyncio.sleep(float('inf')) await asyncio.sleep(float('inf'))
@ -579,9 +581,11 @@ async def open_price_feed(
) -> trio.abc.ReceiveStream: ) -> trio.abc.ReceiveStream:
async with maybe_open_feed_handler() as fh: async with maybe_open_feed_handler() as fh:
async with to_asyncio.open_channel_from( async with to_asyncio.open_channel_from(
aio_price_feed_relay, partial(
fh=fh, aio_price_feed_relay,
instrument=instrument, fh,
instrument
)
) as (chan, first): ) as (chan, first):
yield chan yield chan
@ -607,9 +611,10 @@ async def maybe_open_price_feed(
async def aio_order_feed_relay( async def aio_order_feed_relay(
chan: to_asyncio.LinkedTaskChannel,
fh: FeedHandler, fh: FeedHandler,
instrument: Symbol, instrument: Symbol,
from_trio: asyncio.Queue,
to_trio: trio.abc.SendChannel,
) -> None: ) -> None:
async def _fill(data: dict, receipt_timestamp): async def _fill(data: dict, receipt_timestamp):
breakpoint() breakpoint()
@ -632,7 +637,7 @@ async def aio_order_feed_relay(
install_signal_handlers=False) install_signal_handlers=False)
# sync with trio # sync with trio
chan.started_nowait(None) to_trio.send_nowait(None)
await asyncio.sleep(float('inf')) await asyncio.sleep(float('inf'))
@ -643,9 +648,11 @@ async def open_order_feed(
) -> trio.abc.ReceiveStream: ) -> trio.abc.ReceiveStream:
async with maybe_open_feed_handler() as fh: async with maybe_open_feed_handler() as fh:
async with to_asyncio.open_channel_from( async with to_asyncio.open_channel_from(
aio_order_feed_relay, partial(
fh=fh, aio_order_feed_relay,
instrument=instrument, fh,
instrument
)
) as (chan, first): ) as (chan, first):
yield chan yield chan

View File

@ -95,7 +95,6 @@ from .symbols import (
) )
from ...log import get_logger from ...log import get_logger
from .venues import ( from .venues import (
is_expired,
is_venue_open, is_venue_open,
sesh_times, sesh_times,
is_venue_closure, is_venue_closure,
@ -497,7 +496,7 @@ class Client:
await self.ib.reqContractDetailsAsync(contract) await self.ib.reqContractDetailsAsync(contract)
)[0] )[0]
# convert to makt-native tz # convert to makt-native tz
tz: str = details.timeZoneId or 'EST' tz: str = details.timeZoneId
end_dt = end_dt.in_tz(tz) end_dt = end_dt.in_tz(tz)
first_dt: DateTime = from_timestamp(first).in_tz(tz) first_dt: DateTime = from_timestamp(first).in_tz(tz)
last_dt: DateTime = from_timestamp(last).in_tz(tz) last_dt: DateTime = from_timestamp(last).in_tz(tz)
@ -509,18 +508,10 @@ class Client:
_open_now: bool = is_venue_open( _open_now: bool = is_venue_open(
con_deats=details, con_deats=details,
) )
_is_expired: bool = is_expired(
con_deats=details,
)
# XXX, do gap detections. # XXX, do gap detections.
has_closure_gap: bool = False has_closure_gap: bool = False
if ( if (
# XXX, expired tracts can't be introspected
# for open/closure intervals due to ib's chitty
# details seemingly..
not _is_expired
and
last_dt.add(seconds=sample_period_s) last_dt.add(seconds=sample_period_s)
< <
end_dt end_dt

View File

@ -231,21 +231,20 @@ async def handle_order_requests(
async def recv_trade_updates( async def recv_trade_updates(
chan: tractor.to_asyncio.LinkedTaskChannel,
client: Client, client: Client,
to_trio: trio.abc.SendChannel,
) -> None: ) -> None:
''' '''
Receive and relay order control and positioning Receive and relay order control and positioning related events
related events from `ib_async`, pack as tuples and from `ib_async`, pack as tuples and push over mem-chan to our
push over mem-chan to our trio relay task for trio relay task for processing and relay to EMS.
processing and relay to EMS.
''' '''
client.inline_errors(chan) client.inline_errors(to_trio)
# sync with trio task # sync with trio task
chan.started_nowait(client.ib) to_trio.send_nowait(client.ib)
def push_tradesies( def push_tradesies(
eventkit_obj, eventkit_obj,
@ -283,7 +282,7 @@ async def recv_trade_updates(
try: try:
# emit event name + relevant ibis internal objects # emit event name + relevant ibis internal objects
chan.send_nowait((event_name, emit)) to_trio.send_nowait((event_name, emit))
except trio.BrokenResourceError: except trio.BrokenResourceError:
log.exception(f'Disconnected from {eventkit_obj} updates') log.exception(f'Disconnected from {eventkit_obj} updates')
eventkit_obj.disconnect(push_tradesies) eventkit_obj.disconnect(push_tradesies)
@ -1307,15 +1306,7 @@ async def deliver_trade_events(
elif isinstance(err, str): elif isinstance(err, str):
code_part, _, reason = err.rpartition(']') code_part, _, reason = err.rpartition(']')
if code_part: if code_part:
for prefix_patt in [ _, _, code = code_part.partition('[code')
'[Errno ',
'[code ',
]:
code_part, _, code = code_part.partition()
if code:
code = int(code)
break
reqid: str = '<unknown>' reqid: str = '<unknown>'
# "Warning:" msg codes, # "Warning:" msg codes,

View File

@ -501,7 +501,7 @@ async def update_ledger_from_api_trades(
for fill in fills: for fill in fills:
con: Contract = fill.contract con: Contract = fill.contract
conid: str = con.conId conid: str = con.conId
pexch: str|None = con.primaryExchange or con.exchange pexch: str | None = con.primaryExchange
if not pexch: if not pexch:
cons = await client.get_con(conid=conid) cons = await client.get_con(conid=conid)

View File

@ -33,21 +33,13 @@ from typing import (
) )
import exchange_calendars as xcals import exchange_calendars as xcals
from exchange_calendars.errors import (
InvalidCalendarName,
)
from pendulum import ( from pendulum import (
parse,
now, now,
Duration, Duration,
Interval, Interval,
Time, Time,
) )
from piker.log import get_logger
log = get_logger(__name__)
if TYPE_CHECKING: if TYPE_CHECKING:
from ib_async import ( from ib_async import (
TradingSession, TradingSession,
@ -64,22 +56,6 @@ if TYPE_CHECKING:
) )
def is_expired(
con_deats: ContractDetails,
) -> bool:
'''
Simple predicate whether the provided contract-deats match and
already lifetime-terminated instrument.
'''
expiry_str: str = con_deats.realExpirationDate
if not expiry_str:
return False
expiry_dt: datetime = parse(expiry_str)
return expiry_dt.date() >= now().date()
def has_weekend( def has_weekend(
period: Interval, period: Interval,
) -> bool: ) -> bool:
@ -114,28 +90,13 @@ def has_holiday(
con.exchange con.exchange
) )
# XXX, ad-hoc handle any IB exchange which are # XXX, ad-hoc handle any IB exchange which are non-std
# non-std via lookup table.. # via lookup table..
std_exch: str = { std_exch: dict = {
'ARCA': 'ARCX', 'ARCA': 'ARCX',
}.get(exch, exch) }.get(exch, exch)
try: cal: ExchangeCalendar = xcals.get_calendar(std_exch)
cal: ExchangeCalendar = xcals.get_calendar(
std_exch
)
except InvalidCalendarName:
# venue has no `exchange_calendars` entry
# (eg. IDEALPRO for forex, PAXOS for
# crypto) -> not a holiday by default since
# weekends are already handled by
# `has_weekend()`.
log.warning(
f'No exchange cal for {std_exch!r},'
f' skipping holiday check..\n'
)
return False
end: datetime = period.end end: datetime = period.end
# _start: datetime = period.start # _start: datetime = period.start
# ?TODO, can rm ya? # ?TODO, can rm ya?
@ -209,22 +170,7 @@ def sesh_times(
get the (day-agnostic) times for the start/end. get the (day-agnostic) times for the start/end.
''' '''
# ?TODO, lookup the next front contract instead? earliest_sesh: Interval = next(iter_sessions(con_deats))
if is_expired(con_deats):
raise ValueError(
f'Contract is already expired!\n'
f'Choose an active alt contract instead.\n'
f'con_deats: {con_deats!r}\n'
)
maybe_sessions: list[Interval] = list(iter_sessions(con_deats))
if not maybe_sessions:
raise ValueError(
f'Contract has no trading-session info?\n'
f'con_deats: {con_deats!r}\n'
)
earliest_sesh: Interval = maybe_sessions[0]
return ( return (
earliest_sesh.start.time(), earliest_sesh.start.time(),
earliest_sesh.end.time(), earliest_sesh.end.time(),
@ -265,13 +211,7 @@ def is_venue_closure(
''' '''
open: Time open: Time
close: Time close: Time
maybe_oc: tuple|None = sesh_times(con_deats) open, close = sesh_times(con_deats)
if maybe_oc is None:
# XXX, should never get here.
breakpoint()
return False
open, close = maybe_oc
# ensure times are in mkt-native timezone # ensure times are in mkt-native timezone
tz: str = con_deats.timeZoneId tz: str = con_deats.timeZoneId

View File

@ -35,7 +35,6 @@ import hashlib
import hmac import hmac
import base64 import base64
import tractor import tractor
# from tractor._exceptions import reg_err_types
import trio import trio
from piker import config from piker import config
@ -53,7 +52,6 @@ from piker.brokers._util import (
SymbolNotFound, SymbolNotFound,
BrokerError, BrokerError,
DataThrottle, DataThrottle,
get_or_raise_on_pair_schema_mismatch,
) )
from piker.accounting import Transaction from piker.accounting import Transaction
from piker.log import get_logger from piker.log import get_logger
@ -109,37 +107,15 @@ def get_kraken_signature(
return sigdigest.decode() return sigdigest.decode()
# class InvalidKey(ValueError): class InvalidKey(ValueError):
# ''' '''
# EAPI:Invalid key EAPI:Invalid key
This error is returned when the API key used for the call is
either expired or disabled, please review the API key in your
Settings -> API tab of account management or generate a new one
and update your application.
# This error is returned when the API key used for the call is '''
# either expired or disabled, please review the API key in your
# Settings -> API tab of account management or generate a new one
# and update your application.
# '''
# class InvalidSession(RuntimeError):
# '''
# ESession:Invalid session
# This error is returned when the ws API key used for an authenticated
# sub/endpoint becomes stale, normally after a sufficient network
# disconnect/outage.
# Normally the sub will need to be restarted, likely re-init of the
# auth handshake sequence.
# '''
# subscription: dict
# reg_err_types([
# InvalidKey,
# InvalidSession,
# ])
class Client: class Client:
@ -167,16 +143,18 @@ class Client:
config: dict[str, str], config: dict[str, str],
httpx_client: httpx.AsyncClient, httpx_client: httpx.AsyncClient,
key_descr: str = '', name: str = '',
api_key: str = '', api_key: str = '',
secret: str = '' secret: str = ''
) -> None: ) -> None:
self._sesh: httpx.AsyncClient = httpx_client self._sesh: httpx.AsyncClient = httpx_client
self._key_descr = key_descr
self._name = name
self._api_key = api_key self._api_key = api_key
self._secret = secret self._secret = secret
self.conf: dict[str, str] = config self.conf: dict[str, str] = config
self._ws_token: str|None = None
@property @property
def pairs(self) -> dict[str, Pair]: def pairs(self) -> dict[str, Pair]:
@ -261,39 +239,6 @@ class Client:
return balances return balances
async def get_ws_token(
self,
params: dict = {},
force_renewal: bool = False,
) -> str:
'''
Get websocket token for authenticated data stream and cache
it for reuse.
Assert a value was actually received before return.
'''
if (
not force_renewal
and
self._ws_token
):
return self._ws_token
resp = await self.endpoint(
'GetWebSocketsToken',
{},
)
if err := resp.get('error'):
raise BrokerError(err)
# resp token for ws init
token: str = resp['result']['token']
assert token
self._ws_token: str = token
return token
async def get_assets( async def get_assets(
self, self,
reload: bool = False, reload: bool = False,
@ -557,16 +502,7 @@ class Client:
# NOTE: always cache in pairs tables for faster lookup # NOTE: always cache in pairs tables for faster lookup
with tractor.devx.maybe_open_crash_handler(): # as bxerr: with tractor.devx.maybe_open_crash_handler(): # as bxerr:
# pair = Pair(xname=xkey, **data) pair = Pair(xname=xkey, **data)
pair: Pair = get_or_raise_on_pair_schema_mismatch(
pair_type=Pair,
fields_data=dict(
xname=xkey,
**data,
),
provider_name='kraken',
# api_url='https://binance-docs.github.io/apidocs/spot/en/#exchange-information',
)
# register the above `Pair` structs for all # register the above `Pair` structs for all
# key-sets/monikers: a set of 4 (frickin) tables # key-sets/monikers: a set of 4 (frickin) tables
@ -732,13 +668,7 @@ class Client:
@acm @acm
async def get_client() -> Client: async def get_client() -> Client:
'''
Load and deliver a `.kraken.api.Client`.
When defined, inject any config delivered from the user's
`brokers.toml` config file.
'''
conf: dict[str, Any] = get_config() conf: dict[str, Any] = get_config()
async with httpx.AsyncClient( async with httpx.AsyncClient(
base_url=_url, base_url=_url,
@ -749,14 +679,13 @@ async def get_client() -> Client:
# connections=4 # connections=4
) as trio_client: ) as trio_client:
if conf: if conf:
api_key_descr: str = conf['key_descr']
client = Client( client = Client(
conf, conf,
httpx_client=trio_client, httpx_client=trio_client,
# TODO: don't break these up and just do internal # TODO: don't break these up and just do internal
# conf lookups instead.. # conf lookups instead..
key_descr=api_key_descr, name=conf['key_descr'],
api_key=conf['api_key'], api_key=conf['api_key'],
secret=conf['secret'] secret=conf['secret']
) )

View File

@ -30,15 +30,12 @@ from typing import (
Any, Any,
AsyncIterator, AsyncIterator,
Iterable, Iterable,
Type,
Union, Union,
) )
from bidict import bidict from bidict import bidict
import trio import trio
import tractor import tractor
from tractor.devx.pformat import ppfmt
# from tractor._exceptions import reg_err_types
from piker.accounting import ( from piker.accounting import (
Position, Position,
@ -48,9 +45,6 @@ from piker.accounting import (
open_trade_ledger, open_trade_ledger,
open_account, open_account,
) )
from piker.config import (
ConfigurationError,
)
from piker.clearing import( from piker.clearing import(
OrderDialogs, OrderDialogs,
) )
@ -73,7 +67,10 @@ from piker.log import (
get_logger, get_logger,
) )
from piker.data import open_symcache from piker.data import open_symcache
from . import api from .api import (
Client,
BrokerError,
)
from .feed import ( from .feed import (
open_autorecon_ws, open_autorecon_ws,
NoBsWs, NoBsWs,
@ -97,7 +94,11 @@ MsgUnion = Union[
] ]
# TODO: make this wrap the `api.Client` and `ws` instances class TooFastEdit(Exception):
'Edit requests faster then api submissions'
# TODO: make this wrap the `Client` and `ws` instances
# and give it methods to submit cancel vs. add vs. edit # and give it methods to submit cancel vs. add vs. edit
# requests? # requests?
class BrokerClient: class BrokerClient:
@ -125,22 +126,23 @@ class BrokerClient:
async def handle_order_requests( async def handle_order_requests(
ws: NoBsWs, ws: NoBsWs,
client: api.Client, client: Client,
ems_order_stream: tractor.MsgStream, ems_order_stream: tractor.MsgStream,
token: str,
apiflows: OrderDialogs, apiflows: OrderDialogs,
ids: bidict[str, int], ids: bidict[str, int],
reqids2txids: dict[int, str], reqids2txids: dict[int, str],
toofastedit: set[int],
) -> None: ) -> None:
''' '''
`trio.Task` which handles order ctl requests from the EMS and Process new order submission requests from the EMS
deliver acks or errors back on that IPC dialog. and deliver acks or errors.
''' '''
# XXX: UGH, let's unify this.. with ``msgspec``!!! # XXX: UGH, let's unify this.. with ``msgspec``!!!
msg: dict|Order msg: dict | Order
async for msg in ems_order_stream: async for msg in ems_order_stream:
log.info(f'Rx order msg:\n{pformat(msg)}') log.info(f'Rx order msg:\n{pformat(msg)}')
match msg: match msg:
@ -154,13 +156,8 @@ async def handle_order_requests(
txid = reqids2txids[reqid] txid = reqids2txids[reqid]
except KeyError: except KeyError:
# XXX: not sure if this block ever gets hit now? # XXX: not sure if this block ever gets hit now?
# SEEMS TO on the race case with the update task?
# - update dark order quickly after
# triggered-submitted and then we have inavlid
# value in `reqids2txids` sent over ws.send()??
log.error('TOO FAST CANCEL/EDIT') log.error('TOO FAST CANCEL/EDIT')
toofastedit.add(reqid) reqids2txids[reqid] = TooFastEdit(reqid)
reqids2txids[reqid] = reqid
await ems_order_stream.send( await ems_order_stream.send(
BrokerdError( BrokerdError(
oid=msg['oid'], oid=msg['oid'],
@ -176,7 +173,7 @@ async def handle_order_requests(
# https://docs.kraken.com/websockets/#message-cancelOrder # https://docs.kraken.com/websockets/#message-cancelOrder
await ws.send_msg({ await ws.send_msg({
'event': 'cancelOrder', 'event': 'cancelOrder',
'token': await client.get_ws_token(), 'token': token,
'reqid': reqid, 'reqid': reqid,
'txid': [txid], # should be txid from submission 'txid': [txid], # should be txid from submission
}) })
@ -188,7 +185,7 @@ async def handle_order_requests(
# validate # validate
order = BrokerdOrder(**msg) order = BrokerdOrder(**msg)
# logic from old `api.Client.submit_limit()` # logic from old `Client.submit_limit()`
if order.oid in ids: if order.oid in ids:
ep: str = 'editOrder' ep: str = 'editOrder'
reqid: int = ids[order.oid] # integer not txid reqid: int = ids[order.oid] # integer not txid
@ -198,15 +195,13 @@ async def handle_order_requests(
# XXX: not sure if this block ever gets hit now? # XXX: not sure if this block ever gets hit now?
log.error('TOO FAST EDIT') log.error('TOO FAST EDIT')
reqids2txids[reqid] = reqid reqids2txids[reqid] = TooFastEdit(reqid)
toofastedit.add(reqid)
await tractor.pause()
await ems_order_stream.send( await ems_order_stream.send(
BrokerdError( BrokerdError(
oid=msg['oid'], oid=msg['oid'],
symbol=msg['symbol'], symbol=msg['symbol'],
reason=( reason=(
f'TooFastEdit reqid: {reqid}, cancelling..' f'TooFastEdit reqid:{reqid}, cancelling..'
), ),
) )
@ -252,7 +247,7 @@ async def handle_order_requests(
# https://docs.kraken.com/websockets/#message-addOrder # https://docs.kraken.com/websockets/#message-addOrder
req = { req = {
'event': ep, 'event': ep,
'token': await client.get_ws_token(), 'token': token,
'reqid': reqid, # remapped-to-int uid from ems 'reqid': reqid, # remapped-to-int uid from ems
# XXX: we set these to the same value since for us # XXX: we set these to the same value since for us
@ -296,15 +291,13 @@ async def handle_order_requests(
symbol=msg['symbol'], symbol=msg['symbol'],
reason=( reason=(
'Invalid request msg:\n{msg}' 'Invalid request msg:\n{msg}'
), ))
)
) )
@acm @acm
async def subscribe( async def subscribe(
ws: NoBsWs, ws: NoBsWs,
client: api.Client,
token: str, token: str,
subs: list[tuple[str, dict]] = [ subs: list[tuple[str, dict]] = [
('ownTrades', { ('ownTrades', {
@ -323,25 +316,12 @@ async def subscribe(
Setup ws api subscriptions: Setup ws api subscriptions:
https://docs.kraken.com/websockets/#message-subscribe https://docs.kraken.com/websockets/#message-subscribe
By default we sign up for trade and order (update) events per By default we sign up for trade and order update events.
`subs`.
''' '''
# more specific logic for this in kraken's sync client: # more specific logic for this in kraken's sync client:
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188 # https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
latest_token: str = await client.get_ws_token() assert token
if (
token
!=
latest_token
):
log.info(
f'RE-subscribing to WS connection..\n'
f'orig-token: {token!r}\n'
f'latest-token: {latest_token!r}\n'
)
token = latest_token
subnames: set[str] = set() subnames: set[str] = set()
for name, sub_opts in subs: for name, sub_opts in subs:
@ -349,8 +329,7 @@ async def subscribe(
'event': 'subscribe', 'event': 'subscribe',
'subscription': { 'subscription': {
'name': name, 'name': name,
# 'token': await client.get_ws_token(), 'token': token,
'token': latest_token,
**sub_opts, **sub_opts,
} }
} }
@ -365,9 +344,7 @@ async def subscribe(
# wait on subscriptionn acks # wait on subscriptionn acks
with trio.move_on_after(5): with trio.move_on_after(5):
while True: while True:
msg: dict = await ws.recv_msg() match (msg := await ws.recv_msg()):
fmt_msg: str = ppfmt(msg)
match msg:
case { case {
'event': 'subscriptionStatus', 'event': 'subscriptionStatus',
'status': 'subscribed', 'status': 'subscribed',
@ -385,49 +362,10 @@ async def subscribe(
'event': 'subscriptionStatus', 'event': 'subscriptionStatus',
'status': 'error', 'status': 'error',
'errorMessage': errmsg, 'errorMessage': errmsg,
'subscription': sub_opts,
} as msg: } as msg:
if errmsg: raise RuntimeError(
etype_str, _, ev_msg = errmsg.partition(':') f'{errmsg}\n\n'
etype: Type[Exception] = getattr( f'{pformat(msg)}'
api,
etype_str,
RuntimeError,
)
exc = etype(
f'{ev_msg}\n'
f'\n'
f'{fmt_msg}'
)
# !TODO, for `InvalidSession` we should
# attempt retries to resub and ensure all
# sibling (task) `token` holders update
# their refs accoridingly!
match (etype_str, ev_msg):
case (
'ESession',
'Invalid session',
):
# attempt ws-token refresh
token: str = await client.get_ws_token(
force_renewal=True
)
await tractor.pause()
continue
case _:
log.warning(
f'Unhandled subscription-status,\n'
f'{fmt_msg}'
)
raise exc
case _:
log.warning(
f'Unknown ws event rxed?\n'
f'{fmt_msg}'
) )
yield yield
@ -523,27 +461,11 @@ async def open_trade_dialog(
# (much like the web UI let's you set an "account currency") # (much like the web UI let's you set an "account currency")
# such that all positions (nested or flat) will be translated to # such that all positions (nested or flat) will be translated to
# this source currency's terms. # this source currency's terms.
src_fiat = client.conf.get('src_fiat') src_fiat = client.conf['src_fiat']
if not src_fiat:
raise ConfigurationError(
'No `src_fiat: str` field defined in `brokers.toml`'
)
# auth required block # auth required block
conf: dict = client.conf acctid = client._name
accounts: dict = conf.get('accounts') acc_name = 'kraken.' + acctid
acctid: str = client._key_descr
if not accounts.get(acctid):
raise ConfigurationError(
f'No API-key found for account-alias defined as {acctid!r} !\n'
f'\n'
f'Did set a `kraken.accounts.*` entry in your `brokers.toml`?\n'
f'It should look something like,\n'
f'\n'
f'[kraken]\n'
f'accounts.{acctid} = {acctid!r}\n'
)
fqan: str = f'kraken.{acctid}'
# task local msg dialog tracking # task local msg dialog tracking
apiflows = OrderDialogs() apiflows = OrderDialogs()
@ -662,10 +584,7 @@ async def open_trade_dialog(
acctid, acctid,
) )
# sync with EMS delivering pps and accounts # sync with EMS delivering pps and accounts
await ctx.started(( await ctx.started((ppmsgs, [acc_name]))
ppmsgs,
[fqan],
))
# TODO: ideally this blocks the this task # TODO: ideally this blocks the this task
# as little as possible. we need to either do # as little as possible. we need to either do
@ -673,11 +592,14 @@ async def open_trade_dialog(
# async file IO api? # async file IO api?
acnt.write_config() acnt.write_config()
token: str = await client.get_ws_token() # Get websocket token for authenticated data stream
# Assert that a token was actually received.
resp = await client.endpoint('GetWebSocketsToken', {})
if err := resp.get('error'):
raise BrokerError(err)
# XXX tracks EMS orders which are updated too quickly # resp token for ws init
# on the emds side with sync-issues on the kraken side. token: str = resp['result']['token']
toofastedit: set[int] = set()
ws: NoBsWs ws: NoBsWs
async with ( async with (
@ -686,24 +608,23 @@ async def open_trade_dialog(
'wss://ws-auth.kraken.com/', 'wss://ws-auth.kraken.com/',
fixture=partial( fixture=partial(
subscribe, subscribe,
client=client,
token=token, token=token,
), ),
) as ws, ) as ws,
aclosing(stream_messages(ws)) as stream, aclosing(stream_messages(ws)) as stream,
trio.open_nursery() as tn, trio.open_nursery() as nurse,
): ):
# task for processing inbound requests from ems # task for processing inbound requests from ems
tn.start_soon(partial( nurse.start_soon(
handle_order_requests, handle_order_requests,
ws=ws, ws,
client=client, client,
ems_order_stream=ems_stream, ems_stream,
apiflows=apiflows, token,
ids=ids, apiflows,
reqids2txids=reqids2txids, ids,
toofastedit=toofastedit, reqids2txids,
)) )
# enter relay loop # enter relay loop
await handle_order_updates( await handle_order_updates(
@ -714,23 +635,22 @@ async def open_trade_dialog(
apiflows=apiflows, apiflows=apiflows,
ids=ids, ids=ids,
reqids2txids=reqids2txids, reqids2txids=reqids2txids,
toofastedit=toofastedit,
acnt=acnt, acnt=acnt,
ledger=ledger, ledger=ledger,
acctid=acctid, acctid=acctid,
acc_name=fqan, acc_name=acc_name,
token=token,
) )
async def handle_order_updates( async def handle_order_updates(
client: api.Client, # only for pairs table needed in ledger proc client: Client, # only for pairs table needed in ledger proc
ws: NoBsWs, ws: NoBsWs,
ws_stream: AsyncIterator, ws_stream: AsyncIterator,
ems_stream: tractor.MsgStream, ems_stream: tractor.MsgStream,
apiflows: OrderDialogs, apiflows: OrderDialogs,
ids: bidict[str, int], ids: bidict[str, int],
reqids2txids: bidict[int, str], reqids2txids: bidict[int, str],
toofastedit: set[int],
acnt: Account, acnt: Account,
# transaction records which will be updated # transaction records which will be updated
@ -739,6 +659,7 @@ async def handle_order_updates(
# ledger_trans: dict[str, Transaction], # ledger_trans: dict[str, Transaction],
acctid: str, acctid: str,
acc_name: str, acc_name: str,
token: str,
) -> None: ) -> None:
''' '''
@ -868,7 +789,7 @@ async def handle_order_updates(
for order_msg in order_msgs: for order_msg in order_msgs:
log.info( log.info(
f'`openOrders` msg update_{seq}:\n' f'`openOrders` msg update_{seq}:\n'
f'{ppfmt(order_msg)}' f'{pformat(order_msg)}'
) )
txid, update_msg = list(order_msg.items())[0] txid, update_msg = list(order_msg.items())[0]
@ -1038,8 +959,10 @@ async def handle_order_updates(
# <-> ems dialog. # <-> ems dialog.
if ( if (
status == 'open' status == 'open'
and and isinstance(
reqid in toofastedit reqids2txids.get(reqid),
TooFastEdit
)
): ):
# TODO: don't even allow this case # TODO: don't even allow this case
# by not moving the client side line # by not moving the client side line
@ -1054,8 +977,7 @@ async def handle_order_updates(
# https://docs.kraken.com/websockets/#message-cancelOrder # https://docs.kraken.com/websockets/#message-cancelOrder
await ws.send_msg({ await ws.send_msg({
'event': 'cancelOrder', 'event': 'cancelOrder',
# 'token': token, 'token': token,
'token': await client.get_ws_token(),
'reqid': reqid or 0, 'reqid': reqid or 0,
'txid': [txid], 'txid': [txid],
}) })
@ -1201,8 +1123,7 @@ async def handle_order_updates(
txid txid
# we throttle too-fast-requests on the ems side # we throttle too-fast-requests on the ems side
and and not isinstance(txid, TooFastEdit)
reqid in toofastedit
): ):
# client was editting too quickly # client was editting too quickly
# so we instead cancel this order # so we instead cancel this order
@ -1210,8 +1131,7 @@ async def handle_order_updates(
f'Cancelling {reqid}@{txid} due to:\n {event}') f'Cancelling {reqid}@{txid} due to:\n {event}')
await ws.send_msg({ await ws.send_msg({
'event': 'cancelOrder', 'event': 'cancelOrder',
# 'token': token, 'token': token,
'token': await client.get_ws_token(),
'reqid': reqid or 0, 'reqid': reqid or 0,
'txid': [txid], 'txid': [txid],
}) })

View File

@ -19,9 +19,6 @@ Symbology defs and search.
''' '''
from decimal import Decimal from decimal import Decimal
from typing import (
ClassVar,
)
import tractor import tractor
@ -89,14 +86,9 @@ class Pair(Struct):
short_position_limit: float = 0 short_position_limit: float = 0
long_position_limit: float = float('inf') long_position_limit: float = float('inf')
# TODO, add API note when this was added!
execution_venue: str|None = None
# TODO: should we make this a literal NamespacePath ref? # TODO: should we make this a literal NamespacePath ref?
ns_path: str = 'piker.brokers.kraken:Pair' ns_path: str = 'piker.brokers.kraken:Pair'
_api_url: ClassVar[str] = 'https://docs.kraken.com/api/docs/rest-api/get-tradable-asset-pairs'
@property @property
def bs_mktid(self) -> str: def bs_mktid(self) -> str:
''' '''

View File

@ -95,9 +95,6 @@ _time_frames = {
class QuestradeError(Exception): class QuestradeError(Exception):
"Non-200 OK response code" "Non-200 OK response code"
from tractor._exceptions import reg_err_types
reg_err_types([QuestradeError])
class ContractsKey(NamedTuple): class ContractsKey(NamedTuple):
symbol: str symbol: str

View File

@ -26,6 +26,7 @@ from collections import (
from contextlib import asynccontextmanager as acm from contextlib import asynccontextmanager as acm
from decimal import Decimal from decimal import Decimal
from math import isnan from math import isnan
from pprint import pformat
from time import time_ns from time import time_ns
from types import ModuleType from types import ModuleType
from typing import ( from typing import (
@ -42,7 +43,6 @@ import trio
from trio_typing import TaskStatus from trio_typing import TaskStatus
import tractor import tractor
from tractor import trionics from tractor import trionics
from tractor.devx.pformat import ppfmt
from ._util import ( from ._util import (
log, # sub-sys logger log, # sub-sys logger
@ -490,7 +490,7 @@ async def open_brokerd_dialog(
msg = BrokerdPosition(**msg) msg = BrokerdPosition(**msg)
log.info( log.info(
f'loading pp for {brokermod.__name__}:\n' f'loading pp for {brokermod.__name__}:\n'
f'{ppfmt(msg.to_dict())}', f'{pformat(msg.to_dict())}',
) )
# TODO: state any mismatch here? # TODO: state any mismatch here?
@ -840,7 +840,7 @@ async def translate_and_relay_brokerd_events(
brokerd_msg: dict[str, Any] brokerd_msg: dict[str, Any]
async for brokerd_msg in brokerd_trades_stream: async for brokerd_msg in brokerd_trades_stream:
fmsg = ppfmt(brokerd_msg) fmsg = pformat(brokerd_msg)
log.info( log.info(
f'Rx brokerd trade msg:\n' f'Rx brokerd trade msg:\n'
f'{fmsg}' f'{fmsg}'
@ -1039,8 +1039,7 @@ async def translate_and_relay_brokerd_events(
) )
status_msg.reqid = reqid # THIS LINE IS CRITICAL! status_msg.reqid = reqid # THIS LINE IS CRITICAL!
if not status_msg.brokerd_msg: status_msg.brokerd_msg = msg
status_msg.brokerd_msg = msg
status_msg.src = msg.broker_details['name'] status_msg.src = msg.broker_details['name']
if not status_msg.req: if not status_msg.req:
@ -1073,7 +1072,7 @@ async def translate_and_relay_brokerd_events(
else: # open else: # open
# relayed from backend but probably not handled so # relayed from backend but probably not handled so
# just log it # just log it
log.info(f'{broker!r} opened order {msg}') log.info(f'{broker} opened order {msg}')
# BrokerdFill # BrokerdFill
case { case {
@ -1186,7 +1185,7 @@ async def translate_and_relay_brokerd_events(
}: }:
msg = ( msg = (
f'Unhandled broker status for dialog {reqid}:\n' f'Unhandled broker status for dialog {reqid}:\n'
f'{ppfmt(brokerd_msg)}' f'{pformat(brokerd_msg)}'
) )
if ( if (
oid := book._ems2brokerd_ids.inverse.get(reqid) oid := book._ems2brokerd_ids.inverse.get(reqid)
@ -1195,7 +1194,7 @@ async def translate_and_relay_brokerd_events(
# clearable limits.. # clearable limits..
if status_msg := book._active.get(oid): if status_msg := book._active.get(oid):
msg += ( msg += (
f'last status msg: {ppfmt(status_msg)}\n\n' f'last status msg: {pformat(status_msg)}\n\n'
f'this msg:{fmsg}\n' f'this msg:{fmsg}\n'
) )
@ -1234,7 +1233,7 @@ async def process_client_order_cmds(
async for cmd in client_order_stream: async for cmd in client_order_stream:
log.info( log.info(
f'Received order cmd:\n' f'Received order cmd:\n'
f'{ppfmt(cmd)}\n' f'{pformat(cmd)}\n'
) )
# CAWT DAMN we need struct support! # CAWT DAMN we need struct support!
@ -1399,8 +1398,8 @@ async def process_client_order_cmds(
# handle relaying the ems side responses back to # handle relaying the ems side responses back to
# the client/cmd sender from this request # the client/cmd sender from this request
log.info( log.info(
f'Sending live order to {broker!r}:\n' f'Sending live order to {broker}:\n'
f'{ppfmt(msg)}' f'{pformat(msg)}'
) )
await brokerd_order_stream.send(msg) await brokerd_order_stream.send(msg)

View File

@ -27,7 +27,7 @@ from types import ModuleType
import click import click
import trio import trio
import tractor import tractor
from tractor.discovery._multiaddr import parse_maddr from tractor._multiaddr import parse_maddr
from ..log import ( from ..log import (
get_console_log, get_console_log,
@ -345,7 +345,7 @@ def services(
if not ports: if not ports:
ports: list[int] = [_default_registry_port] ports: list[int] = [_default_registry_port]
addr = tractor.discovery._addr.wrap_address( addr = tractor._addr.wrap_address(
addr=(host, ports[0]) addr=(host, ports[0])
) )

View File

@ -36,8 +36,6 @@ except ModuleNotFoundError:
import tomli as tomllib import tomli as tomllib
from tractor._exceptions import reg_err_types
from tractor.devx.pformat import ppfmt
from .log import get_logger from .log import get_logger
log = get_logger('broker-config') log = get_logger('broker-config')
@ -174,12 +172,6 @@ class ConfigurationError(Exception):
class NoSignature(ConfigurationError): class NoSignature(ConfigurationError):
'No credentials setup for broker backend!' 'No credentials setup for broker backend!'
# auto-register for tractor IPC exc-marshalling.
reg_err_types([
ConfigurationError,
*ConfigurationError.__subclasses__(),
])
def _override_config_dir( def _override_config_dir(
path: str path: str
@ -358,56 +350,30 @@ def write(
def load_accounts( def load_accounts(
providers: list[str]|None = None providers: list[str] | None = None
) -> bidict[str, str|None]:
) -> bidict[str, str | None]:
conf, path = load( conf, path = load(
conf_name='brokers', conf_name='brokers',
) )
accounts = bidict({ accounts = bidict()
# XXX, default paper-engine entry; this MUST be set. for provider_name, section in conf.items():
'paper': None, accounts_section = section.get('accounts')
})
msg: str = (
'Loading account(s) from `brokers.toml`,\n'
)
for (
provider_name,
section,
) in conf.items():
accounts_section: dict[str, str] = section.get('accounts')
if accounts_section is None:
msg += f'No accounts declared for {provider_name!r}?\n'
continue
# msg += f'Loaded accounts for {provider_name!r}?\n'
if ( if (
providers is None providers is None or
or ( providers and provider_name in providers
providers
and
provider_name in providers
)
): ):
for ( if accounts_section is None:
label, log.warning(f'No accounts named for {provider_name}?')
value, continue
) in accounts_section.items(): else:
account_alias: str = f'{provider_name}.{label}' for label, value in accounts_section.items():
accounts[account_alias] = value accounts[
msg += f'{account_alias} = {value!r}\n' f'{provider_name}.{label}'
] = value
else: # our default paper engine entry
log.debug( accounts['paper'] = None
f'NOT loading account(s) for entry in `brokers.toml`,\n'
f'The account provider was not requested for loading.\n'
f'requested-providers: {providers!r}\n'
f'this-provider: {provider_name!r}\n'
f'\n'
f'{ppfmt(accounts_section)}\n'
)
# ?TODO? mk this bp work?
# breakpoint()
log.info(msg)
return accounts return accounts

View File

@ -168,7 +168,7 @@ async def _reconnect_forever(
nobsws: NoBsWs, nobsws: NoBsWs,
reset_after: int, # msg recv timeout before reset attempt reset_after: int, # msg recv timeout before reset attempt
fixture: AsyncContextManager|None = None, fixture: AsyncContextManager | None = None,
task_status: TaskStatus = trio.TASK_STATUS_IGNORED, task_status: TaskStatus = trio.TASK_STATUS_IGNORED,
) -> None: ) -> None:
@ -185,7 +185,7 @@ async def _reconnect_forever(
async def proxy_msgs( async def proxy_msgs(
ws: WebSocketConnection, ws: WebSocketConnection,
rent_cs: trio.CancelScope, # parent cancel scope rent_cs: trio.CancelScope, # parent cancel scope
) -> None: ):
''' '''
Receive (under `timeout` deadline) all msgs from from underlying Receive (under `timeout` deadline) all msgs from from underlying
websocket and relay them to (calling) parent task via ``trio`` websocket and relay them to (calling) parent task via ``trio``
@ -206,7 +206,7 @@ async def _reconnect_forever(
except nobsws.recon_errors: except nobsws.recon_errors:
log.exception( log.exception(
f'{src_mod}\n' f'{src_mod}\n'
f'{url!r} connection failed\n' f'{url} connection bail with:'
) )
with trio.CancelScope(shield=True): with trio.CancelScope(shield=True):
await trio.sleep(0.5) await trio.sleep(0.5)
@ -269,7 +269,7 @@ async def _reconnect_forever(
nobsws._ws = ws nobsws._ws = ws
log.info( log.info(
f'{src_mod}\n' f'{src_mod}\n'
f'Connection success: {url!r}' f'Connection success: {url}'
) )
# begin relay loop to forward msgs # begin relay loop to forward msgs
@ -341,7 +341,7 @@ async def _reconnect_forever(
async def open_autorecon_ws( async def open_autorecon_ws(
url: str, url: str,
fixture: AsyncContextManager|None = None, fixture: AsyncContextManager | None = None,
# time in sec between msgs received before # time in sec between msgs received before
# we presume connection might need a reset. # we presume connection might need a reset.
@ -361,7 +361,7 @@ async def open_autorecon_ws(
and restarts the full http(s) handshake on catches of certain and restarts the full http(s) handshake on catches of certain
connetivity errors, or some user defined recv timeout. connetivity errors, or some user defined recv timeout.
You can provide a `fixture` async-context-manager which will be You can provide a ``fixture`` async-context-manager which will be
entered/exitted around each connection reset; eg. for entered/exitted around each connection reset; eg. for
(re)requesting subscriptions without requiring streaming setup (re)requesting subscriptions without requiring streaming setup
code to rerun. code to rerun.
@ -402,8 +402,7 @@ async def open_autorecon_ws(
except NoBsWs.recon_errors as con_err: except NoBsWs.recon_errors as con_err:
log.warning( log.warning(
f'Entire ws-channel disconnect due to,\n' f'Entire ws-channel disconnect due to,\n'
f'\n' f'con_err: {con_err!r}\n'
f'{con_err!r}\n'
) )
@ -425,7 +424,7 @@ class JSONRPCResult(Struct):
async def open_jsonrpc_session( async def open_jsonrpc_session(
url: str, url: str,
start_id: int = 0, start_id: int = 0,
response_type: Type[Struct] = JSONRPCResult, response_type: type = JSONRPCResult,
msg_recv_timeout: float = float('inf'), msg_recv_timeout: float = float('inf'),
# ^NOTE, since only `deribit` is using this jsonrpc stuff atm # ^NOTE, since only `deribit` is using this jsonrpc stuff atm
# and options mkts are generally "slow moving".. # and options mkts are generally "slow moving"..
@ -436,10 +435,7 @@ async def open_jsonrpc_session(
# broken and never restored with wtv init sequence is required to # broken and never restored with wtv init sequence is required to
# re-establish a working req-resp session. # re-establish a working req-resp session.
) -> Callable[ ) -> Callable[[str, dict], dict]:
[str, dict],
dict,
]:
''' '''
Init a json-RPC-over-websocket connection to the provided `url`. Init a json-RPC-over-websocket connection to the provided `url`.
@ -535,18 +531,14 @@ async def open_jsonrpc_session(
'id': mid, 'id': mid,
} if not rpc_results.get(mid): } if not rpc_results.get(mid):
log.warning( log.warning(
f'Unexpected ws msg?\n' f'Unexpected ws msg: {json.dumps(msg, indent=4)}'
f'{json.dumps(msg, indent=4)}'
) )
case { case {
'method': _, 'method': _,
'params': _, 'params': _,
}: }:
log.debug( log.debug(f'Recieved\n{msg}')
f'Recieved\n'
f'{msg!r}'
)
case { case {
'error': error 'error': error
@ -562,15 +554,12 @@ async def open_jsonrpc_session(
result['event'].set() result['event'].set()
log.error( log.error(
f'JSONRPC request failed\n' f'JSONRPC request failed\n'
f'req: {req_msg!r}\n' f'req: {req_msg}\n'
f'resp: {error!r}\n' f'resp: {error}\n'
) )
case _: case _:
log.warning( log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
f'Unhandled JSON-RPC msg!?\n'
f'{msg!r}'
)
tn.start_soon(recv_task) tn.start_soon(recv_task)
yield json_rpc yield json_rpc

View File

@ -77,7 +77,7 @@ from ._sampling import (
if TYPE_CHECKING: if TYPE_CHECKING:
from .flows import Flume from .flows import Flume
from tractor.discovery._addr import Address from tractor._addr import Address
from tractor.msg.types import Aid from tractor.msg.types import Aid
@ -973,6 +973,9 @@ async def open_feed(
# assert flume.mkt.fqme == fqme # assert flume.mkt.fqme == fqme
feed.flumes[fqme] = flume feed.flumes[fqme] = flume
# TODO: do we need this?
flume.feed = feed
# attach and cache shm handles # attach and cache shm handles
rt_shm = flume.rt_shm rt_shm = flume.rt_shm
assert rt_shm assert rt_shm

View File

@ -22,6 +22,9 @@ real-time data processing data-structures.
""" """
from __future__ import annotations from __future__ import annotations
from typing import (
TYPE_CHECKING,
)
import tractor import tractor
import pendulum import pendulum
@ -35,6 +38,9 @@ from tractor.ipc._shm import (
) )
from piker.accounting import MktPair from piker.accounting import MktPair
if TYPE_CHECKING:
from piker.data.feed import Feed
class Flume(Struct): class Flume(Struct):
''' '''
@ -74,6 +80,10 @@ class Flume(Struct):
izero_rt: int = 0 izero_rt: int = 0
throttle_rate: int | None = None throttle_rate: int | None = None
# TODO: do we need this really if we can pull the `Portal` from
# ``tractor``'s internals?
feed: Feed|None = None
@property @property
def rt_shm(self) -> ShmArray: def rt_shm(self) -> ShmArray:
@ -146,6 +156,7 @@ class Flume(Struct):
# will get instead some kind of msg-compat version # will get instead some kind of msg-compat version
# that it can load. # that it can load.
msg.pop('stream') msg.pop('stream')
msg.pop('feed')
msg.pop('_rt_shm') msg.pop('_rt_shm')
msg.pop('_hist_shm') msg.pop('_hist_shm')

View File

@ -28,7 +28,6 @@ from typing import (
) )
from msgspec import field from msgspec import field
from tractor._exceptions import reg_err_types
from piker.types import Struct from piker.types import Struct
from piker.accounting import ( from piker.accounting import (
@ -44,8 +43,6 @@ class FeedInitializationError(ValueError):
''' '''
reg_err_types([FeedInitializationError])
class FeedInit(Struct, frozen=True): class FeedInit(Struct, frozen=True):
''' '''

View File

@ -91,7 +91,7 @@ async def open_piker_runtime(
try: try:
actor = tractor.current_actor() actor = tractor.current_actor()
except tractor._exceptions.NoRuntime: except tractor._exceptions.NoRuntime:
tractor.runtime._state._runtime_vars[ tractor._state._runtime_vars[
'piker_vars' 'piker_vars'
] = tractor_runtime_overrides ] = tractor_runtime_overrides
@ -264,7 +264,7 @@ async def maybe_open_pikerd(
**kwargs, **kwargs,
) -> ( ) -> (
tractor.Portal tractor._portal.Portal
|ClassVar[Services] |ClassVar[Services]
): ):
''' '''

View File

@ -49,7 +49,6 @@ from requests.exceptions import (
ReadTimeout, ReadTimeout,
) )
from tractor._exceptions import reg_err_types
from piker.log import ( from piker.log import (
get_console_log, get_console_log,
get_logger, get_logger,
@ -67,11 +66,6 @@ class DockerNotStarted(Exception):
class ApplicationLogError(Exception): class ApplicationLogError(Exception):
'App in container reported an error in logs' 'App in container reported an error in logs'
reg_err_types([
DockerNotStarted,
ApplicationLogError,
])
@acm @acm
async def open_docker( async def open_docker(

View File

@ -79,17 +79,10 @@ async def maybe_spawn_daemon(
lock = Services.locks[service_name] lock = Services.locks[service_name]
await lock.acquire() await lock.acquire()
if not pikerd_kwargs:
# XXX NOTE, pin to apprope `tractor` branch!
rtvs: dict = tractor.get_runtime_vars()
registry_addrs: list[tuple] = list(
map(tuple, rtvs['_registry_addrs'])
)
try: try:
async with find_service( async with find_service(
service_name, service_name,
registry_addrs=registry_addrs, registry_addrs=[('127.0.0.1', 6116)],
) as portal: ) as portal:
if portal is not None: if portal is not None:
lock.release() lock.release()
@ -106,7 +99,6 @@ async def maybe_spawn_daemon(
# process tree # process tree
async with maybe_open_pikerd( async with maybe_open_pikerd(
loglevel=loglevel, loglevel=loglevel,
registry_addrs=registry_addrs,
**pikerd_kwargs, **pikerd_kwargs,
) as pikerd_portal: ) as pikerd_portal:
@ -150,65 +142,7 @@ async def maybe_spawn_daemon(
async with tractor.wait_for_actor(service_name) as portal: async with tractor.wait_for_actor(service_name) as portal:
lock.release() lock.release()
yield portal yield portal
# --- ---- --- await portal.cancel_actor()
# XXX NOTE XXX
# --- ---- ---
# DO NOT PUT A `portal.cancel_actor()` here (as was prior)!
#
# Doing so will cause an "out-of-band" ctxc
# (`tractor.ContextCancelled`) to be raised inside the
# `ServiceMngr.open_context_in_task()`'s call to
# `ctx.wait_for_result()` AND the internal self-ctxc
# "graceful capture" WILL NOT CATCH IT!
#
# This can cause certain types of operations to raise
# that ctxc BEFORE THEY `return`, resulting in
# a "false-negative" ctxc being raised when really
# nothing actually failed, other then our semantic
# "failure" to suppress an expected, graceful,
# self-cancel scenario..
#
# bUt wHy duZ It WorK lIKe dis..
# ------------------------------
# from the perspective of the `tractor.Context` this
# cancel request was conducted "out of band" since
# `Context.cancel()` was never called and thus the
# `._cancel_called: bool` was never set. Despite the
# remote `.canceller` being set to `pikerd` (i.e. the
# same `Actor.uid` of the raising service-mngr task) the
# service-task's ctx itself was never marked as having
# requested cancellation and thus still raises the ctxc
# bc it was unaware of any such request.
#
# How to make grokin these cases easier tho?
# ------------------------------------------
# Because `Portal.cancel_actor()` was called it requests
# "full-`Actor`-runtime-cancellation" of it's peer
# process which IS NOT THE SAME as a single inter-actor
# RPC task cancelling its local context with a remote
# peer `Task` in that same peer process.
#
# ?TODO? It might be better if we do one (or all) of the
# following:
#
# -[ ] at least set a special message for the
# `ContextCancelled` when raised locally by the
# unaware ctx task such that we check for the
# `.canceller` being *our `Actor`* and in the case
# where `Context._cancel_called == False` we specially
# note that this is likely an "out-of-band"
# runtime-cancel request triggered by some call to
# `Portal.cancel_actor()`, possibly even reporting the
# exact LOC of that caller by tracking it inside our
# portal-type?
# -[ ] possibly add another field `ContextCancelled` like
# maybe a,
# `.request_type: Literal['os', 'proc', 'actor',
# 'ctx']` type thing which would allow immediately
# being able to tell what kind of cancellation caused
# the unexpected ctxc?
# -[ ] REMOVE THIS COMMENT, once we've settled on how to
# better augment `tractor` to be more explicit on this!
except BaseException as _err: except BaseException as _err:
err = _err err = _err
@ -218,13 +152,11 @@ async def maybe_spawn_daemon(
lock.statistics().owner is current_task() lock.statistics().owner is current_task()
): ):
log.exception( log.exception(
f'Releasing stale lock after crash..?\n' f'Releasing stale lock after crash..?'
f'\n'
f'{err!r}\n' f'{err!r}\n'
) )
lock.release() lock.release()
raise err
raise
async def spawn_emsd( async def spawn_emsd(

View File

@ -48,7 +48,7 @@ log = get_logger(name=__name__)
# new actors and supervises them to completion? # new actors and supervises them to completion?
class Services: class Services:
actor_n: tractor.ActorNursery actor_n: tractor._supervise.ActorNursery
service_n: trio.Nursery service_n: trio.Nursery
debug_mode: bool # tractor sub-actor debug mode flag debug_mode: bool # tractor sub-actor debug mode flag
service_tasks: dict[ service_tasks: dict[

View File

@ -42,7 +42,6 @@ from msgspec.msgpack import (
# import pyqtgraph as pg # import pyqtgraph as pg
import numpy as np import numpy as np
import tractor import tractor
from tractor._exceptions import reg_err_types
from trio_websocket import open_websocket_url from trio_websocket import open_websocket_url
from anyio_marketstore import ( # noqa from anyio_marketstore import ( # noqa
open_marketstore_client, open_marketstore_client,
@ -383,8 +382,6 @@ def quote_to_marketstore_structarray(
class MarketStoreError(Exception): class MarketStoreError(Exception):
"Generic marketstore client error" "Generic marketstore client error"
reg_err_types([MarketStoreError])
# def err_on_resp(response: dict) -> None: # def err_on_resp(response: dict) -> None:
# """Raise any errors found in responses from client request. # """Raise any errors found in responses from client request.

View File

@ -42,17 +42,15 @@ from typing import (
) )
import numpy as np import numpy as np
from tractor._exceptions import reg_err_types
from piker import config from .. import config
from piker.log import ( from ..service import (
check_for_service,
)
from ..log import (
get_logger, get_logger,
get_console_log, get_console_log,
) )
from piker.service import (
check_for_service,
)
subsys: str = 'piker.storage' subsys: str = 'piker.storage'
log = get_logger(subsys) log = get_logger(subsys)
@ -153,12 +151,6 @@ class StorageConnectionError(ConnectionError):
''' '''
reg_err_types([
TimeseriesNotFound,
StorageConnectionError,
])
def get_storagemod( def get_storagemod(
name: str, name: str,

View File

@ -292,11 +292,6 @@ def ldshm(
f'Something is wrong with time period for {shm}:\n{times}' f'Something is wrong with time period for {shm}:\n{times}'
) )
period_s: float = float(max(d1, d2, med)) period_s: float = float(max(d1, d2, med))
log.info(
f'Processing shm buffer:\n'
f' file: {shmfile.name}\n'
f' period: {period_s}s\n'
)
null_segs: tuple = tsp.get_null_segs( null_segs: tuple = tsp.get_null_segs(
frame=shm.array, frame=shm.array,
@ -306,7 +301,7 @@ def ldshm(
# TODO: call null-seg fixer somehow? # TODO: call null-seg fixer somehow?
if null_segs: if null_segs:
if tractor.runtime._state.is_debug_mode(): if tractor._state.is_debug_mode():
await tractor.pause() await tractor.pause()
# async with ( # async with (
# trio.open_nursery() as tn, # trio.open_nursery() as tn,

View File

@ -276,41 +276,14 @@ def get_null_segs(
absi_zdiff: np.ndarray = np.diff(absi_zeros) absi_zdiff: np.ndarray = np.diff(absi_zeros)
if zero_t.size < 2: if zero_t.size < 2:
idx: int = zero_t['index'][0] try:
idx_before: int = idx - 1 breakpoint()
idx_after: int = idx + 1 except RuntimeError:
index = frame['index'] # XXX, if greenback not active from
before_cond = idx_before <= index # piker store ldshm cmd..
after_cond = index <= idx_after log.exception(
bars: np.ndarray = frame[ "Can't debug single-sample null!\n"
before_cond )
&
after_cond
]
time: np.ndarray = bars['time']
from pendulum import (
from_timestamp,
Interval,
)
gap: Interval = (
from_timestamp(time[-1])
-
from_timestamp(time[0])
)
log.warning(
f'Single OHLCV-bar null-segment detected??\n'
f'gap -> {gap}\n'
)
# ^^XXX, if you want to debug the above bar-gap^^
# try:
# breakpoint()
# except RuntimeError:
# # XXX, if greenback not active from
# # piker store ldshm cmd..
# log.exception(
# "Can't debug single-sample null!\n"
# )
return None return None

View File

@ -30,11 +30,6 @@ import tractor
from piker.data._formatters import BGM from piker.data._formatters import BGM
from piker.storage import log from piker.storage import log
from piker.toolz.profile import (
Profiler,
pg_profile_enabled,
ms_slower_then,
)
from piker.ui._style import get_fonts from piker.ui._style import get_fonts
if TYPE_CHECKING: if TYPE_CHECKING:
@ -97,22 +92,12 @@ async def markup_gaps(
# gap's duration. # gap's duration.
show_txt: bool = False, show_txt: bool = False,
# A/B comparison: render individual arrows alongside batch
# for visual comparison
show_individual_arrows: bool = False,
) -> dict[int, dict]: ) -> dict[int, dict]:
''' '''
Remote annotate time-gaps in a dt-fielded ts (normally OHLC) Remote annotate time-gaps in a dt-fielded ts (normally OHLC)
with rectangles. with rectangles.
''' '''
profiler = Profiler(
msg=f'markup_gaps() for {gaps.height} gaps',
disabled=False,
ms_threshold=0.0,
)
# XXX: force chart redraw FIRST to ensure PlotItem coordinate # XXX: force chart redraw FIRST to ensure PlotItem coordinate
# system is properly initialized before we position annotations! # system is properly initialized before we position annotations!
# Without this, annotations may be misaligned on first creation # Without this, annotations may be misaligned on first creation
@ -121,19 +106,6 @@ async def markup_gaps(
fqme=fqme, fqme=fqme,
timeframe=timeframe, timeframe=timeframe,
) )
profiler('first `.redraw()` before annot creation')
log.info(
f'markup_gaps() called:\n'
f' fqme: {fqme}\n'
f' timeframe: {timeframe}s\n'
f' gaps.height: {gaps.height}\n'
)
# collect all annotation specs for batch submission
rect_specs: list[dict] = []
arrow_specs: list[dict] = []
text_specs: list[dict] = []
aids: dict[int] = {} aids: dict[int] = {}
for i in range(gaps.height): for i in range(gaps.height):
@ -196,7 +168,7 @@ async def markup_gaps(
prev_r: pl.DataFrame = prev_row_by_i prev_r: pl.DataFrame = prev_row_by_i
# debug any missing pre-row # debug any missing pre-row
if tractor.runtime._state.is_debug_mode(): if tractor._state.is_debug_mode():
await tractor.pause() await tractor.pause()
istart: int = prev_r['index'][0] istart: int = prev_r['index'][0]
@ -245,38 +217,56 @@ async def markup_gaps(
# 1: 'wine', # down-gap # 1: 'wine', # down-gap
# }[sgn] # }[sgn]
# collect rect spec (no fqme/timeframe, added by batch rect_kwargs: dict[str, Any] = dict(
# API) fqme=fqme,
rect_spec: dict[str, Any] = dict( timeframe=timeframe,
meth='set_view_pos',
start_pos=lc, start_pos=lc,
end_pos=ro, end_pos=ro,
color=color, color=color,
update_label=False,
start_time=start_time, start_time=start_time,
end_time=end_time, end_time=end_time,
) )
rect_specs.append(rect_spec)
# add up/down rects
aid: int|None = await actl.add_rect(**rect_kwargs)
if aid is None:
log.error(
f'Failed to add rect for,\n'
f'{rect_kwargs!r}\n'
f'\n'
f'Skipping to next gap!\n'
)
continue
assert aid
aids[aid] = rect_kwargs
direction: str = ( direction: str = (
'down' if down_gap 'down' if down_gap
else 'up' else 'up'
) )
# TODO! mk this a `msgspec.Struct` which we deserialize
# collect arrow spec # on the server side!
# XXX: send timestamp for server-side index lookup
# to ensure alignment with current shm state
gap_time: float = row['time'][0] gap_time: float = row['time'][0]
arrow_spec: dict[str, Any] = dict( arrow_kwargs: dict[str, Any] = dict(
fqme=fqme,
timeframe=timeframe,
x=iend, # fallback if timestamp lookup fails x=iend, # fallback if timestamp lookup fails
y=cls, y=cls,
time=gap_time, # for server-side index lookup time=gap_time, # for server-side index lookup
color=color, color=color,
alpha=169, alpha=169,
pointing=direction, pointing=direction,
# TODO: expose these as params to markup_gaps()?
headLen=10, headLen=10,
headWidth=2.222, headWidth=2.222,
pxMode=True, pxMode=True,
) )
arrow_specs.append(arrow_spec)
aid: int = await actl.add_arrow(
**arrow_kwargs
)
# add duration label to RHS of arrow # add duration label to RHS of arrow
if up_gap: if up_gap:
@ -288,12 +278,15 @@ async def markup_gaps(
assert flat assert flat
anchor = (0, 0) # up from bottom anchor = (0, 0) # up from bottom
# collect text spec if enabled # use a slightly smaller font for gap label txt.
if show_txt: font, small_font = get_fonts()
font, small_font = get_fonts() font_size: int = small_font.px_size - 1
font_size: int = small_font.px_size - 1 assert isinstance(font_size, int)
text_spec: dict[str, Any] = dict( if show_txt:
text_aid: int = await actl.add_text(
fqme=fqme,
timeframe=timeframe,
text=gap_label, text=gap_label,
x=iend + 1, # fallback if timestamp lookup fails x=iend + 1, # fallback if timestamp lookup fails
y=cls, y=cls,
@ -302,46 +295,12 @@ async def markup_gaps(
anchor=anchor, anchor=anchor,
font_size=font_size, font_size=font_size,
) )
text_specs.append(text_spec) aids[text_aid] = {'text': gap_label}
# submit all annotations in single batch IPC msg # tell chart to redraw all its
log.info( # graphics view layers Bo
f'Submitting batch annotations:\n'
f' rects: {len(rect_specs)}\n'
f' arrows: {len(arrow_specs)}\n'
f' texts: {len(text_specs)}\n'
)
profiler('built all annotation specs')
result: dict[str, list[int]] = await actl.add_batch(
fqme=fqme,
timeframe=timeframe,
rects=rect_specs,
arrows=arrow_specs,
texts=text_specs,
show_individual_arrows=show_individual_arrows,
)
profiler('batch `.add_batch()` IPC call complete')
# build aids dict from batch results
for aid in result['rects']:
aids[aid] = {'type': 'rect'}
for aid in result['arrows']:
aids[aid] = {'type': 'arrow'}
for aid in result['texts']:
aids[aid] = {'type': 'text'}
log.info(
f'Batch submission complete: {len(aids)} annotation(s) '
f'created'
)
profiler('built aids result dict')
# tell chart to redraw all its graphics view layers
await actl.redraw( await actl.redraw(
fqme=fqme, fqme=fqme,
timeframe=timeframe, timeframe=timeframe,
) )
profiler('final `.redraw()` after annot creation')
return aids return aids

View File

@ -738,21 +738,12 @@ async def start_backfill(
# including the dst[/src] source asset token. SO, # including the dst[/src] source asset token. SO,
# 'tsla.nasdaq.ib' over 'tsla/usd.nasdaq.ib' for # 'tsla.nasdaq.ib' over 'tsla/usd.nasdaq.ib' for
# historical reasons ONLY. # historical reasons ONLY.
if ( if mkt.dst.atype not in {
mkt.dst.atype not in { 'crypto',
'crypto', 'crypto_currency',
'crypto_currency', 'fiat', # a "forex pair"
'fiat', # a "forex pair" 'perpetual_future', # stupid "perps" from cex land
'perpetual_future', # stupid "perps" from cex land }:
}
and not (
mkt.src.atype == 'crypto_currency'
and
mkt.dst.atype in {
'future',
}
)
):
col_sym_key: str = mkt.get_fqme( col_sym_key: str = mkt.get_fqme(
delim_char='', delim_char='',
without_src=True, without_src=True,

View File

@ -24,11 +24,8 @@ from pyqtgraph import (
Point, Point,
functions as fn, functions as fn,
Color, Color,
GraphicsObject,
) )
from pyqtgraph.Qt import internals
import numpy as np import numpy as np
import pyqtgraph as pg
from piker.ui.qt import ( from piker.ui.qt import (
QtCore, QtCore,
@ -38,10 +35,6 @@ from piker.ui.qt import (
QRectF, QRectF,
QGraphicsPathItem, QGraphicsPathItem,
) )
from piker.ui._style import hcolor
from piker.log import get_logger
log = get_logger(__name__)
def mk_marker_path( def mk_marker_path(
@ -111,7 +104,7 @@ def mk_marker_path(
class LevelMarker(QGraphicsPathItem): class LevelMarker(QGraphicsPathItem):
''' '''
An arrow marker path graphic which redraws itself An arrow marker path graphich which redraws itself
to the specified view coordinate level on each paint cycle. to the specified view coordinate level on each paint cycle.
''' '''
@ -258,9 +251,9 @@ def qgo_draw_markers(
) -> float: ) -> float:
''' '''
Paint markers in ``pg.GraphicsItem`` style by first removing the Paint markers in ``pg.GraphicsItem`` style by first
view transform for the painter, drawing the markers in scene removing the view transform for the painter, drawing the markers
coords, then restoring the view coords. in scene coords, then restoring the view coords.
''' '''
# paint markers in native coordinate system # paint markers in native coordinate system
@ -302,449 +295,3 @@ def qgo_draw_markers(
p.setTransform(orig_tr) p.setTransform(orig_tr)
return max(sizes) return max(sizes)
class GapAnnotations(GraphicsObject):
'''
Batch-rendered gap annotations using Qt's efficient drawing
APIs.
Instead of creating individual `QGraphicsItem` instances per
gap (which is very slow for 1000+ gaps), this class stores all
gap rectangles and arrows in numpy-backed arrays and renders
them in single batch paint calls.
Performance: ~1000x faster than individual items for large gap
counts.
Based on patterns from:
- `pyqtgraph.BarGraphItem` (batch rect rendering)
- `pyqtgraph.ScatterPlotItem` (fragment rendering)
- `piker.ui._curve.FlowGraphic` (single path pattern)
'''
def __init__(
self,
gap_specs: list[dict],
array: np.ndarray|None = None,
color: str = 'dad_blue',
alpha: int = 169,
arrow_size: float = 10.0,
fqme: str|None = None,
timeframe: float|None = None,
) -> None:
'''
gap_specs: list of dicts with keys:
- start_pos: (x, y) tuple for left corner of rect
- end_pos: (x, y) tuple for right corner of rect
- arrow_x: x position for arrow
- arrow_y: y position for arrow
- pointing: 'up' or 'down' for arrow direction
- start_time: (optional) timestamp for repositioning
- end_time: (optional) timestamp for repositioning
array: optional OHLC numpy array for repositioning on
backfill updates (when abs-index changes)
fqme: symbol name for these gaps (for logging/debugging)
timeframe: period in seconds that these gaps were
detected on (used to skip reposition when
called with wrong timeframe's array)
'''
super().__init__()
self._gap_specs = gap_specs
self._array = array
self._fqme = fqme
self._timeframe = timeframe
n_gaps = len(gap_specs)
# shared pen/brush matching original SelectRect/ArrowItem style
base_color = pg.mkColor(hcolor(color))
# rect pen: base color, fully opaque for outline
self._rect_pen = pg.mkPen(base_color, width=1)
# rect brush: base color with alpha=66 (SelectRect default)
rect_fill = pg.mkColor(hcolor(color))
rect_fill.setAlpha(66)
self._rect_brush = pg.functions.mkBrush(rect_fill)
# arrow pen: same as rects
self._arrow_pen = pg.mkPen(base_color, width=1)
# arrow brush: base color with user-specified alpha (default 169)
arrow_fill = pg.mkColor(hcolor(color))
arrow_fill.setAlpha(alpha)
self._arrow_brush = pg.functions.mkBrush(arrow_fill)
# allocate rect array using Qt's efficient storage
self._rectarray = internals.PrimitiveArray(
QtCore.QRectF,
4,
)
self._rectarray.resize(n_gaps)
rect_memory = self._rectarray.ndarray()
# fill rect array from gap specs
for (
i,
spec,
) in enumerate(gap_specs):
(
start_x,
start_y,
) = spec['start_pos']
(
end_x,
end_y,
) = spec['end_pos']
# QRectF expects (x, y, width, height)
rect_memory[i, 0] = start_x
rect_memory[i, 1] = min(start_y, end_y)
rect_memory[i, 2] = end_x - start_x
rect_memory[i, 3] = abs(end_y - start_y)
# build single QPainterPath for all arrows
self._arrow_path = QtGui.QPainterPath()
self._arrow_size = arrow_size
for spec in gap_specs:
arrow_x = spec['arrow_x']
arrow_y = spec['arrow_y']
pointing = spec['pointing']
# create arrow polygon
if pointing == 'down':
# arrow points downward
arrow_poly = QtGui.QPolygonF([
QPointF(arrow_x, arrow_y), # tip
QPointF(
arrow_x - arrow_size/2,
arrow_y - arrow_size,
), # left
QPointF(
arrow_x + arrow_size/2,
arrow_y - arrow_size,
), # right
])
else: # up
# arrow points upward
arrow_poly = QtGui.QPolygonF([
QPointF(arrow_x, arrow_y), # tip
QPointF(
arrow_x - arrow_size/2,
arrow_y + arrow_size,
), # left
QPointF(
arrow_x + arrow_size/2,
arrow_y + arrow_size,
), # right
])
self._arrow_path.addPolygon(arrow_poly)
self._arrow_path.closeSubpath()
# cache bounding rect
self._br: QRectF|None = None
def boundingRect(self) -> QRectF:
'''
Compute bounding rect from rect array and arrow path.
'''
if self._br is not None:
return self._br
# get rect bounds
rect_memory = self._rectarray.ndarray()
if len(rect_memory) == 0:
self._br = QRectF()
return self._br
x_min = rect_memory[:, 0].min()
y_min = rect_memory[:, 1].min()
x_max = (rect_memory[:, 0] + rect_memory[:, 2]).max()
y_max = (rect_memory[:, 1] + rect_memory[:, 3]).max()
# expand for arrow path
arrow_br = self._arrow_path.boundingRect()
x_min = min(x_min, arrow_br.left())
y_min = min(y_min, arrow_br.top())
x_max = max(x_max, arrow_br.right())
y_max = max(y_max, arrow_br.bottom())
self._br = QRectF(
x_min,
y_min,
x_max - x_min,
y_max - y_min,
)
return self._br
def paint(
self,
p: QtGui.QPainter,
opt: QtWidgets.QStyleOptionGraphicsItem,
w: QtWidgets.QWidget,
) -> None:
'''
Batch render all rects and arrows in minimal paint calls.
'''
# draw all rects in single batch call (data coordinates)
p.setPen(self._rect_pen)
p.setBrush(self._rect_brush)
drawargs = self._rectarray.drawargs()
p.drawRects(*drawargs)
# draw arrows in scene/pixel coordinates so they maintain
# size regardless of zoom level
orig_tr = p.transform()
p.resetTransform()
# rebuild arrow path in scene coordinates
arrow_path_scene = QtGui.QPainterPath()
# arrow geometry matching pg.ArrowItem defaults
# headLen=10, headWidth=2.222
# headWidth is the half-width (center to edge distance)
head_len = self._arrow_size
head_width = head_len * 0.2222 # 2.222 at size=10
for spec in self._gap_specs:
if 'arrow_x' not in spec:
continue
arrow_x = spec['arrow_x']
arrow_y = spec['arrow_y']
pointing = spec['pointing']
# transform data coords to scene coords
scene_pt = orig_tr.map(QPointF(arrow_x, arrow_y))
sx = scene_pt.x()
sy = scene_pt.y()
# create arrow polygon in scene/pixel coords
# matching pg.ArrowItem geometry but rotated for up/down
if pointing == 'down':
# tip points downward (negative y direction)
arrow_poly = QtGui.QPolygonF([
QPointF(sx, sy), # tip
QPointF(
sx - head_width,
sy - head_len,
), # left base
QPointF(
sx + head_width,
sy - head_len,
), # right base
])
else: # up
# tip points upward (positive y direction)
arrow_poly = QtGui.QPolygonF([
QPointF(sx, sy), # tip
QPointF(
sx - head_width,
sy + head_len,
), # left base
QPointF(
sx + head_width,
sy + head_len,
), # right base
])
arrow_path_scene.addPolygon(arrow_poly)
arrow_path_scene.closeSubpath()
p.setPen(self._arrow_pen)
p.setBrush(self._arrow_brush)
p.drawPath(arrow_path_scene)
# restore original transform
p.setTransform(orig_tr)
def reposition(
self,
array: np.ndarray|None = None,
fqme: str|None = None,
timeframe: float|None = None,
) -> None:
'''
Reposition all annotations based on timestamps.
Used when viz is updated (eg during backfill) and abs-index
range changes - we need to lookup new indices from timestamps.
'''
# skip reposition if timeframe doesn't match
# (e.g., 1s gaps being repositioned with 60s array)
if (
timeframe is not None
and
self._timeframe is not None
and
timeframe != self._timeframe
):
log.debug(
f'Skipping reposition for {self._fqme} gaps:\n'
f' gap timeframe: {self._timeframe}s\n'
f' array timeframe: {timeframe}s\n'
)
return
if array is None:
array = self._array
if array is None:
log.warning(
'GapAnnotations.reposition() called but no array '
'provided'
)
return
# collect all unique timestamps we need to lookup
timestamps: set[float] = set()
for spec in self._gap_specs:
if spec.get('start_time') is not None:
timestamps.add(spec['start_time'])
if spec.get('end_time') is not None:
timestamps.add(spec['end_time'])
if spec.get('time') is not None:
timestamps.add(spec['time'])
# vectorized timestamp -> row lookup using binary search
time_to_row: dict[float, dict] = {}
if timestamps:
import numpy as np
time_arr = array['time']
ts_array = np.array(list(timestamps))
search_indices = np.searchsorted(
time_arr,
ts_array,
)
# vectorized bounds check and exact match verification
valid_mask = (
(search_indices < len(array))
& (time_arr[search_indices] == ts_array)
)
valid_indices = search_indices[valid_mask]
valid_timestamps = ts_array[valid_mask]
matched_rows = array[valid_indices]
time_to_row = {
float(ts): {
'index': float(row['index']),
'open': float(row['open']),
'close': float(row['close']),
}
for ts, row in zip(
valid_timestamps,
matched_rows,
)
}
# rebuild rect array from gap specs with new indices
rect_memory = self._rectarray.ndarray()
for (
i,
spec,
) in enumerate(self._gap_specs):
start_time = spec.get('start_time')
end_time = spec.get('end_time')
if (
start_time is None
or end_time is None
):
continue
start_row = time_to_row.get(start_time)
end_row = time_to_row.get(end_time)
if (
start_row is None
or end_row is None
):
log.warning(
f'Timestamp lookup failed for gap[{i}] during '
f'reposition:\n'
f' fqme: {fqme}\n'
f' timeframe: {timeframe}s\n'
f' start_time: {start_time}\n'
f' end_time: {end_time}\n'
f' array time range: '
f'{array["time"][0]} -> {array["time"][-1]}\n'
)
continue
start_idx = start_row['index']
end_idx = end_row['index']
start_close = start_row['close']
end_open = end_row['open']
from_idx: float = 0.16 - 0.06
start_x = start_idx + 1 - from_idx
end_x = end_idx + from_idx
# update rect in array
rect_memory[i, 0] = start_x
rect_memory[i, 1] = min(start_close, end_open)
rect_memory[i, 2] = end_x - start_x
rect_memory[i, 3] = abs(end_open - start_close)
# rebuild arrow path with new indices
self._arrow_path.clear()
for spec in self._gap_specs:
time_val = spec.get('time')
if time_val is None:
continue
arrow_row = time_to_row.get(time_val)
if arrow_row is None:
continue
arrow_x = arrow_row['index']
arrow_y = arrow_row['close']
pointing = spec['pointing']
# create arrow polygon
if pointing == 'down':
arrow_poly = QtGui.QPolygonF([
QPointF(arrow_x, arrow_y),
QPointF(
arrow_x - self._arrow_size/2,
arrow_y - self._arrow_size,
),
QPointF(
arrow_x + self._arrow_size/2,
arrow_y - self._arrow_size,
),
])
else: # up
arrow_poly = QtGui.QPolygonF([
QPointF(arrow_x, arrow_y),
QPointF(
arrow_x - self._arrow_size/2,
arrow_y + self._arrow_size,
),
QPointF(
arrow_x + self._arrow_size/2,
arrow_y + self._arrow_size,
),
])
self._arrow_path.addPolygon(arrow_poly)
self._arrow_path.closeSubpath()
# invalidate bounding rect cache
self._br = None
self.prepareGeometryChange()
self.update()

View File

@ -131,11 +131,11 @@ async def _async_main(
async with ( async with (
tractor.trionics.collapse_eg(), tractor.trionics.collapse_eg(),
trio.open_nursery() as tn, trio.open_nursery() as root_n,
): ):
# set root nursery and task stack for spawning other charts/feeds # set root nursery and task stack for spawning other charts/feeds
# that run cached in the bg # that run cached in the bg
godwidget._root_n = tn godwidget._root_n = root_n
# setup search widget and focus main chart view at startup # setup search widget and focus main chart view at startup
# search widget is a singleton alongside the godwidget # search widget is a singleton alongside the godwidget
@ -165,7 +165,7 @@ async def _async_main(
# load other providers into search **after** # load other providers into search **after**
# the chart's select cache # the chart's select cache
for brokername, mod in needed_brokermods.items(): for brokername, mod in needed_brokermods.items():
tn.start_soon( root_n.start_soon(
load_provider_search, load_provider_search,
mod, mod,
loglevel, loglevel,

View File

@ -20,9 +20,8 @@ Chart axes graphics and behavior.
""" """
from __future__ import annotations from __future__ import annotations
from functools import lru_cache from functools import lru_cache
from math import floor
import platform
from typing import Callable from typing import Callable
from math import floor
import polars as pl import polars as pl
import pyqtgraph as pg import pyqtgraph as pg
@ -43,7 +42,6 @@ from ._style import DpiAwareFont, hcolor, _font
from ._interaction import ChartView from ._interaction import ChartView
from ._dataviz import Viz from ._dataviz import Viz
_friggin_macos: bool = platform.system() == 'Darwin'
_axis_pen = pg.mkPen(hcolor('bracket')) _axis_pen = pg.mkPen(hcolor('bracket'))
@ -77,9 +75,6 @@ class Axis(pg.AxisItem):
self.pi = plotitem self.pi = plotitem
self._dpi_font = _font self._dpi_font = _font
# store for later recalculation on zoom
self._typical_max_str = typical_max_str
self.setTickFont(_font.font) self.setTickFont(_font.font)
font_size = self._dpi_font.font.pixelSize() font_size = self._dpi_font.font.pixelSize()
@ -161,42 +156,6 @@ class Axis(pg.AxisItem):
def size_to_values(self) -> None: def size_to_values(self) -> None:
pass pass
def update_fonts(self, font: DpiAwareFont) -> None:
'''Update font and recalculate axis sizing after zoom change.'''
# IMPORTANT: tell Qt we're about to change geometry
self.prepareGeometryChange()
self._dpi_font = font
self.setTickFont(font.font)
font_size = font.font.pixelSize()
# recalculate text offset based on new font size
text_offset = None
if self.orientation in ('bottom',):
text_offset = floor(0.25 * font_size)
elif self.orientation in ('left', 'right'):
text_offset = floor(font_size / 2)
if text_offset:
self.setStyle(tickTextOffset=text_offset)
# recalculate bounding rect with new font
# Note: typical_max_str should be stored from init
if not hasattr(self, '_typical_max_str'):
self._typical_max_str = '100 000.000 ' # fallback default
self.typical_br = font._qfm.boundingRect(self._typical_max_str)
# Update PyQtGraph's internal text size tracking
# This is critical - PyQtGraph uses these internally for auto-expand
if self.orientation in ['left', 'right']:
self.textWidth = self.typical_br.width()
else:
self.textHeight = self.typical_br.height()
# resize axis to fit new font - this triggers PyQtGraph's auto-expand
self.size_to_values()
def txt_offsets(self) -> tuple[int, int]: def txt_offsets(self) -> tuple[int, int]:
return tuple(self.style['tickTextOffset']) return tuple(self.style['tickTextOffset'])
@ -296,22 +255,8 @@ class PriceAxis(Axis):
) -> None: ) -> None:
self._min_tick = size self._min_tick = size
if _friggin_macos: def size_to_values(self) -> None:
def size_to_values(self) -> None: self.setWidth(self.typical_br.width())
# Call PyQtGraph's internal width update mechanism
# This respects autoExpandTextSpace and updates min/max constraints
self._updateWidth()
# tell Qt our preferred size changed so layout recalculates
self.updateGeometry()
# force parent plot item to recalculate its layout
if self.pi and hasattr(self.pi, 'updateGeometry'):
self.pi.updateGeometry()
else:
def size_to_values(self) -> None:
# XXX, old code!
self.setWidth(self.typical_br.width())
# XXX: drop for now since it just eats up h space # XXX: drop for now since it just eats up h space
@ -354,21 +299,8 @@ class DynamicDateAxis(Axis):
1: '%H:%M:%S', 1: '%H:%M:%S',
} }
if _friggin_macos: def size_to_values(self) -> None:
def size_to_values(self) -> None: self.setHeight(self.typical_br.height() + 1)
# Call PyQtGraph's internal height update mechanism
# This respects autoExpandTextSpace and updates min/max constraints
self._updateHeight()
# tell Qt our preferred size changed so layout recalculates
self.updateGeometry()
# force parent plot item to recalculate its layout
if self.pi and hasattr(self.pi, 'updateGeometry'):
self.pi.updateGeometry()
else:
def size_to_values(self) -> None:
# XXX, old code!
self.setHeight(self.typical_br.height() + 1)
def _indexes_to_timestrs( def _indexes_to_timestrs(
self, self,

View File

@ -1346,6 +1346,7 @@ async def display_symbol_data(
fqmes, fqmes,
loglevel=loglevel, loglevel=loglevel,
tick_throttle=cycles_per_feed, tick_throttle=cycles_per_feed,
) as feed, ) as feed,
): ):
@ -1460,7 +1461,7 @@ async def display_symbol_data(
async with ( async with (
tractor.trionics.collapse_eg(), tractor.trionics.collapse_eg(),
trio.open_nursery() as tn, trio.open_nursery() as ln,
): ):
# if available load volume related built-in display(s) # if available load volume related built-in display(s)
vlm_charts: dict[ vlm_charts: dict[
@ -1471,7 +1472,7 @@ async def display_symbol_data(
flume.has_vlm() flume.has_vlm()
and vlm_chart is None and vlm_chart is None
): ):
vlm_chart = vlm_charts[fqme] = await tn.start( vlm_chart = vlm_charts[fqme] = await ln.start(
open_vlm_displays, open_vlm_displays,
rt_linked, rt_linked,
flume, flume,
@ -1479,7 +1480,7 @@ async def display_symbol_data(
# load (user's) FSP set (otherwise known as "indicators") # load (user's) FSP set (otherwise known as "indicators")
# from an input config. # from an input config.
tn.start_soon( ln.start_soon(
start_fsp_displays, start_fsp_displays,
rt_linked, rt_linked,
flume, flume,
@ -1603,11 +1604,11 @@ async def display_symbol_data(
# start update loop task # start update loop task
dss: dict[str, DisplayState] = {} dss: dict[str, DisplayState] = {}
tn.start_soon( ln.start_soon(
partial( partial(
graphics_update_loop, graphics_update_loop,
dss=dss, dss=dss,
nurse=tn, nurse=ln,
godwidget=godwidget, godwidget=godwidget,
feed=feed, feed=feed,
# min_istream, # min_istream,
@ -1622,6 +1623,7 @@ async def display_symbol_data(
order_ctl_fqme: str = fqmes[0] order_ctl_fqme: str = fqmes[0]
mode: OrderMode mode: OrderMode
async with ( async with (
open_order_mode( open_order_mode(
feed, feed,
godwidget, godwidget,

View File

@ -168,7 +168,7 @@ class ArrowEditor(Struct):
''' '''
uid: str = arrow._uid uid: str = arrow._uid
arrows: list[pg.ArrowItem] = self._arrows[uid] arrows: list[pg.ArrowItem] = self._arrows[uid]
log.debug( log.info(
f'Removing arrow from views\n' f'Removing arrow from views\n'
f'uid: {uid!r}\n' f'uid: {uid!r}\n'
f'{arrow!r}\n' f'{arrow!r}\n'
@ -286,9 +286,7 @@ class LineEditor(Struct):
for line in lines: for line in lines:
line.show_labels() line.show_labels()
line.hide_markers() line.hide_markers()
log.debug( log.debug(f'Level active for level: {line.value()}')
f'Line active @ level: {line.value()!r}'
)
# TODO: other flashy things to indicate the order is active # TODO: other flashy things to indicate the order is active
return lines return lines
@ -331,11 +329,7 @@ class LineEditor(Struct):
if line in hovered: if line in hovered:
hovered.remove(line) hovered.remove(line)
log.debug( log.debug(f'deleting {line} with oid: {uuid}')
f'Deleting level-line\n'
f'line: {line!r}\n'
f'oid: {uuid!r}\n'
)
line.delete() line.delete()
# make sure the xhair doesn't get left off # make sure the xhair doesn't get left off
@ -343,11 +337,7 @@ class LineEditor(Struct):
cursor.show_xhair() cursor.show_xhair()
else: else:
log.warning( log.warning(f'Could not find line for {line}')
f'Could not find line for removal ??\n'
f'\n'
f'{line!r}\n'
)
return lines return lines
@ -579,11 +569,11 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
if update_label: if update_label:
self.init_label(view_rect) self.init_label(view_rect)
log.debug( print(
f'SelectRect modify,\n' 'SelectRect modify:\n'
f'QRectF: {view_rect}\n' f'QRectF: {view_rect}\n'
f'start_pos: {start_pos!r}\n' f'start_pos: {start_pos}\n'
f'end_pos: {end_pos!r}\n' f'end_pos: {end_pos}\n'
) )
self.show() self.show()
@ -650,11 +640,8 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
dmn=dmn, dmn=dmn,
)) ))
# tracing # print(f'x2, y2: {(x2, y2)}')
# log.info( # print(f'xmn, ymn: {(xmn, ymx)}')
# f'x2, y2: {(x2, y2)}\n'
# f'xmn, ymn: {(xmn, ymx)}\n'
# )
label_anchor = Point( label_anchor = Point(
xmx + 2, xmx + 2,

View File

@ -203,9 +203,6 @@ def run_qtractor(
if is_windows: if is_windows:
window.configure_to_desktop() window.configure_to_desktop()
# install global keyboard shortcuts for UI zoom
window.install_global_zoom_filter()
# actually render to screen # actually render to screen
window.show() window.show()
app.exec_() app.exec_()

View File

@ -124,13 +124,6 @@ class Edit(QLineEdit):
self.sizeHint() self.sizeHint()
self.update() self.update()
def update_fonts(self, font: DpiAwareFont) -> None:
'''Update font and recalculate widget size.'''
self.dpi_font = font
self.setFont(font.font)
# tell Qt our size hint changed so it recalculates layout
self.updateGeometry()
def focus(self) -> None: def focus(self) -> None:
self.selectAll() self.selectAll()
self.show() self.show()
@ -248,14 +241,6 @@ class Selection(QComboBox):
icon_size = round(h * 0.75) icon_size = round(h * 0.75)
self.setIconSize(QSize(icon_size, icon_size)) self.setIconSize(QSize(icon_size, icon_size))
def update_fonts(self, font: DpiAwareFont) -> None:
'''Update font and recalculate widget size.'''
self.setFont(font.font)
# recalculate heights with new font
self.resize()
# tell Qt our size hint changed so it recalculates layout
self.updateGeometry()
def set_items( def set_items(
self, self,
keys: list[str], keys: list[str],
@ -446,39 +431,6 @@ class FieldsForm(QWidget):
self.fields[key] = select self.fields[key] = select
return select return select
def update_fonts(self) -> None:
'''Update font sizes after zoom change.'''
from ._style import _font, _font_small
# update stored font size
self._font_size = _font_small.px_size - 2
# update all labels
for name, label in self.labels.items():
if hasattr(label, 'update_font'):
label.update_font(_font.font, self._font_size - 1)
# update all fields (edits, selects)
for key, field in self.fields.items():
# first check for our custom update_fonts method (Edit, Selection)
if hasattr(field, 'update_fonts'):
field.update_fonts(_font)
# then handle stylesheet updates for those without custom methods
elif hasattr(field, 'setStyleSheet'):
# regenerate stylesheet with new font size
field.setStyleSheet(
f"""QLineEdit {{
color : {hcolor('gunmetal')};
font-size : {self._font_size}px;
}}
"""
)
field.setFont(_font.font)
# for Selection widgets that need style updates
if hasattr(field, 'set_style'):
field.set_style(color='gunmetal', font_size=self._font_size)
async def handle_field_input( async def handle_field_input(
@ -681,37 +633,6 @@ class FillStatusBar(QProgressBar):
self.setRange(0, int(slots)) self.setRange(0, int(slots))
self.setValue(value) self.setValue(value)
def update_fonts(self, font_size: int) -> None:
'''Update font size after zoom change.'''
from ._style import _font_small
self.font_size = font_size
# regenerate stylesheet with new font size
self.setStyleSheet(
f"""
QProgressBar {{
text-align: center;
font-size : {self.font_size - 2}px;
background-color: {hcolor('papas_special')};
color : {hcolor('papas_special')};
border: {self.border_px}px solid {hcolor('default_light')};
border-radius: 2px;
}}
QProgressBar::chunk {{
background-color: {hcolor('default_spotlight')};
color: {hcolor('bracket')};
border-radius: 2px;
}}
"""
)
self.setFont(_font_small.font)
def mk_fill_status_bar( def mk_fill_status_bar(

View File

@ -334,19 +334,3 @@ class FormatLabel(QLabel):
out = self.fmt_str.format(**fields) out = self.fmt_str.format(**fields)
self.setText(out) self.setText(out)
return out return out
def update_font(
self,
font: QtGui.QFont,
font_size: int,
font_color: str = 'default_lightest',
) -> None:
'''Update font after zoom change.'''
self.setStyleSheet(
f"""QLabel {{
color : {hcolor(font_color)};
font-size : {font_size}px;
}}
"""
)
self.setFont(font)

View File

@ -38,6 +38,7 @@ from piker.ui.qt import (
QtGui, QtGui,
QGraphicsPathItem, QGraphicsPathItem,
QStyleOptionGraphicsItem, QStyleOptionGraphicsItem,
QGraphicsItem,
QGraphicsScene, QGraphicsScene,
QWidget, QWidget,
QPointF, QPointF,

View File

@ -178,26 +178,6 @@ class SettingsPane:
# encompasing high level namespace # encompasing high level namespace
order_mode: OrderMode | None = None # typing: ignore # noqa order_mode: OrderMode | None = None # typing: ignore # noqa
def update_fonts(self) -> None:
'''Update font sizes after zoom change.'''
from ._style import _font_small
# update form fields
if self.form and hasattr(self.form, 'update_fonts'):
self.form.update_fonts()
# update fill status bar
if self.fill_bar and hasattr(self.fill_bar, 'update_fonts'):
self.fill_bar.update_fonts(_font_small.px_size)
# update labels with new fonts
if self.step_label:
self.step_label.setFont(_font_small.font)
if self.pnl_label:
self.pnl_label.setFont(_font_small.font)
if self.limit_label:
self.limit_label.setFont(_font_small.font)
def set_accounts( def set_accounts(
self, self,
names: list[str], names: list[str],

View File

@ -22,7 +22,6 @@ a chart from some other actor.
from __future__ import annotations from __future__ import annotations
from contextlib import ( from contextlib import (
asynccontextmanager as acm, asynccontextmanager as acm,
contextmanager as cm,
AsyncExitStack, AsyncExitStack,
) )
from functools import partial from functools import partial
@ -47,7 +46,6 @@ from piker.log import get_logger
from piker.types import Struct from piker.types import Struct
from piker.service import find_service from piker.service import find_service
from piker.brokers import SymbolNotFound from piker.brokers import SymbolNotFound
from piker.toolz import Profiler
from piker.ui.qt import ( from piker.ui.qt import (
QGraphicsItem, QGraphicsItem,
) )
@ -100,8 +98,6 @@ def rm_annot(
annot: ArrowEditor|SelectRect|pg.TextItem annot: ArrowEditor|SelectRect|pg.TextItem
) -> bool: ) -> bool:
global _editors global _editors
from piker.ui._annotate import GapAnnotations
match annot: match annot:
case pg.ArrowItem(): case pg.ArrowItem():
editor = _editors[annot._uid] editor = _editors[annot._uid]
@ -126,35 +122,9 @@ def rm_annot(
scene.removeItem(annot) scene.removeItem(annot)
return True return True
case GapAnnotations():
scene = annot.scene()
if scene:
scene.removeItem(annot)
return True
return False return False
@cm
def no_qt_updates(*items):
'''
Disable Qt widget/item updates during context to batch
render operations and only trigger single repaint on exit.
Accepts both QWidgets and QGraphicsItems.
'''
for item in items:
if hasattr(item, 'setUpdatesEnabled'):
item.setUpdatesEnabled(False)
try:
yield
finally:
for item in items:
if hasattr(item, 'setUpdatesEnabled'):
item.setUpdatesEnabled(True)
async def serve_rc_annots( async def serve_rc_annots(
ipc_key: str, ipc_key: str,
annot_req_stream: MsgStream, annot_req_stream: MsgStream,
@ -459,333 +429,6 @@ async def serve_rc_annots(
aids.add(aid) aids.add(aid)
await annot_req_stream.send(aid) await annot_req_stream.send(aid)
case {
'cmd': 'batch',
'fqme': fqme,
'timeframe': timeframe,
'rects': list(rect_specs),
'arrows': list(arrow_specs),
'texts': list(text_specs),
'show_individual_arrows': bool(show_individual_arrows),
}:
# batch submission handler - process multiple
# annotations in single IPC round-trip
ds: DisplayState = _dss[fqme]
try:
chart: ChartPlotWidget = {
60: ds.hist_chart,
1: ds.chart,
}[timeframe]
except KeyError:
msg: str = (
f'No chart for timeframe={timeframe}s, '
f'skipping batch annotation'
)
log.error(msg)
await annot_req_stream.send({'error': msg})
continue
cv: ChartView = chart.cv
viz: Viz = chart.get_viz(fqme)
shm = viz.shm
arr = shm.array
result: dict[str, list[int]] = {
'rects': [],
'arrows': [],
'texts': [],
}
profiler = Profiler(
msg=(
f'Batch annotate {len(rect_specs)} gaps '
f'on {fqme}@{timeframe}s'
),
disabled=False,
delayed=False,
)
aids_set: set[int] = ctxs[ipc_key][1]
# build unified gap_specs for GapAnnotations class
from piker.ui._annotate import GapAnnotations
gap_specs: list[dict] = []
n_gaps: int = max(
len(rect_specs),
len(arrow_specs),
)
profiler('setup batch annot creation')
# collect all unique timestamps for vectorized lookup
timestamps: list[float] = []
for rect_spec in rect_specs:
if start_time := rect_spec.get('start_time'):
timestamps.append(start_time)
if end_time := rect_spec.get('end_time'):
timestamps.append(end_time)
for arrow_spec in arrow_specs:
if time_val := arrow_spec.get('time'):
timestamps.append(time_val)
profiler('collect `timestamps: list` complet!')
# build timestamp -> row mapping using binary search
# O(m log n) instead of O(n*m) with np.isin
time_to_row: dict[float, dict] = {}
if timestamps:
import numpy as np
time_arr = arr['time']
ts_array = np.array(timestamps)
# binary search for each timestamp in sorted time array
search_indices = np.searchsorted(
time_arr,
ts_array,
)
profiler('`np.searchsorted()` complete!')
# vectorized bounds check and exact match verification
valid_mask = (
(search_indices < len(arr))
& (time_arr[search_indices] == ts_array)
)
# get all valid indices and timestamps
valid_indices = search_indices[valid_mask]
valid_timestamps = ts_array[valid_mask]
# use fancy indexing to get all rows at once
matched_rows = arr[valid_indices]
# extract fields to plain arrays BEFORE dict building
indices_arr = matched_rows['index'].astype(float)
opens_arr = matched_rows['open'].astype(float)
closes_arr = matched_rows['close'].astype(float)
profiler('extracted field arrays')
# build dict from plain arrays (much faster)
time_to_row: dict[float, dict] = {
float(ts): {
'index': idx,
'open': opn,
'close': cls,
}
for (
ts,
idx,
opn,
cls,
) in zip(
valid_timestamps,
indices_arr,
opens_arr,
closes_arr,
)
}
profiler('`time_to_row` creation complete!')
profiler(f'built timestamp lookup for {len(timestamps)} times')
# build gap_specs from rect+arrow specs
for i in range(n_gaps):
gap_spec: dict = {}
# get rect spec for this gap
if i < len(rect_specs):
rect_spec: dict = rect_specs[i].copy()
start_time = rect_spec.get('start_time')
end_time = rect_spec.get('end_time')
if (
start_time is not None
and end_time is not None
):
# lookup from pre-built mapping
start_row = time_to_row.get(start_time)
end_row = time_to_row.get(end_time)
if (
start_row is None
or end_row is None
):
log.warning(
f'Timestamp lookup failed for '
f'gap[{i}], skipping'
)
continue
start_idx = start_row['index']
end_idx = end_row['index']
start_close = start_row['close']
end_open = end_row['open']
from_idx: float = 0.16 - 0.06
gap_spec['start_pos'] = (
start_idx + 1 - from_idx,
start_close,
)
gap_spec['end_pos'] = (
end_idx + from_idx,
end_open,
)
gap_spec['start_time'] = start_time
gap_spec['end_time'] = end_time
gap_spec['color'] = rect_spec.get(
'color',
'dad_blue',
)
# get arrow spec for this gap
if i < len(arrow_specs):
arrow_spec: dict = arrow_specs[i].copy()
x: float = float(arrow_spec.get('x', 0))
y: float = float(arrow_spec.get('y', 0))
time_val: float|None = arrow_spec.get('time')
# timestamp-based index lookup (only for x, NOT y!)
# y is already set to the PREVIOUS bar's close
if time_val is not None:
arrow_row = time_to_row.get(time_val)
if arrow_row is not None:
x = arrow_row['index']
# NOTE: do NOT update y! it's the
# previous bar's close, not current
else:
log.warning(
f'Arrow timestamp {time_val} not '
f'found for gap[{i}], using x={x}'
)
gap_spec['arrow_x'] = x
gap_spec['arrow_y'] = y
gap_spec['time'] = time_val
gap_spec['pointing'] = arrow_spec.get(
'pointing',
'down',
)
gap_spec['alpha'] = arrow_spec.get('alpha', 169)
gap_specs.append(gap_spec)
profiler(f'built {len(gap_specs)} gap_specs')
# create single GapAnnotations item for all gaps
if gap_specs:
gaps_item = GapAnnotations(
gap_specs=gap_specs,
array=arr,
color=gap_specs[0].get('color', 'dad_blue'),
alpha=gap_specs[0].get('alpha', 169),
arrow_size=10.0,
fqme=fqme,
timeframe=timeframe,
)
chart.plotItem.addItem(gaps_item)
# register single item for repositioning
aid: int = id(gaps_item)
annots[aid] = gaps_item
aids_set.add(aid)
result['rects'].append(aid)
profiler(
f'created GapAnnotations item for {len(gap_specs)} '
f'gaps'
)
# A/B comparison: optionally create individual arrows
# alongside batch for visual comparison
if show_individual_arrows:
godw = chart.linked.godwidget
arrows: ArrowEditor = ArrowEditor(godw=godw)
for i, spec in enumerate(gap_specs):
if 'arrow_x' not in spec:
continue
aid_str: str = str(uuid4())
arrow: pg.ArrowItem = arrows.add(
plot=chart.plotItem,
uid=aid_str,
x=spec['arrow_x'],
y=spec['arrow_y'],
pointing=spec['pointing'],
color='bracket', # different color
alpha=spec.get('alpha', 169),
headLen=10.0,
headWidth=2.222,
pxMode=True,
)
arrow._abs_x = spec['arrow_x']
arrow._abs_y = spec['arrow_y']
annots[aid_str] = arrow
_editors[aid_str] = arrows
aids_set.add(aid_str)
result['arrows'].append(aid_str)
profiler(
f'created {len(gap_specs)} individual arrows '
f'for comparison'
)
# handle text items separately (less common, keep
# individual items)
n_texts: int = 0
for text_spec in text_specs:
kwargs: dict = text_spec.copy()
text: str = kwargs.pop('text')
x: float = float(kwargs.pop('x'))
y: float = float(kwargs.pop('y'))
time_val: float|None = kwargs.pop('time', None)
# timestamp-based index lookup
if time_val is not None:
matches = arr[arr['time'] == time_val]
if len(matches) > 0:
x = float(matches[0]['index'])
y = float(matches[0]['close'])
color = kwargs.pop('color', 'dad_blue')
anchor = kwargs.pop('anchor', (0, 1))
font_size = kwargs.pop('font_size', None)
text_item: pg.TextItem = pg.TextItem(
text,
color=hcolor(color),
anchor=anchor,
)
if font_size is None:
from ._style import get_fonts
font, font_small = get_fonts()
font_size = font_small.px_size - 1
qfont: QFont = text_item.textItem.font()
qfont.setPixelSize(font_size)
text_item.setFont(qfont)
text_item.setPos(float(x), float(y))
chart.plotItem.addItem(text_item)
text_item._abs_x = float(x)
text_item._abs_y = float(y)
aid: str = str(uuid4())
annots[aid] = text_item
aids_set.add(aid)
result['texts'].append(aid)
n_texts += 1
profiler(
f'created text annotations: {n_texts} texts'
)
profiler.finish()
await annot_req_stream.send(result)
case { case {
'cmd': 'remove', 'cmd': 'remove',
'aid': int(aid)|str(aid), 'aid': int(aid)|str(aid),
@ -828,26 +471,10 @@ async def serve_rc_annots(
# XXX: reposition all annotations to ensure they # XXX: reposition all annotations to ensure they
# stay aligned with viz data after reset (eg during # stay aligned with viz data after reset (eg during
# backfill when abs-index range changes) # backfill when abs-index range changes)
chart: ChartPlotWidget = {
60: ds.hist_chart,
1: ds.chart,
}[timeframe]
viz: Viz = chart.get_viz(fqme)
arr = viz.shm.array
n_repositioned: int = 0 n_repositioned: int = 0
for aid, annot in annots.items(): for aid, annot in annots.items():
# GapAnnotations batch items have .reposition()
if hasattr(annot, 'reposition'):
annot.reposition(
array=arr,
fqme=fqme,
timeframe=timeframe,
)
n_repositioned += 1
# arrows and text items use abs x,y coords # arrows and text items use abs x,y coords
elif ( if (
hasattr(annot, '_abs_x') hasattr(annot, '_abs_x')
and and
hasattr(annot, '_abs_y') hasattr(annot, '_abs_y')
@ -912,21 +539,12 @@ async def remote_annotate(
finally: finally:
# ensure all annots for this connection are deleted # ensure all annots for this connection are deleted
# on any final teardown # on any final teardown
profiler = Profiler(
msg=f'Annotation teardown for ctx {ctx.cid}',
disabled=False,
ms_threshold=0.0,
)
(_ctx, aids) = _ctxs[ctx.cid] (_ctx, aids) = _ctxs[ctx.cid]
assert _ctx is ctx assert _ctx is ctx
profiler(f'got {len(aids)} aids to remove')
for aid in aids: for aid in aids:
annot: QGraphicsItem = _annots[aid] annot: QGraphicsItem = _annots[aid]
assert rm_annot(annot) assert rm_annot(annot)
profiler(f'removed all {len(aids)} annotations')
class AnnotCtl(Struct): class AnnotCtl(Struct):
''' '''
@ -1128,64 +746,6 @@ class AnnotCtl(Struct):
) )
return aid return aid
async def add_batch(
self,
fqme: str,
timeframe: float,
rects: list[dict]|None = None,
arrows: list[dict]|None = None,
texts: list[dict]|None = None,
show_individual_arrows: bool = False,
from_acm: bool = False,
) -> dict[str, list[int]]:
'''
Batch submit multiple annotations in single IPC msg for
much faster remote annotation vs. per-annot round-trips.
Returns dict of annotation IDs:
{
'rects': [aid1, aid2, ...],
'arrows': [aid3, aid4, ...],
'texts': [aid5, aid6, ...],
}
'''
ipc: MsgStream = self._get_ipc(fqme)
with trio.fail_after(10):
await ipc.send({
'fqme': fqme,
'cmd': 'batch',
'timeframe': timeframe,
'rects': rects or [],
'arrows': arrows or [],
'texts': texts or [],
'show_individual_arrows': show_individual_arrows,
})
result: dict = await ipc.receive()
match result:
case {'error': str(msg)}:
log.error(msg)
return {
'rects': [],
'arrows': [],
'texts': [],
}
# register all AIDs with their IPC streams
for aid_list in result.values():
for aid in aid_list:
self._ipcs[aid] = ipc
if not from_acm:
self._annot_stack.push_async_callback(
partial(
self.remove,
aid,
)
)
return result
async def add_text( async def add_text(
self, self,
fqme: str, fqme: str,
@ -1321,14 +881,3 @@ async def open_annot_ctl(
_annot_stack=annots_stack, _annot_stack=annots_stack,
) )
yield client yield client
# client exited, measure teardown time
teardown_profiler = Profiler(
msg='Client AnnotCtl teardown',
disabled=False,
ms_threshold=0.0,
)
teardown_profiler('exiting annots_stack')
teardown_profiler('annots_stack exited')
teardown_profiler('exiting gather_contexts')

View File

@ -174,13 +174,6 @@ class CompleterView(QTreeView):
self.setStyleSheet(f"font: {size}px") self.setStyleSheet(f"font: {size}px")
def update_fonts(self) -> None:
'''Update font sizes after zoom change.'''
self.set_font_size(_font.px_size)
self.setIndentation(_font.px_size)
self.setFont(_font.font)
self.updateGeometry()
def resize_to_results( def resize_to_results(
self, self,
w: float | None = 0, w: float | None = 0,
@ -637,29 +630,6 @@ class SearchWidget(QtWidgets.QWidget):
| align_flag.AlignLeft, | align_flag.AlignLeft,
) )
def update_fonts(self) -> None:
'''Update font sizes after zoom change.'''
# regenerate label stylesheet with new font size
self.label.setStyleSheet(
f"""QLabel {{
color : {hcolor('default_lightest')};
font-size : {_font.px_size - 2}px;
}}
"""
)
self.label.setFont(_font.font)
# update search bar and view fonts
if hasattr(self.bar, 'update_fonts'):
self.bar.update_fonts(_font)
elif hasattr(self.bar, 'setFont'):
self.bar.setFont(_font.font)
if hasattr(self.view, 'update_fonts'):
self.view.update_fonts()
self.updateGeometry()
def focus(self) -> None: def focus(self) -> None:
self.show() self.show()
self.bar.focus() self.bar.focus()

View File

@ -79,13 +79,9 @@ class DpiAwareFont:
self._font_inches: float = None self._font_inches: float = None
self._screen = None self._screen = None
def _set_qfont_px_size( def _set_qfont_px_size(self, px_size: int) -> None:
self, self._qfont.setPixelSize(px_size)
px_size: int,
) -> int:
self._qfont.setPixelSize(int(px_size))
self._qfm = QtGui.QFontMetrics(self._qfont) self._qfm = QtGui.QFontMetrics(self._qfont)
return self.px_size
@property @property
def screen(self) -> QtGui.QScreen: def screen(self) -> QtGui.QScreen:
@ -128,22 +124,17 @@ class DpiAwareFont:
return size return size
def configure_to_dpi( def configure_to_dpi(self, screen: QtGui.QScreen | None = None):
self,
screen: QtGui.QScreen | None = None,
zoom_level: float = 1.0,
) -> int:
''' '''
Set an appropriately sized font size depending on the screen DPI Set an appropriately sized font size depending on the screen DPI.
or scale the size according to `zoom_level`.
If we end up needing to generalize this more here there are If we end up needing to generalize this more here there are resources
resources listed in the script in listed in the script in ``snippets/qt_screen_info.py``.
``snippets/qt_screen_info.py``.
''' '''
if self._font_size is not None: if self._font_size is not None:
return self._set_qfont_px_size(self._font_size * zoom_level) self._set_qfont_px_size(self._font_size)
return
# NOTE: if no font size set either in the [ui] section of the # NOTE: if no font size set either in the [ui] section of the
# config or not yet computed from our magic scaling calcs, # config or not yet computed from our magic scaling calcs,
@ -162,7 +153,7 @@ class DpiAwareFont:
ldpi = pdpi ldpi = pdpi
mx_dpi = max(pdpi, ldpi) mx_dpi = max(pdpi, ldpi)
# mn_dpi = min(pdpi, ldpi) mn_dpi = min(pdpi, ldpi)
scale = round(ldpi/pdpi, ndigits=2) scale = round(ldpi/pdpi, ndigits=2)
if mx_dpi <= 97: # for low dpi use larger font sizes if mx_dpi <= 97: # for low dpi use larger font sizes
@ -171,7 +162,7 @@ class DpiAwareFont:
else: # hidpi use smaller font sizes else: # hidpi use smaller font sizes
inches = _font_sizes['hi'][self._font_size_calc_key] inches = _font_sizes['hi'][self._font_size_calc_key]
# dpi = mn_dpi dpi = mn_dpi
mult = 1.0 mult = 1.0
@ -206,25 +197,24 @@ class DpiAwareFont:
# always going to hit that error in range mapping from inches: # always going to hit that error in range mapping from inches:
# float to px size: int. # float to px size: int.
self._font_inches = inches self._font_inches = inches
font_size = math.floor(inches * pdpi) font_size = math.floor(inches * dpi)
# apply zoom level multiplier
font_size = int(font_size * zoom_level)
log.debug( log.debug(
f"screen:{screen.name()}\n" f"screen:{screen.name()}\n"
f"pDPI: {pdpi}, lDPI: {ldpi}, scale: {scale}\n" f"pDPI: {pdpi}, lDPI: {ldpi}, scale: {scale}\n"
f"zoom_level: {zoom_level}\n"
f"\nOur best guess font size is {font_size}\n" f"\nOur best guess font size is {font_size}\n"
) )
# apply the size # apply the size
return self._set_qfont_px_size(font_size) self._set_qfont_px_size(font_size)
def boundingRect(self, value: str) -> QtCore.QRectF: def boundingRect(self, value: str) -> QtCore.QRectF:
if self.screen is None:
screen = self.screen
if screen is None:
raise RuntimeError("You must call .configure_to_dpi() first!") raise RuntimeError("You must call .configure_to_dpi() first!")
unscaled_br: QtCore.QRectF = self._qfm.boundingRect(value) unscaled_br = self._qfm.boundingRect(value)
return QtCore.QRectF( return QtCore.QRectF(
0, 0,
0, 0,
@ -238,22 +228,12 @@ _font = DpiAwareFont()
_font_small = DpiAwareFont(_font_size_key='small') _font_small = DpiAwareFont(_font_size_key='small')
def _config_fonts_to_screen( def _config_fonts_to_screen() -> None:
zoom_level: float = 1.0 'configure global DPI aware font sizes'
) -> int:
'''
Configure global DPI aware font size(s).
If `zoom_level` is provided we apply it to auto-calculated
DPI-aware font.
Return the new `DpiAwareFont.px_size`.
'''
global _font, _font_small global _font, _font_small
_font.configure_to_dpi(zoom_level=zoom_level) _font.configure_to_dpi()
_font_small.configure_to_dpi(zoom_level=zoom_level) _font_small.configure_to_dpi()
return _font.px_size
def get_fonts() -> tuple[ def get_fonts() -> tuple[

View File

@ -18,7 +18,6 @@
Qt main window singletons and stuff. Qt main window singletons and stuff.
""" """
from __future__ import annotations
import os import os
import signal import signal
import time import time
@ -39,107 +38,15 @@ from piker.ui.qt import (
QScreen, QScreen,
QCloseEvent, QCloseEvent,
QSettings, QSettings,
QEvent,
QObject,
) )
from ..log import get_logger from ..log import get_logger
from . import _style from ._style import _font_small, hcolor
from ._style import (
_font_small,
hcolor,
)
from ._widget import GodWidget from ._widget import GodWidget
log = get_logger(__name__) log = get_logger(__name__)
class GlobalZoomEventFilter(QObject):
'''
Application-level event filter for global UI zoom shortcuts.
This filter intercepts keyboard events BEFORE they reach widgets,
allowing us to implement global UI zoom shortcuts that take precedence
over widget-specific shortcuts.
Shortcuts:
- Ctrl+Shift+Plus/Equal: Zoom in
- Ctrl+Shift+Minus: Zoom out
- Ctrl+Shift+0: Reset zoom
'''
def __init__(self, main_window: MainWindow):
super().__init__()
self.main_window = main_window
def eventFilter(self, obj: QObject, event: QEvent) -> bool:
'''
Filter keyboard events for global zoom shortcuts.
Returns True to filter out (consume) the event, False to pass through.
'''
if event.type() == QEvent.Type.KeyPress:
key = event.key()
mods = event.modifiers()
# Mask out the KeypadModifier which Qt sometimes adds
mods = mods & ~Qt.KeyboardModifier.KeypadModifier
# Check if we have Ctrl+Shift (both required)
has_ctrl = bool(
mods
&
Qt.KeyboardModifier.ControlModifier
)
_has_shift = bool(
mods
&
Qt.KeyboardModifier.ShiftModifier
)
# Only handle UI zoom if BOTH Ctrl and Shift are pressed
# For Plus key: user presses Cmd+Shift+Equal (which makes Plus)
# For Minus key: user presses Cmd+Shift+Minus
if (
has_ctrl
# and
# has_shift
):
# Zoom in: Ctrl+Shift+Plus
# Note: Plus key usually comes as Key_Equal with Shift modifier
if key in (
Qt.Key.Key_Plus,
Qt.Key.Key_Equal,
):
self.main_window.zoom_in()
return True # consume event
# Zoom out: Ctrl+Shift+Minus
# Note: On some keyboards Shift+Minus produces '_' (Underscore)
elif key in (
Qt.Key.Key_Minus,
Qt.Key.Key_Underscore,
):
self.main_window.zoom_out()
return True # consume event
# Reset zoom: Ctrl+Shift+0
# Note: On some keyboards Shift+0 produces ')' (ParenRight)
elif key in (
Qt.Key.Key_0,
Qt.Key.Key_ParenRight,
):
self.main_window.reset_zoom()
return True # consume event
# Pass through if only Ctrl (no Shift) - this goes to chart zoom
# Pass through all other events too
return False
return False
class MultiStatus: class MultiStatus:
bar: QStatusBar bar: QStatusBar
@ -282,24 +189,6 @@ class MainWindow(QMainWindow):
self.restoreGeometry(geometry) self.restoreGeometry(geometry)
log.debug('Restored window geometry from previous session') log.debug('Restored window geometry from previous session')
# zoom level for UI scaling (1.0 = 100%, 1.5 = 150%, etc)
# Change this value to set the default startup zoom level
self._zoom_level: float = 1.0 # Start at 100% (normal)
self._min_zoom: float = 0.5
self._max_zoom: float = 3.0 # Reduced from 10.0 to prevent extreme cropping
self._zoom_step: float = 0.2 # 20% per keypress
# event filter for global zoom shortcuts
self._zoom_filter: GlobalZoomEventFilter | None = None
def install_global_zoom_filter(self) -> None:
'''Install application-level event filter for global UI zoom shortcuts.'''
if self._zoom_filter is None:
self._zoom_filter = GlobalZoomEventFilter(self)
app = QApplication.instance()
app.installEventFilter(self._zoom_filter)
log.info('Installed global zoom shortcuts: Ctrl+Shift+Plus/Minus/0')
@property @property
def mode_label(self) -> QLabel: def mode_label(self) -> QLabel:
@ -342,10 +231,7 @@ class MainWindow(QMainWindow):
log.debug('Saved window geometry for next session') log.debug('Saved window geometry for next session')
# raising KBI seems to get intercepted by by Qt so just use the system. # raising KBI seems to get intercepted by by Qt so just use the system.
os.kill( os.kill(os.getpid(), signal.SIGINT)
os.getpid(),
signal.SIGINT,
)
@property @property
def status_bar(self) -> QStatusBar: def status_bar(self) -> QStatusBar:
@ -471,262 +357,14 @@ class MainWindow(QMainWindow):
self.godwidget.on_win_resize(event) self.godwidget.on_win_resize(event)
event.accept() event.accept()
def zoom_in(self) -> None:
'''
Increase overall UI-widgets zoom level by scaling it the
global font sizes.
'''
new_zoom: float = min(
self._zoom_level + self._zoom_step,
self._max_zoom,
)
if new_zoom != self._zoom_level:
self._zoom_level = new_zoom
font_size: int = self._apply_zoom()
log.info(
f'Zoomed in UI\n'
f'zoom_step: {self._zoom_step!r}\n'
f'zoom_level(%): {self._zoom_level:.1%}\n'
f'font_size: {font_size!r}'
)
def zoom_out(self) -> float:
'''
Decrease UI zoom level.
'''
new_zoom: float = max(self._zoom_level - self._zoom_step, self._min_zoom)
if new_zoom != self._zoom_level:
self._zoom_level = new_zoom
font_size: int = self._apply_zoom()
log.info(
f'Zoomed out UI\n'
f'zoom_step: {self._zoom_step!r}\n'
f'zoom_level(%): {self._zoom_level:.1%}\n'
f'font_size: {font_size!r}'
)
return new_zoom
def reset_zoom(self) -> None:
'''
Reset UI zoom to 100%.
'''
if self._zoom_level != 1.0:
self._zoom_level = 1.0
font_size: int = self._apply_zoom()
log.info(
f'Reset zoom level\n'
f'zoom_step: {self._zoom_step!r}\n'
f'zoom_level(%): {self._zoom_level:.1%}\n'
f'font_size: {font_size!r}'
)
return self._zoom_level
def _apply_zoom(self) -> int:
'''
Apply current zoom level to all UI elements.
'''
# reconfigure fonts with zoom multiplier
font_size: int = _style._config_fonts_to_screen(
zoom_level=self._zoom_level
)
# update status bar styling with new font size
if self._status_bar:
sb = self.statusBar()
sb.setStyleSheet((
f"color : {hcolor('gunmetal')};"
f"background : {hcolor('default_dark')};"
f"font-size : {_style._font_small.px_size}px;"
"padding : 0px;"
))
# force update of mode label if it exists
if self._status_label:
self._status_label.setFont(_style._font_small.font)
# update godwidget and its children
if self.godwidget:
# update search bg if it exists
if search := getattr(self.godwidget, 'search', None):
search.update_fonts()
# update order mode panes in all chart views
self._update_chart_order_panes()
# recursively update all other widgets with stylesheets
self._refresh_widget_fonts(self.godwidget)
self.godwidget.update()
return font_size
def _update_chart_order_panes(self) -> None:
'''
Update order entry panels in all charts.
'''
if not self.godwidget:
return
# iterate through all linked splits (hist and rt)
for splits_name in [
'hist_linked',
'rt_linked',
]:
splits = getattr(self.godwidget, splits_name, None)
if not splits:
continue
# get main chart
chart = getattr(splits, 'chart', None)
if chart:
# update axes
self._update_chart_axes(chart)
# update order pane
if (
(view := getattr(chart, 'view', None))
and
(order_mode := getattr(view, 'order_mode', None))
and
(pane := getattr(order_mode, 'pane', None))
):
pane.update_fonts()
# also check subplots
subplots = getattr(splits, 'subplots', {})
for name, subplot_chart in subplots.items():
# update subplot axes
self._update_chart_axes(subplot_chart)
# update subplot order pane
if (
(view := getattr(subplot_chart, 'view', None))
and
(order_mode := getattr(
view, 'order_mode', None,
))
and
(pane := getattr(order_mode, 'pane', None))
):
pane.update_fonts()
# resize all sidepanes to match the
# main chart's sidepane width; ensures
# volume/subplot sidepanes match.
if (
splits
and
(resize := getattr(
splits, 'resize_sidepanes', None,
))
):
resize()
def _update_chart_axes(self, chart) -> None:
'''
Update axis fonts and sizing for a chart.
'''
from . import _style
# update price axis (right side)
if plot_item := getattr(chart, 'pi', None):
# get all axes from plot item
for axis_name in [
'left',
'right',
'bottom',
'top',
]:
if (
(axis := plot_item.getAxis(axis_name))
and
(update_fonts := getattr(
axis, 'update_fonts', None,
))
):
update_fonts(_style._font)
# force plot item to recalculate
# its entire layout
plot_item.updateGeometry()
# force chart widget to update
if update_geom := getattr(chart, 'updateGeometry', None):
update_geom()
# trigger a full scene update
if update := getattr(chart, 'update', None):
update()
def _refresh_widget_fonts(
self,
widget: QWidget,
) -> None:
'''
Recursively update font sizes in all
child widgets.
Handles widgets that have font-size
hardcoded in their stylesheets.
'''
from . import _style
# recursively process all children
for child in widget.findChildren(QWidget):
# skip widgets that have custom update
# methods; handled separately below.
if getattr(child, 'update_fonts', None):
log.debug(
f'Skipping sub-widget with'
f' custom font-update meth..\n'
f'{child!r}\n'
)
continue
# update child's stylesheet if it has font-size
child_stylesheet = child.styleSheet()
if child_stylesheet and 'font-size' in child_stylesheet:
# for labels and simple widgets, regenerate stylesheet
# this is a heuristic - may need refinement
try:
child.setFont(_style._font.font)
except (
AttributeError,
RuntimeError,
):
log.exception(
'Failed to update sub-widget font?\n'
)
# update child's font
try:
child.setFont(_style._font.font)
except (
AttributeError,
RuntimeError,
):
log.exception(
'Failed to update sub-widget font?\n'
)
# singleton app per actor # singleton app per actor
_qt_win: QMainWindow|None = None _qt_win: QMainWindow = None
def main_window() -> MainWindow: def main_window() -> MainWindow:
''' 'Return the actor-global Qt window.'
Return the actor-global Qt window.
'''
global _qt_win global _qt_win
assert _qt_win assert _qt_win
return _qt_win return _qt_win

View File

@ -167,7 +167,7 @@ async def stream_symbol_selection():
async def _async_main( async def _async_main(
name: str, name: str,
portal: tractor.Portal, portal: tractor._portal.Portal,
symbols: List[str], symbols: List[str],
brokermod: ModuleType, brokermod: ModuleType,
loglevel: str = 'info', loglevel: str = 'info',

View File

@ -436,7 +436,7 @@ class OptionChain(object):
async def new_chain_ui( async def new_chain_ui(
portal: tractor.Portal, portal: tractor._portal.Portal,
symbol: str, symbol: str,
brokermod: types.ModuleType, brokermod: types.ModuleType,
rate: int = 1, rate: int = 1,

View File

@ -1022,22 +1022,13 @@ async def open_order_mode(
started.set() started.set()
for oid, msg in ems_dialog_msgs.items(): for oid, msg in ems_dialog_msgs.items():
# HACK ALERT: ensure a resp field is filled out since # HACK ALERT: ensure a resp field is filled out since
# techincally the call below expects a ``Status``. TODO: # techincally the call below expects a ``Status``. TODO:
# parse into proper ``Status`` equivalents ems-side? # parse into proper ``Status`` equivalents ems-side?
# msg.setdefault('resp', msg['broker_details']['resp']) # msg.setdefault('resp', msg['broker_details']['resp'])
# msg.setdefault('oid', msg['broker_details']['oid']) # msg.setdefault('oid', msg['broker_details']['oid'])
ya_msg: dict = msg.setdefault( msg['brokerd_msg'] = msg
'brokerd_msg',
msg,
)
if msg is not ya_msg:
log.warning(
f'A `.brokerd_msg` was already set for ems-dialog msg?\n'
f'oid: {oid!r}\n'
f'ya_msg: {ya_msg!r}\n'
f'msg: {ya_msg!r}\n'
)
await process_trade_msg( await process_trade_msg(
mode, mode,

View File

@ -42,7 +42,6 @@ from PyQt6.QtCore import (
QSize, QSize,
QModelIndex, QModelIndex,
QItemSelectionModel, QItemSelectionModel,
QObject,
pyqtBoundSignal, pyqtBoundSignal,
pyqtRemoveInputHook, pyqtRemoveInputHook,
QSettings, QSettings,

1263
poetry.lock generated 100644

File diff suppressed because it is too large Load Diff

View File

@ -106,7 +106,7 @@ default-groups = [
[dependency-groups] [dependency-groups]
uis = [ uis = [
"pyqtgraph >= 0.14.0", "pyqtgraph",
"qdarkstyle >=3.0.2, <4.0.0", "qdarkstyle >=3.0.2, <4.0.0",
"pyqt6 >=6.7.0, <7.0.0", "pyqt6 >=6.7.0, <7.0.0",
@ -193,12 +193,9 @@ include = ["piker"]
[tool.uv.sources] [tool.uv.sources]
pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" }
tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" } tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" }
pyvnc = { git = "https://github.com/regulad/pyvnc.git" } pyvnc = { git = "https://github.com/regulad/pyvnc.git" }
# pyqtgraph = { git = "https://github.com/pyqtgraph/pyqtgraph.git", branch = 'master' }
# pyqtgraph = { path = '../pyqtgraph', editable = true }
# ?TODO, resync our fork?
# pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" }
# to get fancy next-cmd/suggestion feats prior to 0.22.2 B) # to get fancy next-cmd/suggestion feats prior to 0.22.2 B)
# https://github.com/xonsh/xonsh/pull/6037 # https://github.com/xonsh/xonsh/pull/6037
@ -206,8 +203,8 @@ pyvnc = { git = "https://github.com/regulad/pyvnc.git" }
# xonsh = { git = 'https://github.com/xonsh/xonsh.git', branch = 'main' } # xonsh = { git = 'https://github.com/xonsh/xonsh.git', branch = 'main' }
# XXX since, we're like, always hacking new shite all-the-time. Bp # XXX since, we're like, always hacking new shite all-the-time. Bp
# tractor = { git = "https://github.com/goodboy/tractor.git", branch ="main" } tractor = { git = "https://github.com/goodboy/tractor.git", branch ="main" }
# tractor = { git = "https://pikers.dev/goodboy/tractor", branch = "piker_pin" } # tractor = { git = "https://pikers.dev/goodboy/tractor", branch = "piker_pin" }
# ------ goodboy ------ # ------ goodboy ------
# hackin dev-envs, usually there's something new he's hackin in.. # hackin dev-envs, usually there's something new he's hackin in..
tractor = { path = "../tractor", editable = true } # tractor = { path = "../tractor", editable = true }

View File

@ -1,64 +0,0 @@
#!env xonsh
'''
Compute the pxs-per-inch (PPI) naively for the local DE.
NOTE, currently this only supports the `sway`-TWM on wayland.
!TODO!
- [ ] support Xorg (and possibly other OSs as well?
- [ ] conver this to pure py code, dropping the `.xsh` specifics
instead for `subprocess` API calls?
- [ ] possibly unify all this with `./qt_screen_info.py` as part of
a "PPI config wizard" or something, but more then likely we'll
have lib-ified version inside modden/piker by then?
'''
import math
import json
# XXX, xonsh part using "subprocess mode"
disp_infos: list[dict] = json.loads($(wlr-randr --json))
lappy: dict = disp_infos[0]
dims: dict[str, int] = lappy['physical_size']
w_cm: int = dims['width']
h_cm: int = dims['height']
# cm per inch
cpi: float = 25.4
# compute "diagonal" size (aka hypot)
diag_inches: float = math.sqrt((h_cm/cpi)**2 + (w_cm/cpi)**2)
# compute reso-hypot / inches-hypot
hi_res: dict[str, float|bool] = lappy['modes'][0]
w_px: int = hi_res['width']
h_px: int = hi_res['height']
diag_pxs: float = math.sqrt(h_px**2 + w_px**2)
unscaled_ppi: float = diag_pxs/diag_inches
# retrieve TWM info on the display (including scaling info)
sway_disp_info: dict = json.loads($(swaymsg -r -t get_outputs))[0]
scale: float = sway_disp_info['scale']
print(
f'output: {sway_disp_info["name"]!r}\n'
f'--- DIMENSIONS ---\n'
f'w_cm: {w_cm!r}\n'
f'h_cm: {h_cm!r}\n'
f'w_px: {w_px!r}\n'
f'h_cm: {h_px!r}\n'
f'\n'
f'--- DIAGONALS ---\n'
f'diag_inches: {diag_inches!r}\n'
f'diag_pxs: {diag_pxs!r}\n'
f'\n'
f'--- PPI-related-info ---\n'
f'(DE reported) scale: {scale!r}\n'
f'unscaled PPI: {unscaled_ppi!r}\n'
f'|_ =sqrt(h_px**2 + w_px**2) / sqrt(h_in**2 + w_in**2)\n'
f'scaled PPI: {unscaled_ppi/scale!r}\n'
f'|_ =unscaled_ppi/scale\n'
)

View File

@ -31,8 +31,8 @@ Resource list for mucking with DPIs on multiple screens:
- https://doc.qt.io/qt-5/qguiapplication.html#screenAt - https://doc.qt.io/qt-5/qguiapplication.html#screenAt
''' '''
import os
from pyqtgraph import QtGui
from PyQt6 import ( from PyQt6 import (
QtCore, QtCore,
QtWidgets, QtWidgets,
@ -43,11 +43,6 @@ from PyQt6.QtCore import (
QSize, QSize,
QRect, QRect,
) )
from pyqtgraph import QtGui
# https://doc.qt.io/qt-6/highdpi.html#environment-variable-reference
os.environ['QT_USE_PHYSICAL_DPI'] = '1'
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute # Proper high DPI scaling is available in Qt >= 5.6.0. This attibute
# must be set before creating the application # must be set before creating the application
@ -63,22 +58,13 @@ if hasattr(Qt, 'AA_UseHighDpiPixmaps'):
True, True,
) )
# NOTE, inherits `QGuiApplication`
# https://doc.qt.io/qt-6/qapplication.html
# https://doc.qt.io/qt-6/qguiapplication.html
app = QtWidgets.QApplication([]) app = QtWidgets.QApplication([])
#
# ^TODO? various global DPI settings?
# [ ] DPI rounding policy,
# - https://doc.qt.io/qt-6/qt.html#HighDpiScaleFactorRoundingPolicy-enum
# - https://doc.qt.io/qt-6/qguiapplication.html#setHighDpiScaleFactorRoundingPolicy
window = QtWidgets.QMainWindow() window = QtWidgets.QMainWindow()
main_widget = QtWidgets.QWidget() main_widget = QtWidgets.QWidget()
window.setCentralWidget(main_widget) window.setCentralWidget(main_widget)
window.show() window.show()
_main_pxr: float = main_widget.devicePixelRatioF() pxr: float = main_widget.devicePixelRatioF()
# explicitly get main widget and primary displays # explicitly get main widget and primary displays
current_screen: QtGui.QScreen = app.screenAt( current_screen: QtGui.QScreen = app.screenAt(
@ -91,13 +77,7 @@ for screen in app.screens():
name: str = screen.name() name: str = screen.name()
model: str = screen.model().rstrip() model: str = screen.model().rstrip()
size: QSize = screen.size() size: QSize = screen.size()
geo: QRect = screen.geometry() geo: QRect = screen.availableGeometry()
# device-pixel-ratio
# https://doc.qt.io/qt-6/highdpi.html
pxr: float = screen.devicePixelRatio()
unscaled_size: QSize = pxr * size
phydpi: float = screen.physicalDotsPerInch() phydpi: float = screen.physicalDotsPerInch()
logdpi: float = screen.logicalDotsPerInch() logdpi: float = screen.logicalDotsPerInch()
is_primary: bool = screen is primary_screen is_primary: bool = screen is primary_screen
@ -108,12 +88,11 @@ for screen in app.screens():
f'|_primary: {is_primary}\n' f'|_primary: {is_primary}\n'
f' _current: {is_current}\n' f' _current: {is_current}\n'
f' _model: {model}\n' f' _model: {model}\n'
f' _size: {size}\n' f' _screen size: {size}\n'
f' _geometry: {geo}\n' f' _screen geometry: {geo}\n'
f' _devicePixelRatio(): {pxr}\n' f' _devicePixelRationF(): {pxr}\n'
f' _unscaled-size: {unscaled_size!r}\n' f' _physical dpi: {phydpi}\n'
f' _physical-dpi: {phydpi}\n' f' _logical dpi: {logdpi}\n'
f' _logical-dpi: {logdpi}\n'
) )
# app-wide font info # app-wide font info
@ -131,8 +110,8 @@ str_w: int = str_br.width()
print( print(
f'------ global font settings ------\n' f'------ global font settings ------\n'
f'font dpi: {fontdpi!r}\n' f'font dpi: {fontdpi}\n'
f'font height: {font_h!r}\n' f'font height: {font_h}\n'
f'string bounding rect: {str_br!r}\n' f'string bounding rect: {str_br}\n'
f'string width : {str_w!r}\n' f'string width : {str_w}\n'
) )

View File

@ -92,7 +92,8 @@ def log(
@acm @acm
async def _open_test_pikerd( async def _open_test_pikerd(
tmpconfdir: str, tmpconfdir: str,
reg_addr: tuple[str, int|str],
reg_addr: tuple[str, int] | None = None,
loglevel: str = 'warning', loglevel: str = 'warning',
debug_mode: bool = False, debug_mode: bool = False,
@ -112,10 +113,16 @@ async def _open_test_pikerd(
to boot the root actor / tractor runtime. to boot the root actor / tractor runtime.
''' '''
import random
from piker.service import maybe_open_pikerd from piker.service import maybe_open_pikerd
if reg_addr is None:
port = random.randint(6e3, 7e3)
reg_addr = ('127.0.0.1', port)
async with ( async with (
maybe_open_pikerd( maybe_open_pikerd(
registry_addrs=[reg_addr], registry_addr=reg_addr,
loglevel=loglevel, loglevel=loglevel,
tractor_runtime_overrides={ tractor_runtime_overrides={
@ -132,14 +139,13 @@ async def _open_test_pikerd(
async with tractor.wait_for_actor( async with tractor.wait_for_actor(
'pikerd', 'pikerd',
registry_addr=reg_addr, arbiter_sockaddr=reg_addr,
) as portal: ) as portal:
raddr = portal.chan.raddr raddr = portal.channel.raddr
uw_raddr: tuple = raddr.unwrap() assert raddr == reg_addr
assert uw_raddr == reg_addr
yield ( yield (
raddr._host, raddr[0],
raddr._port, raddr[1],
portal, portal,
service_manager, service_manager,
) )
@ -196,10 +202,7 @@ def open_test_pikerd(
request: pytest.FixtureRequest, request: pytest.FixtureRequest,
tmp_path: Path, tmp_path: Path,
tmpconfdir: Path, tmpconfdir: Path,
# XXX from `tractor._testing.pytest` plugin
loglevel: str, loglevel: str,
reg_addr: tuple,
): ):
tmpconfdir_str: str = str(tmpconfdir) tmpconfdir_str: str = str(tmpconfdir)
@ -233,13 +236,10 @@ def open_test_pikerd(
# bwitout clobbering each other's config state. # bwitout clobbering each other's config state.
tmpconfdir=tmpconfdir_str, tmpconfdir=tmpconfdir_str,
# NOTE these come verbatim from `tractor`'s builtin plugin! # bind in level from fixture, which is itself set by
# # `--ll <value>` cli flag.
# per-tpt compat registrar address.
reg_addr=reg_addr,
# bind in level from fixture.
# (can be set with `--ll <value>` flag to `pytest`).
loglevel=loglevel, loglevel=loglevel,
debug_mode=debug_mode, debug_mode=debug_mode,
) )

View File

@ -1,36 +0,0 @@
import pytest
from piker.ui._style import DpiAwareFont
class MockScreen:
def __init__(self, pdpi, ldpi, name="MockScreen"):
self._pdpi = pdpi
self._ldpi = ldpi
self._name = name
def physicalDotsPerInch(self):
return self._pdpi
def logicalDotsPerInch(self):
return self._ldpi
def name(self):
return self._name
@pytest.mark.parametrize(
"pdpi, ldpi, expected_px",
[
(96, 96, 9), # normal DPI
(169, 96, 15), # HiDPI
(120, 96, 10), # mid-DPI
]
)
def test_font_px_size(pdpi, ldpi, expected_px):
font = DpiAwareFont()
font.configure_to_dpi(screen=MockScreen(pdpi, ldpi))
px = font.px_size
print(f"{pdpi}x{ldpi} DPI -> Computed pixel size: {px}")
assert px == expected_px

View File

@ -23,35 +23,13 @@ from piker.accounting import (
'fqmes', 'fqmes',
[ [
# binance # binance
(100, { (100, {'btcusdt.binance', 'ethusdt.binance'}, False),
# !TODO, write a suite which validates raising against
# bad/legacy fqmes such as this!
# 'btcusdt.binance',
'btcusdt.spot.binance',
'ethusdt.spot.binance',
}, False),
# kraken # kraken
(20, { (20, {'ethusdt.kraken', 'xbtusd.kraken'}, True),
# !TODO, write a suite which validates raising against
# bad/legacy fqmes such as this!
# 'ethusdt.kraken',
# 'xbtusd.kraken',
'ethusdt.spot.kraken',
'xbtusd.spot.kraken',
}, True),
# binance + kraken # binance + kraken
(100, { (100, {'btcusdt.binance', 'xbtusd.kraken'}, False),
# !TODO, write a suite which validates raising against
# bad/legacy fqmes such as this!
# 'btcusdt.binance',
# 'xbtusd.kraken',
'btcusdt.spot.binance',
'xbtusd.spot.kraken',
}, False),
], ],
ids=lambda param: f'quotes={param[0]}@fqmes={param[1]}', ids=lambda param: f'quotes={param[0]}@fqmes={param[1]}',
) )
@ -70,17 +48,12 @@ def test_multi_fqsn_feed(
if ( if (
ci_env ci_env
and and not run_in_ci
not run_in_ci
): ):
pytest.skip( pytest.skip('Skipping CI disabled test due to feed restrictions')
'CI-disabled-test due to live-feed restrictions'
)
brokers = set() brokers = set()
for fqme in fqmes: for fqme in fqmes:
# ?TODO, add this unpack + normalize check to a symbology
# helper fn?
brokername, *_ = unpack_fqme(fqme) brokername, *_ = unpack_fqme(fqme)
brokers.add(brokername) brokers.add(brokername)

58
uv.lock
View File

@ -1034,7 +1034,7 @@ requires-dist = [
{ name = "tomli", specifier = ">=2.0.1,<3.0.0" }, { name = "tomli", specifier = ">=2.0.1,<3.0.0" },
{ name = "tomli-w", specifier = ">=1.0.0,<2.0.0" }, { name = "tomli-w", specifier = ">=1.0.0,<2.0.0" },
{ name = "tomlkit", git = "https://github.com/pikers/tomlkit.git?branch=piker_pin" }, { name = "tomlkit", git = "https://github.com/pikers/tomlkit.git?branch=piker_pin" },
{ name = "tractor", editable = "../tractor" }, { name = "tractor", git = "https://github.com/goodboy/tractor.git?branch=main" },
{ name = "trio", specifier = ">=0.27" }, { name = "trio", specifier = ">=0.27" },
{ name = "trio-typing", specifier = ">=0.10.0" }, { name = "trio-typing", specifier = ">=0.10.0" },
{ name = "trio-util", specifier = ">=0.7.0,<0.8.0" }, { name = "trio-util", specifier = ">=0.7.0,<0.8.0" },
@ -1055,7 +1055,7 @@ dev = [
{ name = "prompt-toolkit", specifier = "==3.0.40" }, { name = "prompt-toolkit", specifier = "==3.0.40" },
{ name = "pyperclip", specifier = ">=1.9.0" }, { name = "pyperclip", specifier = ">=1.9.0" },
{ name = "pyqt6", specifier = ">=6.7.0,<7.0.0" }, { name = "pyqt6", specifier = ">=6.7.0,<7.0.0" },
{ name = "pyqtgraph", specifier = ">=0.14.0" }, { name = "pyqtgraph", git = "https://github.com/pikers/pyqtgraph.git" },
{ name = "pytest" }, { name = "pytest" },
{ name = "qdarkstyle", specifier = ">=3.0.2,<4.0.0" }, { name = "qdarkstyle", specifier = ">=3.0.2,<4.0.0" },
{ name = "rapidfuzz", specifier = ">=3.2.0,<4.0.0" }, { name = "rapidfuzz", specifier = ">=3.2.0,<4.0.0" },
@ -1073,7 +1073,7 @@ repl = [
testing = [{ name = "pytest" }] testing = [{ name = "pytest" }]
uis = [ uis = [
{ name = "pyqt6", specifier = ">=6.7.0,<7.0.0" }, { name = "pyqt6", specifier = ">=6.7.0,<7.0.0" },
{ name = "pyqtgraph", specifier = ">=0.14.0" }, { name = "pyqtgraph", git = "https://github.com/pikers/pyqtgraph.git" },
{ name = "qdarkstyle", specifier = ">=3.0.2,<4.0.0" }, { name = "qdarkstyle", specifier = ">=3.0.2,<4.0.0" },
{ name = "rapidfuzz", specifier = ">=3.2.0,<4.0.0" }, { name = "rapidfuzz", specifier = ">=3.2.0,<4.0.0" },
] ]
@ -1365,15 +1365,11 @@ wheels = [
[[package]] [[package]]
name = "pyqtgraph" name = "pyqtgraph"
version = "0.14.0" version = "0.12.3"
source = { registry = "https://pypi.org/simple" } source = { git = "https://github.com/pikers/pyqtgraph.git#373f9561ea8ec4fef9b4e8bdcdd4bbf372dd6512" }
dependencies = [ dependencies = [
{ name = "colorama" },
{ name = "numpy" }, { name = "numpy" },
] ]
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/36/4c242f81fdcbfa4fb62a5645f6af79191f4097a0577bd5460c24f19cc4ef/pyqtgraph-0.14.0-py3-none-any.whl", hash = "sha256:7abb7c3e17362add64f8711b474dffac5e7b0e9245abdf992e9a44119b7aa4f5", size = 1924755, upload-time = "2025-11-16T19:43:22.251Z" },
]
[[package]] [[package]]
name = "pyreadline3" name = "pyreadline3"
@ -1680,7 +1676,7 @@ wheels = [
[[package]] [[package]]
name = "tractor" name = "tractor"
version = "0.1.0a6.dev0" version = "0.1.0a6.dev0"
source = { editable = "../tractor" } source = { git = "https://github.com/goodboy/tractor.git?branch=main#e77198bb64f0467a50e251ed140daee439752354" }
dependencies = [ dependencies = [
{ name = "bidict" }, { name = "bidict" },
{ name = "cffi" }, { name = "cffi" },
@ -1693,48 +1689,6 @@ dependencies = [
{ name = "wrapt" }, { name = "wrapt" },
] ]
[package.metadata]
requires-dist = [
{ name = "bidict", specifier = ">=0.23.1" },
{ name = "cffi", specifier = ">=1.17.1" },
{ name = "colorlog", specifier = ">=6.8.2,<7" },
{ name = "msgspec", specifier = ">=0.19.0" },
{ name = "pdbp", specifier = ">=1.8.2,<2" },
{ name = "platformdirs", specifier = ">=4.4.0" },
{ name = "tricycle", specifier = ">=0.4.1,<0.5" },
{ name = "trio", specifier = ">0.27" },
{ name = "wrapt", specifier = ">=1.16.0,<2" },
]
[package.metadata.requires-dev]
dev = [
{ name = "greenback", specifier = ">=1.2.1,<2" },
{ name = "pexpect", specifier = ">=4.9.0,<5" },
{ name = "prompt-toolkit", specifier = ">=3.0.50" },
{ name = "psutil", specifier = ">=7.0.0" },
{ name = "pyperclip", specifier = ">=1.9.0" },
{ name = "pytest", specifier = ">=8.3.5" },
{ name = "stackscope", specifier = ">=0.2.2,<0.3" },
{ name = "typing-extensions", specifier = ">=4.14.1" },
{ name = "xonsh", specifier = ">=0.22.2" },
]
devx = [
{ name = "greenback", specifier = ">=1.2.1,<2" },
{ name = "stackscope", specifier = ">=0.2.2,<0.3" },
{ name = "typing-extensions", specifier = ">=4.14.1" },
]
lint = [{ name = "ruff", specifier = ">=0.9.6" }]
repl = [
{ name = "prompt-toolkit", specifier = ">=3.0.50" },
{ name = "psutil", specifier = ">=7.0.0" },
{ name = "pyperclip", specifier = ">=1.9.0" },
{ name = "xonsh", specifier = ">=0.22.2" },
]
testing = [
{ name = "pexpect", specifier = ">=4.9.0,<5" },
{ name = "pytest", specifier = ">=8.3.5" },
]
[[package]] [[package]]
name = "tricycle" name = "tricycle"
version = "0.4.1" version = "0.4.1"