Compare commits
No commits in common. "310_plus" and "ib_dedicated_data_client" have entirely different histories.
310_plus
...
ib_dedicat
|
@ -26,13 +26,6 @@ ports = [
|
|||
7497, # tws
|
||||
]
|
||||
|
||||
# XXX: for a paper account the flex web query service
|
||||
# is not supported so you have to manually download
|
||||
# and XML report and put it in a location that can be
|
||||
# accessed by the ``brokerd.ib`` backend code for parsing.
|
||||
flex_token = '666666666666666666666666'
|
||||
flex_trades_query_id = '666666' # live account
|
||||
|
||||
# when clients are being scanned this determines
|
||||
# which clients are preferred to be used for data
|
||||
# feeds based on the order of account names, if
|
||||
|
|
|
@ -35,7 +35,7 @@ log = get_logger(__name__)
|
|||
|
||||
_root_dname = 'pikerd'
|
||||
|
||||
_registry_addr = ('127.0.0.1', 6116)
|
||||
_registry_addr = ('127.0.0.1', 1616)
|
||||
_tractor_kwargs: dict[str, Any] = {
|
||||
# use a different registry addr then tractor's default
|
||||
'arbiter_addr': _registry_addr
|
||||
|
@ -426,19 +426,9 @@ async def spawn_brokerd(
|
|||
|
||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||
# actor nursery
|
||||
modpath = brokermod.__name__
|
||||
broker_enable = [modpath]
|
||||
for submodname in getattr(
|
||||
brokermod,
|
||||
'__enable_modules__',
|
||||
[],
|
||||
):
|
||||
subpath = f'{modpath}.{submodname}'
|
||||
broker_enable.append(subpath)
|
||||
|
||||
portal = await _services.actor_n.start_actor(
|
||||
dname,
|
||||
enable_modules=_data_mods + broker_enable,
|
||||
enable_modules=_data_mods + [brokermod.__name__],
|
||||
loglevel=loglevel,
|
||||
debug_mode=_services.debug_mode,
|
||||
**tractor_kwargs
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,67 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Interactive Brokers API backend.
|
||||
|
||||
Sub-modules within break into the core functionalities:
|
||||
|
||||
- ``broker.py`` part for orders / trading endpoints
|
||||
- ``data.py`` for real-time data feed endpoints
|
||||
|
||||
- ``client.py`` for the core API machinery which is ``trio``-ized
|
||||
wrapping around ``ib_insync``.
|
||||
|
||||
- ``report.py`` for the hackery to build manual pp calcs
|
||||
to avoid ib's absolute bullshit FIFO style position
|
||||
tracking..
|
||||
|
||||
"""
|
||||
from .api import (
|
||||
get_client,
|
||||
)
|
||||
from .feed import (
|
||||
open_history_client,
|
||||
open_symbol_search,
|
||||
stream_quotes,
|
||||
)
|
||||
from .broker import trades_dialogue
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
'trades_dialogue',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
]
|
||||
|
||||
|
||||
# tractor RPC enable arg
|
||||
__enable_modules__: list[str] = [
|
||||
'api',
|
||||
'feed',
|
||||
'broker',
|
||||
]
|
||||
|
||||
# passed to ``tractor.ActorNursery.start_actor()``
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
# annotation to let backend agnostic code
|
||||
# know if ``brokerd`` should be spawned with
|
||||
# ``tractor``'s aio mode.
|
||||
_infect_asyncio: bool = True
|
File diff suppressed because it is too large
Load Diff
|
@ -1,590 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Order and trades endpoints for use with ``piker``'s EMS.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from dataclasses import asdict
|
||||
from functools import partial
|
||||
from pprint import pformat
|
||||
import time
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
AsyncIterator,
|
||||
)
|
||||
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
from ib_insync.contract import (
|
||||
Contract,
|
||||
Option,
|
||||
)
|
||||
from ib_insync.order import (
|
||||
Trade,
|
||||
OrderStatus,
|
||||
)
|
||||
from ib_insync.objects import (
|
||||
Fill,
|
||||
Execution,
|
||||
)
|
||||
from ib_insync.objects import Position
|
||||
|
||||
from piker import config
|
||||
from piker.log import get_console_log
|
||||
from piker.clearing._messages import (
|
||||
BrokerdOrder,
|
||||
BrokerdOrderAck,
|
||||
BrokerdStatus,
|
||||
BrokerdPosition,
|
||||
BrokerdCancel,
|
||||
BrokerdFill,
|
||||
BrokerdError,
|
||||
)
|
||||
from .api import (
|
||||
_accounts2clients,
|
||||
_adhoc_futes_set,
|
||||
log,
|
||||
get_config,
|
||||
open_client_proxies,
|
||||
Client,
|
||||
)
|
||||
|
||||
|
||||
def pack_position(
|
||||
pos: Position
|
||||
|
||||
) -> dict[str, Any]:
|
||||
con = pos.contract
|
||||
|
||||
if isinstance(con, Option):
|
||||
# TODO: option symbol parsing and sane display:
|
||||
symbol = con.localSymbol.replace(' ', '')
|
||||
|
||||
else:
|
||||
# TODO: lookup fqsn even for derivs.
|
||||
symbol = con.symbol.lower()
|
||||
|
||||
exch = (con.primaryExchange or con.exchange).lower()
|
||||
symkey = '.'.join((symbol, exch))
|
||||
if not exch:
|
||||
# attempt to lookup the symbol from our
|
||||
# hacked set..
|
||||
for sym in _adhoc_futes_set:
|
||||
if symbol in sym:
|
||||
symkey = sym
|
||||
break
|
||||
|
||||
expiry = con.lastTradeDateOrContractMonth
|
||||
if expiry:
|
||||
symkey += f'.{expiry}'
|
||||
|
||||
# TODO: options contracts into a sane format..
|
||||
|
||||
return BrokerdPosition(
|
||||
broker='ib',
|
||||
account=pos.account,
|
||||
symbol=symkey,
|
||||
currency=con.currency,
|
||||
size=float(pos.position),
|
||||
avg_price=float(pos.avgCost) / float(con.multiplier or 1.0),
|
||||
)
|
||||
|
||||
|
||||
async def handle_order_requests(
|
||||
|
||||
ems_order_stream: tractor.MsgStream,
|
||||
accounts_def: dict[str, str],
|
||||
|
||||
) -> None:
|
||||
|
||||
request_msg: dict
|
||||
async for request_msg in ems_order_stream:
|
||||
log.info(f'Received order request {request_msg}')
|
||||
|
||||
action = request_msg['action']
|
||||
account = request_msg['account']
|
||||
|
||||
acct_number = accounts_def.get(account)
|
||||
if not acct_number:
|
||||
log.error(
|
||||
f'An IB account number for name {account} is not found?\n'
|
||||
'Make sure you have all TWS and GW instances running.'
|
||||
)
|
||||
await ems_order_stream.send(BrokerdError(
|
||||
oid=request_msg['oid'],
|
||||
symbol=request_msg['symbol'],
|
||||
reason=f'No account found: `{account}` ?',
|
||||
).dict())
|
||||
continue
|
||||
|
||||
client = _accounts2clients.get(account)
|
||||
if not client:
|
||||
log.error(
|
||||
f'An IB client for account name {account} is not found.\n'
|
||||
'Make sure you have all TWS and GW instances running.'
|
||||
)
|
||||
await ems_order_stream.send(BrokerdError(
|
||||
oid=request_msg['oid'],
|
||||
symbol=request_msg['symbol'],
|
||||
reason=f'No api client loaded for account: `{account}` ?',
|
||||
).dict())
|
||||
continue
|
||||
|
||||
if action in {'buy', 'sell'}:
|
||||
# validate
|
||||
order = BrokerdOrder(**request_msg)
|
||||
|
||||
# call our client api to submit the order
|
||||
reqid = client.submit_limit(
|
||||
oid=order.oid,
|
||||
symbol=order.symbol,
|
||||
price=order.price,
|
||||
action=order.action,
|
||||
size=order.size,
|
||||
account=acct_number,
|
||||
|
||||
# XXX: by default 0 tells ``ib_insync`` methods that
|
||||
# there is no existing order so ask the client to create
|
||||
# a new one (which it seems to do by allocating an int
|
||||
# counter - collision prone..)
|
||||
reqid=order.reqid,
|
||||
)
|
||||
if reqid is None:
|
||||
await ems_order_stream.send(BrokerdError(
|
||||
oid=request_msg['oid'],
|
||||
symbol=request_msg['symbol'],
|
||||
reason='Order already active?',
|
||||
).dict())
|
||||
|
||||
# deliver ack that order has been submitted to broker routing
|
||||
await ems_order_stream.send(
|
||||
BrokerdOrderAck(
|
||||
# ems order request id
|
||||
oid=order.oid,
|
||||
# broker specific request id
|
||||
reqid=reqid,
|
||||
time_ns=time.time_ns(),
|
||||
account=account,
|
||||
).dict()
|
||||
)
|
||||
|
||||
elif action == 'cancel':
|
||||
msg = BrokerdCancel(**request_msg)
|
||||
client.submit_cancel(reqid=msg.reqid)
|
||||
|
||||
else:
|
||||
log.error(f'Unknown order command: {request_msg}')
|
||||
|
||||
|
||||
async def recv_trade_updates(
|
||||
|
||||
client: Client,
|
||||
to_trio: trio.abc.SendChannel,
|
||||
|
||||
) -> None:
|
||||
"""Stream a ticker using the std L1 api.
|
||||
"""
|
||||
client.inline_errors(to_trio)
|
||||
|
||||
# sync with trio task
|
||||
to_trio.send_nowait(None)
|
||||
|
||||
def push_tradesies(eventkit_obj, obj, fill=None):
|
||||
"""Push events to trio task.
|
||||
|
||||
"""
|
||||
if fill is not None:
|
||||
# execution details event
|
||||
item = ('fill', (obj, fill))
|
||||
|
||||
elif eventkit_obj.name() == 'positionEvent':
|
||||
item = ('position', obj)
|
||||
|
||||
else:
|
||||
item = ('status', obj)
|
||||
|
||||
log.info(f'eventkit event ->\n{pformat(item)}')
|
||||
|
||||
try:
|
||||
to_trio.send_nowait(item)
|
||||
except trio.BrokenResourceError:
|
||||
log.exception(f'Disconnected from {eventkit_obj} updates')
|
||||
eventkit_obj.disconnect(push_tradesies)
|
||||
|
||||
# hook up to the weird eventkit object - event stream api
|
||||
for ev_name in [
|
||||
'orderStatusEvent', # all order updates
|
||||
'execDetailsEvent', # all "fill" updates
|
||||
'positionEvent', # avg price updates per symbol per account
|
||||
|
||||
# 'commissionReportEvent',
|
||||
# XXX: ugh, it is a separate event from IB and it's
|
||||
# emitted as follows:
|
||||
# self.ib.commissionReportEvent.emit(trade, fill, report)
|
||||
|
||||
# XXX: not sure yet if we need these
|
||||
# 'updatePortfolioEvent',
|
||||
|
||||
# XXX: these all seem to be weird ib_insync intrernal
|
||||
# events that we probably don't care that much about
|
||||
# given the internal design is wonky af..
|
||||
# 'newOrderEvent',
|
||||
# 'orderModifyEvent',
|
||||
# 'cancelOrderEvent',
|
||||
# 'openOrderEvent',
|
||||
]:
|
||||
eventkit_obj = getattr(client.ib, ev_name)
|
||||
handler = partial(push_tradesies, eventkit_obj)
|
||||
eventkit_obj.connect(handler)
|
||||
|
||||
# let the engine run and stream
|
||||
await client.ib.disconnectedEvent
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def trades_dialogue(
|
||||
|
||||
ctx: tractor.Context,
|
||||
loglevel: str = None,
|
||||
|
||||
) -> AsyncIterator[dict[str, Any]]:
|
||||
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
accounts_def = config.load_accounts(['ib'])
|
||||
|
||||
global _client_cache
|
||||
|
||||
# deliver positions to subscriber before anything else
|
||||
all_positions = []
|
||||
accounts = set()
|
||||
clients: list[tuple[Client, trio.MemoryReceiveChannel]] = []
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as nurse,
|
||||
open_client_proxies() as (proxies, aioclients),
|
||||
):
|
||||
for account, proxy in proxies.items():
|
||||
|
||||
client = aioclients[account]
|
||||
|
||||
async def open_stream(
|
||||
task_status: TaskStatus[
|
||||
trio.abc.ReceiveChannel
|
||||
] = trio.TASK_STATUS_IGNORED,
|
||||
):
|
||||
# each api client has a unique event stream
|
||||
async with tractor.to_asyncio.open_channel_from(
|
||||
recv_trade_updates,
|
||||
client=client,
|
||||
) as (first, trade_event_stream):
|
||||
|
||||
task_status.started(trade_event_stream)
|
||||
await trio.sleep_forever()
|
||||
|
||||
trade_event_stream = await nurse.start(open_stream)
|
||||
|
||||
clients.append((client, trade_event_stream))
|
||||
|
||||
assert account in accounts_def
|
||||
accounts.add(account)
|
||||
|
||||
for client in aioclients.values():
|
||||
for pos in client.positions():
|
||||
|
||||
msg = pack_position(pos)
|
||||
msg.account = accounts_def.inverse[msg.account]
|
||||
|
||||
assert msg.account in accounts, (
|
||||
f'Position for unknown account: {msg.account}')
|
||||
|
||||
all_positions.append(msg.dict())
|
||||
|
||||
trades: list[dict] = []
|
||||
for proxy in proxies.values():
|
||||
trades.append(await proxy.trades())
|
||||
|
||||
log.info(f'Loaded {len(trades)} from this session')
|
||||
# TODO: write trades to local ``trades.toml``
|
||||
# - use above per-session trades data and write to local file
|
||||
# - get the "flex reports" working and pull historical data and
|
||||
# also save locally.
|
||||
|
||||
await ctx.started((
|
||||
all_positions,
|
||||
tuple(name for name in accounts_def if name in accounts),
|
||||
))
|
||||
|
||||
async with (
|
||||
ctx.open_stream() as ems_stream,
|
||||
trio.open_nursery() as n,
|
||||
):
|
||||
# start order request handler **before** local trades event loop
|
||||
n.start_soon(handle_order_requests, ems_stream, accounts_def)
|
||||
|
||||
# allocate event relay tasks for each client connection
|
||||
for client, stream in clients:
|
||||
n.start_soon(
|
||||
deliver_trade_events,
|
||||
stream,
|
||||
ems_stream,
|
||||
accounts_def
|
||||
)
|
||||
|
||||
# block until cancelled
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
||||
async def deliver_trade_events(
|
||||
|
||||
trade_event_stream: trio.MemoryReceiveChannel,
|
||||
ems_stream: tractor.MsgStream,
|
||||
accounts_def: dict[str, str],
|
||||
|
||||
) -> None:
|
||||
'''Format and relay all trade events for a given client to the EMS.
|
||||
|
||||
'''
|
||||
action_map = {'BOT': 'buy', 'SLD': 'sell'}
|
||||
|
||||
# TODO: for some reason we can receive a ``None`` here when the
|
||||
# ib-gw goes down? Not sure exactly how that's happening looking
|
||||
# at the eventkit code above but we should probably handle it...
|
||||
async for event_name, item in trade_event_stream:
|
||||
|
||||
log.info(f'ib sending {event_name}:\n{pformat(item)}')
|
||||
|
||||
# TODO: templating the ib statuses in comparison with other
|
||||
# brokers is likely the way to go:
|
||||
# https://interactivebrokers.github.io/tws-api/interfaceIBApi_1_1EWrapper.html#a17f2a02d6449710b6394d0266a353313
|
||||
# short list:
|
||||
# - PendingSubmit
|
||||
# - PendingCancel
|
||||
# - PreSubmitted (simulated orders)
|
||||
# - ApiCancelled (cancelled by client before submission
|
||||
# to routing)
|
||||
# - Cancelled
|
||||
# - Filled
|
||||
# - Inactive (reject or cancelled but not by trader)
|
||||
|
||||
# XXX: here's some other sucky cases from the api
|
||||
# - short-sale but securities haven't been located, in this
|
||||
# case we should probably keep the order in some kind of
|
||||
# weird state or cancel it outright?
|
||||
|
||||
# status='PendingSubmit', message=''),
|
||||
# status='Cancelled', message='Error 404,
|
||||
# reqId 1550: Order held while securities are located.'),
|
||||
# status='PreSubmitted', message='')],
|
||||
|
||||
if event_name == 'status':
|
||||
|
||||
# XXX: begin normalization of nonsense ib_insync internal
|
||||
# object-state tracking representations...
|
||||
|
||||
# unwrap needed data from ib_insync internal types
|
||||
trade: Trade = item
|
||||
status: OrderStatus = trade.orderStatus
|
||||
|
||||
# skip duplicate filled updates - we get the deats
|
||||
# from the execution details event
|
||||
msg = BrokerdStatus(
|
||||
|
||||
reqid=trade.order.orderId,
|
||||
time_ns=time.time_ns(), # cuz why not
|
||||
account=accounts_def.inverse[trade.order.account],
|
||||
|
||||
# everyone doin camel case..
|
||||
status=status.status.lower(), # force lower case
|
||||
|
||||
filled=status.filled,
|
||||
reason=status.whyHeld,
|
||||
|
||||
# this seems to not be necessarily up to date in the
|
||||
# execDetails event.. so we have to send it here I guess?
|
||||
remaining=status.remaining,
|
||||
|
||||
broker_details={'name': 'ib'},
|
||||
)
|
||||
|
||||
elif event_name == 'fill':
|
||||
|
||||
# for wtv reason this is a separate event type
|
||||
# from IB, not sure why it's needed other then for extra
|
||||
# complexity and over-engineering :eyeroll:.
|
||||
# we may just end up dropping these events (or
|
||||
# translating them to ``Status`` msgs) if we can
|
||||
# show the equivalent status events are no more latent.
|
||||
|
||||
# unpack ib_insync types
|
||||
# pep-0526 style:
|
||||
# https://www.python.org/dev/peps/pep-0526/#global-and-local-variable-annotations
|
||||
trade: Trade
|
||||
fill: Fill
|
||||
trade, fill = item
|
||||
execu: Execution = fill.execution
|
||||
|
||||
# TODO: normalize out commissions details?
|
||||
details = {
|
||||
'contract': asdict(fill.contract),
|
||||
'execution': asdict(fill.execution),
|
||||
'commissions': asdict(fill.commissionReport),
|
||||
'broker_time': execu.time, # supposedly server fill time
|
||||
'name': 'ib',
|
||||
}
|
||||
|
||||
msg = BrokerdFill(
|
||||
# should match the value returned from `.submit_limit()`
|
||||
reqid=execu.orderId,
|
||||
time_ns=time.time_ns(), # cuz why not
|
||||
|
||||
action=action_map[execu.side],
|
||||
size=execu.shares,
|
||||
price=execu.price,
|
||||
|
||||
broker_details=details,
|
||||
# XXX: required by order mode currently
|
||||
broker_time=details['broker_time'],
|
||||
|
||||
)
|
||||
|
||||
elif event_name == 'error':
|
||||
|
||||
err: dict = item
|
||||
|
||||
# f$#$% gawd dammit insync..
|
||||
con = err['contract']
|
||||
if isinstance(con, Contract):
|
||||
err['contract'] = asdict(con)
|
||||
|
||||
if err['reqid'] == -1:
|
||||
log.error(f'TWS external order error:\n{pformat(err)}')
|
||||
|
||||
# TODO: what schema for this msg if we're going to make it
|
||||
# portable across all backends?
|
||||
# msg = BrokerdError(**err)
|
||||
continue
|
||||
|
||||
elif event_name == 'position':
|
||||
msg = pack_position(item)
|
||||
msg.account = accounts_def.inverse[msg.account]
|
||||
|
||||
elif event_name == 'event':
|
||||
|
||||
# it's either a general system status event or an external
|
||||
# trade event?
|
||||
log.info(f"TWS system status: \n{pformat(item)}")
|
||||
|
||||
# TODO: support this again but needs parsing at the callback
|
||||
# level...
|
||||
# reqid = item.get('reqid', 0)
|
||||
# if getattr(msg, 'reqid', 0) < -1:
|
||||
# log.info(f"TWS triggered trade\n{pformat(msg.dict())}")
|
||||
|
||||
continue
|
||||
|
||||
# msg.reqid = 'tws-' + str(-1 * reqid)
|
||||
|
||||
# mark msg as from "external system"
|
||||
# TODO: probably something better then this.. and start
|
||||
# considering multiplayer/group trades tracking
|
||||
# msg.broker_details['external_src'] = 'tws'
|
||||
|
||||
# XXX: we always serialize to a dict for msgpack
|
||||
# translations, ideally we can move to an msgspec (or other)
|
||||
# encoder # that can be enabled in ``tractor`` ahead of
|
||||
# time so we can pass through the message types directly.
|
||||
await ems_stream.send(msg.dict())
|
||||
|
||||
|
||||
def load_flex_trades(
|
||||
path: Optional[str] = None,
|
||||
|
||||
) -> dict[str, str]:
|
||||
|
||||
from pprint import pprint
|
||||
from ib_insync import flexreport, util
|
||||
|
||||
conf = get_config()
|
||||
|
||||
if not path:
|
||||
# load ``brokers.toml`` and try to get the flex
|
||||
# token and query id that must be previously defined
|
||||
# by the user.
|
||||
token = conf.get('flex_token')
|
||||
if not token:
|
||||
raise ValueError(
|
||||
'You must specify a ``flex_token`` field in your'
|
||||
'`brokers.toml` in order load your trade log, see our'
|
||||
'intructions for how to set this up here:\n'
|
||||
'PUT LINK HERE!'
|
||||
)
|
||||
|
||||
qid = conf['flex_trades_query_id']
|
||||
|
||||
# TODO: hack this into our logging
|
||||
# system like we do with the API client..
|
||||
util.logToConsole()
|
||||
|
||||
# TODO: rewrite the query part of this with async..httpx?
|
||||
report = flexreport.FlexReport(
|
||||
token=token,
|
||||
queryId=qid,
|
||||
)
|
||||
|
||||
else:
|
||||
# XXX: another project we could potentially look at,
|
||||
# https://pypi.org/project/ibflex/
|
||||
report = flexreport.FlexReport(path=path)
|
||||
|
||||
trade_entries = report.extract('Trade')
|
||||
trades = {
|
||||
# XXX: LOL apparently ``toml`` has a bug
|
||||
# where a section key error will show up in the write
|
||||
# if you leave this as an ``int``?
|
||||
str(t.__dict__['tradeID']): t.__dict__
|
||||
for t in trade_entries
|
||||
}
|
||||
|
||||
ln = len(trades)
|
||||
log.info(f'Loaded {ln} trades from flex query')
|
||||
|
||||
trades_by_account = {}
|
||||
for tid, trade in trades.items():
|
||||
trades_by_account.setdefault(
|
||||
# oddly for some so-called "BookTrade" entries
|
||||
# this field seems to be blank, no cuckin clue.
|
||||
# trade['ibExecID']
|
||||
str(trade['accountId']), {}
|
||||
)[tid] = trade
|
||||
|
||||
section = {'ib': trades_by_account}
|
||||
pprint(section)
|
||||
|
||||
# TODO: load the config first and append in
|
||||
# the new trades loaded here..
|
||||
try:
|
||||
config.write(section, 'trades')
|
||||
except KeyError:
|
||||
import pdbpp; pdbpp.set_trace() # noqa
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
load_flex_trades()
|
|
@ -1,938 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Data feed endpoints pre-wrapped and ready for use with ``tractor``/``trio``.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from dataclasses import asdict
|
||||
from datetime import datetime
|
||||
from math import isnan
|
||||
import time
|
||||
from typing import (
|
||||
Callable,
|
||||
Optional,
|
||||
Awaitable,
|
||||
)
|
||||
|
||||
from async_generator import aclosing
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import pendulum
|
||||
import tractor
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
|
||||
from piker.data._sharedmem import ShmArray
|
||||
from .._util import SymbolNotFound, NoData
|
||||
from .api import (
|
||||
_adhoc_futes_set,
|
||||
log,
|
||||
load_aio_clients,
|
||||
ibis,
|
||||
MethodProxy,
|
||||
open_client_proxies,
|
||||
get_preferred_data_client,
|
||||
Ticker,
|
||||
RequestError,
|
||||
Contract,
|
||||
)
|
||||
|
||||
|
||||
# https://interactivebrokers.github.io/tws-api/tick_types.html
|
||||
tick_types = {
|
||||
77: 'trade',
|
||||
|
||||
# a "utrade" aka an off exchange "unreportable" (dark) vlm:
|
||||
# https://interactivebrokers.github.io/tws-api/tick_types.html#rt_volume
|
||||
48: 'dark_trade',
|
||||
|
||||
# standard L1 ticks
|
||||
0: 'bsize',
|
||||
1: 'bid',
|
||||
2: 'ask',
|
||||
3: 'asize',
|
||||
4: 'last',
|
||||
5: 'size',
|
||||
8: 'volume',
|
||||
|
||||
# ``ib_insync`` already packs these into
|
||||
# quotes under the following fields.
|
||||
# 55: 'trades_per_min', # `'tradeRate'`
|
||||
# 56: 'vlm_per_min', # `'volumeRate'`
|
||||
# 89: 'shortable', # `'shortableShares'`
|
||||
}
|
||||
|
||||
|
||||
@acm
|
||||
async def open_data_client() -> MethodProxy:
|
||||
'''
|
||||
Open the first found preferred "data client" as defined in the
|
||||
user's ``brokers.toml`` in the ``ib.prefer_data_account`` variable
|
||||
and deliver that client wrapped in a ``MethodProxy``.
|
||||
|
||||
'''
|
||||
async with (
|
||||
open_client_proxies() as (proxies, clients),
|
||||
):
|
||||
account_name, client = get_preferred_data_client(clients)
|
||||
proxy = proxies.get(f'ib.{account_name}')
|
||||
if not proxy:
|
||||
raise ValueError(
|
||||
f'No preferred data client could be found for {account_name}!'
|
||||
)
|
||||
|
||||
yield proxy
|
||||
|
||||
|
||||
@acm
|
||||
async def open_history_client(
|
||||
symbol: str,
|
||||
|
||||
) -> tuple[Callable, int]:
|
||||
'''
|
||||
History retreival endpoint - delivers a historical frame callble
|
||||
that takes in ``pendulum.datetime`` and returns ``numpy`` arrays.
|
||||
|
||||
'''
|
||||
async with open_data_client() as proxy:
|
||||
|
||||
async def get_hist(
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[np.ndarray, str]:
|
||||
|
||||
out, fails = await get_bars(proxy, symbol, end_dt=end_dt)
|
||||
|
||||
# TODO: add logic here to handle tradable hours and only grab
|
||||
# valid bars in the range
|
||||
if out is None:
|
||||
# could be trying to retreive bars over weekend
|
||||
log.error(f"Can't grab bars starting at {end_dt}!?!?")
|
||||
raise NoData(
|
||||
f'{end_dt}',
|
||||
frame_size=2000,
|
||||
)
|
||||
|
||||
bars, bars_array, first_dt, last_dt = out
|
||||
|
||||
# volume cleaning since there's -ve entries,
|
||||
# wood luv to know what crookery that is..
|
||||
vlm = bars_array['volume']
|
||||
vlm[vlm < 0] = 0
|
||||
|
||||
return bars_array, first_dt, last_dt
|
||||
|
||||
# TODO: it seems like we can do async queries for ohlc
|
||||
# but getting the order right still isn't working and I'm not
|
||||
# quite sure why.. needs some tinkering and probably
|
||||
# a lookthrough of the ``ib_insync`` machinery, for eg. maybe
|
||||
# we have to do the batch queries on the `asyncio` side?
|
||||
yield get_hist, {'erlangs': 1, 'rate': 6}
|
||||
|
||||
|
||||
_pacing: str = (
|
||||
'Historical Market Data Service error '
|
||||
'message:Historical data request pacing violation'
|
||||
)
|
||||
|
||||
|
||||
async def get_bars(
|
||||
|
||||
proxy: MethodProxy,
|
||||
fqsn: str,
|
||||
|
||||
# blank to start which tells ib to look up the latest datum
|
||||
end_dt: str = '',
|
||||
|
||||
) -> (dict, np.ndarray):
|
||||
'''
|
||||
Retrieve historical data from a ``trio``-side task using
|
||||
a ``MethoProxy``.
|
||||
|
||||
'''
|
||||
fails = 0
|
||||
bars: Optional[list] = None
|
||||
first_dt: datetime = None
|
||||
last_dt: datetime = None
|
||||
|
||||
if end_dt:
|
||||
last_dt = pendulum.from_timestamp(end_dt.timestamp())
|
||||
|
||||
for _ in range(10):
|
||||
try:
|
||||
out = await proxy.bars(
|
||||
fqsn=fqsn,
|
||||
end_dt=end_dt,
|
||||
)
|
||||
if out:
|
||||
bars, bars_array = out
|
||||
|
||||
else:
|
||||
await tractor.breakpoint()
|
||||
|
||||
if bars_array is None:
|
||||
raise SymbolNotFound(fqsn)
|
||||
|
||||
first_dt = pendulum.from_timestamp(
|
||||
bars[0].date.timestamp())
|
||||
|
||||
last_dt = pendulum.from_timestamp(
|
||||
bars[-1].date.timestamp())
|
||||
|
||||
time = bars_array['time']
|
||||
assert time[-1] == last_dt.timestamp()
|
||||
assert time[0] == first_dt.timestamp()
|
||||
log.info(
|
||||
f'{len(bars)} bars retreived for {first_dt} -> {last_dt}'
|
||||
)
|
||||
|
||||
return (bars, bars_array, first_dt, last_dt), fails
|
||||
|
||||
except RequestError as err:
|
||||
msg = err.message
|
||||
# why do we always need to rebind this?
|
||||
# _err = err
|
||||
|
||||
if 'No market data permissions for' in msg:
|
||||
# TODO: signalling for no permissions searches
|
||||
raise NoData(
|
||||
f'Symbol: {fqsn}',
|
||||
)
|
||||
|
||||
elif (
|
||||
err.code == 162
|
||||
and 'HMDS query returned no data' in err.message
|
||||
):
|
||||
# XXX: this is now done in the storage mgmt layer
|
||||
# and we shouldn't implicitly decrement the frame dt
|
||||
# index since the upper layer may be doing so
|
||||
# concurrently and we don't want to be delivering frames
|
||||
# that weren't asked for.
|
||||
log.warning(
|
||||
f'NO DATA found ending @ {end_dt}\n'
|
||||
)
|
||||
|
||||
# try to decrement start point and look further back
|
||||
# end_dt = last_dt = last_dt.subtract(seconds=2000)
|
||||
|
||||
raise NoData(
|
||||
f'Symbol: {fqsn}',
|
||||
frame_size=2000,
|
||||
)
|
||||
|
||||
elif _pacing in msg:
|
||||
|
||||
log.warning(
|
||||
'History throttle rate reached!\n'
|
||||
'Resetting farms with `ctrl-alt-f` hack\n'
|
||||
)
|
||||
# TODO: we might have to put a task lock around this
|
||||
# method..
|
||||
hist_ev = proxy.status_event(
|
||||
'HMDS data farm connection is OK:ushmds'
|
||||
)
|
||||
|
||||
# XXX: other event messages we might want to try and
|
||||
# wait for but i wasn't able to get any of this
|
||||
# reliable..
|
||||
# reconnect_start = proxy.status_event(
|
||||
# 'Market data farm is connecting:usfuture'
|
||||
# )
|
||||
# live_ev = proxy.status_event(
|
||||
# 'Market data farm connection is OK:usfuture'
|
||||
# )
|
||||
|
||||
# try to wait on the reset event(s) to arrive, a timeout
|
||||
# will trigger a retry up to 6 times (for now).
|
||||
tries: int = 2
|
||||
timeout: float = 10
|
||||
|
||||
# try 3 time with a data reset then fail over to
|
||||
# a connection reset.
|
||||
for i in range(1, tries):
|
||||
|
||||
log.warning('Sending DATA RESET request')
|
||||
await data_reset_hack(reset_type='data')
|
||||
|
||||
with trio.move_on_after(timeout) as cs:
|
||||
for name, ev in [
|
||||
# TODO: not sure if waiting on other events
|
||||
# is all that useful here or not. in theory
|
||||
# you could wait on one of the ones above
|
||||
# first to verify the reset request was
|
||||
# sent?
|
||||
('history', hist_ev),
|
||||
]:
|
||||
await ev.wait()
|
||||
log.info(f"{name} DATA RESET")
|
||||
break
|
||||
|
||||
if cs.cancelled_caught:
|
||||
fails += 1
|
||||
log.warning(
|
||||
f'Data reset {name} timeout, retrying {i}.'
|
||||
)
|
||||
|
||||
continue
|
||||
else:
|
||||
|
||||
log.warning('Sending CONNECTION RESET')
|
||||
await data_reset_hack(reset_type='connection')
|
||||
|
||||
with trio.move_on_after(timeout) as cs:
|
||||
for name, ev in [
|
||||
# TODO: not sure if waiting on other events
|
||||
# is all that useful here or not. in theory
|
||||
# you could wait on one of the ones above
|
||||
# first to verify the reset request was
|
||||
# sent?
|
||||
('history', hist_ev),
|
||||
]:
|
||||
await ev.wait()
|
||||
log.info(f"{name} DATA RESET")
|
||||
|
||||
if cs.cancelled_caught:
|
||||
fails += 1
|
||||
log.warning('Data CONNECTION RESET timeout!?')
|
||||
|
||||
else:
|
||||
raise
|
||||
|
||||
return None, None
|
||||
# else: # throttle wasn't fixed so error out immediately
|
||||
# raise _err
|
||||
|
||||
|
||||
async def backfill_bars(
|
||||
|
||||
fqsn: str,
|
||||
shm: ShmArray, # type: ignore # noqa
|
||||
|
||||
# TODO: we want to avoid overrunning the underlying shm array buffer
|
||||
# and we should probably calc the number of calls to make depending
|
||||
# on that until we have the `marketstore` daemon in place in which
|
||||
# case the shm size will be driven by user config and available sys
|
||||
# memory.
|
||||
count: int = 16,
|
||||
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Fill historical bars into shared mem / storage afap.
|
||||
|
||||
TODO: avoid pacing constraints:
|
||||
https://github.com/pikers/piker/issues/128
|
||||
|
||||
'''
|
||||
# last_dt1 = None
|
||||
last_dt = None
|
||||
|
||||
with trio.CancelScope() as cs:
|
||||
|
||||
async with open_data_client() as proxy:
|
||||
|
||||
out, fails = await get_bars(proxy, fqsn)
|
||||
|
||||
if out is None:
|
||||
raise RuntimeError("Could not pull currrent history?!")
|
||||
|
||||
(first_bars, bars_array, first_dt, last_dt) = out
|
||||
vlm = bars_array['volume']
|
||||
vlm[vlm < 0] = 0
|
||||
last_dt = first_dt
|
||||
|
||||
# write historical data to buffer
|
||||
shm.push(bars_array)
|
||||
|
||||
task_status.started(cs)
|
||||
|
||||
i = 0
|
||||
while i < count:
|
||||
|
||||
out, fails = await get_bars(proxy, fqsn, end_dt=first_dt)
|
||||
|
||||
if out is None:
|
||||
# could be trying to retreive bars over weekend
|
||||
# TODO: add logic here to handle tradable hours and
|
||||
# only grab valid bars in the range
|
||||
log.error(f"Can't grab bars starting at {first_dt}!?!?")
|
||||
|
||||
# XXX: get_bars() should internally decrement dt by
|
||||
# 2k seconds and try again.
|
||||
continue
|
||||
|
||||
(first_bars, bars_array, first_dt, last_dt) = out
|
||||
# last_dt1 = last_dt
|
||||
# last_dt = first_dt
|
||||
|
||||
# volume cleaning since there's -ve entries,
|
||||
# wood luv to know what crookery that is..
|
||||
vlm = bars_array['volume']
|
||||
vlm[vlm < 0] = 0
|
||||
|
||||
# TODO we should probably dig into forums to see what peeps
|
||||
# think this data "means" and then use it as an indicator of
|
||||
# sorts? dinkus has mentioned that $vlms for the day dont'
|
||||
# match other platforms nor the summary stat tws shows in
|
||||
# the monitor - it's probably worth investigating.
|
||||
|
||||
shm.push(bars_array, prepend=True)
|
||||
i += 1
|
||||
|
||||
|
||||
asset_type_map = {
|
||||
'STK': 'stock',
|
||||
'OPT': 'option',
|
||||
'FUT': 'future',
|
||||
'CONTFUT': 'continuous_future',
|
||||
'CASH': 'forex',
|
||||
'IND': 'index',
|
||||
'CFD': 'cfd',
|
||||
'BOND': 'bond',
|
||||
'CMDTY': 'commodity',
|
||||
'FOP': 'futures_option',
|
||||
'FUND': 'mutual_fund',
|
||||
'WAR': 'warrant',
|
||||
'IOPT': 'warran',
|
||||
'BAG': 'bag',
|
||||
# 'NEWS': 'news',
|
||||
}
|
||||
|
||||
|
||||
_quote_streams: dict[str, trio.abc.ReceiveStream] = {}
|
||||
|
||||
|
||||
async def _setup_quote_stream(
|
||||
|
||||
from_trio: asyncio.Queue,
|
||||
to_trio: trio.abc.SendChannel,
|
||||
|
||||
symbol: str,
|
||||
opts: tuple[int] = (
|
||||
'375', # RT trade volume (excludes utrades)
|
||||
'233', # RT trade volume (includes utrades)
|
||||
'236', # Shortable shares
|
||||
|
||||
# these all appear to only be updated every 25s thus
|
||||
# making them mostly useless and explains why the scanner
|
||||
# is always slow XD
|
||||
# '293', # Trade count for day
|
||||
'294', # Trade rate / minute
|
||||
'295', # Vlm rate / minute
|
||||
),
|
||||
contract: Optional[Contract] = None,
|
||||
|
||||
) -> trio.abc.ReceiveChannel:
|
||||
'''
|
||||
Stream a ticker using the std L1 api.
|
||||
|
||||
This task is ``asyncio``-side and must be called from
|
||||
``tractor.to_asyncio.open_channel_from()``.
|
||||
|
||||
'''
|
||||
global _quote_streams
|
||||
|
||||
to_trio.send_nowait(None)
|
||||
|
||||
async with load_aio_clients() as accts2clients:
|
||||
caccount_name, client = get_preferred_data_client(accts2clients)
|
||||
contract = contract or (await client.find_contract(symbol))
|
||||
ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts))
|
||||
|
||||
# NOTE: it's batch-wise and slow af but I guess could
|
||||
# be good for backchecking? Seems to be every 5s maybe?
|
||||
# ticker: Ticker = client.ib.reqTickByTickData(
|
||||
# contract, 'Last',
|
||||
# )
|
||||
|
||||
# # define a simple queue push routine that streams quote packets
|
||||
# # to trio over the ``to_trio`` memory channel.
|
||||
# to_trio, from_aio = trio.open_memory_channel(2**8) # type: ignore
|
||||
def teardown():
|
||||
ticker.updateEvent.disconnect(push)
|
||||
log.error(f"Disconnected stream for `{symbol}`")
|
||||
client.ib.cancelMktData(contract)
|
||||
|
||||
# decouple broadcast mem chan
|
||||
_quote_streams.pop(symbol, None)
|
||||
|
||||
def push(t: Ticker) -> None:
|
||||
"""
|
||||
Push quotes to trio task.
|
||||
|
||||
"""
|
||||
# log.debug(t)
|
||||
try:
|
||||
to_trio.send_nowait(t)
|
||||
|
||||
except (
|
||||
trio.BrokenResourceError,
|
||||
|
||||
# XXX: HACK, not sure why this gets left stale (probably
|
||||
# due to our terrible ``tractor.to_asyncio``
|
||||
# implementation for streams.. but if the mem chan
|
||||
# gets left here and starts blocking just kill the feed?
|
||||
# trio.WouldBlock,
|
||||
):
|
||||
# XXX: eventkit's ``Event.emit()`` for whatever redic
|
||||
# reason will catch and ignore regular exceptions
|
||||
# resulting in tracebacks spammed to console..
|
||||
# Manually do the dereg ourselves.
|
||||
teardown()
|
||||
except trio.WouldBlock:
|
||||
log.warning(
|
||||
f'channel is blocking symbol feed for {symbol}?'
|
||||
f'\n{to_trio.statistics}'
|
||||
)
|
||||
|
||||
# except trio.WouldBlock:
|
||||
# # for slow debugging purposes to avoid clobbering prompt
|
||||
# # with log msgs
|
||||
# pass
|
||||
|
||||
ticker.updateEvent.connect(push)
|
||||
try:
|
||||
await asyncio.sleep(float('inf'))
|
||||
finally:
|
||||
teardown()
|
||||
|
||||
# return from_aio
|
||||
|
||||
|
||||
@acm
|
||||
async def open_aio_quote_stream(
|
||||
|
||||
symbol: str,
|
||||
contract: Optional[Contract] = None,
|
||||
|
||||
) -> trio.abc.ReceiveStream:
|
||||
|
||||
from tractor.trionics import broadcast_receiver
|
||||
global _quote_streams
|
||||
|
||||
from_aio = _quote_streams.get(symbol)
|
||||
if from_aio:
|
||||
|
||||
# if we already have a cached feed deliver a rx side clone to consumer
|
||||
async with broadcast_receiver(
|
||||
from_aio,
|
||||
2**6,
|
||||
) as from_aio:
|
||||
yield from_aio
|
||||
return
|
||||
|
||||
async with tractor.to_asyncio.open_channel_from(
|
||||
_setup_quote_stream,
|
||||
symbol=symbol,
|
||||
contract=contract,
|
||||
|
||||
) as (first, from_aio):
|
||||
|
||||
# cache feed for later consumers
|
||||
_quote_streams[symbol] = from_aio
|
||||
|
||||
yield from_aio
|
||||
|
||||
|
||||
# TODO: cython/mypyc/numba this!
|
||||
def normalize(
|
||||
ticker: Ticker,
|
||||
calc_price: bool = False
|
||||
|
||||
) -> dict:
|
||||
|
||||
# should be real volume for this contract by default
|
||||
calc_price = False
|
||||
|
||||
# check for special contract types
|
||||
con = ticker.contract
|
||||
if type(con) in (
|
||||
ibis.Commodity,
|
||||
ibis.Forex,
|
||||
):
|
||||
# commodities and forex don't have an exchange name and
|
||||
# no real volume so we have to calculate the price
|
||||
suffix = con.secType
|
||||
# no real volume on this tract
|
||||
calc_price = True
|
||||
|
||||
else:
|
||||
suffix = con.primaryExchange
|
||||
if not suffix:
|
||||
suffix = con.exchange
|
||||
|
||||
# append a `.<suffix>` to the returned symbol
|
||||
# key for derivatives that normally is the expiry
|
||||
# date key.
|
||||
expiry = con.lastTradeDateOrContractMonth
|
||||
if expiry:
|
||||
suffix += f'.{expiry}'
|
||||
|
||||
# convert named tuples to dicts so we send usable keys
|
||||
new_ticks = []
|
||||
for tick in ticker.ticks:
|
||||
if tick and not isinstance(tick, dict):
|
||||
td = tick._asdict()
|
||||
td['type'] = tick_types.get(
|
||||
td['tickType'],
|
||||
'n/a',
|
||||
)
|
||||
|
||||
new_ticks.append(td)
|
||||
|
||||
tbt = ticker.tickByTicks
|
||||
if tbt:
|
||||
print(f'tickbyticks:\n {ticker.tickByTicks}')
|
||||
|
||||
ticker.ticks = new_ticks
|
||||
|
||||
# some contracts don't have volume so we may want to calculate
|
||||
# a midpoint price based on data we can acquire (such as bid / ask)
|
||||
if calc_price:
|
||||
ticker.ticks.append(
|
||||
{'type': 'trade', 'price': ticker.marketPrice()}
|
||||
)
|
||||
|
||||
# serialize for transport
|
||||
data = asdict(ticker)
|
||||
|
||||
# generate fqsn with possible specialized suffix
|
||||
# for derivatives, note the lowercase.
|
||||
data['symbol'] = data['fqsn'] = '.'.join(
|
||||
(con.symbol, suffix)
|
||||
).lower()
|
||||
|
||||
# convert named tuples to dicts for transport
|
||||
tbts = data.get('tickByTicks')
|
||||
if tbts:
|
||||
data['tickByTicks'] = [tbt._asdict() for tbt in tbts]
|
||||
|
||||
# add time stamps for downstream latency measurements
|
||||
data['brokerd_ts'] = time.time()
|
||||
|
||||
# stupid stupid shit...don't even care any more..
|
||||
# leave it until we do a proper latency study
|
||||
# if ticker.rtTime is not None:
|
||||
# data['broker_ts'] = data['rtTime_s'] = float(
|
||||
# ticker.rtTime.timestamp) / 1000.
|
||||
data.pop('rtTime')
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: list[str],
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Stream symbol quotes.
|
||||
|
||||
This is a ``trio`` callable routine meant to be invoked
|
||||
once the brokerd is up.
|
||||
|
||||
'''
|
||||
# TODO: support multiple subscriptions
|
||||
sym = symbols[0]
|
||||
log.info(f'request for real-time quotes: {sym}')
|
||||
|
||||
async with open_data_client() as proxy:
|
||||
|
||||
con, first_ticker, details = await proxy.get_sym_details(symbol=sym)
|
||||
first_quote = normalize(first_ticker)
|
||||
# print(f'first quote: {first_quote}')
|
||||
|
||||
def mk_init_msgs() -> dict[str, dict]:
|
||||
'''
|
||||
Collect a bunch of meta-data useful for feed startup and
|
||||
pack in a `dict`-msg.
|
||||
|
||||
'''
|
||||
# pass back some symbol info like min_tick, trading_hours, etc.
|
||||
syminfo = asdict(details)
|
||||
syminfo.update(syminfo['contract'])
|
||||
|
||||
# nested dataclass we probably don't need and that won't IPC
|
||||
# serialize
|
||||
syminfo.pop('secIdList')
|
||||
|
||||
# TODO: more consistent field translation
|
||||
atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']]
|
||||
|
||||
# for stocks it seems TWS reports too small a tick size
|
||||
# such that you can't submit orders with that granularity?
|
||||
min_tick = 0.01 if atype == 'stock' else 0
|
||||
|
||||
syminfo['price_tick_size'] = max(syminfo['minTick'], min_tick)
|
||||
|
||||
# for "traditional" assets, volume is normally discreet, not
|
||||
# a float
|
||||
syminfo['lot_tick_size'] = 0.0
|
||||
|
||||
ibclient = proxy._aio_ns.ib.client
|
||||
host, port = ibclient.host, ibclient.port
|
||||
|
||||
# TODO: for loop through all symbols passed in
|
||||
init_msgs = {
|
||||
# pass back token, and bool, signalling if we're the writer
|
||||
# and that history has been written
|
||||
sym: {
|
||||
'symbol_info': syminfo,
|
||||
'fqsn': first_quote['fqsn'],
|
||||
},
|
||||
'status': {
|
||||
'data_ep': f'{host}:{port}',
|
||||
},
|
||||
|
||||
}
|
||||
return init_msgs
|
||||
|
||||
init_msgs = mk_init_msgs()
|
||||
|
||||
# TODO: we should instead spawn a task that waits on a feed to start
|
||||
# and let it wait indefinitely..instead of this hard coded stuff.
|
||||
with trio.move_on_after(1):
|
||||
contract, first_ticker, details = await proxy.get_quote(symbol=sym)
|
||||
|
||||
# it might be outside regular trading hours so see if we can at
|
||||
# least grab history.
|
||||
if isnan(first_ticker.last):
|
||||
task_status.started((init_msgs, first_quote))
|
||||
|
||||
# it's not really live but this will unblock
|
||||
# the brokerd feed task to tell the ui to update?
|
||||
feed_is_live.set()
|
||||
|
||||
# block and let data history backfill code run.
|
||||
await trio.sleep_forever()
|
||||
return # we never expect feed to come up?
|
||||
|
||||
async with open_aio_quote_stream(
|
||||
symbol=sym,
|
||||
contract=con,
|
||||
) as stream:
|
||||
|
||||
# ugh, clear ticks since we've consumed them
|
||||
# (ahem, ib_insync is stateful trash)
|
||||
first_ticker.ticks = []
|
||||
|
||||
task_status.started((init_msgs, first_quote))
|
||||
|
||||
async with aclosing(stream):
|
||||
if type(first_ticker.contract) not in (
|
||||
ibis.Commodity,
|
||||
ibis.Forex
|
||||
):
|
||||
# wait for real volume on feed (trading might be closed)
|
||||
while True:
|
||||
ticker = await stream.receive()
|
||||
|
||||
# for a real volume contract we rait for the first
|
||||
# "real" trade to take place
|
||||
if (
|
||||
# not calc_price
|
||||
# and not ticker.rtTime
|
||||
not ticker.rtTime
|
||||
):
|
||||
# spin consuming tickers until we get a real
|
||||
# market datum
|
||||
log.debug(f"New unsent ticker: {ticker}")
|
||||
continue
|
||||
else:
|
||||
log.debug("Received first real volume tick")
|
||||
# ugh, clear ticks since we've consumed them
|
||||
# (ahem, ib_insync is truly stateful trash)
|
||||
ticker.ticks = []
|
||||
|
||||
# XXX: this works because we don't use
|
||||
# ``aclosing()`` above?
|
||||
break
|
||||
|
||||
quote = normalize(ticker)
|
||||
log.debug(f"First ticker received {quote}")
|
||||
|
||||
# tell caller quotes are now coming in live
|
||||
feed_is_live.set()
|
||||
|
||||
# last = time.time()
|
||||
async for ticker in stream:
|
||||
quote = normalize(ticker)
|
||||
await send_chan.send({quote['fqsn']: quote})
|
||||
|
||||
# ugh, clear ticks since we've consumed them
|
||||
ticker.ticks = []
|
||||
# last = time.time()
|
||||
|
||||
|
||||
async def data_reset_hack(
|
||||
reset_type: str = 'data',
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Run key combos for resetting data feeds and yield back to caller
|
||||
when complete.
|
||||
|
||||
This is a linux-only hack around:
|
||||
|
||||
https://interactivebrokers.github.io/tws-api/historical_limitations.html#pacing_violations
|
||||
|
||||
TODOs:
|
||||
- a return type that hopefully determines if the hack was
|
||||
successful.
|
||||
- other OS support?
|
||||
- integration with ``ib-gw`` run in docker + Xorg?
|
||||
|
||||
'''
|
||||
|
||||
async def vnc_click_hack(
|
||||
reset_type: str = 'data'
|
||||
) -> None:
|
||||
'''
|
||||
Reset the data or netowork connection for the VNC attached
|
||||
ib gateway using magic combos.
|
||||
|
||||
'''
|
||||
key = {'data': 'f', 'connection': 'r'}[reset_type]
|
||||
|
||||
import asyncvnc
|
||||
|
||||
async with asyncvnc.connect(
|
||||
'localhost',
|
||||
port=3003,
|
||||
# password='ibcansmbz',
|
||||
) as client:
|
||||
|
||||
# move to middle of screen
|
||||
# 640x1800
|
||||
client.mouse.move(
|
||||
x=500,
|
||||
y=500,
|
||||
)
|
||||
client.mouse.click()
|
||||
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
||||
|
||||
await tractor.to_asyncio.run_task(vnc_click_hack)
|
||||
|
||||
# we don't really need the ``xdotool`` approach any more B)
|
||||
return True
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(
|
||||
ctx: tractor.Context,
|
||||
|
||||
) -> None:
|
||||
|
||||
# TODO: load user defined symbol set locally for fast search?
|
||||
await ctx.started({})
|
||||
|
||||
async with open_data_client() as proxy:
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
last = time.time()
|
||||
|
||||
async for pattern in stream:
|
||||
log.debug(f'received {pattern}')
|
||||
now = time.time()
|
||||
|
||||
assert pattern, 'IB can not accept blank search pattern'
|
||||
|
||||
# throttle search requests to no faster then 1Hz
|
||||
diff = now - last
|
||||
if diff < 1.0:
|
||||
log.debug('throttle sleeping')
|
||||
await trio.sleep(diff)
|
||||
try:
|
||||
pattern = stream.receive_nowait()
|
||||
except trio.WouldBlock:
|
||||
pass
|
||||
|
||||
if not pattern or pattern.isspace():
|
||||
log.warning('empty pattern received, skipping..')
|
||||
|
||||
# TODO: *BUG* if nothing is returned here the client
|
||||
# side will cache a null set result and not showing
|
||||
# anything to the use on re-searches when this query
|
||||
# timed out. We probably need a special "timeout" msg
|
||||
# or something...
|
||||
|
||||
# XXX: this unblocks the far end search task which may
|
||||
# hold up a multi-search nursery block
|
||||
await stream.send({})
|
||||
|
||||
continue
|
||||
|
||||
log.debug(f'searching for {pattern}')
|
||||
|
||||
last = time.time()
|
||||
|
||||
# async batch search using api stocks endpoint and module
|
||||
# defined adhoc symbol set.
|
||||
stock_results = []
|
||||
|
||||
async def stash_results(target: Awaitable[list]):
|
||||
stock_results.extend(await target)
|
||||
|
||||
async with trio.open_nursery() as sn:
|
||||
sn.start_soon(
|
||||
stash_results,
|
||||
proxy.search_symbols(
|
||||
pattern=pattern,
|
||||
upto=5,
|
||||
),
|
||||
)
|
||||
|
||||
# trigger async request
|
||||
await trio.sleep(0)
|
||||
|
||||
# match against our ad-hoc set immediately
|
||||
adhoc_matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
list(_adhoc_futes_set),
|
||||
score_cutoff=90,
|
||||
)
|
||||
log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
||||
adhoc_match_results = {}
|
||||
if adhoc_matches:
|
||||
# TODO: do we need to pull contract details?
|
||||
adhoc_match_results = {i[0]: {} for i in adhoc_matches}
|
||||
|
||||
log.debug(f'fuzzy matching stocks {stock_results}')
|
||||
stock_matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
stock_results,
|
||||
score_cutoff=50,
|
||||
)
|
||||
|
||||
matches = adhoc_match_results | {
|
||||
item[0]: {} for item in stock_matches
|
||||
}
|
||||
# TODO: we used to deliver contract details
|
||||
# {item[2]: item[0] for item in stock_matches}
|
||||
|
||||
log.debug(f"sending matches: {matches.keys()}")
|
||||
await stream.send(matches)
|
|
@ -21,7 +21,6 @@ Kraken backend.
|
|||
from contextlib import asynccontextmanager as acm
|
||||
from dataclasses import asdict, field
|
||||
from datetime import datetime
|
||||
from pprint import pformat
|
||||
from typing import Any, Optional, AsyncIterator, Callable, Union
|
||||
import time
|
||||
|
||||
|
@ -570,10 +569,7 @@ async def handle_order_requests(
|
|||
order: BrokerdOrder
|
||||
|
||||
async for request_msg in ems_order_stream:
|
||||
log.info(
|
||||
'Received order request:\n'
|
||||
f'{pformat(request_msg)}'
|
||||
)
|
||||
log.info(f'Received order request {request_msg}')
|
||||
|
||||
action = request_msg['action']
|
||||
|
||||
|
@ -632,7 +628,6 @@ async def handle_order_requests(
|
|||
# update the internal pairing of oid to krakens
|
||||
# txid with the new txid that is returned on edit
|
||||
reqid = resp['result']['txid']
|
||||
|
||||
# deliver ack that order has been submitted to broker routing
|
||||
await ems_order_stream.send(
|
||||
BrokerdOrderAck(
|
||||
|
@ -793,10 +788,7 @@ async def trades_dialogue(
|
|||
# Get websocket token for authenticated data stream
|
||||
# Assert that a token was actually received.
|
||||
resp = await client.endpoint('GetWebSocketsToken', {})
|
||||
|
||||
# lol wtf is this..
|
||||
assert resp['error'] == []
|
||||
|
||||
token = resp['result']['token']
|
||||
|
||||
async with (
|
||||
|
|
|
@ -80,9 +80,7 @@ def mk_check(
|
|||
|
||||
return check_lt
|
||||
|
||||
raise ValueError(
|
||||
f'trigger: {trigger_price}, last: {known_last}'
|
||||
)
|
||||
raise ValueError('trigger: {trigger_price}, last: {known_last}')
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@ -563,10 +561,7 @@ async def translate_and_relay_brokerd_events(
|
|||
|
||||
name = brokerd_msg['name']
|
||||
|
||||
log.info(
|
||||
f'Received broker trade event:\n'
|
||||
f'{pformat(brokerd_msg)}'
|
||||
)
|
||||
log.info(f'Received broker trade event:\n{pformat(brokerd_msg)}')
|
||||
|
||||
if name == 'position':
|
||||
|
||||
|
@ -618,28 +613,19 @@ async def translate_and_relay_brokerd_events(
|
|||
# packed at submission since we already know it ahead of
|
||||
# time
|
||||
paper = brokerd_msg['broker_details'].get('paper_info')
|
||||
ext = brokerd_msg['broker_details'].get('external')
|
||||
if paper:
|
||||
# paperboi keeps the ems id up front
|
||||
oid = paper['oid']
|
||||
|
||||
elif ext:
|
||||
else:
|
||||
# may be an order msg specified as "external" to the
|
||||
# piker ems flow (i.e. generated by some other
|
||||
# external broker backend client (like tws for ib)
|
||||
log.error(f"External trade event {ext}")
|
||||
ext = brokerd_msg['broker_details'].get('external')
|
||||
if ext:
|
||||
log.error(f"External trade event {ext}")
|
||||
|
||||
continue
|
||||
|
||||
else:
|
||||
# something is out of order, we don't have an oid for
|
||||
# this broker-side message.
|
||||
log.error(
|
||||
'Unknown oid:{oid} for msg:\n'
|
||||
f'{pformat(brokerd_msg)}'
|
||||
'Unable to relay message to client side!?'
|
||||
)
|
||||
|
||||
else:
|
||||
# check for existing live flow entry
|
||||
entry = book._ems_entries.get(oid)
|
||||
|
@ -837,9 +823,7 @@ async def process_client_order_cmds(
|
|||
if reqid:
|
||||
|
||||
# send cancel to brokerd immediately!
|
||||
log.info(
|
||||
f'Submitting cancel for live order {reqid}'
|
||||
)
|
||||
log.info("Submitting cancel for live order {reqid}")
|
||||
|
||||
await brokerd_order_stream.send(msg.dict())
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -16,7 +16,6 @@
|
|||
|
||||
"""
|
||||
Broker configuration mgmt.
|
||||
|
||||
"""
|
||||
import platform
|
||||
import sys
|
||||
|
@ -51,7 +50,7 @@ def get_app_dir(app_name, roaming=True, force_posix=False):
|
|||
Unix (POSIX):
|
||||
``~/.foo-bar``
|
||||
Win XP (roaming):
|
||||
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo``
|
||||
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar``
|
||||
Win XP (not roaming):
|
||||
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
|
||||
Win 7 (roaming):
|
||||
|
@ -82,8 +81,7 @@ def get_app_dir(app_name, roaming=True, force_posix=False):
|
|||
folder = os.path.expanduser("~")
|
||||
return os.path.join(folder, app_name)
|
||||
if force_posix:
|
||||
return os.path.join(
|
||||
os.path.expanduser("~/.{}".format(_posixify(app_name))))
|
||||
return os.path.join(os.path.expanduser("~/.{}".format(_posixify(app_name))))
|
||||
if sys.platform == "darwin":
|
||||
return os.path.join(
|
||||
os.path.expanduser("~/Library/Application Support"), app_name
|
||||
|
@ -109,12 +107,7 @@ if _parent_user:
|
|||
]
|
||||
)
|
||||
|
||||
_conf_names: set[str] = {
|
||||
'brokers',
|
||||
'trades',
|
||||
'watchlists',
|
||||
}
|
||||
|
||||
_file_name = 'brokers.toml'
|
||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||
_context_defaults = dict(
|
||||
default_map={
|
||||
|
@ -136,43 +129,23 @@ def _override_config_dir(
|
|||
_config_dir = path
|
||||
|
||||
|
||||
def _conf_fn_w_ext(
|
||||
name: str,
|
||||
) -> str:
|
||||
# change this if we ever change the config file format.
|
||||
return f'{name}.toml'
|
||||
|
||||
|
||||
def get_conf_path(
|
||||
conf_name: str = 'brokers',
|
||||
|
||||
) -> str:
|
||||
def get_broker_conf_path():
|
||||
"""Return the default config path normally under
|
||||
``~/.config/piker`` on linux.
|
||||
|
||||
Contains files such as:
|
||||
- brokers.toml
|
||||
- watchlists.toml
|
||||
- trades.toml
|
||||
|
||||
# maybe coming soon ;)
|
||||
- signals.toml
|
||||
- strats.toml
|
||||
|
||||
"""
|
||||
assert conf_name in _conf_names
|
||||
fn = _conf_fn_w_ext(conf_name)
|
||||
return os.path.join(
|
||||
_config_dir,
|
||||
fn,
|
||||
)
|
||||
return os.path.join(_config_dir, _file_name)
|
||||
|
||||
|
||||
def repodir():
|
||||
'''
|
||||
Return the abspath to the repo directory.
|
||||
|
||||
'''
|
||||
"""Return the abspath to the repo directory.
|
||||
"""
|
||||
dirpath = os.path.abspath(
|
||||
# we're 3 levels down in **this** module file
|
||||
dirname(dirname(os.path.realpath(__file__)))
|
||||
|
@ -181,27 +154,16 @@ def repodir():
|
|||
|
||||
|
||||
def load(
|
||||
conf_name: str = 'brokers',
|
||||
path: str = None
|
||||
|
||||
) -> (dict, str):
|
||||
'''
|
||||
Load config file by name.
|
||||
|
||||
'''
|
||||
path = path or get_conf_path(conf_name)
|
||||
"""Load broker config.
|
||||
"""
|
||||
path = path or get_broker_conf_path()
|
||||
if not os.path.isfile(path):
|
||||
fn = _conf_fn_w_ext(conf_name)
|
||||
|
||||
template = os.path.join(
|
||||
repodir(),
|
||||
'config',
|
||||
fn
|
||||
shutil.copyfile(
|
||||
os.path.join(repodir(), 'config', 'brokers.toml'),
|
||||
path,
|
||||
)
|
||||
# try to copy in a template config to the user's directory
|
||||
# if one exists.
|
||||
if os.path.isfile(template):
|
||||
shutil.copyfile(template, path)
|
||||
|
||||
config = toml.load(path)
|
||||
log.debug(f"Read config file {path}")
|
||||
|
@ -210,17 +172,13 @@ def load(
|
|||
|
||||
def write(
|
||||
config: dict, # toml config as dict
|
||||
name: str = 'brokers',
|
||||
path: str = None,
|
||||
|
||||
) -> None:
|
||||
''''
|
||||
Write broker config to disk.
|
||||
"""Write broker config to disk.
|
||||
|
||||
Create a ``brokers.ini`` file if one does not exist.
|
||||
|
||||
'''
|
||||
path = path or get_conf_path(name)
|
||||
"""
|
||||
path = path or get_broker_conf_path()
|
||||
dirname = os.path.dirname(path)
|
||||
if not os.path.isdir(dirname):
|
||||
log.debug(f"Creating config dir {_config_dir}")
|
||||
|
@ -230,10 +188,7 @@ def write(
|
|||
raise ValueError(
|
||||
"Watch out you're trying to write a blank config!")
|
||||
|
||||
log.debug(
|
||||
f"Writing config `{name}` file to:\n"
|
||||
f"{path}"
|
||||
)
|
||||
log.debug(f"Writing config file {path}")
|
||||
with open(path, 'w') as cf:
|
||||
return toml.dump(config, cf)
|
||||
|
||||
|
@ -263,5 +218,4 @@ def load_accounts(
|
|||
|
||||
# our default paper engine entry
|
||||
accounts['paper'] = None
|
||||
|
||||
return accounts
|
||||
|
|
|
@ -19,7 +19,6 @@ Supervisor for docker with included specific-image service helpers.
|
|||
|
||||
'''
|
||||
import os
|
||||
import time
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
|
@ -99,6 +98,8 @@ async def open_docker(
|
|||
finally:
|
||||
if client:
|
||||
client.close()
|
||||
for c in client.containers.list():
|
||||
c.kill()
|
||||
|
||||
|
||||
class Container:
|
||||
|
@ -187,65 +188,45 @@ class Container:
|
|||
|
||||
async def cancel(
|
||||
self,
|
||||
stop_msg: str,
|
||||
) -> None:
|
||||
|
||||
cid = self.cntr.id
|
||||
# first try a graceful cancel
|
||||
log.cancel(
|
||||
f'SIGINT cancelling container: {cid}\n'
|
||||
f'waiting on stop msg: "{stop_msg}"'
|
||||
)
|
||||
self.try_signal('SIGINT')
|
||||
|
||||
start = time.time()
|
||||
for _ in range(30):
|
||||
with trio.move_on_after(0.5) as cs:
|
||||
cs.shield = True
|
||||
await self.process_logs_until('initiating graceful shutdown')
|
||||
await self.process_logs_until('exiting...',)
|
||||
|
||||
for _ in range(10):
|
||||
with trio.move_on_after(0.5) as cs:
|
||||
cs.shield = True
|
||||
await self.process_logs_until(stop_msg)
|
||||
|
||||
# if we aren't cancelled on above checkpoint then we
|
||||
# assume we read the expected stop msg and terminated.
|
||||
await self.process_logs_until('exiting...',)
|
||||
break
|
||||
|
||||
try:
|
||||
log.info(f'Polling for container shutdown:\n{cid}')
|
||||
if cs.cancelled_caught:
|
||||
# get out the big guns, bc apparently marketstore
|
||||
# doesn't actually know how to terminate gracefully
|
||||
# :eyeroll:...
|
||||
self.try_signal('SIGKILL')
|
||||
|
||||
if self.cntr.status not in {'exited', 'not-running'}:
|
||||
try:
|
||||
log.info('Waiting on container shutdown: {cid}')
|
||||
self.cntr.wait(
|
||||
timeout=0.1,
|
||||
condition='not-running',
|
||||
)
|
||||
break
|
||||
|
||||
break
|
||||
except (
|
||||
ReadTimeout,
|
||||
ConnectionError,
|
||||
):
|
||||
log.error(f'failed to wait on container {cid}')
|
||||
raise
|
||||
|
||||
except (
|
||||
ReadTimeout,
|
||||
):
|
||||
log.info(f'Still waiting on container:\n{cid}')
|
||||
continue
|
||||
|
||||
except (
|
||||
docker.errors.APIError,
|
||||
ConnectionError,
|
||||
):
|
||||
log.exception('Docker connection failure')
|
||||
break
|
||||
else:
|
||||
delay = time.time() - start
|
||||
log.error(
|
||||
f'Failed to kill container {cid} after {delay}s\n'
|
||||
'sending SIGKILL..'
|
||||
)
|
||||
# get out the big guns, bc apparently marketstore
|
||||
# doesn't actually know how to terminate gracefully
|
||||
# :eyeroll:...
|
||||
self.try_signal('SIGKILL')
|
||||
self.cntr.wait(
|
||||
timeout=3,
|
||||
condition='not-running',
|
||||
)
|
||||
raise RuntimeError('Failed to cancel container {cid}')
|
||||
|
||||
log.cancel(f'Container stopped: {cid}')
|
||||
|
||||
|
@ -266,16 +247,13 @@ async def open_ahabd(
|
|||
# params, etc. passing to ``Containter.run()``?
|
||||
# call into endpoint for container config/init
|
||||
ep_func = NamespacePath(endpoint).load_ref()
|
||||
(
|
||||
dcntr,
|
||||
cntr_config,
|
||||
start_msg,
|
||||
stop_msg,
|
||||
) = ep_func(client)
|
||||
dcntr, cntr_config = ep_func(client)
|
||||
cntr = Container(dcntr)
|
||||
|
||||
with trio.move_on_after(1):
|
||||
found = await cntr.process_logs_until(start_msg)
|
||||
found = await cntr.process_logs_until(
|
||||
"launching tcp listener for all services...",
|
||||
)
|
||||
|
||||
if not found and cntr not in client.containers.list():
|
||||
raise RuntimeError(
|
||||
|
@ -295,9 +273,16 @@ async def open_ahabd(
|
|||
# callers to have root perms?
|
||||
await trio.sleep_forever()
|
||||
|
||||
finally:
|
||||
except (
|
||||
BaseException,
|
||||
# trio.Cancelled,
|
||||
# KeyboardInterrupt,
|
||||
):
|
||||
|
||||
with trio.CancelScope(shield=True):
|
||||
await cntr.cancel(stop_msg)
|
||||
await cntr.cancel()
|
||||
|
||||
raise
|
||||
|
||||
|
||||
async def start_ahab(
|
||||
|
|
|
@ -22,7 +22,7 @@ financial data flows.
|
|||
from __future__ import annotations
|
||||
from collections import Counter
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Optional, Union
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
import tractor
|
||||
import trio
|
||||
|
@ -32,7 +32,6 @@ from ..log import get_logger
|
|||
|
||||
if TYPE_CHECKING:
|
||||
from ._sharedmem import ShmArray
|
||||
from .feed import _FeedsBus
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -143,17 +142,11 @@ async def broadcast(
|
|||
shm: Optional[ShmArray] = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Broadcast the given ``shm: ShmArray``'s buffer index step to any
|
||||
subscribers for a given sample period.
|
||||
|
||||
The sent msg will include the first and last index which slice into
|
||||
the buffer's non-empty data.
|
||||
|
||||
'''
|
||||
# broadcast the buffer index step to any subscribers for
|
||||
# a given sample period.
|
||||
subs = sampler.subscribers.get(delay_s, ())
|
||||
|
||||
first = last = -1
|
||||
last = -1
|
||||
|
||||
if shm is None:
|
||||
periods = sampler.ohlcv_shms.keys()
|
||||
|
@ -163,16 +156,11 @@ async def broadcast(
|
|||
if periods:
|
||||
lowest = min(periods)
|
||||
shm = sampler.ohlcv_shms[lowest][0]
|
||||
first = shm._first.value
|
||||
last = shm._last.value
|
||||
|
||||
for stream in subs:
|
||||
try:
|
||||
await stream.send({
|
||||
'first': first,
|
||||
'last': last,
|
||||
'index': last,
|
||||
})
|
||||
await stream.send({'index': last})
|
||||
except (
|
||||
trio.BrokenResourceError,
|
||||
trio.ClosedResourceError
|
||||
|
@ -180,12 +168,7 @@ async def broadcast(
|
|||
log.error(
|
||||
f'{stream._ctx.chan.uid} dropped connection'
|
||||
)
|
||||
try:
|
||||
subs.remove(stream)
|
||||
except ValueError:
|
||||
log.warning(
|
||||
f'{stream._ctx.chan.uid} sub already removed!?'
|
||||
)
|
||||
subs.remove(stream)
|
||||
|
||||
|
||||
@tractor.context
|
||||
|
@ -220,7 +203,7 @@ async def iter_ohlc_periods(
|
|||
|
||||
async def sample_and_broadcast(
|
||||
|
||||
bus: _FeedsBus, # noqa
|
||||
bus: '_FeedsBus', # noqa
|
||||
shm: ShmArray,
|
||||
quote_stream: trio.abc.ReceiveChannel,
|
||||
brokername: str,
|
||||
|
@ -299,13 +282,7 @@ async def sample_and_broadcast(
|
|||
# end up triggering backpressure which which will
|
||||
# eventually block this producer end of the feed and
|
||||
# thus other consumers still attached.
|
||||
subs: list[
|
||||
tuple[
|
||||
Union[tractor.MsgStream, trio.MemorySendChannel],
|
||||
tractor.Context,
|
||||
Optional[float], # tick throttle in Hz
|
||||
]
|
||||
] = bus._subscribers[broker_symbol.lower()]
|
||||
subs = bus._subscribers[broker_symbol.lower()]
|
||||
|
||||
# NOTE: by default the broker backend doesn't append
|
||||
# it's own "name" into the fqsn schema (but maybe it
|
||||
|
@ -314,7 +291,7 @@ async def sample_and_broadcast(
|
|||
bsym = f'{broker_symbol}.{brokername}'
|
||||
lags: int = 0
|
||||
|
||||
for (stream, ctx, tick_throttle) in subs:
|
||||
for (stream, tick_throttle) in subs:
|
||||
|
||||
try:
|
||||
with trio.move_on_after(0.2) as cs:
|
||||
|
@ -326,41 +303,25 @@ async def sample_and_broadcast(
|
|||
(bsym, quote)
|
||||
)
|
||||
except trio.WouldBlock:
|
||||
chan = ctx.chan
|
||||
ctx = getattr(stream, '_ctx', None)
|
||||
if ctx:
|
||||
log.warning(
|
||||
f'Feed overrun {bus.brokername} ->'
|
||||
f'{chan.uid} !!!'
|
||||
f'{ctx.channel.uid} !!!'
|
||||
)
|
||||
else:
|
||||
key = id(stream)
|
||||
overruns[key] += 1
|
||||
log.warning(
|
||||
f'Feed overrun {broker_symbol}'
|
||||
'@{bus.brokername} -> '
|
||||
f'Feed overrun {bus.brokername} -> '
|
||||
f'feed @ {tick_throttle} Hz'
|
||||
)
|
||||
if overruns[key] > 6:
|
||||
# TODO: should we check for the
|
||||
# context being cancelled? this
|
||||
# could happen but the
|
||||
# channel-ipc-pipe is still up.
|
||||
if not chan.connected():
|
||||
log.warning(
|
||||
'Dropping broken consumer:\n'
|
||||
f'{broker_symbol}:'
|
||||
f'{ctx.cid}@{chan.uid}'
|
||||
)
|
||||
await stream.aclose()
|
||||
raise trio.BrokenResourceError
|
||||
else:
|
||||
log.warning(
|
||||
'Feed getting overrun bro!\n'
|
||||
f'{broker_symbol}:'
|
||||
f'{ctx.cid}@{chan.uid}'
|
||||
)
|
||||
continue
|
||||
|
||||
log.warning(
|
||||
f'Dropping consumer {stream}'
|
||||
)
|
||||
await stream.aclose()
|
||||
raise trio.BrokenResourceError
|
||||
else:
|
||||
await stream.send(
|
||||
{bsym: quote}
|
||||
|
@ -376,12 +337,11 @@ async def sample_and_broadcast(
|
|||
trio.ClosedResourceError,
|
||||
trio.EndOfChannel,
|
||||
):
|
||||
chan = ctx.chan
|
||||
ctx = getattr(stream, '_ctx', None)
|
||||
if ctx:
|
||||
log.warning(
|
||||
'Dropped `brokerd`-quotes-feed connection:\n'
|
||||
f'{broker_symbol}:'
|
||||
f'{ctx.cid}@{chan.uid}'
|
||||
f'{ctx.chan.uid} dropped '
|
||||
'`brokerd`-quotes-feed connection'
|
||||
)
|
||||
if tick_throttle:
|
||||
assert stream._closed
|
||||
|
@ -394,11 +354,7 @@ async def sample_and_broadcast(
|
|||
try:
|
||||
subs.remove((stream, tick_throttle))
|
||||
except ValueError:
|
||||
log.error(
|
||||
f'Stream was already removed from subs!?\n'
|
||||
f'{broker_symbol}:'
|
||||
f'{ctx.cid}@{chan.uid}'
|
||||
)
|
||||
log.error(f'{stream} was already removed from subs!?')
|
||||
|
||||
|
||||
# TODO: a less naive throttler, here's some snippets:
|
||||
|
@ -510,7 +466,6 @@ async def uniform_rate_send(
|
|||
# if the feed consumer goes down then drop
|
||||
# out of this rate limiter
|
||||
log.warning(f'{stream} closed')
|
||||
await stream.aclose()
|
||||
return
|
||||
|
||||
# reset send cycle state
|
||||
|
|
|
@ -20,7 +20,6 @@ NumPy compatible shared memory buffers for real-time IPC streaming.
|
|||
"""
|
||||
from __future__ import annotations
|
||||
from sys import byteorder
|
||||
import time
|
||||
from typing import Optional
|
||||
from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
||||
|
||||
|
@ -99,12 +98,7 @@ class SharedInt:
|
|||
if _USE_POSIX:
|
||||
# We manually unlink to bypass all the "resource tracker"
|
||||
# nonsense meant for non-SC systems.
|
||||
name = self._shm.name
|
||||
try:
|
||||
shm_unlink(name)
|
||||
except FileNotFoundError:
|
||||
# might be a teardown race here?
|
||||
log.warning(f'Shm for {name} already unlinked?')
|
||||
shm_unlink(self._shm.name)
|
||||
|
||||
|
||||
class _Token(BaseModel):
|
||||
|
@ -542,26 +536,8 @@ def attach_shm_array(
|
|||
if key in _known_tokens:
|
||||
assert _Token.from_msg(_known_tokens[key]) == token, "WTF"
|
||||
|
||||
# XXX: ugh, looks like due to the ``shm_open()`` C api we can't
|
||||
# actually place files in a subdir, see discussion here:
|
||||
# https://stackoverflow.com/a/11103289
|
||||
|
||||
# attach to array buffer and view as per dtype
|
||||
_err: Optional[Exception] = None
|
||||
for _ in range(3):
|
||||
try:
|
||||
shm = SharedMemory(
|
||||
name=key,
|
||||
create=False,
|
||||
)
|
||||
break
|
||||
except OSError as oserr:
|
||||
_err = oserr
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
if _err:
|
||||
raise _err
|
||||
|
||||
shm = SharedMemory(name=key)
|
||||
shmarr = np.ndarray(
|
||||
(size,),
|
||||
dtype=token.dtype,
|
||||
|
|
|
@ -33,7 +33,7 @@ ohlc_fields = [
|
|||
('high', float),
|
||||
('low', float),
|
||||
('close', float),
|
||||
('volume', float),
|
||||
('volume', int),
|
||||
('bar_wap', float),
|
||||
]
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@ from typing import (
|
|||
Generator,
|
||||
Awaitable,
|
||||
TYPE_CHECKING,
|
||||
Union,
|
||||
)
|
||||
|
||||
import trio
|
||||
|
@ -41,12 +40,12 @@ from trio.abc import ReceiveChannel
|
|||
from trio_typing import TaskStatus
|
||||
import trimeter
|
||||
import tractor
|
||||
from tractor.trionics import maybe_open_context
|
||||
from pydantic import BaseModel
|
||||
import pendulum
|
||||
import numpy as np
|
||||
|
||||
from ..brokers import get_brokermod
|
||||
from .._cacheables import maybe_open_context
|
||||
from ..calc import humanize
|
||||
from ..log import get_logger, get_console_log
|
||||
from .._daemon import (
|
||||
|
@ -117,13 +116,7 @@ class _FeedsBus(BaseModel):
|
|||
# https://github.com/samuelcolvin/pydantic/issues/2816
|
||||
_subscribers: dict[
|
||||
str,
|
||||
list[
|
||||
tuple[
|
||||
Union[tractor.MsgStream, trio.MemorySendChannel],
|
||||
tractor.Context,
|
||||
Optional[float], # tick throttle in Hz
|
||||
]
|
||||
]
|
||||
list[tuple[tractor.MsgStream, Optional[float]]]
|
||||
] = {}
|
||||
|
||||
async def start_task(
|
||||
|
@ -235,7 +228,7 @@ def diff_history(
|
|||
# the + 1 is because ``last_tsdb_dt`` is pulled from
|
||||
# the last row entry for the ``'time'`` field retreived
|
||||
# from the tsdb.
|
||||
to_push = array[abs(s_diff) + 1:]
|
||||
to_push = array[abs(s_diff)+1:]
|
||||
|
||||
else:
|
||||
# pass back only the portion of the array that is
|
||||
|
@ -258,7 +251,6 @@ async def start_backfill(
|
|||
last_tsdb_dt: Optional[datetime] = None,
|
||||
storage: Optional[Storage] = None,
|
||||
write_tsdb: bool = True,
|
||||
tsdb_is_up: bool = False,
|
||||
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
|
@ -274,8 +266,8 @@ async def start_backfill(
|
|||
|
||||
# sample period step size in seconds
|
||||
step_size_s = (
|
||||
pendulum.from_timestamp(times[-1])
|
||||
- pendulum.from_timestamp(times[-2])
|
||||
pendulum.from_timestamp(times[-1]) -
|
||||
pendulum.from_timestamp(times[-2])
|
||||
).seconds
|
||||
|
||||
# "frame"'s worth of sample period steps in seconds
|
||||
|
@ -300,33 +292,25 @@ async def start_backfill(
|
|||
# let caller unblock and deliver latest history frame
|
||||
task_status.started((shm, start_dt, end_dt, bf_done))
|
||||
|
||||
# based on the sample step size, maybe load a certain amount history
|
||||
if last_tsdb_dt is None:
|
||||
if step_size_s not in (1, 60):
|
||||
# maybe a better default (they don't seem to define epoch?!)
|
||||
|
||||
# based on the sample step size load a certain amount
|
||||
# history
|
||||
if step_size_s == 1:
|
||||
last_tsdb_dt = pendulum.now().subtract(days=2)
|
||||
|
||||
elif step_size_s == 60:
|
||||
last_tsdb_dt = pendulum.now().subtract(years=2)
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
'`piker` only needs to support 1m and 1s sampling '
|
||||
'but ur api is trying to deliver a longer '
|
||||
f'timeframe of {step_size_s} ' 'seconds.. so ye, dun '
|
||||
'do dat brudder.'
|
||||
'do dat bruh.'
|
||||
)
|
||||
|
||||
# when no tsdb "last datum" is provided, we just load
|
||||
# some near-term history.
|
||||
periods = {
|
||||
1: {'days': 1},
|
||||
60: {'days': 14},
|
||||
}
|
||||
|
||||
if tsdb_is_up:
|
||||
# do a decently sized backfill and load it into storage.
|
||||
periods = {
|
||||
1: {'days': 6},
|
||||
60: {'years': 2},
|
||||
}
|
||||
|
||||
kwargs = periods[step_size_s]
|
||||
last_tsdb_dt = start_dt.subtract(**kwargs)
|
||||
|
||||
# configure async query throttling
|
||||
erlangs = config.get('erlangs', 1)
|
||||
rate = config.get('rate', 1)
|
||||
|
@ -344,7 +328,7 @@ async def start_backfill(
|
|||
log.debug(f'New datetime index:\n{pformat(dtrange)}')
|
||||
|
||||
for end_dt in dtrange:
|
||||
log.info(f'Yielding next frame start {end_dt}')
|
||||
log.warning(f'Yielding next frame start {end_dt}')
|
||||
start = yield end_dt
|
||||
|
||||
# if caller sends a new start date, reset to that
|
||||
|
@ -584,8 +568,8 @@ async def start_backfill(
|
|||
start_dt,
|
||||
end_dt,
|
||||
) = await get_ohlc_frame(
|
||||
input_end_dt=last_shm_prepend_dt,
|
||||
iter_dts_gen=idts,
|
||||
input_end_dt=last_shm_prepend_dt,
|
||||
iter_dts_gen=idts,
|
||||
)
|
||||
last_epoch = to_push['time'][-1]
|
||||
diff = start - last_epoch
|
||||
|
@ -700,7 +684,6 @@ async def manage_history(
|
|||
|
||||
bfqsn = fqsn.replace('.' + mod.name, '')
|
||||
open_history_client = getattr(mod, 'open_history_client', None)
|
||||
assert open_history_client
|
||||
|
||||
if is_up and opened and open_history_client:
|
||||
|
||||
|
@ -729,7 +712,6 @@ async def manage_history(
|
|||
bfqsn,
|
||||
shm,
|
||||
last_tsdb_dt=last_tsdb_dt,
|
||||
tsdb_is_up=True,
|
||||
storage=storage,
|
||||
)
|
||||
)
|
||||
|
@ -813,15 +795,6 @@ async def manage_history(
|
|||
|
||||
# manually trigger step update to update charts/fsps
|
||||
# which need an incremental update.
|
||||
# NOTE: the way this works is super duper
|
||||
# un-intuitive right now:
|
||||
# - the broadcaster fires a msg to the fsp subsystem.
|
||||
# - fsp subsys then checks for a sample step diff and
|
||||
# possibly recomputes prepended history.
|
||||
# - the fsp then sends back to the parent actor
|
||||
# (usually a chart showing graphics for said fsp)
|
||||
# which tells the chart to conduct a manual full
|
||||
# graphics loop cycle.
|
||||
for delay_s in sampler.subscribers:
|
||||
await broadcast(delay_s)
|
||||
|
||||
|
@ -1021,7 +994,7 @@ async def open_feed_bus(
|
|||
brokername: str,
|
||||
symbol: str, # normally expected to the broker-specific fqsn
|
||||
loglevel: str,
|
||||
tick_throttle: Optional[float] = None,
|
||||
tick_throttle: Optional[float] = None,
|
||||
start_stream: bool = True,
|
||||
|
||||
) -> None:
|
||||
|
@ -1125,10 +1098,10 @@ async def open_feed_bus(
|
|||
recv,
|
||||
stream,
|
||||
)
|
||||
sub = (send, ctx, tick_throttle)
|
||||
sub = (send, tick_throttle)
|
||||
|
||||
else:
|
||||
sub = (stream, ctx, tick_throttle)
|
||||
sub = (stream, tick_throttle)
|
||||
|
||||
subs = bus._subscribers[bfqsn]
|
||||
subs.append(sub)
|
||||
|
@ -1282,7 +1255,7 @@ async def install_brokerd_search(
|
|||
# a backend module?
|
||||
pause_period=getattr(
|
||||
brokermod, '_search_conf', {}
|
||||
).get('pause_period', 0.0616),
|
||||
).get('pause_period', 0.0616),
|
||||
):
|
||||
yield
|
||||
|
||||
|
|
|
@ -127,15 +127,10 @@ def start_marketstore(
|
|||
import os
|
||||
import docker
|
||||
from .. import config
|
||||
|
||||
get_console_log('info', name=__name__)
|
||||
|
||||
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
||||
|
||||
# create when dne
|
||||
if not os.path.isdir(mktsdir):
|
||||
os.mkdir(mktsdir)
|
||||
|
||||
yml_file = os.path.join(mktsdir, 'mkts.yml')
|
||||
yml_file = os.path.join(config._config_dir, 'mkts.yml')
|
||||
if not os.path.isfile(yml_file):
|
||||
log.warning(
|
||||
f'No `marketstore` config exists?: {yml_file}\n'
|
||||
|
@ -148,14 +143,14 @@ def start_marketstore(
|
|||
# create a mount from user's local piker config dir into container
|
||||
config_dir_mnt = docker.types.Mount(
|
||||
target='/etc',
|
||||
source=mktsdir,
|
||||
source=config._config_dir,
|
||||
type='bind',
|
||||
)
|
||||
|
||||
# create a user config subdir where the marketstore
|
||||
# backing filesystem database can be persisted.
|
||||
persistent_data_dir = os.path.join(
|
||||
mktsdir, 'data',
|
||||
config._config_dir, 'data',
|
||||
)
|
||||
if not os.path.isdir(persistent_data_dir):
|
||||
os.mkdir(persistent_data_dir)
|
||||
|
@ -185,14 +180,7 @@ def start_marketstore(
|
|||
init=True,
|
||||
# remove=True,
|
||||
)
|
||||
return (
|
||||
dcntr,
|
||||
_config,
|
||||
|
||||
# expected startup and stop msgs
|
||||
"launching tcp listener for all services...",
|
||||
"exiting...",
|
||||
)
|
||||
return dcntr, _config
|
||||
|
||||
|
||||
_tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK')
|
||||
|
@ -242,8 +230,8 @@ _ohlcv_dt = [
|
|||
# ohlcv sampling
|
||||
('Open', 'f4'),
|
||||
('High', 'f4'),
|
||||
('Low', 'f4'),
|
||||
('Close', 'f4'),
|
||||
('Low', 'i8'),
|
||||
('Close', 'i8'),
|
||||
('Volume', 'f4'),
|
||||
]
|
||||
|
||||
|
@ -395,12 +383,7 @@ class Storage:
|
|||
]:
|
||||
|
||||
first_tsdb_dt, last_tsdb_dt = None, None
|
||||
tsdb_arrays = await self.read_ohlcv(
|
||||
fqsn,
|
||||
# on first load we don't need to pull the max
|
||||
# history per request size worth.
|
||||
limit=3000,
|
||||
)
|
||||
tsdb_arrays = await self.read_ohlcv(fqsn)
|
||||
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
||||
|
||||
if tsdb_arrays:
|
||||
|
@ -418,7 +401,6 @@ class Storage:
|
|||
fqsn: str,
|
||||
timeframe: Optional[Union[int, str]] = None,
|
||||
end: Optional[int] = None,
|
||||
limit: int = int(800e3),
|
||||
|
||||
) -> tuple[
|
||||
MarketstoreClient,
|
||||
|
@ -441,7 +423,7 @@ class Storage:
|
|||
|
||||
# TODO: figure the max limit here given the
|
||||
# ``purepc`` msg size limit of purerpc: 33554432
|
||||
limit=limit,
|
||||
limit=int(800e3),
|
||||
)
|
||||
|
||||
if timeframe is None:
|
||||
|
@ -565,17 +547,6 @@ class Storage:
|
|||
if err:
|
||||
raise MarketStoreError(err)
|
||||
|
||||
# XXX: currently the only way to do this is through the CLI:
|
||||
|
||||
# sudo ./marketstore connect --dir ~/.config/piker/data
|
||||
# >> \show mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
||||
# and this seems to block and use up mem..
|
||||
# >> \trim mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
||||
|
||||
# relevant source code for this is here:
|
||||
# https://github.com/alpacahq/marketstore/blob/master/cmd/connect/session/trim.go#L14
|
||||
# def delete_range(self, start_dt, end_dt) -> None:
|
||||
# ...
|
||||
|
||||
@acm
|
||||
async def open_storage_client(
|
||||
|
@ -657,13 +628,12 @@ async def tsdb_history_update(
|
|||
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
||||
# hist diffing
|
||||
if tsdb_arrays:
|
||||
for secs in (1, 60):
|
||||
ts = tsdb_arrays.get(secs)
|
||||
if ts is not None and len(ts):
|
||||
# these aren't currently used but can be referenced from
|
||||
# within the embedded ipython shell below.
|
||||
to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
||||
to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
||||
onesec = tsdb_arrays[1]
|
||||
|
||||
# these aren't currently used but can be referenced from
|
||||
# within the embedded ipython shell below.
|
||||
to_append = ohlcv[ohlcv['time'] > onesec['Epoch'][-1]]
|
||||
to_prepend = ohlcv[ohlcv['time'] < onesec['Epoch'][0]]
|
||||
|
||||
profiler('Finished db arrays diffs')
|
||||
|
||||
|
|
|
@ -361,7 +361,7 @@ async def cascade(
|
|||
) -> tuple[TaskTracker, int]:
|
||||
# TODO: adopt an incremental update engine/approach
|
||||
# where possible here eventually!
|
||||
log.debug(f're-syncing fsp {func_name} to source')
|
||||
log.warning(f're-syncing fsp {func_name} to source')
|
||||
tracker.cs.cancel()
|
||||
await tracker.complete.wait()
|
||||
tracker, index = await n.start(fsp_target)
|
||||
|
@ -369,12 +369,7 @@ async def cascade(
|
|||
# always trigger UI refresh after history update,
|
||||
# see ``piker.ui._fsp.FspAdmin.open_chain()`` and
|
||||
# ``piker.ui._display.trigger_update()``.
|
||||
await client_stream.send({
|
||||
'fsp_update': {
|
||||
'key': dst_shm_token,
|
||||
'first': dst._first.value,
|
||||
'last': dst._last.value,
|
||||
}})
|
||||
await client_stream.send('update')
|
||||
return tracker, index
|
||||
|
||||
def is_synced(
|
||||
|
|
|
@ -25,13 +25,10 @@ from pygments import highlight, lexers, formatters
|
|||
|
||||
# Makes it so we only see the full module name when using ``__name__``
|
||||
# without the extra "piker." prefix.
|
||||
_proj_name: str = 'piker'
|
||||
_proj_name = 'piker'
|
||||
|
||||
|
||||
def get_logger(
|
||||
name: str = None,
|
||||
|
||||
) -> logging.Logger:
|
||||
def get_logger(name: str = None) -> logging.Logger:
|
||||
'''Return the package log or a sub-log for `name` if provided.
|
||||
'''
|
||||
return tractor.log.get_logger(name=name, _root_name=_proj_name)
|
||||
|
|
|
@ -223,9 +223,8 @@ class DynamicDateAxis(Axis):
|
|||
) -> list[str]:
|
||||
|
||||
chart = self.linkedsplits.chart
|
||||
flow = chart._flows[chart.name]
|
||||
shm = flow.shm
|
||||
bars = shm.array
|
||||
bars = chart._arrays[chart.name]
|
||||
shm = self.linkedsplits.chart._shm
|
||||
first = shm._first.value
|
||||
|
||||
bars_len = len(bars)
|
||||
|
|
|
@ -34,7 +34,9 @@ from PyQt5.QtWidgets import (
|
|||
QVBoxLayout,
|
||||
QSplitter,
|
||||
)
|
||||
import msgspec
|
||||
import numpy as np
|
||||
# from pydantic import BaseModel
|
||||
import pyqtgraph as pg
|
||||
import trio
|
||||
|
||||
|
@ -47,13 +49,9 @@ from ._cursor import (
|
|||
Cursor,
|
||||
ContentsLabel,
|
||||
)
|
||||
from ..data._sharedmem import ShmArray
|
||||
from ._l1 import L1Labels
|
||||
from ._ohlc import BarItems
|
||||
from ._curve import (
|
||||
Curve,
|
||||
StepCurve,
|
||||
)
|
||||
from ._curve import FastAppendCurve
|
||||
from ._style import (
|
||||
hcolor,
|
||||
CHART_MARGINS,
|
||||
|
@ -62,12 +60,15 @@ from ._style import (
|
|||
)
|
||||
from ..data.feed import Feed
|
||||
from ..data._source import Symbol
|
||||
from ..data._sharedmem import (
|
||||
ShmArray,
|
||||
# _Token,
|
||||
)
|
||||
from ..log import get_logger
|
||||
from ._interaction import ChartView
|
||||
from ._forms import FieldsForm
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._overlay import PlotItemOverlay
|
||||
from ._flows import Flow
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._display import DisplayState
|
||||
|
@ -418,7 +419,7 @@ class LinkedSplits(QWidget):
|
|||
self,
|
||||
|
||||
symbol: Symbol,
|
||||
shm: ShmArray,
|
||||
array: np.ndarray,
|
||||
sidepane: FieldsForm,
|
||||
|
||||
style: str = 'bar',
|
||||
|
@ -443,7 +444,7 @@ class LinkedSplits(QWidget):
|
|||
self.chart = self.add_plot(
|
||||
|
||||
name=symbol.key,
|
||||
shm=shm,
|
||||
array=array,
|
||||
style=style,
|
||||
_is_main=True,
|
||||
|
||||
|
@ -471,7 +472,7 @@ class LinkedSplits(QWidget):
|
|||
self,
|
||||
|
||||
name: str,
|
||||
shm: ShmArray,
|
||||
array: np.ndarray,
|
||||
|
||||
array_key: Optional[str] = None,
|
||||
style: str = 'line',
|
||||
|
@ -515,6 +516,7 @@ class LinkedSplits(QWidget):
|
|||
name=name,
|
||||
data_key=array_key or name,
|
||||
|
||||
array=array,
|
||||
parent=qframe,
|
||||
linkedsplits=self,
|
||||
axisItems=axes,
|
||||
|
@ -578,7 +580,7 @@ class LinkedSplits(QWidget):
|
|||
|
||||
graphics, data_key = cpw.draw_ohlc(
|
||||
name,
|
||||
shm,
|
||||
array,
|
||||
array_key=array_key
|
||||
)
|
||||
self.cursor.contents_labels.add_label(
|
||||
|
@ -592,7 +594,7 @@ class LinkedSplits(QWidget):
|
|||
add_label = True
|
||||
graphics, data_key = cpw.draw_curve(
|
||||
name,
|
||||
shm,
|
||||
array,
|
||||
array_key=array_key,
|
||||
color='default_light',
|
||||
)
|
||||
|
@ -601,7 +603,7 @@ class LinkedSplits(QWidget):
|
|||
add_label = True
|
||||
graphics, data_key = cpw.draw_curve(
|
||||
name,
|
||||
shm,
|
||||
array,
|
||||
array_key=array_key,
|
||||
step_mode=True,
|
||||
color='davies',
|
||||
|
@ -689,6 +691,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
# the "data view" we generate graphics from
|
||||
name: str,
|
||||
array: np.ndarray,
|
||||
data_key: str,
|
||||
linkedsplits: LinkedSplits,
|
||||
|
||||
|
@ -741,6 +744,14 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
self._max_l1_line_len: float = 0
|
||||
|
||||
# self.setViewportMargins(0, 0, 0, 0)
|
||||
# self._ohlc = array # readonly view of ohlc data
|
||||
|
||||
# TODO: move to Aggr above XD
|
||||
# readonly view of data arrays
|
||||
self._arrays = {
|
||||
self.data_key: array,
|
||||
}
|
||||
self._graphics = {} # registry of underlying graphics
|
||||
|
||||
# registry of overlay curve names
|
||||
self._flows: dict[str, Flow] = {}
|
||||
|
@ -756,6 +767,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# show background grid
|
||||
self.showGrid(x=False, y=True, alpha=0.3)
|
||||
|
||||
self.default_view()
|
||||
self.cv.enable_auto_yrange()
|
||||
|
||||
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
||||
|
@ -804,8 +816,14 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
Return a range tuple for the bars present in view.
|
||||
|
||||
'''
|
||||
main_flow = self._flows[self.name]
|
||||
ifirst, l, lbar, rbar, r, ilast = main_flow.datums_range()
|
||||
l, r = self.view_range()
|
||||
array = self._arrays[self.name]
|
||||
start, stop = self._xrange = (
|
||||
array[0]['index'],
|
||||
array[-1]['index'],
|
||||
)
|
||||
lbar = max(l, start)
|
||||
rbar = min(r, stop)
|
||||
return l, lbar, rbar, r
|
||||
|
||||
def curve_width_pxs(
|
||||
|
@ -859,51 +877,40 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
def default_view(
|
||||
self,
|
||||
bars_from_y: int = 3000,
|
||||
steps_on_screen: Optional[int] = None
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Set the view box to the "default" startup view of the scene.
|
||||
|
||||
'''
|
||||
flow = self._flows.get(self.name)
|
||||
if not flow:
|
||||
log.warning(f'`Flow` for {self.name} not loaded yet?')
|
||||
try:
|
||||
index = self._arrays[self.name]['index']
|
||||
except IndexError:
|
||||
log.warning(f'array for {self.name} not loaded yet?')
|
||||
return
|
||||
|
||||
index = flow.shm.array['index']
|
||||
xfirst, xlast = index[0], index[-1]
|
||||
l, lbar, rbar, r = self.bars_range()
|
||||
view = self.view
|
||||
|
||||
marker_pos, l1_len = self.pre_l1_xs()
|
||||
end = xlast + l1_len + 1
|
||||
|
||||
if (
|
||||
rbar < 0
|
||||
or l < xfirst
|
||||
or l < 0
|
||||
or (rbar - lbar) < 6
|
||||
):
|
||||
# TODO: set fixed bars count on screen that approx includes as
|
||||
# set fixed bars count on screen that approx includes as
|
||||
# many bars as possible before a downsample line is shown.
|
||||
begin = xlast - bars_from_y
|
||||
view.setXRange(
|
||||
min=begin,
|
||||
max=xlast,
|
||||
padding=0,
|
||||
)
|
||||
# re-get range
|
||||
l, lbar, rbar, r = self.bars_range()
|
||||
begin = xlast - round(6116 / 6)
|
||||
|
||||
# we get the L1 spread label "length" in view coords
|
||||
# terms now that we've scaled either by user control
|
||||
# or to the default set of bars as per the immediate block
|
||||
# above.
|
||||
marker_pos, l1_len = self.pre_l1_xs()
|
||||
end = xlast + l1_len + 1
|
||||
begin = end - (r - l)
|
||||
else:
|
||||
begin = end - (r - l)
|
||||
|
||||
# for debugging
|
||||
# print(
|
||||
# # f'bars range: {brange}\n'
|
||||
# f'bars range: {brange}\n'
|
||||
# f'xlast: {xlast}\n'
|
||||
# f'marker pos: {marker_pos}\n'
|
||||
# f'l1 len: {l1_len}\n'
|
||||
|
@ -915,13 +922,14 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
if self._static_yrange == 'axis':
|
||||
self._static_yrange = None
|
||||
|
||||
view = self.view
|
||||
view.setXRange(
|
||||
min=begin,
|
||||
max=end,
|
||||
padding=0,
|
||||
)
|
||||
self.view.maybe_downsample_graphics()
|
||||
view._set_yrange()
|
||||
self.view.maybe_downsample_graphics()
|
||||
try:
|
||||
self.linked.graphics_cycle()
|
||||
except IndexError:
|
||||
|
@ -952,7 +960,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
def draw_ohlc(
|
||||
self,
|
||||
name: str,
|
||||
shm: ShmArray,
|
||||
data: np.ndarray,
|
||||
|
||||
array_key: Optional[str] = None,
|
||||
|
||||
|
@ -972,12 +980,15 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
# the np array buffer to be drawn on next render cycle
|
||||
self.plotItem.addItem(graphics)
|
||||
|
||||
# draw after to allow self.scene() to work...
|
||||
graphics.draw_from_data(data)
|
||||
|
||||
data_key = array_key or name
|
||||
self._graphics[data_key] = graphics
|
||||
|
||||
self._flows[data_key] = Flow(
|
||||
name=name,
|
||||
plot=self.plotItem,
|
||||
_shm=shm,
|
||||
is_ohlc=True,
|
||||
graphics=graphics,
|
||||
)
|
||||
|
@ -1047,21 +1058,20 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
self,
|
||||
|
||||
name: str,
|
||||
shm: ShmArray,
|
||||
data: np.ndarray,
|
||||
|
||||
array_key: Optional[str] = None,
|
||||
overlay: bool = False,
|
||||
color: Optional[str] = None,
|
||||
add_label: bool = True,
|
||||
pi: Optional[pg.PlotItem] = None,
|
||||
step_mode: bool = False,
|
||||
|
||||
**pdi_kwargs,
|
||||
|
||||
) -> (pg.PlotDataItem, str):
|
||||
'''
|
||||
Draw a "curve" (line plot graphics) for the provided data in
|
||||
the input shm array ``shm``.
|
||||
the input array ``data``.
|
||||
|
||||
'''
|
||||
color = color or self.pen_color or 'default_light'
|
||||
|
@ -1071,26 +1081,40 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
data_key = array_key or name
|
||||
|
||||
curve_type = {
|
||||
None: Curve,
|
||||
'step': StepCurve,
|
||||
# TODO:
|
||||
# 'bars': BarsItems
|
||||
}['step' if step_mode else None]
|
||||
|
||||
curve = curve_type(
|
||||
# yah, we wrote our own B)
|
||||
curve = FastAppendCurve(
|
||||
y=data[data_key],
|
||||
x=data['index'],
|
||||
# antialias=True,
|
||||
name=name,
|
||||
|
||||
# XXX: pretty sure this is just more overhead
|
||||
# on data reads and makes graphics rendering no faster
|
||||
# clipToView=True,
|
||||
|
||||
**pdi_kwargs,
|
||||
)
|
||||
|
||||
# XXX: see explanation for different caching modes:
|
||||
# https://stackoverflow.com/a/39410081
|
||||
# seems to only be useful if we don't re-generate the entire
|
||||
# QPainterPath every time
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# don't ever use this - it's a colossal nightmare of artefacts
|
||||
# and is disastrous for performance.
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache)
|
||||
|
||||
# register curve graphics and backing array for name
|
||||
self._graphics[name] = curve
|
||||
self._arrays[data_key] = data
|
||||
|
||||
pi = pi or self.plotItem
|
||||
|
||||
self._flows[data_key] = Flow(
|
||||
name=name,
|
||||
plot=pi,
|
||||
_shm=shm,
|
||||
is_ohlc=False,
|
||||
# register curve graphics with this flow
|
||||
graphics=curve,
|
||||
)
|
||||
|
||||
|
@ -1151,11 +1175,16 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
)
|
||||
return last
|
||||
|
||||
def update_graphics_from_flow(
|
||||
def update_graphics_from_array(
|
||||
self,
|
||||
graphics_name: str,
|
||||
|
||||
array: Optional[np.ndarray] = None,
|
||||
array_key: Optional[str] = None,
|
||||
|
||||
use_vr: bool = True,
|
||||
render: bool = True,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> pg.GraphicsObject:
|
||||
|
@ -1163,11 +1192,63 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
Update the named internal graphics from ``array``.
|
||||
|
||||
'''
|
||||
flow = self._flows[array_key or graphics_name]
|
||||
return flow.update_graphics(
|
||||
array_key=array_key,
|
||||
**kwargs,
|
||||
)
|
||||
if array is not None:
|
||||
assert len(array)
|
||||
|
||||
data_key = array_key or graphics_name
|
||||
if graphics_name not in self._flows:
|
||||
data_key = self.name
|
||||
|
||||
if array is not None:
|
||||
# write array to internal graphics table
|
||||
self._arrays[data_key] = array
|
||||
else:
|
||||
array = self._arrays[data_key]
|
||||
|
||||
# array key and graphics "name" might be different..
|
||||
graphics = self._graphics[graphics_name]
|
||||
|
||||
# compute "in-view" indices
|
||||
l, lbar, rbar, r = self.bars_range()
|
||||
indexes = array['index']
|
||||
ifirst = indexes[0]
|
||||
ilast = indexes[-1]
|
||||
|
||||
lbar_i = max(l, ifirst) - ifirst
|
||||
rbar_i = min(r, ilast) - ifirst
|
||||
|
||||
# TODO: we could do it this way as well no?
|
||||
# to_draw = array[lbar - ifirst:(rbar - ifirst) + 1]
|
||||
in_view = array[lbar_i: rbar_i + 1]
|
||||
|
||||
if (
|
||||
not in_view.size
|
||||
or not render
|
||||
):
|
||||
return graphics
|
||||
|
||||
if isinstance(graphics, BarItems):
|
||||
graphics.update_from_array(
|
||||
array,
|
||||
in_view,
|
||||
view_range=(lbar_i, rbar_i) if use_vr else None,
|
||||
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
else:
|
||||
graphics.update_from_array(
|
||||
x=array['index'],
|
||||
y=array[data_key],
|
||||
|
||||
x_iv=in_view['index'],
|
||||
y_iv=in_view[data_key],
|
||||
view_range=(lbar_i, rbar_i) if use_vr else None,
|
||||
|
||||
**kwargs
|
||||
)
|
||||
|
||||
return graphics
|
||||
|
||||
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
||||
# # compute contents label "height" in view terms
|
||||
|
@ -1214,7 +1295,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
|
||||
# TODO: this should go onto some sort of
|
||||
# data-view thinger..right?
|
||||
ohlc = self._flows[self.name].shm.array
|
||||
ohlc = self._shm.array
|
||||
|
||||
# XXX: not sure why the time is so off here
|
||||
# looks like we're gonna have to do some fixing..
|
||||
|
@ -1244,9 +1325,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
def maxmin(
|
||||
self,
|
||||
name: Optional[str] = None,
|
||||
bars_range: Optional[tuple[
|
||||
int, int, int, int, int, int
|
||||
]] = None,
|
||||
bars_range: Optional[tuple[int, int, int, int]] = None,
|
||||
|
||||
) -> tuple[float, float]:
|
||||
'''
|
||||
|
@ -1255,14 +1334,16 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
If ``bars_range`` is provided use that range.
|
||||
|
||||
'''
|
||||
# print(f'Chart[{self.name}].maxmin()')
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
|
||||
msg=f'`{str(self)}.maxmin()` loop cycle for: `{self.name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
gt=ms_slower_then,
|
||||
delayed=True,
|
||||
)
|
||||
|
||||
l, lbar, rbar, r = bars_range or self.bars_range()
|
||||
profiler(f'{self.name} got bars range')
|
||||
|
||||
# TODO: here we should instead look up the ``Flow.shm.array``
|
||||
# and read directly from shm to avoid copying to memory first
|
||||
# and then reading it again here.
|
||||
|
@ -1272,26 +1353,112 @@ class ChartPlotWidget(pg.PlotWidget):
|
|||
flow is None
|
||||
):
|
||||
log.error(f"flow {flow_key} doesn't exist in chart {self.name} !?")
|
||||
key = res = 0, 0
|
||||
res = 0, 0
|
||||
|
||||
else:
|
||||
(
|
||||
first,
|
||||
l,
|
||||
lbar,
|
||||
rbar,
|
||||
r,
|
||||
last,
|
||||
) = bars_range or flow.datums_range()
|
||||
profiler(f'{self.name} got bars range')
|
||||
|
||||
key = round(lbar), round(rbar)
|
||||
res = flow.maxmin(*key)
|
||||
profiler(f'yrange mxmn: {key} -> {res}')
|
||||
if res == (None, None):
|
||||
log.error(
|
||||
f"{flow_key} no mxmn for bars_range => {key} !?"
|
||||
)
|
||||
res = 0, 0
|
||||
|
||||
profiler(f'yrange mxmn: {key} -> {res}')
|
||||
return res
|
||||
|
||||
|
||||
# class FlowsTable(pydantic.BaseModel):
|
||||
# '''
|
||||
# Data-AGGRegate: high level API onto multiple (categorized)
|
||||
# ``Flow``s with high level processing routines for
|
||||
# multi-graphics computations and display.
|
||||
|
||||
# '''
|
||||
# flows: dict[str, np.ndarray] = {}
|
||||
|
||||
|
||||
class Flow(msgspec.Struct): # , frozen=True):
|
||||
'''
|
||||
(FinancialSignal-)Flow compound type which wraps a real-time
|
||||
graphics (curve) and its backing data stream together for high level
|
||||
access and control.
|
||||
|
||||
The intention is for this type to eventually be capable of shm-passing
|
||||
of incrementally updated graphics stream data between actors.
|
||||
|
||||
'''
|
||||
name: str
|
||||
plot: pg.PlotItem
|
||||
is_ohlc: bool = False
|
||||
graphics: pg.GraphicsObject
|
||||
|
||||
# TODO: hackery to be able to set a shm later
|
||||
# but whilst also allowing this type to hashable,
|
||||
# likely will require serializable token that is used to attach
|
||||
# to the underlying shm ref after startup?
|
||||
_shm: Optional[ShmArray] = None # currently, may be filled in "later"
|
||||
|
||||
# cache of y-range values per x-range input.
|
||||
_mxmns: dict[tuple[int, int], tuple[float, float]] = {}
|
||||
|
||||
@property
|
||||
def shm(self) -> ShmArray:
|
||||
return self._shm
|
||||
|
||||
@shm.setter
|
||||
def shm(self, shm: ShmArray) -> ShmArray:
|
||||
self._shm = shm
|
||||
|
||||
def maxmin(
|
||||
self,
|
||||
lbar,
|
||||
rbar,
|
||||
|
||||
) -> tuple[float, float]:
|
||||
'''
|
||||
Compute the cached max and min y-range values for a given
|
||||
x-range determined by ``lbar`` and ``rbar``.
|
||||
|
||||
'''
|
||||
rkey = (lbar, rbar)
|
||||
cached_result = self._mxmns.get(rkey)
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
shm = self.shm
|
||||
if shm is None:
|
||||
mxmn = None
|
||||
|
||||
else: # new block for profiling?..
|
||||
arr = shm.array
|
||||
|
||||
# build relative indexes into shm array
|
||||
# TODO: should we just add/use a method
|
||||
# on the shm to do this?
|
||||
ifirst = arr[0]['index']
|
||||
slice_view = arr[
|
||||
lbar - ifirst:
|
||||
(rbar - ifirst) + 1
|
||||
]
|
||||
|
||||
if not slice_view.size:
|
||||
mxmn = None
|
||||
|
||||
else:
|
||||
if self.is_ohlc:
|
||||
ylow = np.min(slice_view['low'])
|
||||
yhigh = np.max(slice_view['high'])
|
||||
|
||||
else:
|
||||
view = slice_view[self.name]
|
||||
ylow = np.min(view)
|
||||
yhigh = np.max(view)
|
||||
|
||||
mxmn = ylow, yhigh
|
||||
|
||||
if mxmn is not None:
|
||||
# cache new mxmn result
|
||||
self._mxmns[rkey] = mxmn
|
||||
|
||||
return mxmn
|
||||
|
|
|
@ -138,20 +138,50 @@ def ohlc_flatten(
|
|||
return x, flat
|
||||
|
||||
|
||||
def ohlc_to_m4_line(
|
||||
ohlc: np.ndarray,
|
||||
px_width: int,
|
||||
|
||||
downsample: bool = False,
|
||||
uppx: Optional[float] = None,
|
||||
pretrace: bool = False,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an OHLC struct-array to a m4 downsampled 1-d array.
|
||||
|
||||
'''
|
||||
xpts, flat = ohlc_flatten(
|
||||
ohlc,
|
||||
use_mxmn=pretrace,
|
||||
)
|
||||
|
||||
if downsample:
|
||||
bins, x, y = ds_m4(
|
||||
xpts,
|
||||
flat,
|
||||
px_width=px_width,
|
||||
uppx=uppx,
|
||||
log_scale=bool(uppx)
|
||||
)
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
x = (x + np.array([-0.43, 0, 0, 0.43])).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
return x, y
|
||||
else:
|
||||
return xpts, flat
|
||||
|
||||
|
||||
def ds_m4(
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
# units-per-pixel-x(dimension)
|
||||
uppx: float,
|
||||
|
||||
# XXX: troll zone / easter egg..
|
||||
# want to mess with ur pal, pass in the actual
|
||||
# pixel width here instead of uppx-proper (i.e. pass
|
||||
# in our ``pg.GraphicsObject`` derivative's ``.px_width()``
|
||||
# gto mega-trip-out ur bud). Hint, it used to be implemented
|
||||
# (wrongly) using "pixel width", so check the git history ;)
|
||||
|
||||
xrange: Optional[float] = None,
|
||||
# this is the width of the data in view
|
||||
# in display-device-local pixel units.
|
||||
px_width: int,
|
||||
uppx: Optional[float] = None,
|
||||
log_scale: bool = True,
|
||||
|
||||
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||
'''
|
||||
|
@ -178,49 +208,52 @@ def ds_m4(
|
|||
# "i didn't show it in the sample code, but it's accounted for
|
||||
# in the start and end indices and number of bins"
|
||||
|
||||
# should never get called unless actually needed
|
||||
assert uppx > 1
|
||||
# optionally log-scale down the "supposed pxs on screen"
|
||||
# as the units-per-px (uppx) get's large.
|
||||
if log_scale:
|
||||
assert uppx, 'You must provide a `uppx` value to use log scaling!'
|
||||
|
||||
# scaler = 2**7 / (1 + math.log(uppx, 2))
|
||||
scaler = round(
|
||||
max(
|
||||
# NOTE: found that a 16x px width brought greater
|
||||
# detail, likely due to dpi scaling?
|
||||
# px_width=px_width * 16,
|
||||
2**7 / (1 + math.log(uppx, 2)),
|
||||
1
|
||||
)
|
||||
)
|
||||
px_width *= scaler
|
||||
|
||||
assert px_width > 1 # width of screen in pxs?
|
||||
|
||||
# NOTE: if we didn't pre-slice the data to downsample
|
||||
# you could in theory pass these as the slicing params,
|
||||
# do we care though since we can always just pre-slice the
|
||||
# input?
|
||||
x_start = x[0] # x value start/lowest in domain
|
||||
|
||||
if xrange is None:
|
||||
x_end = x[-1] # x end value/highest in domain
|
||||
xrange = (x_end - x_start)
|
||||
x_end = x[-1] # x end value/highest in domain
|
||||
|
||||
# XXX: always round up on the input pixels
|
||||
# lnx = len(x)
|
||||
# uppx *= max(4 / (1 + math.log(uppx, 2)), 1)
|
||||
px_width = math.ceil(px_width)
|
||||
|
||||
pxw = math.ceil(xrange / uppx)
|
||||
x_range = x_end - x_start
|
||||
|
||||
# scale up the frame "width" directly with uppx
|
||||
w = uppx
|
||||
# ratio of indexed x-value to width of raster in pixels.
|
||||
# this is more or less, uppx: units-per-pixel.
|
||||
w = x_range / float(px_width)
|
||||
|
||||
# ensure we make more then enough
|
||||
# frames (windows) for the output pixel
|
||||
frames = pxw
|
||||
frames = px_width
|
||||
|
||||
# if we have more and then exact integer's
|
||||
# (uniform quotient output) worth of datum-domain-points
|
||||
# per windows-frame, add one more window to ensure
|
||||
# we have room for all output down-samples.
|
||||
pts_per_pixel, r = divmod(xrange, frames)
|
||||
pts_per_pixel, r = divmod(len(x), frames)
|
||||
if r:
|
||||
# while r:
|
||||
frames += 1
|
||||
pts_per_pixel, r = divmod(xrange, frames)
|
||||
|
||||
# print(
|
||||
# f'uppx: {uppx}\n'
|
||||
# f'xrange: {xrange}\n'
|
||||
# f'pxw: {pxw}\n'
|
||||
# f'frames: {frames}\n'
|
||||
# )
|
||||
assert frames >= (xrange / uppx)
|
||||
|
||||
# call into ``numba``
|
||||
nb, i_win, y_out = _m4(
|
||||
|
|
|
@ -43,8 +43,8 @@ log = get_logger(__name__)
|
|||
# latency (in terms of perceived lag in cross hair) so really be sure
|
||||
# there's an improvement if you want to change it!
|
||||
|
||||
_mouse_rate_limit = 60 # TODO; should we calc current screen refresh rate?
|
||||
_debounce_delay = 0
|
||||
_mouse_rate_limit = 120 # TODO; should we calc current screen refresh rate?
|
||||
_debounce_delay = 1 / 40
|
||||
_ch_label_opac = 1
|
||||
|
||||
|
||||
|
@ -98,30 +98,25 @@ class LineDot(pg.CurvePoint):
|
|||
ev: QtCore.QEvent,
|
||||
|
||||
) -> bool:
|
||||
|
||||
if (
|
||||
not isinstance(ev, QtCore.QDynamicPropertyChangeEvent)
|
||||
or self.curve() is None
|
||||
):
|
||||
if not isinstance(
|
||||
ev, QtCore.QDynamicPropertyChangeEvent
|
||||
) or self.curve() is None:
|
||||
return False
|
||||
|
||||
# TODO: get rid of this ``.getData()`` and
|
||||
# make a more pythonic api to retreive backing
|
||||
# numpy arrays...
|
||||
# (x, y) = self.curve().getData()
|
||||
# index = self.property('index')
|
||||
# # first = self._plot._arrays['ohlc'][0]['index']
|
||||
# # first = x[0]
|
||||
# # i = index - first
|
||||
# if index:
|
||||
# i = round(index - x[0])
|
||||
# if i > 0 and i < len(y):
|
||||
# newPos = (index, y[i])
|
||||
# QtWidgets.QGraphicsItem.setPos(
|
||||
# self,
|
||||
# *newPos,
|
||||
# )
|
||||
# return True
|
||||
(x, y) = self.curve().getData()
|
||||
index = self.property('index')
|
||||
# first = self._plot._arrays['ohlc'][0]['index']
|
||||
# first = x[0]
|
||||
# i = index - first
|
||||
if index:
|
||||
i = round(index - x[0])
|
||||
if i > 0 and i < len(y):
|
||||
newPos = (index, y[i])
|
||||
QtWidgets.QGraphicsItem.setPos(self, *newPos)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
@ -259,13 +254,13 @@ class ContentsLabels:
|
|||
def update_labels(
|
||||
self,
|
||||
index: int,
|
||||
# array_name: str,
|
||||
|
||||
) -> None:
|
||||
# for name, (label, update) in self._labels.items():
|
||||
for chart, name, label, update in self._labels:
|
||||
|
||||
flow = chart._flows[name]
|
||||
array = flow.shm.array
|
||||
|
||||
array = chart._arrays[name]
|
||||
if not (
|
||||
index >= 0
|
||||
and index < array[-1]['index']
|
||||
|
@ -274,6 +269,8 @@ class ContentsLabels:
|
|||
print('WTF out of range?')
|
||||
continue
|
||||
|
||||
# array = chart._arrays[name]
|
||||
|
||||
# call provided update func with data point
|
||||
try:
|
||||
label.show()
|
||||
|
@ -475,12 +472,9 @@ class Cursor(pg.GraphicsObject):
|
|||
) -> LineDot:
|
||||
# if this plot contains curves add line dot "cursors" to denote
|
||||
# the current sample under the mouse
|
||||
main_flow = plot._flows[plot.name]
|
||||
# read out last index
|
||||
i = main_flow.shm.array[-1]['index']
|
||||
cursor = LineDot(
|
||||
curve,
|
||||
index=i,
|
||||
index=plot._arrays[plot.name][-1]['index'],
|
||||
plot=plot
|
||||
)
|
||||
plot.addItem(cursor)
|
||||
|
|
|
@ -18,37 +18,83 @@
|
|||
Fast, smooth, sexy curves.
|
||||
|
||||
"""
|
||||
from contextlib import contextmanager as cm
|
||||
from typing import Optional, Callable
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
from PyQt5 import QtWidgets
|
||||
from PyQt5 import QtGui, QtWidgets
|
||||
from PyQt5.QtWidgets import QGraphicsItem
|
||||
from PyQt5.QtCore import (
|
||||
Qt,
|
||||
QLineF,
|
||||
QSizeF,
|
||||
QRectF,
|
||||
# QRect,
|
||||
QPointF,
|
||||
)
|
||||
from PyQt5.QtGui import (
|
||||
QPainter,
|
||||
QPainterPath,
|
||||
)
|
||||
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._style import hcolor
|
||||
# from ._compression import (
|
||||
# # ohlc_to_m4_line,
|
||||
# ds_m4,
|
||||
# )
|
||||
from ._compression import (
|
||||
# ohlc_to_m4_line,
|
||||
ds_m4,
|
||||
)
|
||||
from ..log import get_logger
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
def step_path_arrays_from_1d(
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
include_endpoints: bool = False,
|
||||
|
||||
) -> (np.ndarray, np.ndarray):
|
||||
'''
|
||||
Generate a "step mode" curve aligned with OHLC style bars
|
||||
such that each segment spans each bar (aka "centered" style).
|
||||
|
||||
'''
|
||||
y_out = y.copy()
|
||||
x_out = x.copy()
|
||||
x2 = np.empty(
|
||||
# the data + 2 endpoints on either end for
|
||||
# "termination of the path".
|
||||
(len(x) + 1, 2),
|
||||
# we want to align with OHLC or other sampling style
|
||||
# bars likely so we need fractinal values
|
||||
dtype=float,
|
||||
)
|
||||
x2[0] = x[0] - 0.5
|
||||
x2[1] = x[0] + 0.5
|
||||
x2[1:] = x[:, np.newaxis] + 0.5
|
||||
|
||||
# flatten to 1-d
|
||||
x_out = x2.reshape(x2.size)
|
||||
|
||||
# we create a 1d with 2 extra indexes to
|
||||
# hold the start and (current) end value for the steps
|
||||
# on either end
|
||||
y2 = np.empty((len(y), 2), dtype=y.dtype)
|
||||
y2[:] = y[:, np.newaxis]
|
||||
|
||||
y_out = np.empty(
|
||||
2*len(y) + 2,
|
||||
dtype=y.dtype
|
||||
)
|
||||
|
||||
# flatten and set 0 endpoints
|
||||
y_out[1:-1] = y2.reshape(y2.size)
|
||||
y_out[0] = 0
|
||||
y_out[-1] = 0
|
||||
|
||||
if not include_endpoints:
|
||||
return x_out[:-1], y_out[:-1]
|
||||
|
||||
else:
|
||||
return x_out, y_out
|
||||
|
||||
|
||||
_line_styles: dict[str, int] = {
|
||||
'solid': Qt.PenStyle.SolidLine,
|
||||
'dash': Qt.PenStyle.DashLine,
|
||||
|
@ -57,43 +103,24 @@ _line_styles: dict[str, int] = {
|
|||
}
|
||||
|
||||
|
||||
class Curve(pg.GraphicsObject):
|
||||
class FastAppendCurve(pg.GraphicsObject):
|
||||
'''
|
||||
A faster, simpler, append friendly version of
|
||||
``pyqtgraph.PlotCurveItem`` built for highly customizable real-time
|
||||
updates.
|
||||
A faster, append friendly version of ``pyqtgraph.PlotCurveItem``
|
||||
built for real-time data updates.
|
||||
|
||||
This type is a much stripped down version of a ``pyqtgraph`` style
|
||||
"graphics object" in the sense that the internal lower level
|
||||
graphics which are drawn in the ``.paint()`` method are actually
|
||||
rendered outside of this class entirely and instead are assigned as
|
||||
state (instance vars) here and then drawn during a Qt graphics
|
||||
cycle.
|
||||
|
||||
The main motivation for this more modular, composed design is that
|
||||
lower level graphics data can be rendered in different threads and
|
||||
then read and drawn in this main thread without having to worry
|
||||
about dealing with Qt's concurrency primitives. See
|
||||
``piker.ui._flows.Renderer`` for details and logic related to lower
|
||||
level path generation and incremental update. The main differences in
|
||||
the path generation code include:
|
||||
|
||||
- avoiding regeneration of the entire historical path where possible
|
||||
and instead only updating the "new" segment(s) via a ``numpy``
|
||||
array diff calc.
|
||||
- here, the "last" graphics datum-segment is drawn independently
|
||||
such that near-term (high frequency) discrete-time-sampled style
|
||||
updates don't trigger a full path redraw.
|
||||
The main difference is avoiding regeneration of the entire
|
||||
historical path where possible and instead only updating the "new"
|
||||
segment(s) via a ``numpy`` array diff calc. Further the "last"
|
||||
graphic segment is drawn independently such that near-term (high
|
||||
frequency) discrete-time-sampled style updates don't trigger a full
|
||||
path redraw.
|
||||
|
||||
'''
|
||||
|
||||
# sub-type customization methods
|
||||
sub_br: Optional[Callable] = None
|
||||
sub_paint: Optional[Callable] = None
|
||||
declare_paintables: Optional[Callable] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
*args,
|
||||
|
||||
step_mode: bool = False,
|
||||
|
@ -107,25 +134,27 @@ class Curve(pg.GraphicsObject):
|
|||
|
||||
) -> None:
|
||||
|
||||
self._name = name
|
||||
|
||||
# brutaaalll, see comments within..
|
||||
self.yData = None
|
||||
self.xData = None
|
||||
self._y = self.yData = y
|
||||
self._x = self.xData = x
|
||||
|
||||
# self._last_cap: int = 0
|
||||
self.path: Optional[QPainterPath] = None
|
||||
self._name = name
|
||||
self.path: Optional[QtGui.QPainterPath] = None
|
||||
|
||||
# additional path used for appends which tries to avoid
|
||||
# triggering an update/redraw of the presumably larger
|
||||
# historical ``.path`` above.
|
||||
self.use_fpath = use_fpath
|
||||
self.fast_path: Optional[QPainterPath] = None
|
||||
self.fast_path: Optional[QtGui.QPainterPath] = None
|
||||
|
||||
# TODO: we can probably just dispense with the parent since
|
||||
# we're basically only using the pen setting now...
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# self._xrange: tuple[int, int] = self.dataBounds(ax=0)
|
||||
self._xrange: Optional[tuple[int, int]] = None
|
||||
|
||||
# self._last_draw = time.time()
|
||||
self._in_ds: bool = False
|
||||
self._last_uppx: float = 0
|
||||
|
||||
# all history of curve is drawn in single px thickness
|
||||
pen = pg.mkPen(hcolor(color))
|
||||
pen.setStyle(_line_styles[style])
|
||||
|
@ -139,43 +168,29 @@ class Curve(pg.GraphicsObject):
|
|||
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
||||
self.last_step_pen = pg.mkPen(pen, width=2)
|
||||
|
||||
# self._last_line: Optional[QLineF] = None
|
||||
self._last_line = QLineF()
|
||||
self._last_w: float = 1
|
||||
self._last_line: Optional[QLineF] = None
|
||||
self._last_step_rect: Optional[QRectF] = None
|
||||
|
||||
# flat-top style histogram-like discrete curve
|
||||
# self._step_mode: bool = step_mode
|
||||
self._step_mode: bool = step_mode
|
||||
|
||||
# self._fill = True
|
||||
self._brush = pg.functions.mkBrush(hcolor(fill_color or color))
|
||||
|
||||
# NOTE: this setting seems to mostly prevent redraws on mouse
|
||||
# interaction which is a huge boon for avg interaction latency.
|
||||
|
||||
# TODO: one question still remaining is if this makes trasform
|
||||
# interactions slower (such as zooming) and if so maybe if/when
|
||||
# we implement a "history" mode for the view we disable this in
|
||||
# that mode?
|
||||
# don't enable caching by default for the case where the
|
||||
# only thing drawn is the "last" line segment which can
|
||||
# have a weird artifact where it won't be fully drawn to its
|
||||
# endpoint (something we saw on trade rate curves)
|
||||
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
if step_mode:
|
||||
# don't enable caching by default for the case where the
|
||||
# only thing drawn is the "last" line segment which can
|
||||
# have a weird artifact where it won't be fully drawn to its
|
||||
# endpoint (something we saw on trade rate curves)
|
||||
self.setCacheMode(
|
||||
QGraphicsItem.DeviceCoordinateCache
|
||||
)
|
||||
|
||||
# XXX: see explanation for different caching modes:
|
||||
# https://stackoverflow.com/a/39410081
|
||||
# seems to only be useful if we don't re-generate the entire
|
||||
# QPainterPath every time
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# don't ever use this - it's a colossal nightmare of artefacts
|
||||
# and is disastrous for performance.
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache)
|
||||
|
||||
# allow sub-type customization
|
||||
declare = self.declare_paintables
|
||||
if declare:
|
||||
declare()
|
||||
self.update()
|
||||
|
||||
# TODO: probably stick this in a new parent
|
||||
# type which will contain our own version of
|
||||
|
@ -199,6 +214,9 @@ class Curve(pg.GraphicsObject):
|
|||
vr = self.viewRect()
|
||||
l, r = int(vr.left()), int(vr.right())
|
||||
|
||||
if not self._xrange:
|
||||
return 0
|
||||
|
||||
start, stop = self._xrange
|
||||
lbar = max(l, start)
|
||||
rbar = min(r, stop)
|
||||
|
@ -207,10 +225,352 @@ class Curve(pg.GraphicsObject):
|
|||
QLineF(lbar, 0, rbar, 0)
|
||||
).length()
|
||||
|
||||
def downsample(
|
||||
self,
|
||||
x,
|
||||
y,
|
||||
px_width,
|
||||
uppx,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
|
||||
# downsample whenever more then 1 pixels per datum can be shown.
|
||||
# always refresh data bounds until we get diffing
|
||||
# working properly, see above..
|
||||
bins, x, y = ds_m4(
|
||||
x,
|
||||
y,
|
||||
px_width=px_width,
|
||||
uppx=uppx,
|
||||
log_scale=bool(uppx)
|
||||
)
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
# x = (x + np.array([-0.43, 0, 0, 0.43])).flatten()
|
||||
x = (x + np.array([-0.5, 0, 0, 0.5])).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
# presumably?
|
||||
self._in_ds = True
|
||||
return x, y
|
||||
|
||||
def update_from_array(
|
||||
self,
|
||||
|
||||
# full array input history
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
|
||||
# pre-sliced array data that's "in view"
|
||||
x_iv: np.ndarray,
|
||||
y_iv: np.ndarray,
|
||||
|
||||
view_range: Optional[tuple[int, int]] = None,
|
||||
profiler: Optional[pg.debug.Profiler] = None,
|
||||
|
||||
) -> QtGui.QPainterPath:
|
||||
'''
|
||||
Update curve from input 2-d data.
|
||||
|
||||
Compare with a cached "x-range" state and (pre/a)ppend based on
|
||||
a length diff.
|
||||
|
||||
'''
|
||||
profiler = profiler or pg.debug.Profiler(
|
||||
msg=f'FastAppendCurve.update_from_array(): `{self._name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
gt=ms_slower_then,
|
||||
)
|
||||
# flip_cache = False
|
||||
|
||||
if self._xrange:
|
||||
istart, istop = self._xrange
|
||||
else:
|
||||
self._xrange = istart, istop = x[0], x[-1]
|
||||
# print(f"xrange: {self._xrange}")
|
||||
|
||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||
# our `LineDot`) required ``.getData()`` to work..
|
||||
self.xData = x
|
||||
self.yData = y
|
||||
self._x, self._y = x, y
|
||||
|
||||
if view_range:
|
||||
profiler(f'view range slice {view_range}')
|
||||
|
||||
# downsampling incremental state checking
|
||||
uppx = self.x_uppx()
|
||||
px_width = self.px_width()
|
||||
uppx_diff = (uppx - self._last_uppx)
|
||||
|
||||
should_ds = False
|
||||
should_redraw = False
|
||||
|
||||
# if a view range is passed, plan to draw the
|
||||
# source ouput that's "in view" of the chart.
|
||||
if view_range and not self._in_ds:
|
||||
# print(f'{self._name} vr: {view_range}')
|
||||
|
||||
# by default we only pull data up to the last (current) index
|
||||
x_out, y_out = x_iv[:-1], y_iv[:-1]
|
||||
|
||||
# step mode: draw flat top discrete "step"
|
||||
# over the index space for each datum.
|
||||
if self._step_mode:
|
||||
# TODO: numba this bish
|
||||
x_out, y_out = step_path_arrays_from_1d(
|
||||
x_out,
|
||||
y_out
|
||||
)
|
||||
profiler('generated step arrays')
|
||||
|
||||
should_redraw = True
|
||||
profiler('sliced in-view array history')
|
||||
|
||||
# x_last = x_iv[-1]
|
||||
# y_last = y_iv[-1]
|
||||
self._last_vr = view_range
|
||||
|
||||
# self.disable_cache()
|
||||
# flip_cache = True
|
||||
|
||||
else:
|
||||
self._xrange = x[0], x[-1]
|
||||
|
||||
x_last = x[-1]
|
||||
y_last = y[-1]
|
||||
|
||||
# check for downsampling conditions
|
||||
if (
|
||||
# std m4 downsample conditions
|
||||
px_width
|
||||
and uppx_diff >= 4
|
||||
or uppx_diff <= -3
|
||||
or self._step_mode and abs(uppx_diff) >= 4
|
||||
|
||||
):
|
||||
log.info(
|
||||
f'{self._name} sampler change: {self._last_uppx} -> {uppx}'
|
||||
)
|
||||
self._last_uppx = uppx
|
||||
should_ds = True
|
||||
|
||||
elif (
|
||||
uppx <= 2
|
||||
and self._in_ds
|
||||
):
|
||||
# we should de-downsample back to our original
|
||||
# source data so we clear our path data in prep
|
||||
# to generate a new one from original source data.
|
||||
should_redraw = True
|
||||
should_ds = False
|
||||
|
||||
# compute the length diffs between the first/last index entry in
|
||||
# the input data and the last indexes we have on record from the
|
||||
# last time we updated the curve index.
|
||||
prepend_length = int(istart - x[0])
|
||||
append_length = int(x[-1] - istop)
|
||||
|
||||
# no_path_yet = self.path is None
|
||||
if (
|
||||
self.path is None
|
||||
or should_redraw
|
||||
or should_ds
|
||||
or prepend_length > 0
|
||||
):
|
||||
if (
|
||||
not view_range
|
||||
or self._in_ds
|
||||
):
|
||||
# by default we only pull data up to the last (current) index
|
||||
x_out, y_out = x[:-1], y[:-1]
|
||||
|
||||
# step mode: draw flat top discrete "step"
|
||||
# over the index space for each datum.
|
||||
if self._step_mode:
|
||||
x_out, y_out = step_path_arrays_from_1d(
|
||||
x_out,
|
||||
y_out,
|
||||
)
|
||||
# TODO: numba this bish
|
||||
profiler('generated step arrays')
|
||||
|
||||
if should_redraw:
|
||||
profiler('path reversion to non-ds')
|
||||
if self.path:
|
||||
self.path.clear()
|
||||
|
||||
if self.fast_path:
|
||||
self.fast_path.clear()
|
||||
|
||||
if should_redraw and not should_ds:
|
||||
if self._in_ds:
|
||||
log.info(f'DEDOWN -> {self._name}')
|
||||
|
||||
self._in_ds = False
|
||||
|
||||
elif should_ds and px_width:
|
||||
x_out, y_out = self.downsample(
|
||||
x_out,
|
||||
y_out,
|
||||
px_width,
|
||||
uppx,
|
||||
)
|
||||
profiler(f'FULL PATH downsample redraw={should_ds}')
|
||||
self._in_ds = True
|
||||
|
||||
self.path = pg.functions.arrayToQPath(
|
||||
x_out,
|
||||
y_out,
|
||||
connect='all',
|
||||
finiteCheck=False,
|
||||
path=self.path,
|
||||
)
|
||||
profiler('generated fresh path')
|
||||
# profiler(f'DRAW PATH IN VIEW -> {self._name}')
|
||||
|
||||
# reserve mem allocs see:
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#reserve
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||
# XXX: right now this is based on had hoc checks on a
|
||||
# hidpi 3840x2160 4k monitor but we should optimize for
|
||||
# the target display(s) on the sys.
|
||||
# if no_path_yet:
|
||||
# self.path.reserve(int(500e3))
|
||||
|
||||
# TODO: get this piecewise prepend working - right now it's
|
||||
# giving heck on vwap...
|
||||
# elif prepend_length:
|
||||
# breakpoint()
|
||||
|
||||
# prepend_path = pg.functions.arrayToQPath(
|
||||
# x[0:prepend_length],
|
||||
# y[0:prepend_length],
|
||||
# connect='all'
|
||||
# )
|
||||
|
||||
# # swap prepend path in "front"
|
||||
# old_path = self.path
|
||||
# self.path = prepend_path
|
||||
# # self.path.moveTo(new_x[0], new_y[0])
|
||||
# self.path.connectPath(old_path)
|
||||
|
||||
elif (
|
||||
append_length > 0
|
||||
and not view_range
|
||||
):
|
||||
new_x = x[-append_length - 2:-1]
|
||||
new_y = y[-append_length - 2:-1]
|
||||
|
||||
if self._step_mode:
|
||||
new_x, new_y = step_path_arrays_from_1d(
|
||||
new_x,
|
||||
new_y,
|
||||
)
|
||||
# [1:] since we don't need the vertical line normally at
|
||||
# the beginning of the step curve taking the first (x,
|
||||
# y) poing down to the x-axis **because** this is an
|
||||
# appended path graphic.
|
||||
new_x = new_x[1:]
|
||||
new_y = new_y[1:]
|
||||
|
||||
profiler('diffed append arrays')
|
||||
|
||||
if should_ds:
|
||||
new_x, new_y = self.downsample(
|
||||
new_x,
|
||||
new_y,
|
||||
**should_ds,
|
||||
)
|
||||
profiler(f'fast path downsample redraw={should_ds}')
|
||||
|
||||
append_path = pg.functions.arrayToQPath(
|
||||
new_x,
|
||||
new_y,
|
||||
connect='all',
|
||||
finiteCheck=False,
|
||||
path=self.fast_path,
|
||||
)
|
||||
|
||||
if self.use_fpath:
|
||||
# an attempt at trying to make append-updates faster..
|
||||
if self.fast_path is None:
|
||||
self.fast_path = append_path
|
||||
self.fast_path.reserve(int(6e3))
|
||||
else:
|
||||
self.fast_path.connectPath(append_path)
|
||||
size = self.fast_path.capacity()
|
||||
profiler(f'connected fast path w size: {size}')
|
||||
|
||||
# print(f"append_path br: {append_path.boundingRect()}")
|
||||
# self.path.moveTo(new_x[0], new_y[0])
|
||||
# path.connectPath(append_path)
|
||||
|
||||
# XXX: lol this causes a hang..
|
||||
# self.path = self.path.simplified()
|
||||
else:
|
||||
size = self.path.capacity()
|
||||
profiler(f'connected history path w size: {size}')
|
||||
self.path.connectPath(append_path)
|
||||
|
||||
# other merging ideas:
|
||||
# https://stackoverflow.com/questions/8936225/how-to-merge-qpainterpaths
|
||||
# path.addPath(append_path)
|
||||
# path.closeSubpath()
|
||||
|
||||
# TODO: try out new work from `pyqtgraph` main which
|
||||
# should repair horrid perf:
|
||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2032
|
||||
# ok, nope still horrible XD
|
||||
# if self._fill:
|
||||
# # XXX: super slow set "union" op
|
||||
# self.path = self.path.united(append_path).simplified()
|
||||
|
||||
# self.disable_cache()
|
||||
# flip_cache = True
|
||||
|
||||
# draw the "current" step graphic segment so it lines up with
|
||||
# the "middle" of the current (OHLC) sample.
|
||||
if self._step_mode:
|
||||
self._last_line = QLineF(
|
||||
x_last - 0.5, 0,
|
||||
x_last + 0.5, 0,
|
||||
)
|
||||
self._last_step_rect = QRectF(
|
||||
x_last - 0.5, 0,
|
||||
x_last + 0.5, y_last
|
||||
)
|
||||
# print(
|
||||
# f"path br: {self.path.boundingRect()}",
|
||||
# f"fast path br: {self.fast_path.boundingRect()}",
|
||||
# f"last rect br: {self._last_step_rect}",
|
||||
# )
|
||||
else:
|
||||
self._last_line = QLineF(
|
||||
x[-2], y[-2],
|
||||
x[-1], y_last
|
||||
)
|
||||
|
||||
profiler('draw last segment')
|
||||
|
||||
# trigger redraw of path
|
||||
# do update before reverting to cache mode
|
||||
# self.prepareGeometryChange()
|
||||
self.update()
|
||||
profiler('.update()')
|
||||
|
||||
# if flip_cache:
|
||||
# # XXX: seems to be needed to avoid artifacts (see above).
|
||||
# self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||
# our `LineDot`) required ``.getData()`` to work..
|
||||
def getData(self):
|
||||
return self.xData, self.yData
|
||||
return self._x, self._y
|
||||
|
||||
# TODO: drop the above after ``Cursor`` re-work
|
||||
def get_arrays(self) -> tuple[np.ndarray, np.ndarray]:
|
||||
return self._x, self._y
|
||||
|
||||
def clear(self):
|
||||
'''
|
||||
|
@ -233,18 +593,25 @@ class Curve(pg.GraphicsObject):
|
|||
# self.fast_path.clear()
|
||||
self.fast_path = None
|
||||
|
||||
@cm
|
||||
def reset_cache(self) -> None:
|
||||
# self.disable_cache()
|
||||
# self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
def disable_cache(self) -> None:
|
||||
'''
|
||||
Disable the use of the pixel coordinate cache and trigger a geo event.
|
||||
|
||||
'''
|
||||
# XXX: pretty annoying but, without this there's little
|
||||
# artefacts on the append updates to the curve...
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
||||
yield
|
||||
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
self.prepareGeometryChange()
|
||||
|
||||
def boundingRect(self):
|
||||
'''
|
||||
Compute and then cache our rect.
|
||||
'''
|
||||
if self.path is None:
|
||||
return QPainterPath().boundingRect()
|
||||
return QtGui.QPainterPath().boundingRect()
|
||||
else:
|
||||
# dynamically override this method after initial
|
||||
# path is created to avoid requiring the above None check
|
||||
|
@ -256,7 +623,6 @@ class Curve(pg.GraphicsObject):
|
|||
Post init ``.boundingRect()```.
|
||||
|
||||
'''
|
||||
# hb = self.path.boundingRect()
|
||||
hb = self.path.controlPointRect()
|
||||
hb_size = hb.size()
|
||||
|
||||
|
@ -264,60 +630,17 @@ class Curve(pg.GraphicsObject):
|
|||
if fp:
|
||||
fhb = fp.controlPointRect()
|
||||
hb_size = fhb.size() + hb_size
|
||||
|
||||
# print(f'hb_size: {hb_size}')
|
||||
|
||||
# if self._last_step_rect:
|
||||
# hb_size += self._last_step_rect.size()
|
||||
|
||||
# if self._line:
|
||||
# br = self._last_step_rect.bottomRight()
|
||||
|
||||
# tl = QPointF(
|
||||
# # self._vr[0],
|
||||
# # hb.topLeft().y(),
|
||||
# # 0,
|
||||
# # hb_size.height() + 1
|
||||
# )
|
||||
|
||||
# br = self._last_step_rect.bottomRight()
|
||||
|
||||
w = hb_size.width()
|
||||
h = hb_size.height()
|
||||
|
||||
sbr = self.sub_br
|
||||
if sbr:
|
||||
w, h = self.sub_br(w, h)
|
||||
else:
|
||||
# assume plain line graphic and use
|
||||
# default unit step in each direction.
|
||||
|
||||
# only on a plane line do we include
|
||||
# and extra index step's worth of width
|
||||
# since in the step case the end of the curve
|
||||
# actually terminates earlier so we don't need
|
||||
# this for the last step.
|
||||
w += self._last_w
|
||||
# ll = self._last_line
|
||||
h += 1 # ll.y2() - ll.y1()
|
||||
|
||||
# br = QPointF(
|
||||
# self._vr[-1],
|
||||
# # tl.x() + w,
|
||||
# tl.y() + h,
|
||||
# )
|
||||
w = hb_size.width() + 1
|
||||
h = hb_size.height() + 1
|
||||
|
||||
br = QRectF(
|
||||
|
||||
# top left
|
||||
# hb.topLeft()
|
||||
# tl,
|
||||
QPointF(hb.topLeft()),
|
||||
|
||||
# br,
|
||||
# total size
|
||||
# QSizeF(hb_size)
|
||||
# hb_size,
|
||||
QSizeF(w, h)
|
||||
)
|
||||
# print(f'bounding rect: {br}')
|
||||
|
@ -325,36 +648,40 @@ class Curve(pg.GraphicsObject):
|
|||
|
||||
def paint(
|
||||
self,
|
||||
p: QPainter,
|
||||
p: QtGui.QPainter,
|
||||
opt: QtWidgets.QStyleOptionGraphicsItem,
|
||||
w: QtWidgets.QWidget
|
||||
|
||||
) -> None:
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'Curve.paint(): `{self._name}`',
|
||||
msg=f'FastAppendCurve.paint(): `{self._name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
gt=ms_slower_then,
|
||||
)
|
||||
|
||||
sub_paint = self.sub_paint
|
||||
if sub_paint:
|
||||
sub_paint(p, profiler)
|
||||
if (
|
||||
self._step_mode
|
||||
and self._last_step_rect
|
||||
):
|
||||
brush = self._brush
|
||||
|
||||
p.setPen(self.last_step_pen)
|
||||
p.drawLine(self._last_line)
|
||||
profiler('.drawLine()')
|
||||
p.setPen(self._pen)
|
||||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||
# p.drawRect(self._last_step_rect)
|
||||
p.fillRect(self._last_step_rect, brush)
|
||||
profiler('.fillRect()')
|
||||
|
||||
if self._last_line:
|
||||
p.setPen(self.last_step_pen)
|
||||
p.drawLine(self._last_line)
|
||||
profiler('.drawLine()')
|
||||
p.setPen(self._pen)
|
||||
|
||||
path = self.path
|
||||
# cap = path.capacity()
|
||||
# if cap != self._last_cap:
|
||||
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
||||
# self._last_cap = cap
|
||||
|
||||
if path:
|
||||
p.drawPath(path)
|
||||
profiler(f'.drawPath(path): {path.capacity()}')
|
||||
profiler('.drawPath(path)')
|
||||
|
||||
fp = self.fast_path
|
||||
if fp:
|
||||
|
@ -368,117 +695,3 @@ class Curve(pg.GraphicsObject):
|
|||
# if self._fill:
|
||||
# brush = self.opts['brush']
|
||||
# p.fillPath(self.path, brush)
|
||||
|
||||
def draw_last_datum(
|
||||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
|
||||
) -> None:
|
||||
# default line draw last call
|
||||
# with self.reset_cache():
|
||||
x = render_data['index']
|
||||
y = render_data[array_key]
|
||||
|
||||
# draw the "current" step graphic segment so it
|
||||
# lines up with the "middle" of the current
|
||||
# (OHLC) sample.
|
||||
self._last_line = QLineF(
|
||||
x[-2], y[-2],
|
||||
x[-1], y[-1],
|
||||
)
|
||||
|
||||
return x, y
|
||||
|
||||
|
||||
# TODO: this should probably be a "downsampled" curve type
|
||||
# that draws a bar-style (but for the px column) last graphics
|
||||
# element such that the current datum in view can be shown
|
||||
# (via it's max / min) even when highly zoomed out.
|
||||
class FlattenedOHLC(Curve):
|
||||
|
||||
def draw_last_datum(
|
||||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
|
||||
) -> None:
|
||||
lasts = src_data[-2:]
|
||||
x = lasts['index']
|
||||
y = lasts['close']
|
||||
|
||||
# draw the "current" step graphic segment so it
|
||||
# lines up with the "middle" of the current
|
||||
# (OHLC) sample.
|
||||
self._last_line = QLineF(
|
||||
x[-2], y[-2],
|
||||
x[-1], y[-1]
|
||||
)
|
||||
return x, y
|
||||
|
||||
|
||||
class StepCurve(Curve):
|
||||
|
||||
def declare_paintables(
|
||||
self,
|
||||
) -> None:
|
||||
self._last_step_rect = QRectF()
|
||||
|
||||
def draw_last_datum(
|
||||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
|
||||
w: float = 0.5,
|
||||
|
||||
) -> None:
|
||||
|
||||
# TODO: remove this and instead place all step curve
|
||||
# updating into pre-path data render callbacks.
|
||||
# full input data
|
||||
x = src_data['index']
|
||||
y = src_data[array_key]
|
||||
|
||||
x_last = x[-1]
|
||||
y_last = y[-1]
|
||||
|
||||
# lol, commenting this makes step curves
|
||||
# all "black" for me :eyeroll:..
|
||||
self._last_line = QLineF(
|
||||
x_last - w, 0,
|
||||
x_last + w, 0,
|
||||
)
|
||||
self._last_step_rect = QRectF(
|
||||
x_last - w, 0,
|
||||
x_last + w, y_last,
|
||||
)
|
||||
return x, y
|
||||
|
||||
def sub_paint(
|
||||
self,
|
||||
p: QPainter,
|
||||
profiler: pg.debug.Profiler,
|
||||
|
||||
) -> None:
|
||||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||
# p.drawRect(self._last_step_rect)
|
||||
p.fillRect(self._last_step_rect, self._brush)
|
||||
profiler('.fillRect()')
|
||||
|
||||
def sub_br(
|
||||
self,
|
||||
path_w: float,
|
||||
path_h: float,
|
||||
|
||||
) -> (float, float):
|
||||
# passthrough
|
||||
return path_w, path_h
|
||||
|
|
|
@ -32,7 +32,7 @@ import trio
|
|||
import pendulum
|
||||
import pyqtgraph as pg
|
||||
|
||||
# from .. import brokers
|
||||
from .. import brokers
|
||||
from ..data.feed import open_feed
|
||||
from ._axes import YAxisLabel
|
||||
from ._chart import (
|
||||
|
@ -54,16 +54,16 @@ from ._forms import (
|
|||
mk_order_pane_layout,
|
||||
)
|
||||
from .order_mode import open_order_mode
|
||||
from .._profile import (
|
||||
pg_profile_enabled,
|
||||
ms_slower_then,
|
||||
)
|
||||
# from .._profile import (
|
||||
# pg_profile_enabled,
|
||||
# ms_slower_then,
|
||||
# )
|
||||
from ..log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
# TODO: load this from a config.toml!
|
||||
_quote_throttle_rate: int = 22 # Hz
|
||||
_quote_throttle_rate: int = 12 # Hz
|
||||
|
||||
|
||||
# a working tick-type-classes template
|
||||
|
@ -96,19 +96,28 @@ def chart_maxmin(
|
|||
Compute max and min datums "in view" for range limits.
|
||||
|
||||
'''
|
||||
last_bars_range = chart.bars_range()
|
||||
out = chart.maxmin()
|
||||
array = ohlcv_shm.array
|
||||
ifirst = array[0]['index']
|
||||
|
||||
if out is None:
|
||||
last_bars_range = chart.bars_range()
|
||||
l, lbar, rbar, r = last_bars_range
|
||||
in_view = array[lbar - ifirst:rbar - ifirst + 1]
|
||||
|
||||
if not in_view.size:
|
||||
log.warning('Resetting chart to data')
|
||||
chart.default_view()
|
||||
return (last_bars_range, 0, 0, 0)
|
||||
|
||||
mn, mx = out
|
||||
mx, mn = (
|
||||
np.nanmax(in_view['high']),
|
||||
np.nanmin(in_view['low'],)
|
||||
)
|
||||
|
||||
mx_vlm_in_view = 0
|
||||
if vlm_chart:
|
||||
out = vlm_chart.maxmin()
|
||||
if out:
|
||||
_, mx_vlm_in_view = out
|
||||
mx_vlm_in_view = np.max(
|
||||
in_view['volume']
|
||||
)
|
||||
|
||||
return (
|
||||
last_bars_range,
|
||||
|
@ -263,7 +272,6 @@ async def graphics_update_loop(
|
|||
'vars': {
|
||||
'tick_margin': tick_margin,
|
||||
'i_last': i_last,
|
||||
'i_last_append': i_last,
|
||||
'last_mx_vlm': last_mx_vlm,
|
||||
'last_mx': last_mx,
|
||||
'last_mn': last_mn,
|
||||
|
@ -310,7 +318,6 @@ def graphics_update_cycle(
|
|||
ds: DisplayState,
|
||||
wap_in_history: bool = False,
|
||||
trigger_all: bool = False, # flag used by prepend history updates
|
||||
prepend_update_index: Optional[int] = None,
|
||||
|
||||
) -> None:
|
||||
# TODO: eventually optimize this whole graphics stack with ``numba``
|
||||
|
@ -320,12 +327,9 @@ def graphics_update_cycle(
|
|||
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'Graphics loop cycle for: `{chart.name}`',
|
||||
delayed=True,
|
||||
disabled=not pg_profile_enabled(),
|
||||
# disabled=True,
|
||||
ms_threshold=ms_slower_then,
|
||||
|
||||
# ms_threshold=1/12 * 1e3,
|
||||
disabled=True, # not pg_profile_enabled(),
|
||||
gt=1/12 * 1e3,
|
||||
# gt=ms_slower_then,
|
||||
)
|
||||
|
||||
# unpack multi-referenced components
|
||||
|
@ -336,12 +340,12 @@ def graphics_update_cycle(
|
|||
vars = ds.vars
|
||||
tick_margin = vars['tick_margin']
|
||||
|
||||
update_uppx = 16
|
||||
update_uppx = 6
|
||||
|
||||
for sym, quote in ds.quotes.items():
|
||||
|
||||
# compute the first available graphic's x-units-per-pixel
|
||||
uppx = vlm_chart.view.x_uppx()
|
||||
xpx = vlm_chart.view.x_uppx()
|
||||
|
||||
# NOTE: vlm may be written by the ``brokerd`` backend
|
||||
# event though a tick sample is not emitted.
|
||||
|
@ -360,58 +364,26 @@ def graphics_update_cycle(
|
|||
i_diff = i_step - vars['i_last']
|
||||
vars['i_last'] = i_step
|
||||
|
||||
append_diff = i_step - vars['i_last_append']
|
||||
|
||||
# update the "last datum" (aka extending the flow graphic with
|
||||
# new data) only if the number of unit steps is >= the number of
|
||||
# such unit steps per pixel (aka uppx). Iow, if the zoom level
|
||||
# is such that a datum(s) update to graphics wouldn't span
|
||||
# to a new pixel, we don't update yet.
|
||||
do_append = (append_diff >= uppx)
|
||||
if do_append:
|
||||
vars['i_last_append'] = i_step
|
||||
|
||||
do_rt_update = uppx < update_uppx
|
||||
# print(
|
||||
# f'append_diff:{append_diff}\n'
|
||||
# f'uppx:{uppx}\n'
|
||||
# f'do_append: {do_append}'
|
||||
# )
|
||||
|
||||
# TODO: we should only run mxmn when we know
|
||||
# an update is due via ``do_append`` above.
|
||||
(
|
||||
brange,
|
||||
mx_in_view,
|
||||
mn_in_view,
|
||||
mx_vlm_in_view,
|
||||
) = ds.maxmin()
|
||||
|
||||
l, lbar, rbar, r = brange
|
||||
mx = mx_in_view + tick_margin
|
||||
mn = mn_in_view - tick_margin
|
||||
|
||||
profiler('`ds.maxmin()` call')
|
||||
|
||||
liv = r >= i_step # the last datum is in view
|
||||
|
||||
if (
|
||||
prepend_update_index is not None
|
||||
and lbar > prepend_update_index
|
||||
):
|
||||
# on a history update (usually from the FSP subsys)
|
||||
# if the segment of history that is being prepended
|
||||
# isn't in view there is no reason to do a graphics
|
||||
# update.
|
||||
log.debug('Skipping prepend graphics cycle: frame not in view')
|
||||
return
|
||||
profiler('maxmin call')
|
||||
liv = r > i_step # the last datum is in view
|
||||
|
||||
# don't real-time "shift" the curve to the
|
||||
# left unless we get one of the following:
|
||||
if (
|
||||
(
|
||||
# i_diff > 0 # no new sample step
|
||||
do_append
|
||||
# and uppx < 4 # chart is zoomed out very far
|
||||
i_diff > 0 # no new sample step
|
||||
and xpx < 4 # chart is zoomed out very far
|
||||
and r >= i_step # the last datum isn't in view
|
||||
and liv
|
||||
)
|
||||
or trigger_all
|
||||
|
@ -421,10 +393,63 @@ def graphics_update_cycle(
|
|||
# and then iff update curves and shift?
|
||||
chart.increment_view(steps=i_diff)
|
||||
|
||||
if vlm_chart:
|
||||
vlm_chart.increment_view(steps=i_diff)
|
||||
if vlm_chart:
|
||||
# always update y-label
|
||||
ds.vlm_sticky.update_from_data(
|
||||
*array[-1][['index', 'volume']]
|
||||
)
|
||||
|
||||
profiler('view incremented')
|
||||
if (
|
||||
(
|
||||
xpx < update_uppx or i_diff > 0
|
||||
and liv
|
||||
)
|
||||
or trigger_all
|
||||
):
|
||||
# TODO: make it so this doesn't have to be called
|
||||
# once the $vlm is up?
|
||||
vlm_chart.update_graphics_from_array(
|
||||
'volume',
|
||||
array,
|
||||
|
||||
# UGGGh, see ``maxmin()`` impl in `._fsp` for
|
||||
# the overlayed plotitems... we need a better
|
||||
# bay to invoke a maxmin per overlay..
|
||||
render=False,
|
||||
# XXX: ^^^^ THIS IS SUPER IMPORTANT! ^^^^
|
||||
# without this, since we disable the
|
||||
# 'volume' (units) chart after the $vlm starts
|
||||
# up we need to be sure to enable this
|
||||
# auto-ranging otherwise there will be no handler
|
||||
# connected to update accompanying overlay
|
||||
# graphics..
|
||||
)
|
||||
|
||||
if (
|
||||
mx_vlm_in_view != vars['last_mx_vlm']
|
||||
):
|
||||
yrange = (0, mx_vlm_in_view * 1.375)
|
||||
vlm_chart.view._set_yrange(
|
||||
yrange=yrange,
|
||||
)
|
||||
# print(f'mx vlm: {last_mx_vlm} -> {mx_vlm_in_view}')
|
||||
vars['last_mx_vlm'] = mx_vlm_in_view
|
||||
|
||||
for curve_name, flow in vlm_chart._flows.items():
|
||||
update_fsp_chart(
|
||||
vlm_chart,
|
||||
flow,
|
||||
curve_name,
|
||||
array_key=curve_name,
|
||||
)
|
||||
# is this even doing anything?
|
||||
# (pretty sure it's the real-time
|
||||
# resizing from last quote?)
|
||||
fvb = flow.plot.vb
|
||||
fvb._set_yrange(
|
||||
autoscale_linked_plots=False,
|
||||
name=curve_name,
|
||||
)
|
||||
|
||||
ticks_frame = quote.get('ticks', ())
|
||||
|
||||
|
@ -471,20 +496,15 @@ def graphics_update_cycle(
|
|||
|
||||
# update ohlc sampled price bars
|
||||
if (
|
||||
do_rt_update
|
||||
or do_append
|
||||
xpx < update_uppx
|
||||
or i_diff > 0
|
||||
or trigger_all
|
||||
):
|
||||
chart.update_graphics_from_flow(
|
||||
chart.update_graphics_from_array(
|
||||
chart.name,
|
||||
# do_append=uppx < update_uppx,
|
||||
do_append=do_append,
|
||||
array,
|
||||
)
|
||||
|
||||
# NOTE: we always update the "last" datum
|
||||
# since the current range should at least be updated
|
||||
# to it's max/min on the last pixel.
|
||||
|
||||
# iterate in FIFO order per tick-frame
|
||||
for typ, tick in lasts.items():
|
||||
|
||||
|
@ -495,9 +515,8 @@ def graphics_update_cycle(
|
|||
# tick frames to determine the y-range for chart
|
||||
# auto-scaling.
|
||||
# TODO: we need a streaming minmax algo here, see def above.
|
||||
if liv:
|
||||
mx = max(price + tick_margin, mx)
|
||||
mn = min(price - tick_margin, mn)
|
||||
mx = max(price + tick_margin, mx)
|
||||
mn = min(price - tick_margin, mn)
|
||||
|
||||
if typ in clear_types:
|
||||
|
||||
|
@ -520,8 +539,9 @@ def graphics_update_cycle(
|
|||
|
||||
if wap_in_history:
|
||||
# update vwap overlay line
|
||||
chart.update_graphics_from_flow(
|
||||
chart.update_graphics_from_array(
|
||||
'bar_wap',
|
||||
array,
|
||||
)
|
||||
|
||||
# L1 book label-line updates
|
||||
|
@ -537,7 +557,7 @@ def graphics_update_cycle(
|
|||
|
||||
if (
|
||||
label is not None
|
||||
and liv
|
||||
# and liv
|
||||
):
|
||||
label.update_fields(
|
||||
{'level': price, 'size': size}
|
||||
|
@ -551,7 +571,7 @@ def graphics_update_cycle(
|
|||
typ in _tick_groups['asks']
|
||||
# TODO: instead we could check if the price is in the
|
||||
# y-view-range?
|
||||
and liv
|
||||
# and liv
|
||||
):
|
||||
l1.ask_label.update_fields({'level': price, 'size': size})
|
||||
|
||||
|
@ -559,7 +579,7 @@ def graphics_update_cycle(
|
|||
typ in _tick_groups['bids']
|
||||
# TODO: instead we could check if the price is in the
|
||||
# y-view-range?
|
||||
and liv
|
||||
# and liv
|
||||
):
|
||||
l1.bid_label.update_fields({'level': price, 'size': size})
|
||||
|
||||
|
@ -574,7 +594,6 @@ def graphics_update_cycle(
|
|||
main_vb._ic is None
|
||||
or not main_vb._ic.is_set()
|
||||
):
|
||||
# print(f'updating range due to mxmn')
|
||||
main_vb._set_yrange(
|
||||
# TODO: we should probably scale
|
||||
# the view margin based on the size
|
||||
|
@ -585,120 +604,21 @@ def graphics_update_cycle(
|
|||
yrange=(mn, mx),
|
||||
)
|
||||
|
||||
# XXX: update this every draw cycle to make L1-always-in-view work.
|
||||
vars['last_mx'], vars['last_mn'] = mx, mn
|
||||
vars['last_mx'], vars['last_mn'] = mx, mn
|
||||
|
||||
# run synchronous update on all linked flows
|
||||
# TODO: should the "main" (aka source) flow be special?
|
||||
for curve_name, flow in chart._flows.items():
|
||||
# update any overlayed fsp flows
|
||||
if curve_name != chart.data_key:
|
||||
update_fsp_chart(
|
||||
chart,
|
||||
flow,
|
||||
curve_name,
|
||||
array_key=curve_name,
|
||||
)
|
||||
# TODO: should the "main" (aka source) flow be special?
|
||||
if curve_name == chart.data_key:
|
||||
continue
|
||||
|
||||
# even if we're downsampled bigly
|
||||
# draw the last datum in the final
|
||||
# px column to give the user the mx/mn
|
||||
# range of that set.
|
||||
if (
|
||||
not do_append
|
||||
# and not do_rt_update
|
||||
and liv
|
||||
):
|
||||
flow.draw_last(
|
||||
array_key=curve_name,
|
||||
only_last_uppx=True,
|
||||
)
|
||||
|
||||
# volume chart logic..
|
||||
# TODO: can we unify this with the above loop?
|
||||
if vlm_chart:
|
||||
# always update y-label
|
||||
ds.vlm_sticky.update_from_data(
|
||||
*array[-1][['index', 'volume']]
|
||||
update_fsp_chart(
|
||||
chart,
|
||||
flow,
|
||||
curve_name,
|
||||
array_key=curve_name,
|
||||
)
|
||||
|
||||
if (
|
||||
(
|
||||
do_rt_update
|
||||
or do_append
|
||||
and liv
|
||||
)
|
||||
or trigger_all
|
||||
):
|
||||
# TODO: make it so this doesn't have to be called
|
||||
# once the $vlm is up?
|
||||
vlm_chart.update_graphics_from_flow(
|
||||
'volume',
|
||||
# UGGGh, see ``maxmin()`` impl in `._fsp` for
|
||||
# the overlayed plotitems... we need a better
|
||||
# bay to invoke a maxmin per overlay..
|
||||
render=False,
|
||||
# XXX: ^^^^ THIS IS SUPER IMPORTANT! ^^^^
|
||||
# without this, since we disable the
|
||||
# 'volume' (units) chart after the $vlm starts
|
||||
# up we need to be sure to enable this
|
||||
# auto-ranging otherwise there will be no handler
|
||||
# connected to update accompanying overlay
|
||||
# graphics..
|
||||
)
|
||||
profiler('`vlm_chart.update_graphics_from_flow()`')
|
||||
|
||||
if (
|
||||
mx_vlm_in_view != vars['last_mx_vlm']
|
||||
):
|
||||
yrange = (0, mx_vlm_in_view * 1.375)
|
||||
vlm_chart.view._set_yrange(
|
||||
yrange=yrange,
|
||||
)
|
||||
profiler('`vlm_chart.view._set_yrange()`')
|
||||
# print(f'mx vlm: {last_mx_vlm} -> {mx_vlm_in_view}')
|
||||
vars['last_mx_vlm'] = mx_vlm_in_view
|
||||
|
||||
for curve_name, flow in vlm_chart._flows.items():
|
||||
|
||||
if (
|
||||
curve_name != 'volume' and
|
||||
flow.render and (
|
||||
liv and
|
||||
do_rt_update or do_append
|
||||
)
|
||||
):
|
||||
update_fsp_chart(
|
||||
vlm_chart,
|
||||
flow,
|
||||
curve_name,
|
||||
array_key=curve_name,
|
||||
# do_append=uppx < update_uppx,
|
||||
do_append=do_append,
|
||||
)
|
||||
# is this even doing anything?
|
||||
# (pretty sure it's the real-time
|
||||
# resizing from last quote?)
|
||||
fvb = flow.plot.vb
|
||||
fvb._set_yrange(
|
||||
name=curve_name,
|
||||
)
|
||||
|
||||
elif (
|
||||
curve_name != 'volume'
|
||||
and not do_append
|
||||
and liv
|
||||
and uppx >= 1
|
||||
# even if we're downsampled bigly
|
||||
# draw the last datum in the final
|
||||
# px column to give the user the mx/mn
|
||||
# range of that set.
|
||||
):
|
||||
# always update the last datum-element
|
||||
# graphic for all flows
|
||||
# print(f'drawing last {flow.name}')
|
||||
flow.draw_last(array_key=curve_name)
|
||||
|
||||
|
||||
async def display_symbol_data(
|
||||
godwidget: GodWidget,
|
||||
|
@ -723,7 +643,7 @@ async def display_symbol_data(
|
|||
)
|
||||
|
||||
# historical data fetch
|
||||
# brokermod = brokers.get_brokermod(provider)
|
||||
brokermod = brokers.get_brokermod(provider)
|
||||
|
||||
# ohlc_status_done = sbar.open_status(
|
||||
# 'retreiving OHLC history.. ',
|
||||
|
@ -772,31 +692,32 @@ async def display_symbol_data(
|
|||
# create main OHLC chart
|
||||
chart = linked.plot_ohlc_main(
|
||||
symbol,
|
||||
ohlcv,
|
||||
bars,
|
||||
sidepane=pp_pane,
|
||||
)
|
||||
chart.default_view()
|
||||
chart._feeds[symbol.key] = feed
|
||||
chart.setFocus()
|
||||
|
||||
# plot historical vwap if available
|
||||
wap_in_history = False
|
||||
|
||||
# XXX: FOR SOME REASON THIS IS CAUSING HANGZ!?!
|
||||
# if brokermod._show_wap_in_history:
|
||||
if brokermod._show_wap_in_history:
|
||||
|
||||
# if 'bar_wap' in bars.dtype.fields:
|
||||
# wap_in_history = True
|
||||
# chart.draw_curve(
|
||||
# name='bar_wap',
|
||||
# shm=ohlcv,
|
||||
# color='default_light',
|
||||
# add_label=False,
|
||||
# )
|
||||
if 'bar_wap' in bars.dtype.fields:
|
||||
wap_in_history = True
|
||||
chart.draw_curve(
|
||||
name='bar_wap',
|
||||
data=bars,
|
||||
add_label=False,
|
||||
)
|
||||
|
||||
# size view to data once at outset
|
||||
chart.cv._set_yrange()
|
||||
|
||||
# TODO: a data view api that makes this less shit
|
||||
chart._shm = ohlcv
|
||||
chart._flows[chart.data_key].shm = ohlcv
|
||||
|
||||
# NOTE: we must immediately tell Qt to show the OHLC chart
|
||||
# to avoid a race where the subplots get added/shown to
|
||||
# the linked set *before* the main price chart!
|
||||
|
@ -859,5 +780,6 @@ async def display_symbol_data(
|
|||
sbar._status_groups[loading_sym_key][1]()
|
||||
|
||||
# let the app run.. bby
|
||||
chart.default_view()
|
||||
# linked.graphics_cycle()
|
||||
await trio.sleep_forever()
|
||||
|
|
|
@ -343,7 +343,7 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
|||
nbars = ixmx - ixmn + 1
|
||||
|
||||
chart = self._chart
|
||||
data = chart._flows[chart.name].shm.array[ixmn:ixmx]
|
||||
data = chart._arrays[chart.name][ixmn:ixmx]
|
||||
|
||||
if len(data):
|
||||
std = data['close'].std()
|
||||
|
|
|
@ -49,6 +49,10 @@ from . import _style
|
|||
log = get_logger(__name__)
|
||||
|
||||
# pyqtgraph global config
|
||||
# might as well enable this for now?
|
||||
pg.useOpenGL = True
|
||||
pg.enableExperimental = True
|
||||
|
||||
# engage core tweaks that give us better response
|
||||
# latency then the average pg user
|
||||
_do_overrides()
|
||||
|
|
1247
piker/ui/_flows.py
1247
piker/ui/_flows.py
File diff suppressed because it is too large
Load Diff
|
@ -75,7 +75,6 @@ def update_fsp_chart(
|
|||
flow,
|
||||
graphics_name: str,
|
||||
array_key: Optional[str],
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
|
||||
|
@ -94,10 +93,10 @@ def update_fsp_chart(
|
|||
# update graphics
|
||||
# NOTE: this does a length check internally which allows it
|
||||
# staying above the last row check below..
|
||||
chart.update_graphics_from_flow(
|
||||
chart.update_graphics_from_array(
|
||||
graphics_name,
|
||||
array,
|
||||
array_key=array_key or graphics_name,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# XXX: re: ``array_key``: fsp func names must be unique meaning we
|
||||
|
@ -107,6 +106,9 @@ def update_fsp_chart(
|
|||
# read from last calculated value and update any label
|
||||
last_val_sticky = chart._ysticks.get(graphics_name)
|
||||
if last_val_sticky:
|
||||
# array = shm.array[array_key]
|
||||
# if len(array):
|
||||
# value = array[-1]
|
||||
last = last_row[array_key]
|
||||
last_val_sticky.update_from_data(-1, last)
|
||||
|
||||
|
@ -244,18 +246,20 @@ async def run_fsp_ui(
|
|||
|
||||
chart.draw_curve(
|
||||
name=name,
|
||||
shm=shm,
|
||||
data=shm.array,
|
||||
overlay=True,
|
||||
color='default_light',
|
||||
array_key=name,
|
||||
**conf.get('chart_kwargs', {})
|
||||
)
|
||||
# specially store ref to shm for lookup in display loop
|
||||
chart._flows[name].shm = shm
|
||||
|
||||
else:
|
||||
# create a new sub-chart widget for this fsp
|
||||
chart = linkedsplits.add_plot(
|
||||
name=name,
|
||||
shm=shm,
|
||||
array=shm.array,
|
||||
|
||||
array_key=name,
|
||||
sidepane=sidepane,
|
||||
|
@ -267,6 +271,12 @@ async def run_fsp_ui(
|
|||
**conf.get('chart_kwargs', {})
|
||||
)
|
||||
|
||||
# XXX: ONLY for sub-chart fsps, overlays have their
|
||||
# data looked up from the chart's internal array set.
|
||||
# TODO: we must get a data view api going STAT!!
|
||||
chart._shm = shm
|
||||
chart._flows[chart.data_key].shm = shm
|
||||
|
||||
# should **not** be the same sub-chart widget
|
||||
assert chart.name != linkedsplits.chart.name
|
||||
|
||||
|
@ -435,16 +445,12 @@ class FspAdmin:
|
|||
# wait for graceful shutdown signal
|
||||
async with stream.subscribe() as stream:
|
||||
async for msg in stream:
|
||||
info = msg.get('fsp_update')
|
||||
if info:
|
||||
if msg == 'update':
|
||||
# if the chart isn't hidden try to update
|
||||
# the data on screen.
|
||||
if not self.linked.isHidden():
|
||||
log.debug(f'Re-syncing graphics for fsp: {ns_path}')
|
||||
self.linked.graphics_cycle(
|
||||
trigger_all=True,
|
||||
prepend_update_index=info['first'],
|
||||
)
|
||||
log.info(f'Re-syncing graphics for fsp: {ns_path}')
|
||||
self.linked.graphics_cycle(trigger_all=True)
|
||||
else:
|
||||
log.info(f'recved unexpected fsp engine msg: {msg}')
|
||||
|
||||
|
@ -620,7 +626,7 @@ async def open_vlm_displays(
|
|||
shm = ohlcv
|
||||
chart = linked.add_plot(
|
||||
name='volume',
|
||||
shm=shm,
|
||||
array=shm.array,
|
||||
|
||||
array_key='volume',
|
||||
sidepane=sidepane,
|
||||
|
@ -633,9 +639,10 @@ async def open_vlm_displays(
|
|||
# the curve item internals are pretty convoluted.
|
||||
style='step',
|
||||
)
|
||||
chart._flows['volume'].shm = ohlcv
|
||||
|
||||
# force 0 to always be in view
|
||||
def multi_maxmin(
|
||||
def maxmin(
|
||||
names: list[str],
|
||||
|
||||
) -> tuple[float, float]:
|
||||
|
@ -651,7 +658,7 @@ async def open_vlm_displays(
|
|||
|
||||
return 0, mx
|
||||
|
||||
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
|
||||
chart.view.maxmin = partial(maxmin, names=['volume'])
|
||||
|
||||
# TODO: fix the x-axis label issue where if you put
|
||||
# the axis on the left it's totally not lined up...
|
||||
|
@ -659,6 +666,11 @@ async def open_vlm_displays(
|
|||
# chart.hideAxis('right')
|
||||
# chart.showAxis('left')
|
||||
|
||||
# XXX: ONLY for sub-chart fsps, overlays have their
|
||||
# data looked up from the chart's internal array set.
|
||||
# TODO: we must get a data view api going STAT!!
|
||||
chart._shm = shm
|
||||
|
||||
# send back new chart to caller
|
||||
task_status.started(chart)
|
||||
|
||||
|
@ -673,9 +685,9 @@ async def open_vlm_displays(
|
|||
|
||||
last_val_sticky.update_from_data(-1, value)
|
||||
|
||||
vlm_curve = chart.update_graphics_from_flow(
|
||||
vlm_curve = chart.update_graphics_from_array(
|
||||
'volume',
|
||||
# shm.array,
|
||||
shm.array,
|
||||
)
|
||||
|
||||
# size view to data once at outset
|
||||
|
@ -741,20 +753,19 @@ async def open_vlm_displays(
|
|||
'dolla_vlm',
|
||||
'dark_vlm',
|
||||
]
|
||||
# dvlm_rate_fields = [
|
||||
# 'dvlm_rate',
|
||||
# 'dark_dvlm_rate',
|
||||
# ]
|
||||
dvlm_rate_fields = [
|
||||
'dvlm_rate',
|
||||
'dark_dvlm_rate',
|
||||
]
|
||||
trade_rate_fields = [
|
||||
'trade_rate',
|
||||
'dark_trade_rate',
|
||||
]
|
||||
|
||||
group_mxmn = partial(
|
||||
multi_maxmin,
|
||||
maxmin,
|
||||
# keep both regular and dark vlm in view
|
||||
names=fields,
|
||||
# names=fields + dvlm_rate_fields,
|
||||
names=fields + dvlm_rate_fields,
|
||||
)
|
||||
|
||||
# add custom auto range handler
|
||||
|
@ -784,8 +795,9 @@ async def open_vlm_displays(
|
|||
color = 'bracket'
|
||||
|
||||
curve, _ = chart.draw_curve(
|
||||
# name='dolla_vlm',
|
||||
name=name,
|
||||
shm=shm,
|
||||
data=shm.array,
|
||||
array_key=name,
|
||||
overlay=pi,
|
||||
color=color,
|
||||
|
@ -800,6 +812,7 @@ async def open_vlm_displays(
|
|||
# ``.draw_curve()``.
|
||||
flow = chart._flows[name]
|
||||
assert flow.plot is pi
|
||||
flow.shm = shm
|
||||
|
||||
chart_curves(
|
||||
fields,
|
||||
|
@ -821,11 +834,11 @@ async def open_vlm_displays(
|
|||
)
|
||||
await started.wait()
|
||||
|
||||
# chart_curves(
|
||||
# dvlm_rate_fields,
|
||||
# dvlm_pi,
|
||||
# fr_shm,
|
||||
# )
|
||||
chart_curves(
|
||||
dvlm_rate_fields,
|
||||
dvlm_pi,
|
||||
fr_shm,
|
||||
)
|
||||
|
||||
# TODO: is there a way to "sync" the dual axes such that only
|
||||
# one curve is needed?
|
||||
|
@ -834,9 +847,7 @@ async def open_vlm_displays(
|
|||
# liquidity events (well at least on low OHLC periods - 1s).
|
||||
vlm_curve.hide()
|
||||
chart.removeItem(vlm_curve)
|
||||
vflow = chart._flows['volume']
|
||||
vflow.render = False
|
||||
|
||||
chart._flows.pop('volume')
|
||||
# avoid range sorting on volume once disabled
|
||||
chart.view.disable_auto_yrange()
|
||||
|
||||
|
@ -863,7 +874,7 @@ async def open_vlm_displays(
|
|||
)
|
||||
# add custom auto range handler
|
||||
tr_pi.vb.maxmin = partial(
|
||||
multi_maxmin,
|
||||
maxmin,
|
||||
# keep both regular and dark vlm in view
|
||||
names=trade_rate_fields,
|
||||
)
|
||||
|
@ -891,10 +902,10 @@ async def open_vlm_displays(
|
|||
|
||||
# built-in vlm fsps
|
||||
for target, conf in {
|
||||
# tina_vwap: {
|
||||
# 'overlay': 'ohlc', # overlays with OHLCV (main) chart
|
||||
# 'anchor': 'session',
|
||||
# },
|
||||
tina_vwap: {
|
||||
'overlay': 'ohlc', # overlays with OHLCV (main) chart
|
||||
'anchor': 'session',
|
||||
},
|
||||
}.items():
|
||||
started = await admin.open_fsp_chart(
|
||||
target,
|
||||
|
|
|
@ -20,6 +20,7 @@ Chart view box primitives
|
|||
"""
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager
|
||||
# import itertools
|
||||
import time
|
||||
from typing import Optional, Callable
|
||||
|
||||
|
@ -34,9 +35,10 @@ import trio
|
|||
|
||||
from ..log import get_logger
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
# from ._style import _min_points_to_show
|
||||
from ._style import _min_points_to_show
|
||||
from ._editors import SelectRect
|
||||
from . import _event
|
||||
from ._ohlc import BarItems
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -484,18 +486,15 @@ class ChartView(ViewBox):
|
|||
|
||||
# don't zoom more then the min points setting
|
||||
l, lbar, rbar, r = chart.bars_range()
|
||||
# vl = r - l
|
||||
vl = r - l
|
||||
|
||||
# if ev.delta() > 0 and vl <= _min_points_to_show:
|
||||
# log.debug("Max zoom bruh...")
|
||||
# return
|
||||
if ev.delta() > 0 and vl <= _min_points_to_show:
|
||||
log.debug("Max zoom bruh...")
|
||||
return
|
||||
|
||||
# if (
|
||||
# ev.delta() < 0
|
||||
# and vl >= len(chart._flows[chart.name].shm.array) + 666
|
||||
# ):
|
||||
# log.debug("Min zoom bruh...")
|
||||
# return
|
||||
if ev.delta() < 0 and vl >= len(chart._arrays[chart.name]) + 666:
|
||||
log.debug("Min zoom bruh...")
|
||||
return
|
||||
|
||||
# actual scaling factor
|
||||
s = 1.015 ** (ev.delta() * -1 / 20) # self.state['wheelScaleFactor'])
|
||||
|
@ -569,23 +568,11 @@ class ChartView(ViewBox):
|
|||
|
||||
self._resetTarget()
|
||||
self.scaleBy(s, focal)
|
||||
|
||||
# XXX: the order of the next 2 lines i'm pretty sure
|
||||
# matters, we want the resize to trigger before the graphics
|
||||
# update, but i gotta feelin that because this one is signal
|
||||
# based (and thus not necessarily sync invoked right away)
|
||||
# that calling the resize method manually might work better.
|
||||
self.sigRangeChangedManually.emit(mask)
|
||||
|
||||
# XXX: without this is seems as though sometimes
|
||||
# when zooming in from far out (and maybe vice versa?)
|
||||
# the signal isn't being fired enough since if you pan
|
||||
# just after you'll see further downsampling code run
|
||||
# (pretty noticeable on the OHLC ds curve) but with this
|
||||
# that never seems to happen? Only question is how much this
|
||||
# "double work" is causing latency when these missing event
|
||||
# fires don't happen?
|
||||
self.maybe_downsample_graphics()
|
||||
# self._ic.set()
|
||||
# self._ic = None
|
||||
# self.chart.resume_all_feeds()
|
||||
|
||||
ev.accept()
|
||||
|
||||
|
@ -747,8 +734,9 @@ class ChartView(ViewBox):
|
|||
|
||||
# flag to prevent triggering sibling charts from the same linked
|
||||
# set from recursion errors.
|
||||
autoscale_linked_plots: bool = False,
|
||||
autoscale_linked_plots: bool = True,
|
||||
name: Optional[str] = None,
|
||||
# autoscale_overlays: bool = False,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
|
@ -759,12 +747,9 @@ class ChartView(ViewBox):
|
|||
data set.
|
||||
|
||||
'''
|
||||
name = self.name
|
||||
# print(f'YRANGE ON {name}')
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'`ChartView._set_yrange()`: `{name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
gt=ms_slower_then,
|
||||
delayed=True,
|
||||
)
|
||||
set_range = True
|
||||
|
@ -790,22 +775,45 @@ class ChartView(ViewBox):
|
|||
elif yrange is not None:
|
||||
ylow, yhigh = yrange
|
||||
|
||||
# calculate max, min y values in viewable x-range from data.
|
||||
# Make sure min bars/datums on screen is adhered.
|
||||
else:
|
||||
br = bars_range or chart.bars_range()
|
||||
profiler(f'got bars range: {br}')
|
||||
|
||||
# TODO: maybe should be a method on the
|
||||
# chart widget/item?
|
||||
# if False:
|
||||
# if autoscale_linked_plots:
|
||||
# # avoid recursion by sibling plots
|
||||
# linked = self.linkedsplits
|
||||
# plots = list(linked.subplots.copy().values())
|
||||
# main = linked.chart
|
||||
# if main:
|
||||
# plots.append(main)
|
||||
|
||||
# for chart in plots:
|
||||
# if chart and not chart._static_yrange:
|
||||
# chart.cv._set_yrange(
|
||||
# bars_range=br,
|
||||
# autoscale_linked_plots=False,
|
||||
# )
|
||||
# profiler('autoscaled linked plots')
|
||||
|
||||
if set_range:
|
||||
|
||||
# XXX: only compute the mxmn range
|
||||
# if none is provided as input!
|
||||
if not yrange:
|
||||
# flow = chart._flows[name]
|
||||
# XXX: only compute the mxmn range
|
||||
# if none is provided as input!
|
||||
yrange = self._maxmin()
|
||||
|
||||
if yrange is None:
|
||||
log.warning(f'No yrange provided for {name}!?')
|
||||
print(f"WTF NO YRANGE {name}")
|
||||
log.warning(f'No yrange provided for {self.name}!?')
|
||||
return
|
||||
|
||||
ylow, yhigh = yrange
|
||||
|
||||
profiler(f'callback ._maxmin(): {yrange}')
|
||||
profiler(f'maxmin(): {yrange}')
|
||||
|
||||
# view margins: stay within a % of the "true range"
|
||||
diff = yhigh - ylow
|
||||
|
@ -822,8 +830,6 @@ class ChartView(ViewBox):
|
|||
self.setYRange(ylow, yhigh)
|
||||
profiler(f'set limits: {(ylow, yhigh)}')
|
||||
|
||||
profiler.finish()
|
||||
|
||||
def enable_auto_yrange(
|
||||
self,
|
||||
src_vb: Optional[ChartView] = None,
|
||||
|
@ -837,9 +843,17 @@ class ChartView(ViewBox):
|
|||
if src_vb is None:
|
||||
src_vb = self
|
||||
|
||||
# such that when a linked chart changes its range
|
||||
# this local view is also automatically changed and
|
||||
# resized to data.
|
||||
src_vb.sigXRangeChanged.connect(self._set_yrange)
|
||||
|
||||
# splitter(s) resizing
|
||||
src_vb.sigResized.connect(self._set_yrange)
|
||||
|
||||
# mouse wheel doesn't emit XRangeChanged
|
||||
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
||||
|
||||
# TODO: a smarter way to avoid calling this needlessly?
|
||||
# 2 things i can think of:
|
||||
# - register downsample-able graphics specially and only
|
||||
|
@ -850,16 +864,15 @@ class ChartView(ViewBox):
|
|||
self.maybe_downsample_graphics
|
||||
)
|
||||
|
||||
# mouse wheel doesn't emit XRangeChanged
|
||||
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
||||
def disable_auto_yrange(
|
||||
self,
|
||||
) -> None:
|
||||
|
||||
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
||||
# src_vb.sigXRangeChanged.connect(
|
||||
# self.maybe_downsample_graphics
|
||||
# )
|
||||
|
||||
def disable_auto_yrange(self) -> None:
|
||||
# self._chart._static_yrange = 'axis'
|
||||
|
||||
self.sigXRangeChanged.disconnect(
|
||||
self._set_yrange,
|
||||
)
|
||||
self.sigResized.disconnect(
|
||||
self._set_yrange,
|
||||
)
|
||||
|
@ -870,11 +883,6 @@ class ChartView(ViewBox):
|
|||
self._set_yrange,
|
||||
)
|
||||
|
||||
# self.sigXRangeChanged.disconnect(self._set_yrange)
|
||||
# self.sigXRangeChanged.disconnect(
|
||||
# self.maybe_downsample_graphics
|
||||
# )
|
||||
|
||||
def x_uppx(self) -> float:
|
||||
'''
|
||||
Return the "number of x units" within a single
|
||||
|
@ -882,7 +890,7 @@ class ChartView(ViewBox):
|
|||
graphics items which are our children.
|
||||
|
||||
'''
|
||||
graphics = [f.graphics for f in self._chart._flows.values()]
|
||||
graphics = list(self._chart._graphics.values())
|
||||
if not graphics:
|
||||
return 0
|
||||
|
||||
|
@ -893,21 +901,25 @@ class ChartView(ViewBox):
|
|||
else:
|
||||
return 0
|
||||
|
||||
def maybe_downsample_graphics(
|
||||
self,
|
||||
autoscale_overlays: bool = True,
|
||||
):
|
||||
def maybe_downsample_graphics(self):
|
||||
|
||||
uppx = self.x_uppx()
|
||||
if (
|
||||
# we probably want to drop this once we are "drawing in
|
||||
# view" for downsampled flows..
|
||||
uppx and uppx > 16
|
||||
and self._ic is not None
|
||||
):
|
||||
# don't bother updating since we're zoomed out bigly and
|
||||
# in a pan-interaction, in which case we shouldn't be
|
||||
# doing view-range based rendering (at least not yet).
|
||||
# print(f'{uppx} exiting early!')
|
||||
return
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
||||
disabled=not pg_profile_enabled(),
|
||||
|
||||
# XXX: important to avoid not seeing underlying
|
||||
# ``.update_graphics_from_flow()`` nested profiling likely
|
||||
# due to the way delaying works and garbage collection of
|
||||
# the profiler in the delegated method calls.
|
||||
ms_threshold=6,
|
||||
# ms_threshold=ms_slower_then,
|
||||
gt=3,
|
||||
delayed=True,
|
||||
)
|
||||
|
||||
# TODO: a faster single-loop-iterator way of doing this XD
|
||||
|
@ -916,32 +928,19 @@ class ChartView(ViewBox):
|
|||
plots = linked.subplots | {chart.name: chart}
|
||||
for chart_name, chart in plots.items():
|
||||
for name, flow in chart._flows.items():
|
||||
graphics = flow.graphics
|
||||
|
||||
if (
|
||||
not flow.render
|
||||
|
||||
# XXX: super important to be aware of this.
|
||||
# or not flow.graphics.isVisible()
|
||||
):
|
||||
continue
|
||||
use_vr = False
|
||||
if isinstance(graphics, BarItems):
|
||||
use_vr = True
|
||||
|
||||
# pass in no array which will read and render from the last
|
||||
# passed array (normally provided by the display loop.)
|
||||
chart.update_graphics_from_flow(
|
||||
chart.update_graphics_from_array(
|
||||
name,
|
||||
use_vr=True,
|
||||
use_vr=use_vr,
|
||||
profiler=profiler,
|
||||
)
|
||||
profiler(f'range change updated {chart_name}:{name}')
|
||||
|
||||
# for each overlay on this chart auto-scale the
|
||||
# y-range to max-min values.
|
||||
if autoscale_overlays:
|
||||
overlay = chart.pi_overlay
|
||||
if overlay:
|
||||
for pi in overlay.overlays:
|
||||
pi.vb._set_yrange(
|
||||
# TODO: get the range once up front...
|
||||
# bars_range=br,
|
||||
)
|
||||
profiler('autoscaled linked plots')
|
||||
|
||||
profiler(f'<{chart_name}>.update_graphics_from_flow({name})')
|
||||
profiler.finish()
|
||||
|
|
|
@ -25,13 +25,17 @@ from typing import (
|
|||
|
||||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
from numba import njit, float64, int64 # , optional
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||
from PyQt5.QtCore import QLineF, QPointF
|
||||
from PyQt5.QtGui import QPainterPath
|
||||
# from numba import types as ntypes
|
||||
# from ..data._source import numba_ohlc_dtype
|
||||
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._style import hcolor
|
||||
from ..log import get_logger
|
||||
from ._curve import FastAppendCurve
|
||||
from ._compression import ohlc_flatten
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import LinkedSplits
|
||||
|
@ -42,8 +46,7 @@ log = get_logger(__name__)
|
|||
|
||||
def bar_from_ohlc_row(
|
||||
row: np.ndarray,
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
w: float = 0.43
|
||||
w: float
|
||||
|
||||
) -> tuple[QLineF]:
|
||||
'''
|
||||
|
@ -81,11 +84,128 @@ def bar_from_ohlc_row(
|
|||
return [hl, o, c]
|
||||
|
||||
|
||||
@njit(
|
||||
# TODO: for now need to construct this manually for readonly arrays, see
|
||||
# https://github.com/numba/numba/issues/4511
|
||||
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
||||
# numba_ohlc_dtype[::1], # contiguous
|
||||
# int64,
|
||||
# optional(float64),
|
||||
# ),
|
||||
nogil=True
|
||||
)
|
||||
def path_arrays_from_ohlc(
|
||||
data: np.ndarray,
|
||||
start: int64,
|
||||
bar_gap: float64 = 0.43,
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Generate an array of lines objects from input ohlc data.
|
||||
|
||||
'''
|
||||
size = int(data.shape[0] * 6)
|
||||
|
||||
x = np.zeros(
|
||||
# data,
|
||||
shape=size,
|
||||
dtype=float64,
|
||||
)
|
||||
y, c = x.copy(), x.copy()
|
||||
|
||||
# TODO: report bug for assert @
|
||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||
for i, q in enumerate(data[start:], start):
|
||||
|
||||
# TODO: ask numba why this doesn't work..
|
||||
# open, high, low, close, index = q[
|
||||
# ['open', 'high', 'low', 'close', 'index']]
|
||||
|
||||
open = q['open']
|
||||
high = q['high']
|
||||
low = q['low']
|
||||
close = q['close']
|
||||
index = float64(q['index'])
|
||||
|
||||
istart = i * 6
|
||||
istop = istart + 6
|
||||
|
||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||
x[istart:istop] = (
|
||||
index - bar_gap,
|
||||
index,
|
||||
index,
|
||||
index,
|
||||
index,
|
||||
index + bar_gap,
|
||||
)
|
||||
y[istart:istop] = (
|
||||
open,
|
||||
open,
|
||||
low,
|
||||
high,
|
||||
close,
|
||||
close,
|
||||
)
|
||||
|
||||
# specifies that the first edge is never connected to the
|
||||
# prior bars last edge thus providing a small "gap"/"space"
|
||||
# between bars determined by ``bar_gap``.
|
||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||
|
||||
return x, y, c
|
||||
|
||||
|
||||
def gen_qpath(
|
||||
data: np.ndarray,
|
||||
start: int, # XXX: do we need this?
|
||||
w: float,
|
||||
path: Optional[QtGui.QPainterPath] = None,
|
||||
|
||||
) -> QtGui.QPainterPath:
|
||||
|
||||
path_was_none = path is None
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
msg='gen_qpath ohlc',
|
||||
disabled=not pg_profile_enabled(),
|
||||
gt=ms_slower_then,
|
||||
)
|
||||
|
||||
x, y, c = path_arrays_from_ohlc(
|
||||
data,
|
||||
start,
|
||||
bar_gap=w,
|
||||
)
|
||||
profiler("generate stream with numba")
|
||||
|
||||
# TODO: numba the internals of this!
|
||||
path = pg.functions.arrayToQPath(
|
||||
x,
|
||||
y,
|
||||
connect=c,
|
||||
path=path,
|
||||
)
|
||||
|
||||
# avoid mem allocs if possible
|
||||
if path_was_none:
|
||||
path.reserve(path.capacity())
|
||||
|
||||
profiler("generate path with arrayToQPath")
|
||||
|
||||
return path
|
||||
|
||||
|
||||
class BarItems(pg.GraphicsObject):
|
||||
'''
|
||||
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
||||
|
||||
'''
|
||||
sigPlotChanged = QtCore.pyqtSignal(object)
|
||||
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
w: float = 0.43
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
linked: LinkedSplits,
|
||||
|
@ -105,13 +225,388 @@ class BarItems(pg.GraphicsObject):
|
|||
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
||||
self._name = name
|
||||
|
||||
self._ds_line_xy: Optional[
|
||||
tuple[np.ndarray, np.ndarray]
|
||||
] = None
|
||||
|
||||
# NOTE: this prevents redraws on mouse interaction which is
|
||||
# a huge boon for avg interaction latency.
|
||||
|
||||
# TODO: one question still remaining is if this makes trasform
|
||||
# interactions slower (such as zooming) and if so maybe if/when
|
||||
# we implement a "history" mode for the view we disable this in
|
||||
# that mode?
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
self.path = QPainterPath()
|
||||
|
||||
self._pi = plotitem
|
||||
self.path = QtGui.QPainterPath()
|
||||
self.fast_path = QtGui.QPainterPath()
|
||||
|
||||
self._xrange: tuple[int, int]
|
||||
self._yrange: tuple[float, float]
|
||||
self._vrange = None
|
||||
|
||||
# TODO: don't render the full backing array each time
|
||||
# self._path_data = None
|
||||
self._last_bar_lines: Optional[tuple[QLineF, ...]] = None
|
||||
|
||||
# track the current length of drawable lines within the larger array
|
||||
self.start_index: int = 0
|
||||
self.stop_index: int = 0
|
||||
|
||||
# downsampler-line state
|
||||
self._in_ds: bool = False
|
||||
self._ds_line: Optional[FastAppendCurve] = None
|
||||
self._dsi: tuple[int, int] = 0, 0
|
||||
self._xs_in_px: float = 0
|
||||
|
||||
def draw_from_data(
|
||||
self,
|
||||
ohlc: np.ndarray,
|
||||
start: int = 0,
|
||||
|
||||
) -> QtGui.QPainterPath:
|
||||
'''
|
||||
Draw OHLC datum graphics from a ``np.ndarray``.
|
||||
|
||||
This routine is usually only called to draw the initial history.
|
||||
|
||||
'''
|
||||
hist, last = ohlc[:-1], ohlc[-1]
|
||||
self.path = gen_qpath(hist, start, self.w)
|
||||
|
||||
# save graphics for later reference and keep track
|
||||
# of current internal "last index"
|
||||
# self.start_index = len(ohlc)
|
||||
index = ohlc['index']
|
||||
self._xrange = (index[0], index[-1])
|
||||
self._yrange = (
|
||||
np.nanmax(ohlc['high']),
|
||||
np.nanmin(ohlc['low']),
|
||||
)
|
||||
|
||||
# up to last to avoid double draw of last bar
|
||||
self._last_bar_lines = bar_from_ohlc_row(last, self.w)
|
||||
|
||||
x, y = self._ds_line_xy = ohlc_flatten(ohlc)
|
||||
|
||||
# TODO: figuring out the most optimial size for the ideal
|
||||
# curve-path by,
|
||||
# - calcing the display's max px width `.screen()`
|
||||
# - drawing a curve and figuring out it's capacity:
|
||||
# https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||
# - reserving that cap for each curve-mapped-to-shm with
|
||||
|
||||
# - leveraging clearing when needed to redraw the entire
|
||||
# curve that does not release mem allocs:
|
||||
# https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||
curve = FastAppendCurve(
|
||||
y=y,
|
||||
x=x,
|
||||
name='OHLC',
|
||||
color=self._color,
|
||||
)
|
||||
curve.hide()
|
||||
self._pi.addItem(curve)
|
||||
self._ds_line = curve
|
||||
|
||||
self._ds_xrange = (index[0], index[-1])
|
||||
|
||||
# trigger render
|
||||
# https://doc.qt.io/qt-5/qgraphicsitem.html#update
|
||||
self.update()
|
||||
|
||||
return self.path
|
||||
|
||||
def x_uppx(self) -> int:
|
||||
# we expect the downsample curve report this.
|
||||
return 0
|
||||
if self._ds_line:
|
||||
return self._ds_line.x_uppx()
|
||||
else:
|
||||
return 0
|
||||
|
||||
def update_from_array(
|
||||
self,
|
||||
|
||||
# full array input history
|
||||
ohlc: np.ndarray,
|
||||
|
||||
# pre-sliced array data that's "in view"
|
||||
ohlc_iv: np.ndarray,
|
||||
|
||||
view_range: Optional[tuple[int, int]] = None,
|
||||
profiler: Optional[pg.debug.Profiler] = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Update the last datum's bar graphic from input data array.
|
||||
|
||||
This routine should be interface compatible with
|
||||
``pg.PlotCurveItem.setData()``. Normally this method in
|
||||
``pyqtgraph`` seems to update all the data passed to the
|
||||
graphics object, and then update/rerender, but here we're
|
||||
assuming the prior graphics havent changed (OHLC history rarely
|
||||
does) so this "should" be simpler and faster.
|
||||
|
||||
This routine should be made (transitively) as fast as possible.
|
||||
|
||||
'''
|
||||
profiler = profiler or pg.debug.Profiler(
|
||||
disabled=not pg_profile_enabled(),
|
||||
gt=ms_slower_then,
|
||||
delayed=True,
|
||||
)
|
||||
|
||||
# index = self.start_index
|
||||
istart, istop = self._xrange
|
||||
ds_istart, ds_istop = self._ds_xrange
|
||||
|
||||
index = ohlc['index']
|
||||
first_index, last_index = index[0], index[-1]
|
||||
|
||||
# length = len(ohlc)
|
||||
# prepend_length = istart - first_index
|
||||
# append_length = last_index - istop
|
||||
|
||||
# ds_prepend_length = ds_istart - first_index
|
||||
# ds_append_length = last_index - ds_istop
|
||||
|
||||
flip_cache = False
|
||||
|
||||
x_gt = 16
|
||||
if self._ds_line:
|
||||
uppx = self._ds_line.x_uppx()
|
||||
else:
|
||||
uppx = 0
|
||||
|
||||
should_line = self._in_ds
|
||||
if (
|
||||
self._in_ds
|
||||
and uppx < x_gt
|
||||
):
|
||||
should_line = False
|
||||
|
||||
elif (
|
||||
not self._in_ds
|
||||
and uppx >= x_gt
|
||||
):
|
||||
should_line = True
|
||||
|
||||
profiler('ds logic complete')
|
||||
|
||||
if should_line:
|
||||
# update the line graphic
|
||||
# x, y = self._ds_line_xy = ohlc_flatten(ohlc_iv)
|
||||
x, y = self._ds_line_xy = ohlc_flatten(ohlc)
|
||||
x_iv, y_iv = self._ds_line_xy = ohlc_flatten(ohlc_iv)
|
||||
profiler('flattening bars to line')
|
||||
|
||||
# TODO: we should be diffing the amount of new data which
|
||||
# needs to be downsampled. Ideally we actually are just
|
||||
# doing all the ds-ing in sibling actors so that the data
|
||||
# can just be read and rendered to graphics on events of our
|
||||
# choice.
|
||||
# diff = do_diff(ohlc, new_bit)
|
||||
curve = self._ds_line
|
||||
curve.update_from_array(
|
||||
x=x,
|
||||
y=y,
|
||||
x_iv=x_iv,
|
||||
y_iv=y_iv,
|
||||
view_range=None, # hack
|
||||
profiler=profiler,
|
||||
)
|
||||
profiler('updated ds line')
|
||||
|
||||
if not self._in_ds:
|
||||
# hide bars and show line
|
||||
self.hide()
|
||||
# XXX: is this actually any faster?
|
||||
# self._pi.removeItem(self)
|
||||
|
||||
# TODO: a `.ui()` log level?
|
||||
log.info(
|
||||
f'downsampling to line graphic {self._name}'
|
||||
)
|
||||
|
||||
# self._pi.addItem(curve)
|
||||
curve.show()
|
||||
curve.update()
|
||||
self._in_ds = True
|
||||
|
||||
# stop here since we don't need to update bars path any more
|
||||
# as we delegate to the downsample line with updates.
|
||||
profiler.finish()
|
||||
# print('terminating early')
|
||||
return
|
||||
|
||||
else:
|
||||
# we should be in bars mode
|
||||
|
||||
if self._in_ds:
|
||||
# flip back to bars graphics and hide the downsample line.
|
||||
log.info(f'showing bars graphic {self._name}')
|
||||
|
||||
curve = self._ds_line
|
||||
curve.hide()
|
||||
# self._pi.removeItem(curve)
|
||||
|
||||
# XXX: is this actually any faster?
|
||||
# self._pi.addItem(self)
|
||||
self.show()
|
||||
self._in_ds = False
|
||||
|
||||
# generate in_view path
|
||||
self.path = gen_qpath(
|
||||
ohlc_iv,
|
||||
0,
|
||||
self.w,
|
||||
# path=self.path,
|
||||
)
|
||||
|
||||
# TODO: to make the downsampling faster
|
||||
# - allow mapping only a range of lines thus only drawing as
|
||||
# many bars as exactly specified.
|
||||
# - move ohlc "flattening" to a shmarr
|
||||
# - maybe move all this embedded logic to a higher
|
||||
# level type?
|
||||
|
||||
# if prepend_length:
|
||||
# # new history was added and we need to render a new path
|
||||
# prepend_bars = ohlc[:prepend_length]
|
||||
|
||||
# if ds_prepend_length:
|
||||
# ds_prepend_bars = ohlc[:ds_prepend_length]
|
||||
# pre_x, pre_y = ohlc_flatten(ds_prepend_bars)
|
||||
# fx = np.concatenate((pre_x, fx))
|
||||
# fy = np.concatenate((pre_y, fy))
|
||||
# profiler('ds line prepend diff complete')
|
||||
|
||||
# if append_length:
|
||||
# # generate new graphics to match provided array
|
||||
# # path appending logic:
|
||||
# # we need to get the previous "current bar(s)" for the time step
|
||||
# # and convert it to a sub-path to append to the historical set
|
||||
# # new_bars = ohlc[istop - 1:istop + append_length - 1]
|
||||
# append_bars = ohlc[-append_length - 1:-1]
|
||||
# # print(f'ohlc bars to append size: {append_bars.size}\n')
|
||||
|
||||
# if ds_append_length:
|
||||
# ds_append_bars = ohlc[-ds_append_length - 1:-1]
|
||||
# post_x, post_y = ohlc_flatten(ds_append_bars)
|
||||
# print(
|
||||
# f'ds curve to append sizes: {(post_x.size, post_y.size)}'
|
||||
# )
|
||||
# fx = np.concatenate((fx, post_x))
|
||||
# fy = np.concatenate((fy, post_y))
|
||||
|
||||
# profiler('ds line append diff complete')
|
||||
|
||||
profiler('array diffs complete')
|
||||
|
||||
# does this work?
|
||||
last = ohlc[-1]
|
||||
# fy[-1] = last['close']
|
||||
|
||||
# # incremental update and cache line datums
|
||||
# self._ds_line_xy = fx, fy
|
||||
|
||||
# maybe downsample to line
|
||||
# ds = self.maybe_downsample()
|
||||
# if ds:
|
||||
# # if we downsample to a line don't bother with
|
||||
# # any more path generation / updates
|
||||
# self._ds_xrange = first_index, last_index
|
||||
# profiler('downsampled to line')
|
||||
# return
|
||||
|
||||
# print(in_view.size)
|
||||
|
||||
# if self.path:
|
||||
# self.path = path
|
||||
# self.path.reserve(path.capacity())
|
||||
# self.path.swap(path)
|
||||
|
||||
# path updates
|
||||
# if prepend_length:
|
||||
# # XXX: SOMETHING IS MAYBE FISHY HERE what with the old_path
|
||||
# # y value not matching the first value from
|
||||
# # ohlc[prepend_length + 1] ???
|
||||
# prepend_path = gen_qpath(prepend_bars, 0, self.w)
|
||||
# old_path = self.path
|
||||
# self.path = prepend_path
|
||||
# self.path.addPath(old_path)
|
||||
# profiler('path PREPEND')
|
||||
|
||||
# if append_length:
|
||||
# append_path = gen_qpath(append_bars, 0, self.w)
|
||||
|
||||
# self.path.moveTo(
|
||||
# float(istop - self.w),
|
||||
# float(append_bars[0]['open'])
|
||||
# )
|
||||
# self.path.addPath(append_path)
|
||||
|
||||
# profiler('path APPEND')
|
||||
# fp = self.fast_path
|
||||
# if fp is None:
|
||||
# self.fast_path = append_path
|
||||
|
||||
# else:
|
||||
# fp.moveTo(
|
||||
# float(istop - self.w), float(new_bars[0]['open'])
|
||||
# )
|
||||
# fp.addPath(append_path)
|
||||
|
||||
# self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
||||
# flip_cache = True
|
||||
|
||||
self._xrange = first_index, last_index
|
||||
|
||||
# trigger redraw despite caching
|
||||
self.prepareGeometryChange()
|
||||
|
||||
# generate new lines objects for updatable "current bar"
|
||||
self._last_bar_lines = bar_from_ohlc_row(last, self.w)
|
||||
|
||||
# last bar update
|
||||
i, o, h, l, last, v = last[
|
||||
['index', 'open', 'high', 'low', 'close', 'volume']
|
||||
]
|
||||
# assert i == self.start_index - 1
|
||||
# assert i == last_index
|
||||
body, larm, rarm = self._last_bar_lines
|
||||
|
||||
# XXX: is there a faster way to modify this?
|
||||
rarm.setLine(rarm.x1(), last, rarm.x2(), last)
|
||||
|
||||
# writer is responsible for changing open on "first" volume of bar
|
||||
larm.setLine(larm.x1(), o, larm.x2(), o)
|
||||
|
||||
if l != h: # noqa
|
||||
|
||||
if body is None:
|
||||
body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
||||
else:
|
||||
# update body
|
||||
body.setLine(i, l, i, h)
|
||||
|
||||
# XXX: pretty sure this is causing an issue where the bar has
|
||||
# a large upward move right before the next sample and the body
|
||||
# is getting set to None since the next bar is flat but the shm
|
||||
# array index update wasn't read by the time this code runs. Iow
|
||||
# we're doing this removal of the body for a bar index that is
|
||||
# now out of date / from some previous sample. It's weird
|
||||
# though because i've seen it do this to bars i - 3 back?
|
||||
|
||||
profiler('last bar set')
|
||||
|
||||
self.update()
|
||||
profiler('.update()')
|
||||
|
||||
if flip_cache:
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
profiler.finish()
|
||||
|
||||
def boundingRect(self):
|
||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||
|
@ -135,6 +630,16 @@ class BarItems(pg.GraphicsObject):
|
|||
hb.bottomRight(),
|
||||
)
|
||||
|
||||
# fp = self.fast_path
|
||||
# if fp:
|
||||
# fhb = fp.controlPointRect()
|
||||
# print((hb_tl, hb_br))
|
||||
# print(fhb)
|
||||
# hb_tl, hb_br = (
|
||||
# fhb.topLeft() + hb.topLeft(),
|
||||
# fhb.bottomRight() + hb.bottomRight(),
|
||||
# )
|
||||
|
||||
# need to include last bar height or BR will be off
|
||||
mx_y = hb_br.y()
|
||||
mn_y = hb_tl.y()
|
||||
|
@ -170,9 +675,12 @@ class BarItems(pg.GraphicsObject):
|
|||
|
||||
) -> None:
|
||||
|
||||
if self._in_ds:
|
||||
return
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
gt=ms_slower_then,
|
||||
)
|
||||
|
||||
# p.setCompositionMode(0)
|
||||
|
@ -184,67 +692,13 @@ class BarItems(pg.GraphicsObject):
|
|||
# lead to any perf gains other then when zoomed in to less bars
|
||||
# in view.
|
||||
p.setPen(self.last_bar_pen)
|
||||
if self._last_bar_lines:
|
||||
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
||||
profiler('draw last bar')
|
||||
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
||||
profiler('draw last bar')
|
||||
|
||||
p.setPen(self.bars_pen)
|
||||
p.drawPath(self.path)
|
||||
profiler(f'draw history path: {self.path.capacity()}')
|
||||
|
||||
def draw_last_datum(
|
||||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
|
||||
fields: list[str] = [
|
||||
'index',
|
||||
'open',
|
||||
'high',
|
||||
'low',
|
||||
'close',
|
||||
],
|
||||
|
||||
) -> None:
|
||||
|
||||
# relevant fields
|
||||
ohlc = src_data[fields]
|
||||
last_row = ohlc[-1:]
|
||||
|
||||
# individual values
|
||||
last_row = i, o, h, l, last = ohlc[-1]
|
||||
|
||||
# generate new lines objects for updatable "current bar"
|
||||
self._last_bar_lines = bar_from_ohlc_row(last_row)
|
||||
|
||||
# assert i == graphics.start_index - 1
|
||||
# assert i == last_index
|
||||
body, larm, rarm = self._last_bar_lines
|
||||
|
||||
# XXX: is there a faster way to modify this?
|
||||
rarm.setLine(rarm.x1(), last, rarm.x2(), last)
|
||||
|
||||
# writer is responsible for changing open on "first" volume of bar
|
||||
larm.setLine(larm.x1(), o, larm.x2(), o)
|
||||
|
||||
if l != h: # noqa
|
||||
|
||||
if body is None:
|
||||
body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
||||
else:
|
||||
# update body
|
||||
body.setLine(i, l, i, h)
|
||||
|
||||
# XXX: pretty sure this is causing an issue where the
|
||||
# bar has a large upward move right before the next
|
||||
# sample and the body is getting set to None since the
|
||||
# next bar is flat but the shm array index update wasn't
|
||||
# read by the time this code runs. Iow we're doing this
|
||||
# removal of the body for a bar index that is now out of
|
||||
# date / from some previous sample. It's weird though
|
||||
# because i've seen it do this to bars i - 3 back?
|
||||
|
||||
return ohlc['index'], ohlc['close']
|
||||
# if self.fast_path:
|
||||
# p.drawPath(self.fast_path)
|
||||
# profiler('draw fast path')
|
||||
|
|
|
@ -1,236 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Super fast ``QPainterPath`` generation related operator routines.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
# Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import njit, float64, int64 # , optional
|
||||
# import pyqtgraph as pg
|
||||
from PyQt5 import QtGui
|
||||
# from PyQt5.QtCore import QLineF, QPointF
|
||||
|
||||
from ..data._sharedmem import (
|
||||
ShmArray,
|
||||
)
|
||||
# from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._compression import (
|
||||
ds_m4,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._flows import Renderer
|
||||
|
||||
|
||||
def xy_downsample(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
|
||||
x_spacer: float = 0.5,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
|
||||
# downsample whenever more then 1 pixels per datum can be shown.
|
||||
# always refresh data bounds until we get diffing
|
||||
# working properly, see above..
|
||||
bins, x, y = ds_m4(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
)
|
||||
|
||||
# flatten output to 1d arrays suitable for path-graphics generation.
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
x = (x + np.array(
|
||||
[-x_spacer, 0, 0, x_spacer]
|
||||
)).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
return x, y
|
||||
|
||||
|
||||
@njit(
|
||||
# TODO: for now need to construct this manually for readonly arrays, see
|
||||
# https://github.com/numba/numba/issues/4511
|
||||
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
||||
# numba_ohlc_dtype[::1], # contiguous
|
||||
# int64,
|
||||
# optional(float64),
|
||||
# ),
|
||||
nogil=True
|
||||
)
|
||||
def path_arrays_from_ohlc(
|
||||
data: np.ndarray,
|
||||
start: int64,
|
||||
bar_gap: float64 = 0.43,
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Generate an array of lines objects from input ohlc data.
|
||||
|
||||
'''
|
||||
size = int(data.shape[0] * 6)
|
||||
|
||||
x = np.zeros(
|
||||
# data,
|
||||
shape=size,
|
||||
dtype=float64,
|
||||
)
|
||||
y, c = x.copy(), x.copy()
|
||||
|
||||
# TODO: report bug for assert @
|
||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||
for i, q in enumerate(data[start:], start):
|
||||
|
||||
# TODO: ask numba why this doesn't work..
|
||||
# open, high, low, close, index = q[
|
||||
# ['open', 'high', 'low', 'close', 'index']]
|
||||
|
||||
open = q['open']
|
||||
high = q['high']
|
||||
low = q['low']
|
||||
close = q['close']
|
||||
index = float64(q['index'])
|
||||
|
||||
istart = i * 6
|
||||
istop = istart + 6
|
||||
|
||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||
x[istart:istop] = (
|
||||
index - bar_gap,
|
||||
index,
|
||||
index,
|
||||
index,
|
||||
index,
|
||||
index + bar_gap,
|
||||
)
|
||||
y[istart:istop] = (
|
||||
open,
|
||||
open,
|
||||
low,
|
||||
high,
|
||||
close,
|
||||
close,
|
||||
)
|
||||
|
||||
# specifies that the first edge is never connected to the
|
||||
# prior bars last edge thus providing a small "gap"/"space"
|
||||
# between bars determined by ``bar_gap``.
|
||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||
|
||||
return x, y, c
|
||||
|
||||
|
||||
def gen_ohlc_qpath(
|
||||
r: Renderer,
|
||||
data: np.ndarray,
|
||||
array_key: str, # we ignore this
|
||||
vr: tuple[int, int],
|
||||
|
||||
start: int = 0, # XXX: do we need this?
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
w: float = 0.43,
|
||||
|
||||
) -> QtGui.QPainterPath:
|
||||
'''
|
||||
More or less direct proxy to ``path_arrays_from_ohlc()``
|
||||
but with closed in kwargs for line spacing.
|
||||
|
||||
'''
|
||||
x, y, c = path_arrays_from_ohlc(
|
||||
data,
|
||||
start,
|
||||
bar_gap=w,
|
||||
)
|
||||
return x, y, c
|
||||
|
||||
|
||||
def ohlc_to_line(
|
||||
ohlc_shm: ShmArray,
|
||||
data_field: str,
|
||||
fields: list[str] = ['open', 'high', 'low', 'close']
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
'''
|
||||
Convert an input struct-array holding OHLC samples into a pair of
|
||||
flattened x, y arrays with the same size (datums wise) as the source
|
||||
data.
|
||||
|
||||
'''
|
||||
y_out = ohlc_shm.ustruct(fields)
|
||||
first = ohlc_shm._first.value
|
||||
last = ohlc_shm._last.value
|
||||
|
||||
# write pushed data to flattened copy
|
||||
y_out[first:last] = rfn.structured_to_unstructured(
|
||||
ohlc_shm.array[fields]
|
||||
)
|
||||
|
||||
# generate an flat-interpolated x-domain
|
||||
x_out = (
|
||||
np.broadcast_to(
|
||||
ohlc_shm._array['index'][:, None],
|
||||
(
|
||||
ohlc_shm._array.size,
|
||||
# 4, # only ohlc
|
||||
y_out.shape[1],
|
||||
),
|
||||
) + np.array([-0.5, 0, 0, 0.5])
|
||||
)
|
||||
assert y_out.any()
|
||||
|
||||
return (
|
||||
x_out,
|
||||
y_out,
|
||||
)
|
||||
|
||||
|
||||
def to_step_format(
|
||||
shm: ShmArray,
|
||||
data_field: str,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an input 1d shm array to a "step array" format
|
||||
for use by path graphics generation.
|
||||
|
||||
'''
|
||||
i = shm._array['index'].copy()
|
||||
out = shm._array[data_field].copy()
|
||||
|
||||
x_out = np.broadcast_to(
|
||||
i[:, None],
|
||||
(i.size, 2),
|
||||
) + np.array([-0.5, 0.5])
|
||||
|
||||
y_out = np.empty((len(out), 2), dtype=out.dtype)
|
||||
y_out[:] = out[:, np.newaxis]
|
||||
|
||||
# start y at origin level
|
||||
y_out[0, 0] = 0
|
||||
return x_out, y_out
|
|
@ -873,9 +873,7 @@ async def process_trades_and_update_ui(
|
|||
mode.lines.remove_line(uuid=oid)
|
||||
|
||||
# each clearing tick is responded individually
|
||||
elif resp in (
|
||||
'broker_filled',
|
||||
):
|
||||
elif resp in ('broker_filled',):
|
||||
|
||||
known_order = book._sent_orders.get(oid)
|
||||
if not known_order:
|
||||
|
|
23
setup.py
23
setup.py
|
@ -57,7 +57,6 @@ setup(
|
|||
# from github currently (see requirements.txt)
|
||||
# 'trimeter', # not released yet..
|
||||
# 'tractor',
|
||||
# asyncvnc,
|
||||
|
||||
# brokers
|
||||
'asks==2.4.8',
|
||||
|
@ -72,34 +71,32 @@ setup(
|
|||
|
||||
# UI
|
||||
'PyQt5',
|
||||
# 'pyqtgraph', from our fork see reqs.txt
|
||||
'qdarkstyle >= 3.0.2', # themeing
|
||||
'fuzzywuzzy[speedup]', # fuzzy search
|
||||
'pyqtgraph',
|
||||
'qdarkstyle >= 3.0.2',
|
||||
# fuzzy search
|
||||
'fuzzywuzzy[speedup]',
|
||||
|
||||
# tsdbs
|
||||
# anyio-marketstore # from gh see reqs.txt
|
||||
'pymarketstore',
|
||||
],
|
||||
extras_require={
|
||||
|
||||
# serialization
|
||||
'tsdb': [
|
||||
'docker',
|
||||
],
|
||||
|
||||
},
|
||||
tests_require=['pytest'],
|
||||
python_requires=">=3.10",
|
||||
keywords=[
|
||||
"async",
|
||||
"trading",
|
||||
"finance",
|
||||
"quant",
|
||||
"charting",
|
||||
],
|
||||
python_requires=">=3.9", # literally for ``datetime.datetime.fromisoformat``...
|
||||
keywords=["async", "trading", "finance", "quant", "charting"],
|
||||
classifiers=[
|
||||
'Development Status :: 3 - Alpha',
|
||||
'License :: OSI Approved :: ',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
'Intended Audience :: Financial and Insurance Industry',
|
||||
'Intended Audience :: Science/Research',
|
||||
|
|
Loading…
Reference in New Issue