Compare commits
No commits in common. "storage_middleware_layer" and "310_plus" have entirely different histories.
storage_mi
...
310_plus
|
@ -3,9 +3,10 @@ name: CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# Triggers the workflow on push or pull request events but only for the master branch
|
# Triggers the workflow on push or pull request events but only for the master branch
|
||||||
pull_request:
|
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
@ -13,46 +14,21 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
# test that we can generate a software distribution and install it
|
|
||||||
# thus avoid missing file issues after packaging.
|
|
||||||
sdist-linux:
|
|
||||||
name: 'sdist'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Setup python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- name: Build sdist
|
|
||||||
run: python setup.py sdist --formats=zip
|
|
||||||
|
|
||||||
- name: Install sdist from .zips
|
|
||||||
run: python -m pip install dist/*.zip
|
|
||||||
|
|
||||||
testing:
|
testing:
|
||||||
name: 'install + test-suite'
|
name: 'install + test-suite'
|
||||||
timeout-minutes: 10
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Build DB container
|
|
||||||
run: docker build -t piker:elastic dockering/elastic
|
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@v3
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||||
|
|
||||||
- name: Test suite
|
- name: Test suite
|
||||||
run: pytest tests -rs
|
run: pytest tests -rs
|
||||||
|
|
13
README.rst
13
README.rst
|
@ -71,19 +71,6 @@ for a development install::
|
||||||
source ./env/bin/activate
|
source ./env/bin/activate
|
||||||
pip install -r requirements.txt -e .
|
pip install -r requirements.txt -e .
|
||||||
|
|
||||||
install for nixos
|
|
||||||
*****************
|
|
||||||
for users of `NixOS` we offer a development shell envoirment that can be
|
|
||||||
loaded with::
|
|
||||||
|
|
||||||
nix-shell develop.nix
|
|
||||||
|
|
||||||
this will setup the required python environment to run piker, make sure to
|
|
||||||
run::
|
|
||||||
|
|
||||||
pip install -r requirements.txt -e .
|
|
||||||
|
|
||||||
once after loading the shell
|
|
||||||
|
|
||||||
install for tinas
|
install for tinas
|
||||||
*****************
|
*****************
|
||||||
|
|
|
@ -50,8 +50,3 @@ prefer_data_account = [
|
||||||
paper = "XX0000000"
|
paper = "XX0000000"
|
||||||
margin = "X0000000"
|
margin = "X0000000"
|
||||||
ira = "X0000000"
|
ira = "X0000000"
|
||||||
|
|
||||||
|
|
||||||
[deribit]
|
|
||||||
key_id = 'XXXXXXXX'
|
|
||||||
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
|
||||||
|
|
32
develop.nix
32
develop.nix
|
@ -1,32 +0,0 @@
|
||||||
with (import <nixpkgs> {});
|
|
||||||
with python310Packages;
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "pip-env";
|
|
||||||
buildInputs = [
|
|
||||||
# System requirements.
|
|
||||||
readline
|
|
||||||
|
|
||||||
# Python requirements (enough to get a virtualenv going).
|
|
||||||
python310Full
|
|
||||||
virtualenv
|
|
||||||
setuptools
|
|
||||||
pyqt5
|
|
||||||
pip
|
|
||||||
];
|
|
||||||
src = null;
|
|
||||||
shellHook = ''
|
|
||||||
# Allow the use of wheels.
|
|
||||||
SOURCE_DATE_EPOCH=$(date +%s)
|
|
||||||
|
|
||||||
# Augment the dynamic linker path
|
|
||||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${R}/lib/R/lib:${readline}/lib
|
|
||||||
|
|
||||||
export QT_QPA_PLATFORM_PLUGIN_PATH="${qt5.qtbase.bin}/lib/qt-${qt5.qtbase.version}/plugins";
|
|
||||||
|
|
||||||
if [ ! -d "venv" ]; then
|
|
||||||
virtualenv venv
|
|
||||||
fi
|
|
||||||
|
|
||||||
source venv/bin/activate
|
|
||||||
'';
|
|
||||||
}
|
|
|
@ -1,11 +0,0 @@
|
||||||
FROM elasticsearch:7.17.4
|
|
||||||
|
|
||||||
ENV ES_JAVA_OPTS "-Xms2g -Xmx2g"
|
|
||||||
ENV ELASTIC_USERNAME "elastic"
|
|
||||||
ENV ELASTIC_PASSWORD "password"
|
|
||||||
|
|
||||||
COPY elasticsearch.yml /usr/share/elasticsearch/config/
|
|
||||||
|
|
||||||
RUN printf "password" | ./bin/elasticsearch-keystore add -f -x "bootstrap.password"
|
|
||||||
|
|
||||||
EXPOSE 19200
|
|
|
@ -1,5 +0,0 @@
|
||||||
network.host: 0.0.0.0
|
|
||||||
|
|
||||||
http.port: 19200
|
|
||||||
|
|
||||||
discovery.type: single-node
|
|
|
@ -3,12 +3,11 @@
|
||||||
version: "3.5"
|
version: "3.5"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
ib_gw_paper:
|
ib-gateway:
|
||||||
# other image tags available:
|
# other image tags available:
|
||||||
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
||||||
# image: waytrade/ib-gateway:981.3j
|
image: waytrade/ib-gateway:981.3j
|
||||||
image: waytrade/ib-gateway:1012.2i
|
restart: always
|
||||||
restart: 'no' # restart on boot whenev there's a crash or user clicsk
|
|
||||||
network_mode: 'host'
|
network_mode: 'host'
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -40,12 +39,14 @@ services:
|
||||||
# this compose file which looks something like:
|
# this compose file which looks something like:
|
||||||
# TWS_USERID='myuser'
|
# TWS_USERID='myuser'
|
||||||
# TWS_PASSWORD='guest'
|
# TWS_PASSWORD='guest'
|
||||||
|
# TRADING_MODE=paper (or live)
|
||||||
|
# VNC_SERVER_PASSWORD='diggity'
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
TWS_USERID: ${TWS_USERID}
|
TWS_USERID: ${TWS_USERID}
|
||||||
TWS_PASSWORD: ${TWS_PASSWORD}
|
TWS_PASSWORD: ${TWS_PASSWORD}
|
||||||
TRADING_MODE: 'paper'
|
TRADING_MODE: ${TRADING_MODE:-paper}
|
||||||
VNC_SERVER_PASSWORD: 'doggy'
|
VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-}
|
||||||
VNC_SERVER_PORT: '3003'
|
|
||||||
|
|
||||||
# ports:
|
# ports:
|
||||||
# - target: 4002
|
# - target: 4002
|
||||||
|
@ -61,40 +62,3 @@ services:
|
||||||
# - "127.0.0.1:4001:4001"
|
# - "127.0.0.1:4001:4001"
|
||||||
# - "127.0.0.1:4002:4002"
|
# - "127.0.0.1:4002:4002"
|
||||||
# - "127.0.0.1:5900:5900"
|
# - "127.0.0.1:5900:5900"
|
||||||
|
|
||||||
# ib_gw_live:
|
|
||||||
# image: waytrade/ib-gateway:1012.2i
|
|
||||||
# restart: no
|
|
||||||
# network_mode: 'host'
|
|
||||||
|
|
||||||
# volumes:
|
|
||||||
# - type: bind
|
|
||||||
# source: ./jts_live.ini
|
|
||||||
# target: /root/jts/jts.ini
|
|
||||||
# # don't let ibc clobber this file for
|
|
||||||
# # the main reason of not having a stupid
|
|
||||||
# # timezone set..
|
|
||||||
# read_only: true
|
|
||||||
|
|
||||||
# # force our own ibc config
|
|
||||||
# - type: bind
|
|
||||||
# source: ./ibc.ini
|
|
||||||
# target: /root/ibc/config.ini
|
|
||||||
|
|
||||||
# # force our noop script - socat isn't needed in host mode.
|
|
||||||
# - type: bind
|
|
||||||
# source: ./fork_ports_delayed.sh
|
|
||||||
# target: /root/scripts/fork_ports_delayed.sh
|
|
||||||
|
|
||||||
# # force our noop script - socat isn't needed in host mode.
|
|
||||||
# - type: bind
|
|
||||||
# source: ./run_x11_vnc.sh
|
|
||||||
# target: /root/scripts/run_x11_vnc.sh
|
|
||||||
# read_only: true
|
|
||||||
|
|
||||||
# # NOTE: to fill these out, define an `.env` file in the same dir as
|
|
||||||
# # this compose file which looks something like:
|
|
||||||
# environment:
|
|
||||||
# TRADING_MODE: 'live'
|
|
||||||
# VNC_SERVER_PASSWORD: 'doggy'
|
|
||||||
# VNC_SERVER_PORT: '3004'
|
|
||||||
|
|
|
@ -188,7 +188,7 @@ AcceptNonBrokerageAccountWarning=yes
|
||||||
#
|
#
|
||||||
# The default value is 60.
|
# The default value is 60.
|
||||||
|
|
||||||
LoginDialogDisplayTimeout=20
|
LoginDialogDisplayTimeout = 60
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -292,7 +292,7 @@ ExistingSessionDetectedAction=primary
|
||||||
# be set dynamically at run-time: most users will never need it,
|
# be set dynamically at run-time: most users will never need it,
|
||||||
# so don't use it unless you know you need it.
|
# so don't use it unless you know you need it.
|
||||||
|
|
||||||
; OverrideTwsApiPort=4002
|
OverrideTwsApiPort=4002
|
||||||
|
|
||||||
|
|
||||||
# Read-only Login
|
# Read-only Login
|
||||||
|
|
|
@ -1,33 +0,0 @@
|
||||||
[IBGateway]
|
|
||||||
ApiOnly=true
|
|
||||||
LocalServerPort=4001
|
|
||||||
# NOTE: must be set if using IBC's "reject" mode
|
|
||||||
TrustedIPs=127.0.0.1
|
|
||||||
; RemoteHostOrderRouting=ndc1.ibllc.com
|
|
||||||
; WriteDebug=true
|
|
||||||
; RemotePortOrderRouting=4001
|
|
||||||
; useRemoteSettings=false
|
|
||||||
; tradingMode=p
|
|
||||||
; Steps=8
|
|
||||||
; colorPalletName=dark
|
|
||||||
|
|
||||||
# window geo, this may be useful for sending `xdotool` commands?
|
|
||||||
; MainWindow.Width=1986
|
|
||||||
; screenHeight=3960
|
|
||||||
|
|
||||||
|
|
||||||
[Logon]
|
|
||||||
Locale=en
|
|
||||||
# most markets are oriented around this zone
|
|
||||||
# so might as well hard code it.
|
|
||||||
TimeZone=America/New_York
|
|
||||||
UseSSL=true
|
|
||||||
displayedproxymsg=1
|
|
||||||
os_titlebar=true
|
|
||||||
s3store=true
|
|
||||||
useRemoteSettings=false
|
|
||||||
|
|
||||||
[Communication]
|
|
||||||
ctciAutoEncrypt=true
|
|
||||||
Region=usr
|
|
||||||
; Peer=cdc1.ibllc.com:4001
|
|
|
@ -1,35 +1,16 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
# start vnc server and listen for connections
|
|
||||||
# on port specced in `$VNC_SERVER_PORT`
|
|
||||||
|
|
||||||
|
# start VNC server
|
||||||
x11vnc \
|
x11vnc \
|
||||||
-listen 127.0.0.1 \
|
-ncache_cr \
|
||||||
-allow 127.0.0.1 \
|
-listen localhost \
|
||||||
-rfbport "${VNC_SERVER_PORT}" \
|
|
||||||
-display :1 \
|
-display :1 \
|
||||||
-forever \
|
-forever \
|
||||||
-shared \
|
-shared \
|
||||||
|
-logappend /var/log/x11vnc.log \
|
||||||
-bg \
|
-bg \
|
||||||
-nowf \
|
|
||||||
-noxdamage \
|
|
||||||
-noxfixes \
|
|
||||||
-no6 \
|
|
||||||
-noipv6 \
|
-noipv6 \
|
||||||
|
-autoport 3003 \
|
||||||
|
# can't use this because of ``asyncvnc`` issue:
|
||||||
# -nowcr \
|
|
||||||
# TODO: can't use this because of ``asyncvnc`` issue:
|
|
||||||
# https://github.com/barneygale/asyncvnc/issues/1
|
# https://github.com/barneygale/asyncvnc/issues/1
|
||||||
# -passwd 'ibcansmbz'
|
# -passwd 'ibcansmbz'
|
||||||
|
|
||||||
# XXX: optional graphics caching flags that seem to rekt the overlay
|
|
||||||
# of the 2 gw windows? When running a single gateway
|
|
||||||
# this seems to maybe optimize some memory usage?
|
|
||||||
# -ncache_cr \
|
|
||||||
# -ncache \
|
|
||||||
|
|
||||||
# NOTE: this will prevent logs from going to the console.
|
|
||||||
# -logappend /var/log/x11vnc.log \
|
|
||||||
|
|
||||||
# where to start allocating ports
|
|
||||||
# -autoport "${VNC_SERVER_PORT}" \
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers.
|
# piker: trading gear for hackers.
|
||||||
# Copyright 2020-eternity Tyler Goodlet (in stewardship for pikers)
|
# Copyright 2020-eternity Tyler Goodlet (in stewardship for piker0)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -14,14 +14,7 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
piker: trading gear for hackers.
|
piker: trading gear for hackers.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
from .service import open_piker_runtime
|
|
||||||
from .data.feed import open_feed
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'open_piker_runtime',
|
|
||||||
'open_feed',
|
|
||||||
]
|
|
||||||
|
|
|
@ -0,0 +1,561 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Structured, daemon tree service management.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Optional, Union, Callable, Any
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
import trio
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
from .log import get_logger, get_console_log
|
||||||
|
from .brokers import get_brokermod
|
||||||
|
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
_root_dname = 'pikerd'
|
||||||
|
|
||||||
|
_registry_addr = ('127.0.0.1', 6116)
|
||||||
|
_tractor_kwargs: dict[str, Any] = {
|
||||||
|
# use a different registry addr then tractor's default
|
||||||
|
'arbiter_addr': _registry_addr
|
||||||
|
}
|
||||||
|
_root_modules = [
|
||||||
|
__name__,
|
||||||
|
'piker.clearing._ems',
|
||||||
|
'piker.clearing._client',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class Services(BaseModel):
|
||||||
|
|
||||||
|
actor_n: tractor._supervise.ActorNursery
|
||||||
|
service_n: trio.Nursery
|
||||||
|
debug_mode: bool # tractor sub-actor debug mode flag
|
||||||
|
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
|
async def start_service_task(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
portal: tractor.Portal,
|
||||||
|
target: Callable,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> (trio.CancelScope, tractor.Context):
|
||||||
|
'''
|
||||||
|
Open a context in a service sub-actor, add to a stack
|
||||||
|
that gets unwound at ``pikerd`` teardown.
|
||||||
|
|
||||||
|
This allows for allocating long-running sub-services in our main
|
||||||
|
daemon and explicitly controlling their lifetimes.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async def open_context_in_task(
|
||||||
|
task_status: TaskStatus[
|
||||||
|
trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> Any:
|
||||||
|
|
||||||
|
with trio.CancelScope() as cs:
|
||||||
|
async with portal.open_context(
|
||||||
|
target,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) as (ctx, first):
|
||||||
|
|
||||||
|
# unblock once the remote context has started
|
||||||
|
task_status.started((cs, first))
|
||||||
|
log.info(
|
||||||
|
f'`pikerd` service {name} started with value {first}'
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
# wait on any context's return value
|
||||||
|
ctx_res = await ctx.result()
|
||||||
|
except tractor.ContextCancelled:
|
||||||
|
return await self.cancel_service(name)
|
||||||
|
else:
|
||||||
|
# wait on any error from the sub-actor
|
||||||
|
# NOTE: this will block indefinitely until
|
||||||
|
# cancelled either by error from the target
|
||||||
|
# context function or by being cancelled here by
|
||||||
|
# the surrounding cancel scope
|
||||||
|
return (await portal.result(), ctx_res)
|
||||||
|
|
||||||
|
cs, first = await self.service_n.start(open_context_in_task)
|
||||||
|
|
||||||
|
# store the cancel scope and portal for later cancellation or
|
||||||
|
# retstart if needed.
|
||||||
|
self.service_tasks[name] = (cs, portal)
|
||||||
|
|
||||||
|
return cs, first
|
||||||
|
|
||||||
|
# TODO: per service cancellation by scope, we aren't using this
|
||||||
|
# anywhere right?
|
||||||
|
async def cancel_service(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
) -> Any:
|
||||||
|
log.info(f'Cancelling `pikerd` service {name}')
|
||||||
|
cs, portal = self.service_tasks[name]
|
||||||
|
# XXX: not entirely sure why this is required,
|
||||||
|
# and should probably be better fine tuned in
|
||||||
|
# ``tractor``?
|
||||||
|
cs.cancel()
|
||||||
|
return await portal.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
|
_services: Optional[Services] = None
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_pikerd(
|
||||||
|
start_method: str = 'trio',
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
|
|
||||||
|
# XXX: you should pretty much never want debug mode
|
||||||
|
# for data daemons when running in production.
|
||||||
|
debug_mode: bool = False,
|
||||||
|
|
||||||
|
) -> Optional[tractor._portal.Portal]:
|
||||||
|
'''
|
||||||
|
Start a root piker daemon who's lifetime extends indefinitely
|
||||||
|
until cancelled.
|
||||||
|
|
||||||
|
A root actor nursery is created which can be used to create and keep
|
||||||
|
alive underling services (see below).
|
||||||
|
|
||||||
|
'''
|
||||||
|
global _services
|
||||||
|
assert _services is None
|
||||||
|
|
||||||
|
# XXX: this may open a root actor as well
|
||||||
|
async with (
|
||||||
|
tractor.open_root_actor(
|
||||||
|
|
||||||
|
# passed through to ``open_root_actor``
|
||||||
|
arbiter_addr=_registry_addr,
|
||||||
|
name=_root_dname,
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
start_method=start_method,
|
||||||
|
|
||||||
|
# TODO: eventually we should be able to avoid
|
||||||
|
# having the root have more then permissions to
|
||||||
|
# spawn other specialized daemons I think?
|
||||||
|
enable_modules=_root_modules,
|
||||||
|
) as _,
|
||||||
|
|
||||||
|
tractor.open_nursery() as actor_nursery,
|
||||||
|
):
|
||||||
|
async with trio.open_nursery() as service_nursery:
|
||||||
|
|
||||||
|
# # setup service mngr singleton instance
|
||||||
|
# async with AsyncExitStack() as stack:
|
||||||
|
|
||||||
|
# assign globally for future daemon/task creation
|
||||||
|
_services = Services(
|
||||||
|
actor_n=actor_nursery,
|
||||||
|
service_n=service_nursery,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
)
|
||||||
|
|
||||||
|
yield _services
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_piker_runtime(
|
||||||
|
name: str,
|
||||||
|
enable_modules: list[str] = [],
|
||||||
|
start_method: str = 'trio',
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
|
|
||||||
|
# XXX: you should pretty much never want debug mode
|
||||||
|
# for data daemons when running in production.
|
||||||
|
debug_mode: bool = False,
|
||||||
|
|
||||||
|
) -> Optional[tractor._portal.Portal]:
|
||||||
|
'''
|
||||||
|
Start a piker actor who's runtime will automatically
|
||||||
|
sync with existing piker actors in local network
|
||||||
|
based on configuration.
|
||||||
|
|
||||||
|
'''
|
||||||
|
global _services
|
||||||
|
assert _services is None
|
||||||
|
|
||||||
|
# XXX: this may open a root actor as well
|
||||||
|
async with (
|
||||||
|
tractor.open_root_actor(
|
||||||
|
|
||||||
|
# passed through to ``open_root_actor``
|
||||||
|
arbiter_addr=_registry_addr,
|
||||||
|
name=name,
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
start_method=start_method,
|
||||||
|
|
||||||
|
# TODO: eventually we should be able to avoid
|
||||||
|
# having the root have more then permissions to
|
||||||
|
# spawn other specialized daemons I think?
|
||||||
|
enable_modules=_root_modules,
|
||||||
|
) as _,
|
||||||
|
):
|
||||||
|
yield tractor.current_actor()
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_open_runtime(
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Start the ``tractor`` runtime (a root actor) if none exists.
|
||||||
|
|
||||||
|
"""
|
||||||
|
settings = _tractor_kwargs
|
||||||
|
settings.update(kwargs)
|
||||||
|
|
||||||
|
if not tractor.current_actor(err_on_no_runtime=False):
|
||||||
|
async with tractor.open_root_actor(
|
||||||
|
loglevel=loglevel,
|
||||||
|
**settings,
|
||||||
|
):
|
||||||
|
yield
|
||||||
|
else:
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_open_pikerd(
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> Union[tractor._portal.Portal, Services]:
|
||||||
|
"""If no ``pikerd`` daemon-root-actor can be found start it and
|
||||||
|
yield up (we should probably figure out returning a portal to self
|
||||||
|
though).
|
||||||
|
|
||||||
|
"""
|
||||||
|
if loglevel:
|
||||||
|
get_console_log(loglevel)
|
||||||
|
|
||||||
|
# subtle, we must have the runtime up here or portal lookup will fail
|
||||||
|
async with maybe_open_runtime(loglevel, **kwargs):
|
||||||
|
|
||||||
|
async with tractor.find_actor(_root_dname) as portal:
|
||||||
|
# assert portal is not None
|
||||||
|
if portal is not None:
|
||||||
|
yield portal
|
||||||
|
return
|
||||||
|
|
||||||
|
# presume pikerd role since no daemon could be found at
|
||||||
|
# configured address
|
||||||
|
async with open_pikerd(
|
||||||
|
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=kwargs.get('debug_mode', False),
|
||||||
|
|
||||||
|
) as _:
|
||||||
|
# in the case where we're starting up the
|
||||||
|
# tractor-piker runtime stack in **this** process
|
||||||
|
# we return no portal to self.
|
||||||
|
yield None
|
||||||
|
|
||||||
|
|
||||||
|
# brokerd enabled modules
|
||||||
|
_data_mods = [
|
||||||
|
'piker.brokers.core',
|
||||||
|
'piker.brokers.data',
|
||||||
|
'piker.data',
|
||||||
|
'piker.data.feed',
|
||||||
|
'piker.data._sampling'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class Brokerd:
|
||||||
|
locks = defaultdict(trio.Lock)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def find_service(
|
||||||
|
service_name: str,
|
||||||
|
) -> Optional[tractor.Portal]:
|
||||||
|
|
||||||
|
log.info(f'Scanning for service `{service_name}`')
|
||||||
|
# attach to existing daemon by name if possible
|
||||||
|
async with tractor.find_actor(
|
||||||
|
service_name,
|
||||||
|
arbiter_sockaddr=_registry_addr,
|
||||||
|
) as maybe_portal:
|
||||||
|
yield maybe_portal
|
||||||
|
|
||||||
|
|
||||||
|
async def check_for_service(
|
||||||
|
service_name: str,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
'''
|
||||||
|
Service daemon "liveness" predicate.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with tractor.query_actor(
|
||||||
|
service_name,
|
||||||
|
arbiter_sockaddr=_registry_addr,
|
||||||
|
) as sockaddr:
|
||||||
|
return sockaddr
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_spawn_daemon(
|
||||||
|
|
||||||
|
service_name: str,
|
||||||
|
service_task_target: Callable,
|
||||||
|
spawn_args: dict[str, Any],
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> tractor.Portal:
|
||||||
|
'''
|
||||||
|
If no ``service_name`` daemon-actor can be found,
|
||||||
|
spawn one in a local subactor and return a portal to it.
|
||||||
|
|
||||||
|
If this function is called from a non-pikerd actor, the
|
||||||
|
spawned service will persist as long as pikerd does or
|
||||||
|
it is requested to be cancelled.
|
||||||
|
|
||||||
|
This can be seen as a service starting api for remote-actor
|
||||||
|
clients.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if loglevel:
|
||||||
|
get_console_log(loglevel)
|
||||||
|
|
||||||
|
# serialize access to this section to avoid
|
||||||
|
# 2 or more tasks racing to create a daemon
|
||||||
|
lock = Brokerd.locks[service_name]
|
||||||
|
await lock.acquire()
|
||||||
|
|
||||||
|
async with find_service(service_name) as portal:
|
||||||
|
if portal is not None:
|
||||||
|
lock.release()
|
||||||
|
yield portal
|
||||||
|
return
|
||||||
|
|
||||||
|
log.warning(f"Couldn't find any existing {service_name}")
|
||||||
|
|
||||||
|
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
||||||
|
# pikerd is not live we now become the root of the
|
||||||
|
# process tree
|
||||||
|
async with maybe_open_pikerd(
|
||||||
|
|
||||||
|
loglevel=loglevel,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) as pikerd_portal:
|
||||||
|
|
||||||
|
if pikerd_portal is None:
|
||||||
|
# we are the root and thus are `pikerd`
|
||||||
|
# so spawn the target service directly by calling
|
||||||
|
# the provided target routine.
|
||||||
|
# XXX: this assumes that the target is well formed and will
|
||||||
|
# do the right things to setup both a sub-actor **and** call
|
||||||
|
# the ``_Services`` api from above to start the top level
|
||||||
|
# service task for that actor.
|
||||||
|
await service_task_target(**spawn_args)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# tell the remote `pikerd` to start the target,
|
||||||
|
# the target can't return a non-serializable value
|
||||||
|
# since it is expected that service startingn is
|
||||||
|
# non-blocking and the target task will persist running
|
||||||
|
# on `pikerd` after the client requesting it's start
|
||||||
|
# disconnects.
|
||||||
|
await pikerd_portal.run(
|
||||||
|
service_task_target,
|
||||||
|
**spawn_args,
|
||||||
|
)
|
||||||
|
|
||||||
|
async with tractor.wait_for_actor(service_name) as portal:
|
||||||
|
lock.release()
|
||||||
|
yield portal
|
||||||
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
|
async def spawn_brokerd(
|
||||||
|
|
||||||
|
brokername: str,
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
|
**tractor_kwargs,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
|
||||||
|
log.info(f'Spawning {brokername} broker daemon')
|
||||||
|
|
||||||
|
brokermod = get_brokermod(brokername)
|
||||||
|
dname = f'brokerd.{brokername}'
|
||||||
|
|
||||||
|
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
||||||
|
tractor_kwargs.update(extra_tractor_kwargs)
|
||||||
|
|
||||||
|
global _services
|
||||||
|
assert _services
|
||||||
|
|
||||||
|
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||||
|
# actor nursery
|
||||||
|
modpath = brokermod.__name__
|
||||||
|
broker_enable = [modpath]
|
||||||
|
for submodname in getattr(
|
||||||
|
brokermod,
|
||||||
|
'__enable_modules__',
|
||||||
|
[],
|
||||||
|
):
|
||||||
|
subpath = f'{modpath}.{submodname}'
|
||||||
|
broker_enable.append(subpath)
|
||||||
|
|
||||||
|
portal = await _services.actor_n.start_actor(
|
||||||
|
dname,
|
||||||
|
enable_modules=_data_mods + broker_enable,
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=_services.debug_mode,
|
||||||
|
**tractor_kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
# non-blocking setup of brokerd service nursery
|
||||||
|
from .data import _setup_persistent_brokerd
|
||||||
|
|
||||||
|
await _services.start_service_task(
|
||||||
|
dname,
|
||||||
|
portal,
|
||||||
|
_setup_persistent_brokerd,
|
||||||
|
brokername=brokername,
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_spawn_brokerd(
|
||||||
|
|
||||||
|
brokername: str,
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> tractor.Portal:
|
||||||
|
'''
|
||||||
|
Helper to spawn a brokerd service *from* a client
|
||||||
|
who wishes to use the sub-actor-daemon.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with maybe_spawn_daemon(
|
||||||
|
|
||||||
|
f'brokerd.{brokername}',
|
||||||
|
service_task_target=spawn_brokerd,
|
||||||
|
spawn_args={'brokername': brokername, 'loglevel': loglevel},
|
||||||
|
loglevel=loglevel,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) as portal:
|
||||||
|
yield portal
|
||||||
|
|
||||||
|
|
||||||
|
async def spawn_emsd(
|
||||||
|
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
|
**extra_tractor_kwargs
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Start the clearing engine under ``pikerd``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
log.info('Spawning emsd')
|
||||||
|
|
||||||
|
global _services
|
||||||
|
assert _services
|
||||||
|
|
||||||
|
portal = await _services.actor_n.start_actor(
|
||||||
|
'emsd',
|
||||||
|
enable_modules=[
|
||||||
|
'piker.clearing._ems',
|
||||||
|
'piker.clearing._client',
|
||||||
|
],
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=_services.debug_mode, # set by pikerd flag
|
||||||
|
**extra_tractor_kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
# non-blocking setup of clearing service
|
||||||
|
from .clearing._ems import _setup_persistent_emsd
|
||||||
|
|
||||||
|
await _services.start_service_task(
|
||||||
|
'emsd',
|
||||||
|
portal,
|
||||||
|
_setup_persistent_emsd,
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_open_emsd(
|
||||||
|
|
||||||
|
brokername: str,
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> tractor._portal.Portal: # noqa
|
||||||
|
|
||||||
|
async with maybe_spawn_daemon(
|
||||||
|
|
||||||
|
'emsd',
|
||||||
|
service_task_target=spawn_emsd,
|
||||||
|
spawn_args={'loglevel': loglevel},
|
||||||
|
loglevel=loglevel,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) as portal:
|
||||||
|
yield portal
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: ideally we can start the tsdb "on demand" but it's
|
||||||
|
# probably going to require "rootless" docker, at least if we don't
|
||||||
|
# want to expect the user to start ``pikerd`` with root perms all the
|
||||||
|
# time.
|
||||||
|
# async def maybe_open_marketstored(
|
||||||
|
# loglevel: Optional[str] = None,
|
||||||
|
# **kwargs,
|
||||||
|
|
||||||
|
# ) -> tractor._portal.Portal: # noqa
|
||||||
|
|
||||||
|
# async with maybe_spawn_daemon(
|
||||||
|
|
||||||
|
# 'marketstored',
|
||||||
|
# service_task_target=spawn_emsd,
|
||||||
|
# spawn_args={'loglevel': loglevel},
|
||||||
|
# loglevel=loglevel,
|
||||||
|
# **kwargs,
|
||||||
|
|
||||||
|
# ) as portal:
|
||||||
|
# yield portal
|
|
@ -18,10 +18,7 @@
|
||||||
Profiling wrappers for internal libs.
|
Profiling wrappers for internal libs.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
from time import perf_counter
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
# NOTE: you can pass a flag to enable this:
|
# NOTE: you can pass a flag to enable this:
|
||||||
|
@ -47,193 +44,3 @@ def timeit(fn):
|
||||||
return res
|
return res
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
# Modified version of ``pyqtgraph.debug.Profiler`` that
|
|
||||||
# core seems hesitant to land in:
|
|
||||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2281
|
|
||||||
class Profiler(object):
|
|
||||||
'''
|
|
||||||
Simple profiler allowing measurement of multiple time intervals.
|
|
||||||
|
|
||||||
By default, profilers are disabled. To enable profiling, set the
|
|
||||||
environment variable `PYQTGRAPHPROFILE` to a comma-separated list of
|
|
||||||
fully-qualified names of profiled functions.
|
|
||||||
|
|
||||||
Calling a profiler registers a message (defaulting to an increasing
|
|
||||||
counter) that contains the time elapsed since the last call. When the
|
|
||||||
profiler is about to be garbage-collected, the messages are passed to the
|
|
||||||
outer profiler if one is running, or printed to stdout otherwise.
|
|
||||||
|
|
||||||
If `delayed` is set to False, messages are immediately printed instead.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
def function(...):
|
|
||||||
profiler = Profiler()
|
|
||||||
... do stuff ...
|
|
||||||
profiler('did stuff')
|
|
||||||
... do other stuff ...
|
|
||||||
profiler('did other stuff')
|
|
||||||
# profiler is garbage-collected and flushed at function end
|
|
||||||
|
|
||||||
If this function is a method of class C, setting `PYQTGRAPHPROFILE` to
|
|
||||||
"C.function" (without the module name) will enable this profiler.
|
|
||||||
|
|
||||||
For regular functions, use the qualified name of the function, stripping
|
|
||||||
only the initial "pyqtgraph." prefix from the module.
|
|
||||||
'''
|
|
||||||
|
|
||||||
_profilers = os.environ.get("PYQTGRAPHPROFILE", None)
|
|
||||||
_profilers = _profilers.split(",") if _profilers is not None else []
|
|
||||||
|
|
||||||
_depth = 0
|
|
||||||
|
|
||||||
# NOTE: without this defined at the class level
|
|
||||||
# you won't see apprpriately "nested" sub-profiler
|
|
||||||
# instance calls.
|
|
||||||
_msgs = []
|
|
||||||
|
|
||||||
# set this flag to disable all or individual profilers at runtime
|
|
||||||
disable = False
|
|
||||||
|
|
||||||
class DisabledProfiler(object):
|
|
||||||
def __init__(self, *args, **kwds):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def finish(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def mark(self, msg=None):
|
|
||||||
pass
|
|
||||||
|
|
||||||
_disabledProfiler = DisabledProfiler()
|
|
||||||
|
|
||||||
def __new__(
|
|
||||||
cls,
|
|
||||||
msg=None,
|
|
||||||
disabled='env',
|
|
||||||
delayed=True,
|
|
||||||
ms_threshold: float = 0.0,
|
|
||||||
):
|
|
||||||
"""Optionally create a new profiler based on caller's qualname.
|
|
||||||
|
|
||||||
``ms_threshold`` can be set to value in ms for which, if the
|
|
||||||
total measured time of the lifetime of this profiler is **less
|
|
||||||
than** this value, then no profiling messages will be printed.
|
|
||||||
Setting ``delayed=False`` disables this feature since messages
|
|
||||||
are emitted immediately.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if (
|
|
||||||
disabled is True
|
|
||||||
or (
|
|
||||||
disabled == 'env'
|
|
||||||
and len(cls._profilers) == 0
|
|
||||||
)
|
|
||||||
):
|
|
||||||
return cls._disabledProfiler
|
|
||||||
|
|
||||||
# determine the qualified name of the caller function
|
|
||||||
caller_frame = sys._getframe(1)
|
|
||||||
try:
|
|
||||||
caller_object_type = type(caller_frame.f_locals["self"])
|
|
||||||
|
|
||||||
except KeyError: # we are in a regular function
|
|
||||||
qualifier = caller_frame.f_globals["__name__"].split(".", 1)[-1]
|
|
||||||
|
|
||||||
else: # we are in a method
|
|
||||||
qualifier = caller_object_type.__name__
|
|
||||||
func_qualname = qualifier + "." + caller_frame.f_code.co_name
|
|
||||||
|
|
||||||
if disabled == 'env' and func_qualname not in cls._profilers:
|
|
||||||
# don't do anything
|
|
||||||
return cls._disabledProfiler
|
|
||||||
|
|
||||||
cls._depth += 1
|
|
||||||
obj = super(Profiler, cls).__new__(cls)
|
|
||||||
obj._msgs = []
|
|
||||||
|
|
||||||
# create an actual profiling object
|
|
||||||
if cls._depth < 1:
|
|
||||||
cls._msgs = []
|
|
||||||
|
|
||||||
obj._name = msg or func_qualname
|
|
||||||
obj._delayed = delayed
|
|
||||||
obj._markCount = 0
|
|
||||||
obj._finished = False
|
|
||||||
obj._firstTime = obj._lastTime = perf_counter()
|
|
||||||
obj._mt = ms_threshold
|
|
||||||
obj._newMsg("> Entering " + obj._name)
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def __call__(self, msg=None):
|
|
||||||
"""Register or print a new message with timing information.
|
|
||||||
"""
|
|
||||||
if self.disable:
|
|
||||||
return
|
|
||||||
if msg is None:
|
|
||||||
msg = str(self._markCount)
|
|
||||||
|
|
||||||
self._markCount += 1
|
|
||||||
newTime = perf_counter()
|
|
||||||
tot_ms = (newTime - self._firstTime) * 1000
|
|
||||||
ms = (newTime - self._lastTime) * 1000
|
|
||||||
self._newMsg(
|
|
||||||
f" {msg}: {ms:0.4f}, tot:{tot_ms:0.4f}"
|
|
||||||
)
|
|
||||||
|
|
||||||
self._lastTime = newTime
|
|
||||||
|
|
||||||
def mark(self, msg=None):
|
|
||||||
self(msg)
|
|
||||||
|
|
||||||
def _newMsg(self, msg, *args):
|
|
||||||
msg = " " * (self._depth - 1) + msg
|
|
||||||
if self._delayed:
|
|
||||||
self._msgs.append((msg, args))
|
|
||||||
else:
|
|
||||||
print(msg % args)
|
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
self.finish()
|
|
||||||
|
|
||||||
def finish(self, msg=None):
|
|
||||||
"""Add a final message; flush the message list if no parent profiler.
|
|
||||||
"""
|
|
||||||
if self._finished or self.disable:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._finished = True
|
|
||||||
if msg is not None:
|
|
||||||
self(msg)
|
|
||||||
|
|
||||||
tot_ms = (perf_counter() - self._firstTime) * 1000
|
|
||||||
self._newMsg(
|
|
||||||
"< Exiting %s, total time: %0.4f ms",
|
|
||||||
self._name,
|
|
||||||
tot_ms,
|
|
||||||
)
|
|
||||||
|
|
||||||
if tot_ms < self._mt:
|
|
||||||
# print(f'{tot_ms} < {self._mt}, clearing')
|
|
||||||
# NOTE: this list **must** be an instance var to avoid
|
|
||||||
# deleting common messages during GC I think?
|
|
||||||
self._msgs.clear()
|
|
||||||
# else:
|
|
||||||
# print(f'{tot_ms} > {self._mt}, not clearing')
|
|
||||||
|
|
||||||
# XXX: why is this needed?
|
|
||||||
# don't we **want to show** nested profiler messages?
|
|
||||||
if self._msgs: # and self._depth < 1:
|
|
||||||
|
|
||||||
# if self._msgs:
|
|
||||||
print("\n".join([m[0] % m[1] for m in self._msgs]))
|
|
||||||
|
|
||||||
# clear all entries
|
|
||||||
self._msgs.clear()
|
|
||||||
# type(self)._msgs = []
|
|
||||||
|
|
||||||
type(self)._depth -= 1
|
|
||||||
|
|
|
@ -20,41 +20,30 @@ Broker clients, daemons and general back end machinery.
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
|
|
||||||
|
# TODO: move to urllib3/requests once supported
|
||||||
|
import asks
|
||||||
|
asks.init('trio')
|
||||||
|
|
||||||
__brokers__ = [
|
__brokers__ = [
|
||||||
'binance',
|
'binance',
|
||||||
|
'questrade',
|
||||||
|
'robinhood',
|
||||||
'ib',
|
'ib',
|
||||||
'kraken',
|
'kraken',
|
||||||
|
|
||||||
# broken but used to work
|
|
||||||
# 'questrade',
|
|
||||||
# 'robinhood',
|
|
||||||
|
|
||||||
# TODO: we should get on these stat!
|
|
||||||
# alpaca
|
|
||||||
# wstrade
|
|
||||||
# iex
|
|
||||||
|
|
||||||
# deribit
|
|
||||||
# kucoin
|
|
||||||
# bitso
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def get_brokermod(brokername: str) -> ModuleType:
|
def get_brokermod(brokername: str) -> ModuleType:
|
||||||
'''
|
"""Return the imported broker module by name.
|
||||||
Return the imported broker module by name.
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
module = import_module('.' + brokername, 'piker.brokers')
|
module = import_module('.' + brokername, 'piker.brokers')
|
||||||
# we only allow monkeying because it's for internal keying
|
# we only allow monkeying because it's for internal keying
|
||||||
module.name = module.__name__.split('.')[-1]
|
module.name = module.__name__.split('.')[-1]
|
||||||
return module
|
return module
|
||||||
|
|
||||||
|
|
||||||
def iter_brokermods():
|
def iter_brokermods():
|
||||||
'''
|
"""Iterate all built-in broker modules.
|
||||||
Iterate all built-in broker modules.
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
for name in __brokers__:
|
for name in __brokers__:
|
||||||
yield get_brokermod(name)
|
yield get_brokermod(name)
|
||||||
|
|
|
@ -33,23 +33,15 @@ import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
|
from pydantic.dataclasses import dataclass
|
||||||
|
from pydantic import BaseModel
|
||||||
import wsproto
|
import wsproto
|
||||||
|
|
||||||
from .._cacheables import open_cached_client
|
from .._cacheables import open_cached_client
|
||||||
from ._util import (
|
from ._util import resproc, SymbolNotFound
|
||||||
resproc,
|
from ..log import get_logger, get_console_log
|
||||||
SymbolNotFound,
|
from ..data import ShmArray
|
||||||
DataUnavailable,
|
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
||||||
)
|
|
||||||
from ..log import (
|
|
||||||
get_logger,
|
|
||||||
get_console_log,
|
|
||||||
)
|
|
||||||
from ..data.types import Struct
|
|
||||||
from ..data._web_bs import (
|
|
||||||
open_autorecon_ws,
|
|
||||||
NoBsWs,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -87,14 +79,12 @@ _show_wap_in_history = False
|
||||||
|
|
||||||
|
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
||||||
class Pair(Struct, frozen=True):
|
class Pair(BaseModel):
|
||||||
symbol: str
|
symbol: str
|
||||||
status: str
|
status: str
|
||||||
|
|
||||||
baseAsset: str
|
baseAsset: str
|
||||||
baseAssetPrecision: int
|
baseAssetPrecision: int
|
||||||
cancelReplaceAllowed: bool
|
|
||||||
allowTrailingStop: bool
|
|
||||||
quoteAsset: str
|
quoteAsset: str
|
||||||
quotePrecision: int
|
quotePrecision: int
|
||||||
quoteAssetPrecision: int
|
quoteAssetPrecision: int
|
||||||
|
@ -110,21 +100,18 @@ class Pair(Struct, frozen=True):
|
||||||
isSpotTradingAllowed: bool
|
isSpotTradingAllowed: bool
|
||||||
isMarginTradingAllowed: bool
|
isMarginTradingAllowed: bool
|
||||||
|
|
||||||
defaultSelfTradePreventionMode: str
|
|
||||||
allowedSelfTradePreventionModes: list[str]
|
|
||||||
|
|
||||||
filters: list[dict[str, Union[str, int, float]]]
|
filters: list[dict[str, Union[str, int, float]]]
|
||||||
permissions: list[str]
|
permissions: list[str]
|
||||||
|
|
||||||
|
|
||||||
class OHLC(Struct):
|
@dataclass
|
||||||
'''
|
class OHLC:
|
||||||
Description of the flattened OHLC quote format.
|
"""Description of the flattened OHLC quote format.
|
||||||
|
|
||||||
For schema details see:
|
For schema details see:
|
||||||
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
||||||
|
|
||||||
'''
|
"""
|
||||||
time: int
|
time: int
|
||||||
|
|
||||||
open: float
|
open: float
|
||||||
|
@ -147,9 +134,7 @@ class OHLC(Struct):
|
||||||
|
|
||||||
|
|
||||||
# convert datetime obj timestamp to unixtime in milliseconds
|
# convert datetime obj timestamp to unixtime in milliseconds
|
||||||
def binance_timestamp(
|
def binance_timestamp(when):
|
||||||
when: datetime
|
|
||||||
) -> int:
|
|
||||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||||
|
|
||||||
|
|
||||||
|
@ -188,7 +173,7 @@ class Client:
|
||||||
params = {}
|
params = {}
|
||||||
|
|
||||||
if sym is not None:
|
if sym is not None:
|
||||||
sym = sym.lower()
|
sym = sym.upper()
|
||||||
params = {'symbol': sym}
|
params = {'symbol': sym}
|
||||||
|
|
||||||
resp = await self._api(
|
resp = await self._api(
|
||||||
|
@ -245,7 +230,7 @@ class Client:
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
|
||||||
if end_dt is None:
|
if end_dt is None:
|
||||||
end_dt = pendulum.now('UTC').add(minutes=1)
|
end_dt = pendulum.now('UTC')
|
||||||
|
|
||||||
if start_dt is None:
|
if start_dt is None:
|
||||||
start_dt = end_dt.start_of(
|
start_dt = end_dt.start_of(
|
||||||
|
@ -275,7 +260,6 @@ class Client:
|
||||||
for i, bar in enumerate(bars):
|
for i, bar in enumerate(bars):
|
||||||
|
|
||||||
bar = OHLC(*bar)
|
bar = OHLC(*bar)
|
||||||
bar.typecast()
|
|
||||||
|
|
||||||
row = []
|
row = []
|
||||||
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
||||||
|
@ -303,7 +287,7 @@ async def get_client() -> Client:
|
||||||
|
|
||||||
|
|
||||||
# validation type
|
# validation type
|
||||||
class AggTrade(Struct):
|
class AggTrade(BaseModel):
|
||||||
e: str # Event type
|
e: str # Event type
|
||||||
E: int # Event time
|
E: int # Event time
|
||||||
s: str # Symbol
|
s: str # Symbol
|
||||||
|
@ -357,9 +341,7 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
|
|
||||||
elif msg.get('e') == 'aggTrade':
|
elif msg.get('e') == 'aggTrade':
|
||||||
|
|
||||||
# NOTE: this is purely for a definition, ``msgspec.Struct``
|
# validate
|
||||||
# does not runtime-validate until you decode/encode.
|
|
||||||
# see: https://jcristharif.com/msgspec/structs.html#type-validation
|
|
||||||
msg = AggTrade(**msg)
|
msg = AggTrade(**msg)
|
||||||
|
|
||||||
# TODO: type out and require this quote format
|
# TODO: type out and require this quote format
|
||||||
|
@ -370,8 +352,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
'brokerd_ts': time.time(),
|
'brokerd_ts': time.time(),
|
||||||
'ticks': [{
|
'ticks': [{
|
||||||
'type': 'trade',
|
'type': 'trade',
|
||||||
'price': float(msg.p),
|
'price': msg.p,
|
||||||
'size': float(msg.q),
|
'size': msg.q,
|
||||||
'broker_ts': msg.T,
|
'broker_ts': msg.T,
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
|
@ -402,39 +384,41 @@ async def open_history_client(
|
||||||
async with open_cached_client('binance') as client:
|
async with open_cached_client('binance') as client:
|
||||||
|
|
||||||
async def get_ohlc(
|
async def get_ohlc(
|
||||||
timeframe: float,
|
end_dt: Optional[datetime] = None,
|
||||||
end_dt: datetime | None = None,
|
start_dt: Optional[datetime] = None,
|
||||||
start_dt: datetime | None = None,
|
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
np.ndarray,
|
np.ndarray,
|
||||||
datetime, # start
|
datetime, # start
|
||||||
datetime, # end
|
datetime, # end
|
||||||
]:
|
]:
|
||||||
if timeframe != 60:
|
|
||||||
raise DataUnavailable('Only 1m bars are supported')
|
|
||||||
|
|
||||||
array = await client.bars(
|
array = await client.bars(
|
||||||
symbol,
|
symbol,
|
||||||
start_dt=start_dt,
|
start_dt=start_dt,
|
||||||
end_dt=end_dt,
|
end_dt=end_dt,
|
||||||
)
|
)
|
||||||
times = array['time']
|
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||||
if (
|
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||||
end_dt is None
|
|
||||||
):
|
|
||||||
inow = round(time.time())
|
|
||||||
if (inow - times[-1]) > 60:
|
|
||||||
await tractor.breakpoint()
|
|
||||||
|
|
||||||
start_dt = pendulum.from_timestamp(times[0])
|
|
||||||
end_dt = pendulum.from_timestamp(times[-1])
|
|
||||||
|
|
||||||
return array, start_dt, end_dt
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||||
|
|
||||||
|
|
||||||
|
async def backfill_bars(
|
||||||
|
sym: str,
|
||||||
|
shm: ShmArray, # type: ignore # noqa
|
||||||
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
) -> None:
|
||||||
|
"""Fill historical bars into shared mem / storage afap.
|
||||||
|
"""
|
||||||
|
with trio.CancelScope() as cs:
|
||||||
|
async with open_cached_client('binance') as client:
|
||||||
|
bars = await client.bars(symbol=sym)
|
||||||
|
shm.push(bars)
|
||||||
|
task_status.started(cs)
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
|
@ -464,20 +448,12 @@ async def stream_quotes(
|
||||||
d = cache[sym.upper()]
|
d = cache[sym.upper()]
|
||||||
syminfo = Pair(**d) # validation
|
syminfo = Pair(**d) # validation
|
||||||
|
|
||||||
si = sym_infos[sym] = syminfo.to_dict()
|
si = sym_infos[sym] = syminfo.dict()
|
||||||
filters = {}
|
|
||||||
for entry in syminfo.filters:
|
|
||||||
ftype = entry['filterType']
|
|
||||||
filters[ftype] = entry
|
|
||||||
|
|
||||||
# XXX: after manually inspecting the response format we
|
# XXX: after manually inspecting the response format we
|
||||||
# just directly pick out the info we need
|
# just directly pick out the info we need
|
||||||
si['price_tick_size'] = float(
|
si['price_tick_size'] = float(syminfo.filters[0]['tickSize'])
|
||||||
filters['PRICE_FILTER']['tickSize']
|
si['lot_tick_size'] = float(syminfo.filters[2]['stepSize'])
|
||||||
)
|
|
||||||
si['lot_tick_size'] = float(
|
|
||||||
filters['LOT_SIZE']['stepSize']
|
|
||||||
)
|
|
||||||
si['asset_type'] = 'crypto'
|
si['asset_type'] = 'crypto'
|
||||||
|
|
||||||
symbol = symbols[0]
|
symbol = symbols[0]
|
||||||
|
@ -519,15 +495,14 @@ async def stream_quotes(
|
||||||
subs.append("{sym}@bookTicker")
|
subs.append("{sym}@bookTicker")
|
||||||
|
|
||||||
# unsub from all pairs on teardown
|
# unsub from all pairs on teardown
|
||||||
if ws.connected():
|
await ws.send_msg({
|
||||||
await ws.send_msg({
|
"method": "UNSUBSCRIBE",
|
||||||
"method": "UNSUBSCRIBE",
|
"params": subs,
|
||||||
"params": subs,
|
"id": uid,
|
||||||
"id": uid,
|
})
|
||||||
})
|
|
||||||
|
|
||||||
# XXX: do we need to ack the unsub?
|
# XXX: do we need to ack the unsub?
|
||||||
# await ws.recv_msg()
|
# await ws.recv_msg()
|
||||||
|
|
||||||
async with open_autorecon_ws(
|
async with open_autorecon_ws(
|
||||||
'wss://stream.binance.com/ws',
|
'wss://stream.binance.com/ws',
|
||||||
|
|
|
@ -29,15 +29,8 @@ import tractor
|
||||||
from ..cli import cli
|
from ..cli import cli
|
||||||
from .. import watchlists as wl
|
from .. import watchlists as wl
|
||||||
from ..log import get_console_log, colorize_json, get_logger
|
from ..log import get_console_log, colorize_json, get_logger
|
||||||
from ..service import (
|
from .._daemon import maybe_spawn_brokerd, maybe_open_pikerd
|
||||||
maybe_spawn_brokerd,
|
from ..brokers import core, get_brokermod, data
|
||||||
maybe_open_pikerd,
|
|
||||||
)
|
|
||||||
from ..brokers import (
|
|
||||||
core,
|
|
||||||
get_brokermod,
|
|
||||||
data,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger('cli')
|
log = get_logger('cli')
|
||||||
DEFAULT_BROKER = 'questrade'
|
DEFAULT_BROKER = 'questrade'
|
||||||
|
@ -46,148 +39,6 @@ _config_dir = click.get_app_dir('piker')
|
||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
|
|
||||||
|
|
||||||
OK = '\033[92m'
|
|
||||||
WARNING = '\033[93m'
|
|
||||||
FAIL = '\033[91m'
|
|
||||||
ENDC = '\033[0m'
|
|
||||||
|
|
||||||
|
|
||||||
def print_ok(s: str, **kwargs):
|
|
||||||
print(OK + s + ENDC, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def print_error(s: str, **kwargs):
|
|
||||||
print(FAIL + s + ENDC, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def get_method(client, meth_name: str):
|
|
||||||
print(f'checking client for method \'{meth_name}\'...', end='', flush=True)
|
|
||||||
method = getattr(client, meth_name, None)
|
|
||||||
assert method
|
|
||||||
print_ok('found!.')
|
|
||||||
return method
|
|
||||||
|
|
||||||
|
|
||||||
async def run_method(client, meth_name: str, **kwargs):
|
|
||||||
method = get_method(client, meth_name)
|
|
||||||
print('running...', end='', flush=True)
|
|
||||||
result = await method(**kwargs)
|
|
||||||
print_ok(f'done! result: {type(result)}')
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
async def run_test(broker_name: str):
|
|
||||||
brokermod = get_brokermod(broker_name)
|
|
||||||
total = 0
|
|
||||||
passed = 0
|
|
||||||
failed = 0
|
|
||||||
|
|
||||||
print('getting client...', end='', flush=True)
|
|
||||||
if not hasattr(brokermod, 'get_client'):
|
|
||||||
print_error('fail! no \'get_client\' context manager found.')
|
|
||||||
return
|
|
||||||
|
|
||||||
async with brokermod.get_client(is_brokercheck=True) as client:
|
|
||||||
print_ok('done! inside client context.')
|
|
||||||
|
|
||||||
# check for methods present on brokermod
|
|
||||||
method_list = [
|
|
||||||
'backfill_bars',
|
|
||||||
'get_client',
|
|
||||||
'trades_dialogue',
|
|
||||||
'open_history_client',
|
|
||||||
'open_symbol_search',
|
|
||||||
'stream_quotes',
|
|
||||||
|
|
||||||
]
|
|
||||||
|
|
||||||
for method in method_list:
|
|
||||||
print(
|
|
||||||
f'checking brokermod for method \'{method}\'...',
|
|
||||||
end='', flush=True)
|
|
||||||
if not hasattr(brokermod, method):
|
|
||||||
print_error(f'fail! method \'{method}\' not found.')
|
|
||||||
failed += 1
|
|
||||||
else:
|
|
||||||
print_ok('done!')
|
|
||||||
passed += 1
|
|
||||||
|
|
||||||
total += 1
|
|
||||||
|
|
||||||
# check for methods present con brokermod.Client and their
|
|
||||||
# results
|
|
||||||
|
|
||||||
# for private methods only check is present
|
|
||||||
method_list = [
|
|
||||||
'get_balances',
|
|
||||||
'get_assets',
|
|
||||||
'get_trades',
|
|
||||||
'get_xfers',
|
|
||||||
'submit_limit',
|
|
||||||
'submit_cancel',
|
|
||||||
'search_symbols',
|
|
||||||
]
|
|
||||||
|
|
||||||
for method_name in method_list:
|
|
||||||
try:
|
|
||||||
get_method(client, method_name)
|
|
||||||
passed += 1
|
|
||||||
|
|
||||||
except AssertionError:
|
|
||||||
print_error(f'fail! method \'{method_name}\' not found.')
|
|
||||||
failed += 1
|
|
||||||
|
|
||||||
total += 1
|
|
||||||
|
|
||||||
# check for methods present con brokermod.Client and their
|
|
||||||
# results
|
|
||||||
|
|
||||||
syms = await run_method(client, 'symbol_info')
|
|
||||||
total += 1
|
|
||||||
|
|
||||||
if len(syms) == 0:
|
|
||||||
raise BaseException('Empty Symbol list?')
|
|
||||||
|
|
||||||
passed += 1
|
|
||||||
|
|
||||||
first_sym = tuple(syms.keys())[0]
|
|
||||||
|
|
||||||
method_list = [
|
|
||||||
('cache_symbols', {}),
|
|
||||||
('search_symbols', {'pattern': first_sym[:-1]}),
|
|
||||||
('bars', {'symbol': first_sym})
|
|
||||||
]
|
|
||||||
|
|
||||||
for method_name, method_kwargs in method_list:
|
|
||||||
try:
|
|
||||||
await run_method(client, method_name, **method_kwargs)
|
|
||||||
passed += 1
|
|
||||||
|
|
||||||
except AssertionError:
|
|
||||||
print_error(f'fail! method \'{method_name}\' not found.')
|
|
||||||
failed += 1
|
|
||||||
|
|
||||||
total += 1
|
|
||||||
|
|
||||||
print(f'total: {total}, passed: {passed}, failed: {failed}')
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.argument('broker', nargs=1, required=True)
|
|
||||||
@click.pass_obj
|
|
||||||
def brokercheck(config, broker):
|
|
||||||
'''
|
|
||||||
Test broker apis for completeness.
|
|
||||||
|
|
||||||
'''
|
|
||||||
async def bcheck_main():
|
|
||||||
async with maybe_spawn_brokerd(broker) as portal:
|
|
||||||
await portal.run(run_test, broker)
|
|
||||||
await portal.cancel_actor()
|
|
||||||
|
|
||||||
trio.run(run_test, broker)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--keys', '-k', multiple=True,
|
@click.option('--keys', '-k', multiple=True,
|
||||||
help='Return results only for these keys')
|
help='Return results only for these keys')
|
||||||
|
|
|
@ -28,7 +28,7 @@ import trio
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from . import get_brokermod
|
from . import get_brokermod
|
||||||
from ..service import maybe_spawn_brokerd
|
from .._daemon import maybe_spawn_brokerd
|
||||||
from .._cacheables import open_cached_client
|
from .._cacheables import open_cached_client
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -227,28 +227,26 @@ async def get_cached_feed(
|
||||||
|
|
||||||
@tractor.stream
|
@tractor.stream
|
||||||
async def start_quote_stream(
|
async def start_quote_stream(
|
||||||
stream: tractor.Context, # marks this as a streaming func
|
ctx: tractor.Context, # marks this as a streaming func
|
||||||
broker: str,
|
broker: str,
|
||||||
symbols: List[Any],
|
symbols: List[Any],
|
||||||
feed_type: str = 'stock',
|
feed_type: str = 'stock',
|
||||||
rate: int = 3,
|
rate: int = 3,
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
"""Handle per-broker quote stream subscriptions using a "lazy" pub-sub
|
||||||
Handle per-broker quote stream subscriptions using a "lazy" pub-sub
|
|
||||||
pattern.
|
pattern.
|
||||||
|
|
||||||
Spawns new quoter tasks for each broker backend on-demand.
|
Spawns new quoter tasks for each broker backend on-demand.
|
||||||
Since most brokers seems to support batch quote requests we
|
Since most brokers seems to support batch quote requests we
|
||||||
limit to one task per process (for now).
|
limit to one task per process (for now).
|
||||||
|
"""
|
||||||
'''
|
|
||||||
# XXX: why do we need this again?
|
# XXX: why do we need this again?
|
||||||
get_console_log(tractor.current_actor().loglevel)
|
get_console_log(tractor.current_actor().loglevel)
|
||||||
|
|
||||||
# pull global vars from local actor
|
# pull global vars from local actor
|
||||||
symbols = list(symbols)
|
symbols = list(symbols)
|
||||||
log.info(
|
log.info(
|
||||||
f"{stream.chan.uid} subscribed to {broker} for symbols {symbols}")
|
f"{ctx.chan.uid} subscribed to {broker} for symbols {symbols}")
|
||||||
# another actor task may have already created it
|
# another actor task may have already created it
|
||||||
async with get_cached_feed(broker) as feed:
|
async with get_cached_feed(broker) as feed:
|
||||||
|
|
||||||
|
@ -292,13 +290,13 @@ async def start_quote_stream(
|
||||||
assert fquote['displayable']
|
assert fquote['displayable']
|
||||||
payload[sym] = fquote
|
payload[sym] = fquote
|
||||||
|
|
||||||
await stream.send_yield(payload)
|
await ctx.send_yield(payload)
|
||||||
|
|
||||||
await stream_poll_requests(
|
await stream_poll_requests(
|
||||||
|
|
||||||
# ``trionics.msgpub`` required kwargs
|
# ``trionics.msgpub`` required kwargs
|
||||||
task_name=feed_type,
|
task_name=feed_type,
|
||||||
ctx=stream,
|
ctx=ctx,
|
||||||
topics=symbols,
|
topics=symbols,
|
||||||
packetizer=feed.mod.packetizer,
|
packetizer=feed.mod.packetizer,
|
||||||
|
|
||||||
|
@ -321,11 +319,9 @@ async def call_client(
|
||||||
|
|
||||||
|
|
||||||
class DataFeed:
|
class DataFeed:
|
||||||
'''
|
"""Data feed client for streaming symbol data from and making API client calls
|
||||||
Data feed client for streaming symbol data from and making API
|
to a (remote) ``brokerd`` daemon.
|
||||||
client calls to a (remote) ``brokerd`` daemon.
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
_allowed = ('stock', 'option')
|
_allowed = ('stock', 'option')
|
||||||
|
|
||||||
def __init__(self, portal, brokermod):
|
def __init__(self, portal, brokermod):
|
||||||
|
|
|
@ -1,70 +0,0 @@
|
||||||
``deribit`` backend
|
|
||||||
------------------
|
|
||||||
pretty good liquidity crypto derivatives, uses custom json rpc over ws for
|
|
||||||
client methods, then `cryptofeed` for data streams.
|
|
||||||
|
|
||||||
status
|
|
||||||
******
|
|
||||||
- supports option charts
|
|
||||||
- no order support yet
|
|
||||||
|
|
||||||
|
|
||||||
config
|
|
||||||
******
|
|
||||||
In order to get order mode support your ``brokers.toml``
|
|
||||||
needs to have something like the following:
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[deribit]
|
|
||||||
key_id = 'XXXXXXXX'
|
|
||||||
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
|
||||||
|
|
||||||
To obtain an api id and secret you need to create an account, which can be a
|
|
||||||
real market account over at:
|
|
||||||
|
|
||||||
- deribit.com (requires KYC for deposit address)
|
|
||||||
|
|
||||||
Or a testnet account over at:
|
|
||||||
|
|
||||||
- test.deribit.com
|
|
||||||
|
|
||||||
For testnet once the account is created here is how you deposit fake crypto to
|
|
||||||
try it out:
|
|
||||||
|
|
||||||
1) Go to Wallet:
|
|
||||||
|
|
||||||
.. figure:: assets/0_wallet.png
|
|
||||||
:align: center
|
|
||||||
:target: assets/0_wallet.png
|
|
||||||
:alt: wallet page
|
|
||||||
|
|
||||||
2) Then click on the elipsis menu and select deposit
|
|
||||||
|
|
||||||
.. figure:: assets/1_wallet_select_deposit.png
|
|
||||||
:align: center
|
|
||||||
:target: assets/1_wallet_select_deposit.png
|
|
||||||
:alt: wallet deposit page
|
|
||||||
|
|
||||||
3) This will take you to the deposit address page
|
|
||||||
|
|
||||||
.. figure:: assets/2_gen_deposit_addr.png
|
|
||||||
:align: center
|
|
||||||
:target: assets/2_gen_deposit_addr.png
|
|
||||||
:alt: generate deposit address page
|
|
||||||
|
|
||||||
4) After clicking generate you should see the address, copy it and go to the
|
|
||||||
`coin faucet <https://test.deribit.com/dericoin/BTC/deposit>`_ and send fake
|
|
||||||
coins to that address.
|
|
||||||
|
|
||||||
.. figure:: assets/3_deposit_address.png
|
|
||||||
:align: center
|
|
||||||
:target: assets/3_deposit_address.png
|
|
||||||
:alt: generated address
|
|
||||||
|
|
||||||
5) Back in the deposit address page you should see the deposit in your history
|
|
||||||
|
|
||||||
.. figure:: assets/4_wallet_deposit_history.png
|
|
||||||
:align: center
|
|
||||||
:target: assets/4_wallet_deposit_history.png
|
|
||||||
:alt: wallet deposit history
|
|
|
@ -1,65 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
Deribit backend.
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
from piker.log import get_logger
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
from .api import (
|
|
||||||
get_client,
|
|
||||||
)
|
|
||||||
from .feed import (
|
|
||||||
open_history_client,
|
|
||||||
open_symbol_search,
|
|
||||||
stream_quotes,
|
|
||||||
backfill_bars
|
|
||||||
)
|
|
||||||
# from .broker import (
|
|
||||||
# trades_dialogue,
|
|
||||||
# norm_trade_records,
|
|
||||||
# )
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'get_client',
|
|
||||||
# 'trades_dialogue',
|
|
||||||
'open_history_client',
|
|
||||||
'open_symbol_search',
|
|
||||||
'stream_quotes',
|
|
||||||
# 'norm_trade_records',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# tractor RPC enable arg
|
|
||||||
__enable_modules__: list[str] = [
|
|
||||||
'api',
|
|
||||||
'feed',
|
|
||||||
# 'broker',
|
|
||||||
]
|
|
||||||
|
|
||||||
# passed to ``tractor.ActorNursery.start_actor()``
|
|
||||||
_spawn_kwargs = {
|
|
||||||
'infect_asyncio': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
# annotation to let backend agnostic code
|
|
||||||
# know if ``brokerd`` should be spawned with
|
|
||||||
# ``tractor``'s aio mode.
|
|
||||||
_infect_asyncio: bool = True
|
|
|
@ -1,672 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
Deribit backend.
|
|
||||||
|
|
||||||
'''
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from contextlib import asynccontextmanager as acm, AsyncExitStack
|
|
||||||
from functools import partial
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Any, Optional, Iterable, Callable
|
|
||||||
|
|
||||||
import pendulum
|
|
||||||
import asks
|
|
||||||
import trio
|
|
||||||
from trio_typing import Nursery, TaskStatus
|
|
||||||
from fuzzywuzzy import process as fuzzy
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
from piker.data.types import Struct
|
|
||||||
from piker.data._web_bs import (
|
|
||||||
NoBsWs,
|
|
||||||
open_autorecon_ws,
|
|
||||||
open_jsonrpc_session
|
|
||||||
)
|
|
||||||
|
|
||||||
from .._util import resproc
|
|
||||||
|
|
||||||
from piker import config
|
|
||||||
from piker.log import get_logger
|
|
||||||
|
|
||||||
from tractor.trionics import (
|
|
||||||
broadcast_receiver,
|
|
||||||
BroadcastReceiver,
|
|
||||||
maybe_open_context
|
|
||||||
)
|
|
||||||
from tractor import to_asyncio
|
|
||||||
|
|
||||||
from cryptofeed import FeedHandler
|
|
||||||
|
|
||||||
from cryptofeed.defines import (
|
|
||||||
DERIBIT,
|
|
||||||
L1_BOOK, TRADES,
|
|
||||||
OPTION, CALL, PUT
|
|
||||||
)
|
|
||||||
from cryptofeed.symbols import Symbol
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
_spawn_kwargs = {
|
|
||||||
'infect_asyncio': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
_url = 'https://www.deribit.com'
|
|
||||||
_ws_url = 'wss://www.deribit.com/ws/api/v2'
|
|
||||||
_testnet_ws_url = 'wss://test.deribit.com/ws/api/v2'
|
|
||||||
|
|
||||||
|
|
||||||
# Broker specific ohlc schema (rest)
|
|
||||||
_ohlc_dtype = [
|
|
||||||
('index', int),
|
|
||||||
('time', int),
|
|
||||||
('open', float),
|
|
||||||
('high', float),
|
|
||||||
('low', float),
|
|
||||||
('close', float),
|
|
||||||
('volume', float),
|
|
||||||
('bar_wap', float), # will be zeroed by sampler if not filled
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class JSONRPCResult(Struct):
|
|
||||||
jsonrpc: str = '2.0'
|
|
||||||
id: int
|
|
||||||
result: Optional[dict] = None
|
|
||||||
error: Optional[dict] = None
|
|
||||||
usIn: int
|
|
||||||
usOut: int
|
|
||||||
usDiff: int
|
|
||||||
testnet: bool
|
|
||||||
|
|
||||||
class JSONRPCChannel(Struct):
|
|
||||||
jsonrpc: str = '2.0'
|
|
||||||
method: str
|
|
||||||
params: dict
|
|
||||||
|
|
||||||
|
|
||||||
class KLinesResult(Struct):
|
|
||||||
close: list[float]
|
|
||||||
cost: list[float]
|
|
||||||
high: list[float]
|
|
||||||
low: list[float]
|
|
||||||
open: list[float]
|
|
||||||
status: str
|
|
||||||
ticks: list[int]
|
|
||||||
volume: list[float]
|
|
||||||
|
|
||||||
class Trade(Struct):
|
|
||||||
trade_seq: int
|
|
||||||
trade_id: str
|
|
||||||
timestamp: int
|
|
||||||
tick_direction: int
|
|
||||||
price: float
|
|
||||||
mark_price: float
|
|
||||||
iv: float
|
|
||||||
instrument_name: str
|
|
||||||
index_price: float
|
|
||||||
direction: str
|
|
||||||
combo_trade_id: Optional[int] = 0,
|
|
||||||
combo_id: Optional[str] = '',
|
|
||||||
amount: float
|
|
||||||
|
|
||||||
class LastTradesResult(Struct):
|
|
||||||
trades: list[Trade]
|
|
||||||
has_more: bool
|
|
||||||
|
|
||||||
|
|
||||||
# convert datetime obj timestamp to unixtime in milliseconds
|
|
||||||
def deribit_timestamp(when):
|
|
||||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
|
||||||
|
|
||||||
|
|
||||||
def str_to_cb_sym(name: str) -> Symbol:
|
|
||||||
base, strike_price, expiry_date, option_type = name.split('-')
|
|
||||||
|
|
||||||
quote = base
|
|
||||||
|
|
||||||
if option_type == 'put':
|
|
||||||
option_type = PUT
|
|
||||||
elif option_type == 'call':
|
|
||||||
option_type = CALL
|
|
||||||
else:
|
|
||||||
raise Exception("Couldn\'t parse option type")
|
|
||||||
|
|
||||||
return Symbol(
|
|
||||||
base, quote,
|
|
||||||
type=OPTION,
|
|
||||||
strike_price=strike_price,
|
|
||||||
option_type=option_type,
|
|
||||||
expiry_date=expiry_date,
|
|
||||||
expiry_normalize=False)
|
|
||||||
|
|
||||||
|
|
||||||
def piker_sym_to_cb_sym(name: str) -> Symbol:
|
|
||||||
base, expiry_date, strike_price, option_type = tuple(
|
|
||||||
name.upper().split('-'))
|
|
||||||
|
|
||||||
quote = base
|
|
||||||
|
|
||||||
if option_type == 'P':
|
|
||||||
option_type = PUT
|
|
||||||
elif option_type == 'C':
|
|
||||||
option_type = CALL
|
|
||||||
else:
|
|
||||||
raise Exception("Couldn\'t parse option type")
|
|
||||||
|
|
||||||
return Symbol(
|
|
||||||
base, quote,
|
|
||||||
type=OPTION,
|
|
||||||
strike_price=strike_price,
|
|
||||||
option_type=option_type,
|
|
||||||
expiry_date=expiry_date.upper())
|
|
||||||
|
|
||||||
|
|
||||||
def cb_sym_to_deribit_inst(sym: Symbol):
|
|
||||||
# cryptofeed normalized
|
|
||||||
cb_norm = ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z']
|
|
||||||
|
|
||||||
# deribit specific
|
|
||||||
months = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']
|
|
||||||
|
|
||||||
exp = sym.expiry_date
|
|
||||||
|
|
||||||
# YYMDD
|
|
||||||
# 01234
|
|
||||||
year, month, day = (
|
|
||||||
exp[:2], months[cb_norm.index(exp[2:3])], exp[3:])
|
|
||||||
|
|
||||||
otype = 'C' if sym.option_type == CALL else 'P'
|
|
||||||
|
|
||||||
return f'{sym.base}-{day}{month}{year}-{sym.strike_price}-{otype}'
|
|
||||||
|
|
||||||
|
|
||||||
def get_config() -> dict[str, Any]:
|
|
||||||
|
|
||||||
conf, path = config.load()
|
|
||||||
|
|
||||||
section = conf.get('deribit')
|
|
||||||
|
|
||||||
# TODO: document why we send this, basically because logging params for cryptofeed
|
|
||||||
conf['log'] = {}
|
|
||||||
conf['log']['disabled'] = True
|
|
||||||
|
|
||||||
if section is None:
|
|
||||||
log.warning(f'No config section found for deribit in {path}')
|
|
||||||
|
|
||||||
return conf
|
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
|
||||||
|
|
||||||
def __init__(self, json_rpc: Callable) -> None:
|
|
||||||
self._pairs: dict[str, Any] = None
|
|
||||||
|
|
||||||
config = get_config().get('deribit', {})
|
|
||||||
|
|
||||||
if ('key_id' in config) and ('key_secret' in config):
|
|
||||||
self._key_id = config['key_id']
|
|
||||||
self._key_secret = config['key_secret']
|
|
||||||
|
|
||||||
else:
|
|
||||||
self._key_id = None
|
|
||||||
self._key_secret = None
|
|
||||||
|
|
||||||
self.json_rpc = json_rpc
|
|
||||||
|
|
||||||
@property
|
|
||||||
def currencies(self):
|
|
||||||
return ['btc', 'eth', 'sol', 'usd']
|
|
||||||
|
|
||||||
async def get_balances(self, kind: str = 'option') -> dict[str, float]:
|
|
||||||
"""Return the set of positions for this account
|
|
||||||
by symbol.
|
|
||||||
"""
|
|
||||||
balances = {}
|
|
||||||
|
|
||||||
for currency in self.currencies:
|
|
||||||
resp = await self.json_rpc(
|
|
||||||
'private/get_positions', params={
|
|
||||||
'currency': currency.upper(),
|
|
||||||
'kind': kind})
|
|
||||||
|
|
||||||
balances[currency] = resp.result
|
|
||||||
|
|
||||||
return balances
|
|
||||||
|
|
||||||
async def get_assets(self) -> dict[str, float]:
|
|
||||||
"""Return the set of asset balances for this account
|
|
||||||
by symbol.
|
|
||||||
"""
|
|
||||||
balances = {}
|
|
||||||
|
|
||||||
for currency in self.currencies:
|
|
||||||
resp = await self.json_rpc(
|
|
||||||
'private/get_account_summary', params={
|
|
||||||
'currency': currency.upper()})
|
|
||||||
|
|
||||||
balances[currency] = resp.result['balance']
|
|
||||||
|
|
||||||
return balances
|
|
||||||
|
|
||||||
async def submit_limit(
|
|
||||||
self,
|
|
||||||
symbol: str,
|
|
||||||
price: float,
|
|
||||||
action: str,
|
|
||||||
size: float
|
|
||||||
) -> dict:
|
|
||||||
"""Place an order
|
|
||||||
"""
|
|
||||||
params = {
|
|
||||||
'instrument_name': symbol.upper(),
|
|
||||||
'amount': size,
|
|
||||||
'type': 'limit',
|
|
||||||
'price': price,
|
|
||||||
}
|
|
||||||
resp = await self.json_rpc(
|
|
||||||
f'private/{action}', params)
|
|
||||||
|
|
||||||
return resp.result
|
|
||||||
|
|
||||||
async def submit_cancel(self, oid: str):
|
|
||||||
"""Send cancel request for order id
|
|
||||||
"""
|
|
||||||
resp = await self.json_rpc(
|
|
||||||
'private/cancel', {'order_id': oid})
|
|
||||||
return resp.result
|
|
||||||
|
|
||||||
async def symbol_info(
|
|
||||||
self,
|
|
||||||
instrument: Optional[str] = None,
|
|
||||||
currency: str = 'btc', # BTC, ETH, SOL, USDC
|
|
||||||
kind: str = 'option',
|
|
||||||
expired: bool = False
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""Get symbol info for the exchange.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self._pairs:
|
|
||||||
return self._pairs
|
|
||||||
|
|
||||||
# will retrieve all symbols by default
|
|
||||||
params = {
|
|
||||||
'currency': currency.upper(),
|
|
||||||
'kind': kind,
|
|
||||||
'expired': str(expired).lower()
|
|
||||||
}
|
|
||||||
|
|
||||||
resp = await self.json_rpc('public/get_instruments', params)
|
|
||||||
results = resp.result
|
|
||||||
|
|
||||||
instruments = {
|
|
||||||
item['instrument_name'].lower(): item
|
|
||||||
for item in results
|
|
||||||
}
|
|
||||||
|
|
||||||
if instrument is not None:
|
|
||||||
return instruments[instrument]
|
|
||||||
else:
|
|
||||||
return instruments
|
|
||||||
|
|
||||||
async def cache_symbols(
|
|
||||||
self,
|
|
||||||
) -> dict:
|
|
||||||
if not self._pairs:
|
|
||||||
self._pairs = await self.symbol_info()
|
|
||||||
|
|
||||||
return self._pairs
|
|
||||||
|
|
||||||
async def search_symbols(
|
|
||||||
self,
|
|
||||||
pattern: str,
|
|
||||||
limit: int = 30,
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
data = await self.symbol_info()
|
|
||||||
|
|
||||||
matches = fuzzy.extractBests(
|
|
||||||
pattern,
|
|
||||||
data,
|
|
||||||
score_cutoff=35,
|
|
||||||
limit=limit
|
|
||||||
)
|
|
||||||
# repack in dict form
|
|
||||||
return {item[0]['instrument_name'].lower(): item[0]
|
|
||||||
for item in matches}
|
|
||||||
|
|
||||||
async def bars(
|
|
||||||
self,
|
|
||||||
symbol: str,
|
|
||||||
start_dt: Optional[datetime] = None,
|
|
||||||
end_dt: Optional[datetime] = None,
|
|
||||||
limit: int = 1000,
|
|
||||||
as_np: bool = True,
|
|
||||||
) -> dict:
|
|
||||||
instrument = symbol
|
|
||||||
|
|
||||||
if end_dt is None:
|
|
||||||
end_dt = pendulum.now('UTC')
|
|
||||||
|
|
||||||
if start_dt is None:
|
|
||||||
start_dt = end_dt.start_of(
|
|
||||||
'minute').subtract(minutes=limit)
|
|
||||||
|
|
||||||
start_time = deribit_timestamp(start_dt)
|
|
||||||
end_time = deribit_timestamp(end_dt)
|
|
||||||
|
|
||||||
# https://docs.deribit.com/#public-get_tradingview_chart_data
|
|
||||||
resp = await self.json_rpc(
|
|
||||||
'public/get_tradingview_chart_data',
|
|
||||||
params={
|
|
||||||
'instrument_name': instrument.upper(),
|
|
||||||
'start_timestamp': start_time,
|
|
||||||
'end_timestamp': end_time,
|
|
||||||
'resolution': '1'
|
|
||||||
})
|
|
||||||
|
|
||||||
result = KLinesResult(**resp.result)
|
|
||||||
new_bars = []
|
|
||||||
for i in range(len(result.close)):
|
|
||||||
|
|
||||||
_open = result.open[i]
|
|
||||||
high = result.high[i]
|
|
||||||
low = result.low[i]
|
|
||||||
close = result.close[i]
|
|
||||||
volume = result.volume[i]
|
|
||||||
|
|
||||||
row = [
|
|
||||||
(start_time + (i * (60 * 1000))) / 1000.0, # time
|
|
||||||
result.open[i],
|
|
||||||
result.high[i],
|
|
||||||
result.low[i],
|
|
||||||
result.close[i],
|
|
||||||
result.volume[i],
|
|
||||||
0
|
|
||||||
]
|
|
||||||
|
|
||||||
new_bars.append((i,) + tuple(row))
|
|
||||||
|
|
||||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines
|
|
||||||
return array
|
|
||||||
|
|
||||||
async def last_trades(
|
|
||||||
self,
|
|
||||||
instrument: str,
|
|
||||||
count: int = 10
|
|
||||||
):
|
|
||||||
resp = await self.json_rpc(
|
|
||||||
'public/get_last_trades_by_instrument',
|
|
||||||
params={
|
|
||||||
'instrument_name': instrument,
|
|
||||||
'count': count
|
|
||||||
})
|
|
||||||
|
|
||||||
return LastTradesResult(**resp.result)
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def get_client(
|
|
||||||
is_brokercheck: bool = False
|
|
||||||
) -> Client:
|
|
||||||
|
|
||||||
async with (
|
|
||||||
trio.open_nursery() as n,
|
|
||||||
open_jsonrpc_session(
|
|
||||||
_testnet_ws_url, dtype=JSONRPCResult) as json_rpc
|
|
||||||
):
|
|
||||||
client = Client(json_rpc)
|
|
||||||
|
|
||||||
_refresh_token: Optional[str] = None
|
|
||||||
_access_token: Optional[str] = None
|
|
||||||
|
|
||||||
async def _auth_loop(
|
|
||||||
task_status: TaskStatus = trio.TASK_STATUS_IGNORED
|
|
||||||
):
|
|
||||||
"""Background task that adquires a first access token and then will
|
|
||||||
refresh the access token while the nursery isn't cancelled.
|
|
||||||
|
|
||||||
https://docs.deribit.com/?python#authentication-2
|
|
||||||
"""
|
|
||||||
renew_time = 10
|
|
||||||
access_scope = 'trade:read_write'
|
|
||||||
_expiry_time = time.time()
|
|
||||||
got_access = False
|
|
||||||
nonlocal _refresh_token
|
|
||||||
nonlocal _access_token
|
|
||||||
|
|
||||||
while True:
|
|
||||||
if time.time() - _expiry_time < renew_time:
|
|
||||||
# if we are close to token expiry time
|
|
||||||
|
|
||||||
if _refresh_token != None:
|
|
||||||
# if we have a refresh token already dont need to send
|
|
||||||
# secret
|
|
||||||
params = {
|
|
||||||
'grant_type': 'refresh_token',
|
|
||||||
'refresh_token': _refresh_token,
|
|
||||||
'scope': access_scope
|
|
||||||
}
|
|
||||||
|
|
||||||
else:
|
|
||||||
# we don't have refresh token, send secret to initialize
|
|
||||||
params = {
|
|
||||||
'grant_type': 'client_credentials',
|
|
||||||
'client_id': client._key_id,
|
|
||||||
'client_secret': client._key_secret,
|
|
||||||
'scope': access_scope
|
|
||||||
}
|
|
||||||
|
|
||||||
resp = await json_rpc('public/auth', params)
|
|
||||||
result = resp.result
|
|
||||||
|
|
||||||
_expiry_time = time.time() + result['expires_in']
|
|
||||||
_refresh_token = result['refresh_token']
|
|
||||||
|
|
||||||
if 'access_token' in result:
|
|
||||||
_access_token = result['access_token']
|
|
||||||
|
|
||||||
if not got_access:
|
|
||||||
# first time this loop runs we must indicate task is
|
|
||||||
# started, we have auth
|
|
||||||
got_access = True
|
|
||||||
task_status.started()
|
|
||||||
|
|
||||||
else:
|
|
||||||
await trio.sleep(renew_time / 2)
|
|
||||||
|
|
||||||
# if we have client creds launch auth loop
|
|
||||||
if client._key_id is not None:
|
|
||||||
await n.start(_auth_loop)
|
|
||||||
|
|
||||||
await client.cache_symbols()
|
|
||||||
yield client
|
|
||||||
n.cancel_scope.cancel()
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_feed_handler():
|
|
||||||
fh = FeedHandler(config=get_config())
|
|
||||||
yield fh
|
|
||||||
await to_asyncio.run_task(fh.stop_async)
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def maybe_open_feed_handler() -> trio.abc.ReceiveStream:
|
|
||||||
async with maybe_open_context(
|
|
||||||
acm_func=open_feed_handler,
|
|
||||||
key='feedhandler',
|
|
||||||
) as (cache_hit, fh):
|
|
||||||
yield fh
|
|
||||||
|
|
||||||
|
|
||||||
async def aio_price_feed_relay(
|
|
||||||
fh: FeedHandler,
|
|
||||||
instrument: Symbol,
|
|
||||||
from_trio: asyncio.Queue,
|
|
||||||
to_trio: trio.abc.SendChannel,
|
|
||||||
) -> None:
|
|
||||||
async def _trade(data: dict, receipt_timestamp):
|
|
||||||
to_trio.send_nowait(('trade', {
|
|
||||||
'symbol': cb_sym_to_deribit_inst(
|
|
||||||
str_to_cb_sym(data.symbol)).lower(),
|
|
||||||
'last': data,
|
|
||||||
'broker_ts': time.time(),
|
|
||||||
'data': data.to_dict(),
|
|
||||||
'receipt': receipt_timestamp
|
|
||||||
}))
|
|
||||||
|
|
||||||
async def _l1(data: dict, receipt_timestamp):
|
|
||||||
to_trio.send_nowait(('l1', {
|
|
||||||
'symbol': cb_sym_to_deribit_inst(
|
|
||||||
str_to_cb_sym(data.symbol)).lower(),
|
|
||||||
'ticks': [
|
|
||||||
{'type': 'bid',
|
|
||||||
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
|
||||||
{'type': 'bsize',
|
|
||||||
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
|
||||||
{'type': 'ask',
|
|
||||||
'price': float(data.ask_price), 'size': float(data.ask_size)},
|
|
||||||
{'type': 'asize',
|
|
||||||
'price': float(data.ask_price), 'size': float(data.ask_size)}
|
|
||||||
]
|
|
||||||
}))
|
|
||||||
|
|
||||||
fh.add_feed(
|
|
||||||
DERIBIT,
|
|
||||||
channels=[TRADES, L1_BOOK],
|
|
||||||
symbols=[piker_sym_to_cb_sym(instrument)],
|
|
||||||
callbacks={
|
|
||||||
TRADES: _trade,
|
|
||||||
L1_BOOK: _l1
|
|
||||||
})
|
|
||||||
|
|
||||||
if not fh.running:
|
|
||||||
fh.run(
|
|
||||||
start_loop=False,
|
|
||||||
install_signal_handlers=False)
|
|
||||||
|
|
||||||
# sync with trio
|
|
||||||
to_trio.send_nowait(None)
|
|
||||||
|
|
||||||
await asyncio.sleep(float('inf'))
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_price_feed(
|
|
||||||
instrument: str
|
|
||||||
) -> trio.abc.ReceiveStream:
|
|
||||||
async with maybe_open_feed_handler() as fh:
|
|
||||||
async with to_asyncio.open_channel_from(
|
|
||||||
partial(
|
|
||||||
aio_price_feed_relay,
|
|
||||||
fh,
|
|
||||||
instrument
|
|
||||||
)
|
|
||||||
) as (first, chan):
|
|
||||||
yield chan
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def maybe_open_price_feed(
|
|
||||||
instrument: str
|
|
||||||
) -> trio.abc.ReceiveStream:
|
|
||||||
|
|
||||||
# TODO: add a predicate to maybe_open_context
|
|
||||||
async with maybe_open_context(
|
|
||||||
acm_func=open_price_feed,
|
|
||||||
kwargs={
|
|
||||||
'instrument': instrument
|
|
||||||
},
|
|
||||||
key=f'{instrument}-price',
|
|
||||||
) as (cache_hit, feed):
|
|
||||||
if cache_hit:
|
|
||||||
yield broadcast_receiver(feed, 10)
|
|
||||||
else:
|
|
||||||
yield feed
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def aio_order_feed_relay(
|
|
||||||
fh: FeedHandler,
|
|
||||||
instrument: Symbol,
|
|
||||||
from_trio: asyncio.Queue,
|
|
||||||
to_trio: trio.abc.SendChannel,
|
|
||||||
) -> None:
|
|
||||||
async def _fill(data: dict, receipt_timestamp):
|
|
||||||
breakpoint()
|
|
||||||
|
|
||||||
async def _order_info(data: dict, receipt_timestamp):
|
|
||||||
breakpoint()
|
|
||||||
|
|
||||||
fh.add_feed(
|
|
||||||
DERIBIT,
|
|
||||||
channels=[FILLS, ORDER_INFO],
|
|
||||||
symbols=[instrument.upper()],
|
|
||||||
callbacks={
|
|
||||||
FILLS: _fill,
|
|
||||||
ORDER_INFO: _order_info,
|
|
||||||
})
|
|
||||||
|
|
||||||
if not fh.running:
|
|
||||||
fh.run(
|
|
||||||
start_loop=False,
|
|
||||||
install_signal_handlers=False)
|
|
||||||
|
|
||||||
# sync with trio
|
|
||||||
to_trio.send_nowait(None)
|
|
||||||
|
|
||||||
await asyncio.sleep(float('inf'))
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_order_feed(
|
|
||||||
instrument: list[str]
|
|
||||||
) -> trio.abc.ReceiveStream:
|
|
||||||
async with maybe_open_feed_handler() as fh:
|
|
||||||
async with to_asyncio.open_channel_from(
|
|
||||||
partial(
|
|
||||||
aio_order_feed_relay,
|
|
||||||
fh,
|
|
||||||
instrument
|
|
||||||
)
|
|
||||||
) as (first, chan):
|
|
||||||
yield chan
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def maybe_open_order_feed(
|
|
||||||
instrument: str
|
|
||||||
) -> trio.abc.ReceiveStream:
|
|
||||||
|
|
||||||
# TODO: add a predicate to maybe_open_context
|
|
||||||
async with maybe_open_context(
|
|
||||||
acm_func=open_order_feed,
|
|
||||||
kwargs={
|
|
||||||
'instrument': instrument,
|
|
||||||
'fh': fh
|
|
||||||
},
|
|
||||||
key=f'{instrument}-order',
|
|
||||||
) as (cache_hit, feed):
|
|
||||||
if cache_hit:
|
|
||||||
yield broadcast_receiver(feed, 10)
|
|
||||||
else:
|
|
||||||
yield feed
|
|
Binary file not shown.
Before Width: | Height: | Size: 169 KiB |
Binary file not shown.
Before Width: | Height: | Size: 106 KiB |
Binary file not shown.
Before Width: | Height: | Size: 59 KiB |
Binary file not shown.
Before Width: | Height: | Size: 70 KiB |
Binary file not shown.
Before Width: | Height: | Size: 132 KiB |
|
@ -1,185 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
Deribit backend.
|
|
||||||
|
|
||||||
'''
|
|
||||||
from contextlib import asynccontextmanager as acm
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Any, Optional, Callable
|
|
||||||
import time
|
|
||||||
|
|
||||||
import trio
|
|
||||||
from trio_typing import TaskStatus
|
|
||||||
import pendulum
|
|
||||||
from fuzzywuzzy import process as fuzzy
|
|
||||||
import numpy as np
|
|
||||||
import tractor
|
|
||||||
|
|
||||||
from piker._cacheables import open_cached_client
|
|
||||||
from piker.log import get_logger, get_console_log
|
|
||||||
from piker.data import ShmArray
|
|
||||||
from piker.brokers._util import (
|
|
||||||
BrokerError,
|
|
||||||
DataUnavailable,
|
|
||||||
)
|
|
||||||
|
|
||||||
from cryptofeed import FeedHandler
|
|
||||||
|
|
||||||
from cryptofeed.defines import (
|
|
||||||
DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
|
|
||||||
)
|
|
||||||
from cryptofeed.symbols import Symbol
|
|
||||||
|
|
||||||
from .api import (
|
|
||||||
Client, Trade,
|
|
||||||
get_config,
|
|
||||||
str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst,
|
|
||||||
maybe_open_price_feed
|
|
||||||
)
|
|
||||||
|
|
||||||
_spawn_kwargs = {
|
|
||||||
'infect_asyncio': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_history_client(
|
|
||||||
instrument: str,
|
|
||||||
) -> tuple[Callable, int]:
|
|
||||||
|
|
||||||
# TODO implement history getter for the new storage layer.
|
|
||||||
async with open_cached_client('deribit') as client:
|
|
||||||
|
|
||||||
async def get_ohlc(
|
|
||||||
end_dt: Optional[datetime] = None,
|
|
||||||
start_dt: Optional[datetime] = None,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
datetime, # start
|
|
||||||
datetime, # end
|
|
||||||
]:
|
|
||||||
|
|
||||||
array = await client.bars(
|
|
||||||
instrument,
|
|
||||||
start_dt=start_dt,
|
|
||||||
end_dt=end_dt,
|
|
||||||
)
|
|
||||||
if len(array) == 0:
|
|
||||||
raise DataUnavailable
|
|
||||||
|
|
||||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
|
||||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
|
||||||
|
|
||||||
return array, start_dt, end_dt
|
|
||||||
|
|
||||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
|
||||||
symbols: list[str],
|
|
||||||
feed_is_live: trio.Event,
|
|
||||||
loglevel: str = None,
|
|
||||||
|
|
||||||
# startup sync
|
|
||||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
|
||||||
|
|
||||||
sym = symbols[0]
|
|
||||||
|
|
||||||
async with (
|
|
||||||
open_cached_client('deribit') as client,
|
|
||||||
send_chan as send_chan
|
|
||||||
):
|
|
||||||
|
|
||||||
init_msgs = {
|
|
||||||
# pass back token, and bool, signalling if we're the writer
|
|
||||||
# and that history has been written
|
|
||||||
sym: {
|
|
||||||
'symbol_info': {
|
|
||||||
'asset_type': 'option',
|
|
||||||
'price_tick_size': 0.0005
|
|
||||||
},
|
|
||||||
'shm_write_opts': {'sum_tick_vml': False},
|
|
||||||
'fqsn': sym,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
nsym = piker_sym_to_cb_sym(sym)
|
|
||||||
|
|
||||||
async with maybe_open_price_feed(sym) as stream:
|
|
||||||
|
|
||||||
cache = await client.cache_symbols()
|
|
||||||
|
|
||||||
last_trades = (await client.last_trades(
|
|
||||||
cb_sym_to_deribit_inst(nsym), count=1)).trades
|
|
||||||
|
|
||||||
if len(last_trades) == 0:
|
|
||||||
last_trade = None
|
|
||||||
async for typ, quote in stream:
|
|
||||||
if typ == 'trade':
|
|
||||||
last_trade = Trade(**(quote['data']))
|
|
||||||
break
|
|
||||||
|
|
||||||
else:
|
|
||||||
last_trade = Trade(**(last_trades[0]))
|
|
||||||
|
|
||||||
first_quote = {
|
|
||||||
'symbol': sym,
|
|
||||||
'last': last_trade.price,
|
|
||||||
'brokerd_ts': last_trade.timestamp,
|
|
||||||
'ticks': [{
|
|
||||||
'type': 'trade',
|
|
||||||
'price': last_trade.price,
|
|
||||||
'size': last_trade.amount,
|
|
||||||
'broker_ts': last_trade.timestamp
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
task_status.started((init_msgs, first_quote))
|
|
||||||
|
|
||||||
feed_is_live.set()
|
|
||||||
|
|
||||||
async for typ, quote in stream:
|
|
||||||
topic = quote['symbol']
|
|
||||||
await send_chan.send({topic: quote})
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
|
||||||
async def open_symbol_search(
|
|
||||||
ctx: tractor.Context,
|
|
||||||
) -> Client:
|
|
||||||
async with open_cached_client('deribit') as client:
|
|
||||||
|
|
||||||
# load all symbols locally for fast search
|
|
||||||
cache = await client.cache_symbols()
|
|
||||||
await ctx.started()
|
|
||||||
|
|
||||||
async with ctx.open_stream() as stream:
|
|
||||||
|
|
||||||
async for pattern in stream:
|
|
||||||
# repack in dict form
|
|
||||||
await stream.send(
|
|
||||||
await client.search_symbols(pattern))
|
|
|
@ -1,134 +0,0 @@
|
||||||
``ib`` backend
|
|
||||||
--------------
|
|
||||||
more or less the "everything broker" for traditional and international
|
|
||||||
markets. they are the "go to" provider for automatic retail trading
|
|
||||||
and we interface to their APIs using the `ib_insync` project.
|
|
||||||
|
|
||||||
status
|
|
||||||
******
|
|
||||||
current support is *production grade* and both real-time data and order
|
|
||||||
management should be correct and fast. this backend is used by core devs
|
|
||||||
for live trading.
|
|
||||||
|
|
||||||
currently there is not yet full support for:
|
|
||||||
- options charting and trading
|
|
||||||
- paxos based crypto rt feeds and trading
|
|
||||||
|
|
||||||
|
|
||||||
config
|
|
||||||
******
|
|
||||||
In order to get order mode support your ``brokers.toml``
|
|
||||||
needs to have something like the following:
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[ib]
|
|
||||||
hosts = [
|
|
||||||
"127.0.0.1",
|
|
||||||
]
|
|
||||||
# TODO: when we eventually spawn gateways in our
|
|
||||||
# container, we can just dynamically allocate these
|
|
||||||
# using IBC.
|
|
||||||
ports = [
|
|
||||||
4002,
|
|
||||||
4003,
|
|
||||||
4006,
|
|
||||||
4001,
|
|
||||||
7497,
|
|
||||||
]
|
|
||||||
|
|
||||||
# XXX: for a paper account the flex web query service
|
|
||||||
# is not supported so you have to manually download
|
|
||||||
# and XML report and put it in a location that can be
|
|
||||||
# accessed by the ``brokerd.ib`` backend code for parsing.
|
|
||||||
flex_token = '1111111111111111'
|
|
||||||
flex_trades_query_id = '6969696' # live accounts only?
|
|
||||||
|
|
||||||
# 3rd party web-api token
|
|
||||||
# (XXX: not sure if this works yet)
|
|
||||||
trade_log_token = '111111111111111'
|
|
||||||
|
|
||||||
# when clients are being scanned this determines
|
|
||||||
# which clients are preferred to be used for data feeds
|
|
||||||
# based on account names which are detected as active
|
|
||||||
# on each client.
|
|
||||||
prefer_data_account = [
|
|
||||||
# this has to be first in order to make data work with dual paper + live
|
|
||||||
'main',
|
|
||||||
'algopaper',
|
|
||||||
]
|
|
||||||
|
|
||||||
[ib.accounts]
|
|
||||||
main = 'U69696969'
|
|
||||||
algopaper = 'DU9696969'
|
|
||||||
|
|
||||||
|
|
||||||
If everything works correctly you should see any current positions
|
|
||||||
loaded in the pps pane on chart load and you should also be able to
|
|
||||||
check your trade records in the file::
|
|
||||||
|
|
||||||
<pikerk_conf_dir>/ledgers/trades_ib_algopaper.toml
|
|
||||||
|
|
||||||
|
|
||||||
An example ledger file will have entries written verbatim from the
|
|
||||||
trade events schema:
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
["0000e1a7.630f5e5a.01.01"]
|
|
||||||
secType = "FUT"
|
|
||||||
conId = 515416577
|
|
||||||
symbol = "MNQ"
|
|
||||||
lastTradeDateOrContractMonth = "20221216"
|
|
||||||
strike = 0.0
|
|
||||||
right = ""
|
|
||||||
multiplier = "2"
|
|
||||||
exchange = "GLOBEX"
|
|
||||||
primaryExchange = ""
|
|
||||||
currency = "USD"
|
|
||||||
localSymbol = "MNQZ2"
|
|
||||||
tradingClass = "MNQ"
|
|
||||||
includeExpired = false
|
|
||||||
secIdType = ""
|
|
||||||
secId = ""
|
|
||||||
comboLegsDescrip = ""
|
|
||||||
comboLegs = []
|
|
||||||
execId = "0000e1a7.630f5e5a.01.01"
|
|
||||||
time = 1661972086.0
|
|
||||||
acctNumber = "DU69696969"
|
|
||||||
side = "BOT"
|
|
||||||
shares = 1.0
|
|
||||||
price = 12372.75
|
|
||||||
permId = 441472655
|
|
||||||
clientId = 6116
|
|
||||||
orderId = 985
|
|
||||||
liquidation = 0
|
|
||||||
cumQty = 1.0
|
|
||||||
avgPrice = 12372.75
|
|
||||||
orderRef = ""
|
|
||||||
evRule = ""
|
|
||||||
evMultiplier = 0.0
|
|
||||||
modelCode = ""
|
|
||||||
lastLiquidity = 1
|
|
||||||
broker_time = 1661972086.0
|
|
||||||
name = "ib"
|
|
||||||
commission = 0.57
|
|
||||||
realizedPNL = 243.41
|
|
||||||
yield_ = 0.0
|
|
||||||
yieldRedemptionDate = 0
|
|
||||||
listingExchange = "GLOBEX"
|
|
||||||
date = "2022-08-31T18:54:46+00:00"
|
|
||||||
|
|
||||||
|
|
||||||
your ``pps.toml`` file will have position entries like,
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[ib.algopaper."mnq.globex.20221216"]
|
|
||||||
size = -1.0
|
|
||||||
ppu = 12423.630576923071
|
|
||||||
bsuid = 515416577
|
|
||||||
expiry = "2022-12-16T00:00:00+00:00"
|
|
||||||
clears = [
|
|
||||||
{ dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" },
|
|
||||||
]
|
|
|
@ -20,10 +20,15 @@ Interactive Brokers API backend.
|
||||||
Sub-modules within break into the core functionalities:
|
Sub-modules within break into the core functionalities:
|
||||||
|
|
||||||
- ``broker.py`` part for orders / trading endpoints
|
- ``broker.py`` part for orders / trading endpoints
|
||||||
- ``feed.py`` for real-time data feed endpoints
|
- ``data.py`` for real-time data feed endpoints
|
||||||
- ``api.py`` for the core API machinery which is ``trio``-ized
|
|
||||||
|
- ``client.py`` for the core API machinery which is ``trio``-ized
|
||||||
wrapping around ``ib_insync``.
|
wrapping around ``ib_insync``.
|
||||||
|
|
||||||
|
- ``report.py`` for the hackery to build manual pp calcs
|
||||||
|
to avoid ib's absolute bullshit FIFO style position
|
||||||
|
tracking..
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from .api import (
|
from .api import (
|
||||||
get_client,
|
get_client,
|
||||||
|
@ -33,10 +38,7 @@ from .feed import (
|
||||||
open_symbol_search,
|
open_symbol_search,
|
||||||
stream_quotes,
|
stream_quotes,
|
||||||
)
|
)
|
||||||
from .broker import (
|
from .broker import trades_dialogue
|
||||||
trades_dialogue,
|
|
||||||
norm_trade_records,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'get_client',
|
'get_client',
|
||||||
|
|
|
@ -1,187 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
``ib`` utilities and hacks suitable for use in the backend and/or as
|
|
||||||
runnable script-programs.
|
|
||||||
|
|
||||||
'''
|
|
||||||
from typing import Literal
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
import tractor
|
|
||||||
|
|
||||||
|
|
||||||
_reset_tech: Literal[
|
|
||||||
'vnc',
|
|
||||||
'i3ipc_xdotool',
|
|
||||||
|
|
||||||
# TODO: in theory we can use a different linux DE API or
|
|
||||||
# some other type of similar window scanning/mgmt client
|
|
||||||
# (on other OSs) to do the same.
|
|
||||||
|
|
||||||
] = 'vnc'
|
|
||||||
|
|
||||||
|
|
||||||
async def data_reset_hack(
|
|
||||||
reset_type: str = 'data',
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Run key combos for resetting data feeds and yield back to caller
|
|
||||||
when complete.
|
|
||||||
|
|
||||||
NOTE: this is a linux-only hack around!
|
|
||||||
|
|
||||||
There are multiple "techs" you can use depending on your infra setup:
|
|
||||||
|
|
||||||
- if running ib-gw in a container with a VNC server running the most
|
|
||||||
performant method is the `'vnc'` option.
|
|
||||||
|
|
||||||
- if running ib-gw/tws locally, and you are using `i3` you can use
|
|
||||||
the ``i3ipc`` lib and ``xdotool`` to send the appropriate click
|
|
||||||
and key-combos automatically to your local desktop's java X-apps.
|
|
||||||
|
|
||||||
https://interactivebrokers.github.io/tws-api/historical_limitations.html#pacing_violations
|
|
||||||
|
|
||||||
TODOs:
|
|
||||||
- a return type that hopefully determines if the hack was
|
|
||||||
successful.
|
|
||||||
- other OS support?
|
|
||||||
- integration with ``ib-gw`` run in docker + Xorg?
|
|
||||||
- is it possible to offer a local server that can be accessed by
|
|
||||||
a client? Would be sure be handy for running native java blobs
|
|
||||||
that need to be wrangle.
|
|
||||||
|
|
||||||
'''
|
|
||||||
global _reset_tech
|
|
||||||
|
|
||||||
match _reset_tech:
|
|
||||||
case 'vnc':
|
|
||||||
try:
|
|
||||||
await tractor.to_asyncio.run_task(vnc_click_hack)
|
|
||||||
except OSError:
|
|
||||||
_reset_tech = 'i3ipc_xdotool'
|
|
||||||
try:
|
|
||||||
i3ipc_xdotool_manual_click_hack()
|
|
||||||
return True
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
case 'i3ipc_xdotool':
|
|
||||||
i3ipc_xdotool_manual_click_hack()
|
|
||||||
|
|
||||||
case _ as tech:
|
|
||||||
raise RuntimeError(f'{tech} is not supported for reset tech!?')
|
|
||||||
|
|
||||||
# we don't really need the ``xdotool`` approach any more B)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def vnc_click_hack(
|
|
||||||
reset_type: str = 'data'
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Reset the data or netowork connection for the VNC attached
|
|
||||||
ib gateway using magic combos.
|
|
||||||
|
|
||||||
'''
|
|
||||||
key = {'data': 'f', 'connection': 'r'}[reset_type]
|
|
||||||
|
|
||||||
import asyncvnc
|
|
||||||
|
|
||||||
async with asyncvnc.connect(
|
|
||||||
'localhost',
|
|
||||||
port=3003,
|
|
||||||
# password='ibcansmbz',
|
|
||||||
) as client:
|
|
||||||
|
|
||||||
# move to middle of screen
|
|
||||||
# 640x1800
|
|
||||||
client.mouse.move(
|
|
||||||
x=500,
|
|
||||||
y=500,
|
|
||||||
)
|
|
||||||
client.mouse.click()
|
|
||||||
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
|
||||||
|
|
||||||
|
|
||||||
def i3ipc_xdotool_manual_click_hack() -> None:
|
|
||||||
import i3ipc
|
|
||||||
|
|
||||||
i3 = i3ipc.Connection()
|
|
||||||
t = i3.get_tree()
|
|
||||||
|
|
||||||
orig_win_id = t.find_focused().window
|
|
||||||
|
|
||||||
# for tws
|
|
||||||
win_names: list[str] = [
|
|
||||||
'Interactive Brokers', # tws running in i3
|
|
||||||
'IB Gateway', # gw running in i3
|
|
||||||
# 'IB', # gw running in i3 (newer version?)
|
|
||||||
]
|
|
||||||
|
|
||||||
for name in win_names:
|
|
||||||
results = t.find_titled(name)
|
|
||||||
print(f'results for {name}: {results}')
|
|
||||||
if results:
|
|
||||||
con = results[0]
|
|
||||||
print(f'Resetting data feed for {name}')
|
|
||||||
win_id = str(con.window)
|
|
||||||
w, h = con.rect.width, con.rect.height
|
|
||||||
|
|
||||||
# TODO: seems to be a few libs for python but not sure
|
|
||||||
# if they support all the sub commands we need, order of
|
|
||||||
# most recent commit history:
|
|
||||||
# https://github.com/rr-/pyxdotool
|
|
||||||
# https://github.com/ShaneHutter/pyxdotool
|
|
||||||
# https://github.com/cphyc/pyxdotool
|
|
||||||
|
|
||||||
# TODO: only run the reconnect (2nd) kc on a detected
|
|
||||||
# disconnect?
|
|
||||||
for key_combo, timeout in [
|
|
||||||
# only required if we need a connection reset.
|
|
||||||
# ('ctrl+alt+r', 12),
|
|
||||||
# data feed reset.
|
|
||||||
('ctrl+alt+f', 6)
|
|
||||||
]:
|
|
||||||
subprocess.call([
|
|
||||||
'xdotool',
|
|
||||||
'windowactivate', '--sync', win_id,
|
|
||||||
|
|
||||||
# move mouse to bottom left of window (where there should
|
|
||||||
# be nothing to click).
|
|
||||||
'mousemove_relative', '--sync', str(w-4), str(h-4),
|
|
||||||
|
|
||||||
# NOTE: we may need to stick a `--retry 3` in here..
|
|
||||||
'click', '--window', win_id,
|
|
||||||
'--repeat', '3', '1',
|
|
||||||
|
|
||||||
# hackzorzes
|
|
||||||
'key', key_combo,
|
|
||||||
],
|
|
||||||
timeout=timeout,
|
|
||||||
)
|
|
||||||
|
|
||||||
# re-activate and focus original window
|
|
||||||
try:
|
|
||||||
subprocess.call([
|
|
||||||
'xdotool',
|
|
||||||
'windowactivate', '--sync', str(orig_win_id),
|
|
||||||
'click', '--window', str(orig_win_id), '1',
|
|
||||||
])
|
|
||||||
except subprocess.TimeoutExpired:
|
|
||||||
log.exception(f'xdotool timed out?')
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,64 +0,0 @@
|
||||||
``kraken`` backend
|
|
||||||
------------------
|
|
||||||
though they don't have the most liquidity of all the cexes they sure are
|
|
||||||
accommodating to those of us who appreciate a little ``xmr``.
|
|
||||||
|
|
||||||
status
|
|
||||||
******
|
|
||||||
current support is *production grade* and both real-time data and order
|
|
||||||
management should be correct and fast. this backend is used by core devs
|
|
||||||
for live trading.
|
|
||||||
|
|
||||||
|
|
||||||
config
|
|
||||||
******
|
|
||||||
In order to get order mode support your ``brokers.toml``
|
|
||||||
needs to have something like the following:
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[kraken]
|
|
||||||
accounts.spot = 'spot'
|
|
||||||
key_descr = "spot"
|
|
||||||
api_key = "69696969696969696696969696969696969696969696969696969696"
|
|
||||||
secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696"
|
|
||||||
|
|
||||||
|
|
||||||
If everything works correctly you should see any current positions
|
|
||||||
loaded in the pps pane on chart load and you should also be able to
|
|
||||||
check your trade records in the file::
|
|
||||||
|
|
||||||
<pikerk_conf_dir>/ledgers/trades_kraken_spot.toml
|
|
||||||
|
|
||||||
|
|
||||||
An example ledger file will have entries written verbatim from the
|
|
||||||
trade events schema:
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[TFJBKK-SMBZS-VJ4UWS]
|
|
||||||
ordertxid = "SMBZSA-7CNQU-3HWLNJ"
|
|
||||||
postxid = "SMBZSE-M7IF5-CFI7LT"
|
|
||||||
pair = "XXMRZEUR"
|
|
||||||
time = 1655691993.4133966
|
|
||||||
type = "buy"
|
|
||||||
ordertype = "limit"
|
|
||||||
price = "103.97000000"
|
|
||||||
cost = "499.99999977"
|
|
||||||
fee = "0.80000000"
|
|
||||||
vol = "4.80907954"
|
|
||||||
margin = "0.00000000"
|
|
||||||
misc = ""
|
|
||||||
|
|
||||||
|
|
||||||
your ``pps.toml`` file will have position entries like,
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[kraken.spot."xmreur.kraken"]
|
|
||||||
size = 4.80907954
|
|
||||||
ppu = 103.97000000
|
|
||||||
bsuid = "XXMRZEUR"
|
|
||||||
clears = [
|
|
||||||
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
|
|
||||||
]
|
|
|
@ -1,61 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
Kraken backend.
|
|
||||||
|
|
||||||
Sub-modules within break into the core functionalities:
|
|
||||||
|
|
||||||
- ``broker.py`` part for orders / trading endpoints
|
|
||||||
- ``feed.py`` for real-time data feed endpoints
|
|
||||||
- ``api.py`` for the core API machinery which is ``trio``-ized
|
|
||||||
wrapping around ``ib_insync``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
from piker.log import get_logger
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
from .api import (
|
|
||||||
get_client,
|
|
||||||
)
|
|
||||||
from .feed import (
|
|
||||||
open_history_client,
|
|
||||||
open_symbol_search,
|
|
||||||
stream_quotes,
|
|
||||||
)
|
|
||||||
from .broker import (
|
|
||||||
trades_dialogue,
|
|
||||||
norm_trade_records,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'get_client',
|
|
||||||
'trades_dialogue',
|
|
||||||
'open_history_client',
|
|
||||||
'open_symbol_search',
|
|
||||||
'stream_quotes',
|
|
||||||
'norm_trade_records',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# tractor RPC enable arg
|
|
||||||
__enable_modules__: list[str] = [
|
|
||||||
'api',
|
|
||||||
'feed',
|
|
||||||
'broker',
|
|
||||||
]
|
|
|
@ -1,621 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
Kraken web API wrapping.
|
|
||||||
|
|
||||||
'''
|
|
||||||
from contextlib import asynccontextmanager as acm
|
|
||||||
from datetime import datetime
|
|
||||||
import itertools
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
Optional,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
import time
|
|
||||||
|
|
||||||
from bidict import bidict
|
|
||||||
import pendulum
|
|
||||||
import asks
|
|
||||||
from fuzzywuzzy import process as fuzzy
|
|
||||||
import numpy as np
|
|
||||||
import urllib.parse
|
|
||||||
import hashlib
|
|
||||||
import hmac
|
|
||||||
import base64
|
|
||||||
import trio
|
|
||||||
|
|
||||||
from piker import config
|
|
||||||
from piker.data.types import Struct
|
|
||||||
from piker.data._source import Symbol
|
|
||||||
from piker.brokers._util import (
|
|
||||||
resproc,
|
|
||||||
SymbolNotFound,
|
|
||||||
BrokerError,
|
|
||||||
DataThrottle,
|
|
||||||
)
|
|
||||||
from piker.pp import Transaction
|
|
||||||
from . import log
|
|
||||||
|
|
||||||
# <uri>/<version>/
|
|
||||||
_url = 'https://api.kraken.com/0'
|
|
||||||
|
|
||||||
|
|
||||||
# Broker specific ohlc schema which includes a vwap field
|
|
||||||
_ohlc_dtype = [
|
|
||||||
('index', int),
|
|
||||||
('time', int),
|
|
||||||
('open', float),
|
|
||||||
('high', float),
|
|
||||||
('low', float),
|
|
||||||
('close', float),
|
|
||||||
('volume', float),
|
|
||||||
('count', int),
|
|
||||||
('bar_wap', float),
|
|
||||||
]
|
|
||||||
|
|
||||||
# UI components allow this to be declared such that additional
|
|
||||||
# (historical) fields can be exposed.
|
|
||||||
ohlc_dtype = np.dtype(_ohlc_dtype)
|
|
||||||
|
|
||||||
_show_wap_in_history = True
|
|
||||||
_symbol_info_translation: dict[str, str] = {
|
|
||||||
'tick_decimals': 'pair_decimals',
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_config() -> dict[str, Any]:
|
|
||||||
|
|
||||||
conf, path = config.load()
|
|
||||||
section = conf.get('kraken')
|
|
||||||
|
|
||||||
if section is None:
|
|
||||||
log.warning(f'No config section found for kraken in {path}')
|
|
||||||
return {}
|
|
||||||
|
|
||||||
return section
|
|
||||||
|
|
||||||
|
|
||||||
def get_kraken_signature(
|
|
||||||
urlpath: str,
|
|
||||||
data: dict[str, Any],
|
|
||||||
secret: str
|
|
||||||
) -> str:
|
|
||||||
postdata = urllib.parse.urlencode(data)
|
|
||||||
encoded = (str(data['nonce']) + postdata).encode()
|
|
||||||
message = urlpath.encode() + hashlib.sha256(encoded).digest()
|
|
||||||
|
|
||||||
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
|
|
||||||
sigdigest = base64.b64encode(mac.digest())
|
|
||||||
return sigdigest.decode()
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidKey(ValueError):
|
|
||||||
'''
|
|
||||||
EAPI:Invalid key
|
|
||||||
This error is returned when the API key used for the call is
|
|
||||||
either expired or disabled, please review the API key in your
|
|
||||||
Settings -> API tab of account management or generate a new one
|
|
||||||
and update your application.
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
# https://www.kraken.com/features/api#get-tradable-pairs
|
|
||||||
class Pair(Struct):
|
|
||||||
altname: str # alternate pair name
|
|
||||||
wsname: str # WebSocket pair name (if available)
|
|
||||||
aclass_base: str # asset class of base component
|
|
||||||
base: str # asset id of base component
|
|
||||||
aclass_quote: str # asset class of quote component
|
|
||||||
quote: str # asset id of quote component
|
|
||||||
lot: str # volume lot size
|
|
||||||
|
|
||||||
cost_decimals: int
|
|
||||||
costmin: float
|
|
||||||
pair_decimals: int # scaling decimal places for pair
|
|
||||||
lot_decimals: int # scaling decimal places for volume
|
|
||||||
|
|
||||||
# amount to multiply lot volume by to get currency volume
|
|
||||||
lot_multiplier: float
|
|
||||||
|
|
||||||
# array of leverage amounts available when buying
|
|
||||||
leverage_buy: list[int]
|
|
||||||
# array of leverage amounts available when selling
|
|
||||||
leverage_sell: list[int]
|
|
||||||
|
|
||||||
# fee schedule array in [volume, percent fee] tuples
|
|
||||||
fees: list[tuple[int, float]]
|
|
||||||
|
|
||||||
# maker fee schedule array in [volume, percent fee] tuples (if on
|
|
||||||
# maker/taker)
|
|
||||||
fees_maker: list[tuple[int, float]]
|
|
||||||
|
|
||||||
fee_volume_currency: str # volume discount currency
|
|
||||||
margin_call: str # margin call level
|
|
||||||
margin_stop: str # stop-out/liquidation margin level
|
|
||||||
ordermin: float # minimum order volume for pair
|
|
||||||
tick_size: float # min price step size
|
|
||||||
status: str
|
|
||||||
|
|
||||||
short_position_limit: float = 0
|
|
||||||
long_position_limit: float = float('inf')
|
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
|
||||||
|
|
||||||
# global symbol normalization table
|
|
||||||
_ntable: dict[str, str] = {}
|
|
||||||
_atable: bidict[str, str] = bidict()
|
|
||||||
_pairs: dict[str, Pair] = {}
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
config: dict[str, str],
|
|
||||||
name: str = '',
|
|
||||||
api_key: str = '',
|
|
||||||
secret: str = ''
|
|
||||||
) -> None:
|
|
||||||
self._sesh = asks.Session(connections=4)
|
|
||||||
self._sesh.base_location = _url
|
|
||||||
self._sesh.headers.update({
|
|
||||||
'User-Agent':
|
|
||||||
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
|
||||||
})
|
|
||||||
self.conf: dict[str, str] = config
|
|
||||||
self._name = name
|
|
||||||
self._api_key = api_key
|
|
||||||
self._secret = secret
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pairs(self) -> dict[str, Pair]:
|
|
||||||
if self._pairs is None:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Make sure to run `cache_symbols()` on startup!"
|
|
||||||
)
|
|
||||||
# retreive and cache all symbols
|
|
||||||
|
|
||||||
return self._pairs
|
|
||||||
|
|
||||||
async def _public(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
data: dict,
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
resp = await self._sesh.post(
|
|
||||||
path=f'/public/{method}',
|
|
||||||
json=data,
|
|
||||||
timeout=float('inf')
|
|
||||||
)
|
|
||||||
return resproc(resp, log)
|
|
||||||
|
|
||||||
async def _private(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
data: dict,
|
|
||||||
uri_path: str
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
headers = {
|
|
||||||
'Content-Type':
|
|
||||||
'application/x-www-form-urlencoded',
|
|
||||||
'API-Key':
|
|
||||||
self._api_key,
|
|
||||||
'API-Sign':
|
|
||||||
get_kraken_signature(uri_path, data, self._secret)
|
|
||||||
}
|
|
||||||
resp = await self._sesh.post(
|
|
||||||
path=f'/private/{method}',
|
|
||||||
data=data,
|
|
||||||
headers=headers,
|
|
||||||
timeout=float('inf')
|
|
||||||
)
|
|
||||||
return resproc(resp, log)
|
|
||||||
|
|
||||||
async def endpoint(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
data: dict[str, Any]
|
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
uri_path = f'/0/private/{method}'
|
|
||||||
data['nonce'] = str(int(1000*time.time()))
|
|
||||||
return await self._private(method, data, uri_path)
|
|
||||||
|
|
||||||
async def get_balances(
|
|
||||||
self,
|
|
||||||
) -> dict[str, float]:
|
|
||||||
'''
|
|
||||||
Return the set of asset balances for this account
|
|
||||||
by symbol.
|
|
||||||
|
|
||||||
'''
|
|
||||||
resp = await self.endpoint(
|
|
||||||
'Balance',
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
by_bsuid = resp['result']
|
|
||||||
return {
|
|
||||||
self._atable[sym].lower(): float(bal)
|
|
||||||
for sym, bal in by_bsuid.items()
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_assets(self) -> dict[str, dict]:
|
|
||||||
resp = await self._public('Assets', {})
|
|
||||||
return resp['result']
|
|
||||||
|
|
||||||
async def cache_assets(self) -> None:
|
|
||||||
assets = self.assets = await self.get_assets()
|
|
||||||
for bsuid, info in assets.items():
|
|
||||||
self._atable[bsuid] = info['altname']
|
|
||||||
|
|
||||||
async def get_trades(
|
|
||||||
self,
|
|
||||||
fetch_limit: int | None = None,
|
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
'''
|
|
||||||
Get the trades (aka cleared orders) history from the rest endpoint:
|
|
||||||
https://docs.kraken.com/rest/#operation/getTradeHistory
|
|
||||||
|
|
||||||
'''
|
|
||||||
ofs = 0
|
|
||||||
trades_by_id: dict[str, Any] = {}
|
|
||||||
|
|
||||||
for i in itertools.count():
|
|
||||||
if (
|
|
||||||
fetch_limit
|
|
||||||
and i >= fetch_limit
|
|
||||||
):
|
|
||||||
break
|
|
||||||
|
|
||||||
# increment 'ofs' pagination offset
|
|
||||||
ofs = i*50
|
|
||||||
|
|
||||||
resp = await self.endpoint(
|
|
||||||
'TradesHistory',
|
|
||||||
{'ofs': ofs},
|
|
||||||
)
|
|
||||||
by_id = resp['result']['trades']
|
|
||||||
trades_by_id.update(by_id)
|
|
||||||
|
|
||||||
# can get up to 50 results per query, see:
|
|
||||||
# https://docs.kraken.com/rest/#tag/User-Data/operation/getTradeHistory
|
|
||||||
if (
|
|
||||||
len(by_id) < 50
|
|
||||||
):
|
|
||||||
err = resp.get('error')
|
|
||||||
if err:
|
|
||||||
raise BrokerError(err)
|
|
||||||
|
|
||||||
# we know we received the max amount of
|
|
||||||
# trade results so there may be more history.
|
|
||||||
# catch the end of the trades
|
|
||||||
count = resp['result']['count']
|
|
||||||
break
|
|
||||||
|
|
||||||
# santity check on update
|
|
||||||
assert count == len(trades_by_id.values())
|
|
||||||
return trades_by_id
|
|
||||||
|
|
||||||
async def get_xfers(
|
|
||||||
self,
|
|
||||||
asset: str,
|
|
||||||
src_asset: str = '',
|
|
||||||
|
|
||||||
) -> dict[str, Transaction]:
|
|
||||||
'''
|
|
||||||
Get asset balance transfer transactions.
|
|
||||||
|
|
||||||
Currently only withdrawals are supported.
|
|
||||||
|
|
||||||
'''
|
|
||||||
xfers: list[dict] = (await self.endpoint(
|
|
||||||
'WithdrawStatus',
|
|
||||||
{'asset': asset},
|
|
||||||
))['result']
|
|
||||||
|
|
||||||
# eg. resp schema:
|
|
||||||
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
|
|
||||||
# 'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid':
|
|
||||||
# 'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44',
|
|
||||||
# 'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z',
|
|
||||||
# 'amount': '0.00300726', 'fee': '0.00001000', 'time':
|
|
||||||
# 1658347714, 'status': 'Success'}]}
|
|
||||||
|
|
||||||
trans: dict[str, Transaction] = {}
|
|
||||||
for entry in xfers:
|
|
||||||
|
|
||||||
# look up the normalized name and asset info
|
|
||||||
asset_key = entry['asset']
|
|
||||||
asset_info = self.assets[asset_key]
|
|
||||||
asset = self._atable[asset_key].lower()
|
|
||||||
|
|
||||||
# XXX: this is in the asset units (likely) so it isn't
|
|
||||||
# quite the same as a commisions cost necessarily..)
|
|
||||||
cost = float(entry['fee'])
|
|
||||||
|
|
||||||
fqsn = asset + '.kraken'
|
|
||||||
pairinfo = Symbol.from_fqsn(
|
|
||||||
fqsn,
|
|
||||||
info={
|
|
||||||
'asset_type': 'crypto',
|
|
||||||
'lot_tick_size': asset_info['decimals'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
tran = Transaction(
|
|
||||||
fqsn=fqsn,
|
|
||||||
sym=pairinfo,
|
|
||||||
tid=entry['txid'],
|
|
||||||
dt=pendulum.from_timestamp(entry['time']),
|
|
||||||
bsuid=f'{asset}{src_asset}',
|
|
||||||
size=-1*(
|
|
||||||
float(entry['amount'])
|
|
||||||
+
|
|
||||||
cost
|
|
||||||
),
|
|
||||||
# since this will be treated as a "sell" it
|
|
||||||
# shouldn't be needed to compute the be price.
|
|
||||||
price='NaN',
|
|
||||||
|
|
||||||
# XXX: see note above
|
|
||||||
cost=cost,
|
|
||||||
)
|
|
||||||
trans[tran.tid] = tran
|
|
||||||
|
|
||||||
return trans
|
|
||||||
|
|
||||||
async def submit_limit(
|
|
||||||
self,
|
|
||||||
symbol: str,
|
|
||||||
price: float,
|
|
||||||
action: str,
|
|
||||||
size: float,
|
|
||||||
reqid: str = None,
|
|
||||||
validate: bool = False # set True test call without a real submission
|
|
||||||
|
|
||||||
) -> dict:
|
|
||||||
'''
|
|
||||||
Place an order and return integer request id provided by client.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# Build common data dict for common keys from both endpoints
|
|
||||||
data = {
|
|
||||||
"pair": symbol,
|
|
||||||
"price": str(price),
|
|
||||||
"validate": validate
|
|
||||||
}
|
|
||||||
if reqid is None:
|
|
||||||
# Build order data for kraken api
|
|
||||||
data |= {
|
|
||||||
"ordertype": "limit",
|
|
||||||
"type": action,
|
|
||||||
"volume": str(size),
|
|
||||||
}
|
|
||||||
return await self.endpoint('AddOrder', data)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Edit order data for kraken api
|
|
||||||
data["txid"] = reqid
|
|
||||||
return await self.endpoint('EditOrder', data)
|
|
||||||
|
|
||||||
async def submit_cancel(
|
|
||||||
self,
|
|
||||||
reqid: str,
|
|
||||||
) -> dict:
|
|
||||||
'''
|
|
||||||
Send cancel request for order id ``reqid``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# txid is a transaction id given by kraken
|
|
||||||
return await self.endpoint('CancelOrder', {"txid": reqid})
|
|
||||||
|
|
||||||
async def symbol_info(
|
|
||||||
self,
|
|
||||||
pair: Optional[str] = None,
|
|
||||||
|
|
||||||
) -> dict[str, Pair] | Pair:
|
|
||||||
|
|
||||||
if pair is not None:
|
|
||||||
pairs = {'pair': pair}
|
|
||||||
else:
|
|
||||||
pairs = None # get all pairs
|
|
||||||
|
|
||||||
resp = await self._public('AssetPairs', pairs)
|
|
||||||
err = resp['error']
|
|
||||||
if err:
|
|
||||||
symbolname = pairs['pair'] if pair else None
|
|
||||||
raise SymbolNotFound(f'{symbolname}.kraken')
|
|
||||||
|
|
||||||
pairs = resp['result']
|
|
||||||
|
|
||||||
if pair is not None:
|
|
||||||
_, data = next(iter(pairs.items()))
|
|
||||||
return Pair(**data)
|
|
||||||
else:
|
|
||||||
return {key: Pair(**data) for key, data in pairs.items()}
|
|
||||||
|
|
||||||
async def cache_symbols(self) -> dict:
|
|
||||||
'''
|
|
||||||
Load all market pair info build and cache it for downstream use.
|
|
||||||
|
|
||||||
A ``._ntable: dict[str, str]`` is available for mapping the
|
|
||||||
websocket pair name-keys and their http endpoint API (smh)
|
|
||||||
equivalents to the "alternative name" which is generally the one
|
|
||||||
we actually want to use XD
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not self._pairs:
|
|
||||||
self._pairs.update(await self.symbol_info())
|
|
||||||
|
|
||||||
# table of all ws and rest keys to their alt-name values.
|
|
||||||
ntable: dict[str, str] = {}
|
|
||||||
|
|
||||||
for rest_key in list(self._pairs.keys()):
|
|
||||||
|
|
||||||
pair: Pair = self._pairs[rest_key]
|
|
||||||
altname = pair.altname
|
|
||||||
wsname = pair.wsname
|
|
||||||
ntable[rest_key] = ntable[wsname] = altname
|
|
||||||
|
|
||||||
# register the pair under all monikers, a giant flat
|
|
||||||
# surjection of all possible names to each info obj.
|
|
||||||
self._pairs[altname] = self._pairs[wsname] = pair
|
|
||||||
|
|
||||||
self._ntable.update(ntable)
|
|
||||||
|
|
||||||
return self._pairs
|
|
||||||
|
|
||||||
async def search_symbols(
|
|
||||||
self,
|
|
||||||
pattern: str,
|
|
||||||
limit: int = None,
|
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
'''
|
|
||||||
Search for a symbol by "alt name"..
|
|
||||||
|
|
||||||
It is expected that the ``Client._pairs`` table
|
|
||||||
gets populated before conducting the underlying fuzzy-search
|
|
||||||
over the pair-key set.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not len(self._pairs):
|
|
||||||
await self.cache_symbols()
|
|
||||||
assert self._pairs, '`Client.cache_symbols()` was never called!?'
|
|
||||||
|
|
||||||
matches = fuzzy.extractBests(
|
|
||||||
pattern,
|
|
||||||
self._pairs,
|
|
||||||
score_cutoff=50,
|
|
||||||
)
|
|
||||||
# repack in dict form
|
|
||||||
return {item[0].altname: item[0] for item in matches}
|
|
||||||
|
|
||||||
async def bars(
|
|
||||||
self,
|
|
||||||
symbol: str = 'XBTUSD',
|
|
||||||
|
|
||||||
# UTC 2017-07-02 12:53:20
|
|
||||||
since: Union[int, datetime] | None = None,
|
|
||||||
count: int = 720, # <- max allowed per query
|
|
||||||
as_np: bool = True,
|
|
||||||
|
|
||||||
) -> dict:
|
|
||||||
|
|
||||||
if since is None:
|
|
||||||
since = pendulum.now('UTC').start_of('minute').subtract(
|
|
||||||
minutes=count).timestamp()
|
|
||||||
|
|
||||||
elif isinstance(since, int):
|
|
||||||
since = pendulum.from_timestamp(since).timestamp()
|
|
||||||
|
|
||||||
else: # presumably a pendulum datetime
|
|
||||||
since = since.timestamp()
|
|
||||||
|
|
||||||
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
|
||||||
since = str(max(1499000000, int(since)))
|
|
||||||
json = await self._public(
|
|
||||||
'OHLC',
|
|
||||||
data={
|
|
||||||
'pair': symbol,
|
|
||||||
'since': since,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
res = json['result']
|
|
||||||
res.pop('last')
|
|
||||||
bars = next(iter(res.values()))
|
|
||||||
|
|
||||||
new_bars = []
|
|
||||||
|
|
||||||
first = bars[0]
|
|
||||||
last_nz_vwap = first[-3]
|
|
||||||
if last_nz_vwap == 0:
|
|
||||||
# use close if vwap is zero
|
|
||||||
last_nz_vwap = first[-4]
|
|
||||||
|
|
||||||
# convert all fields to native types
|
|
||||||
for i, bar in enumerate(bars):
|
|
||||||
# normalize weird zero-ed vwap values..cmon kraken..
|
|
||||||
# indicates vwap didn't change since last bar
|
|
||||||
vwap = float(bar.pop(-3))
|
|
||||||
if vwap != 0:
|
|
||||||
last_nz_vwap = vwap
|
|
||||||
if vwap == 0:
|
|
||||||
vwap = last_nz_vwap
|
|
||||||
|
|
||||||
# re-insert vwap as the last of the fields
|
|
||||||
bar.append(vwap)
|
|
||||||
|
|
||||||
new_bars.append(
|
|
||||||
(i,) + tuple(
|
|
||||||
ftype(bar[j]) for j, (name, ftype) in enumerate(
|
|
||||||
_ohlc_dtype[1:]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
|
||||||
return array
|
|
||||||
except KeyError:
|
|
||||||
errmsg = json['error'][0]
|
|
||||||
|
|
||||||
if 'not found' in errmsg:
|
|
||||||
raise SymbolNotFound(errmsg + f': {symbol}')
|
|
||||||
|
|
||||||
elif 'Too many requests' in errmsg:
|
|
||||||
raise DataThrottle(f'{symbol}')
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise BrokerError(errmsg)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def normalize_symbol(
|
|
||||||
cls,
|
|
||||||
ticker: str
|
|
||||||
) -> tuple[str, Pair]:
|
|
||||||
'''
|
|
||||||
Normalize symbol names to to a 3x3 pair from the global
|
|
||||||
definition map which we build out from the data retreived from
|
|
||||||
the 'AssetPairs' endpoint, see methods above.
|
|
||||||
|
|
||||||
'''
|
|
||||||
ticker = cls._ntable[ticker]
|
|
||||||
return ticker.lower(), cls._pairs[ticker]
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def get_client() -> Client:
|
|
||||||
|
|
||||||
conf = get_config()
|
|
||||||
if conf:
|
|
||||||
client = Client(
|
|
||||||
conf,
|
|
||||||
name=conf['key_descr'],
|
|
||||||
api_key=conf['api_key'],
|
|
||||||
secret=conf['secret']
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
client = Client({})
|
|
||||||
|
|
||||||
# at startup, load all symbols, and asset info in
|
|
||||||
# batch requests.
|
|
||||||
async with trio.open_nursery() as nurse:
|
|
||||||
nurse.start_soon(client.cache_assets)
|
|
||||||
await client.cache_symbols()
|
|
||||||
|
|
||||||
yield client
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,459 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
Real-time and historical data feed endpoints.
|
|
||||||
|
|
||||||
'''
|
|
||||||
from contextlib import asynccontextmanager as acm
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
Optional,
|
|
||||||
Callable,
|
|
||||||
)
|
|
||||||
import time
|
|
||||||
|
|
||||||
from async_generator import aclosing
|
|
||||||
from fuzzywuzzy import process as fuzzy
|
|
||||||
import numpy as np
|
|
||||||
import pendulum
|
|
||||||
from trio_typing import TaskStatus
|
|
||||||
import tractor
|
|
||||||
import trio
|
|
||||||
|
|
||||||
from piker._cacheables import open_cached_client
|
|
||||||
from piker.brokers._util import (
|
|
||||||
BrokerError,
|
|
||||||
DataThrottle,
|
|
||||||
DataUnavailable,
|
|
||||||
)
|
|
||||||
from piker.log import get_console_log
|
|
||||||
from piker.data.types import Struct
|
|
||||||
from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
|
||||||
from . import log
|
|
||||||
from .api import (
|
|
||||||
Client,
|
|
||||||
Pair,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OHLC(Struct):
|
|
||||||
'''
|
|
||||||
Description of the flattened OHLC quote format.
|
|
||||||
|
|
||||||
For schema details see:
|
|
||||||
https://docs.kraken.com/websockets/#message-ohlc
|
|
||||||
|
|
||||||
'''
|
|
||||||
chan_id: int # internal kraken id
|
|
||||||
chan_name: str # eg. ohlc-1 (name-interval)
|
|
||||||
pair: str # fx pair
|
|
||||||
time: float # Begin time of interval, in seconds since epoch
|
|
||||||
etime: float # End time of interval, in seconds since epoch
|
|
||||||
open: float # Open price of interval
|
|
||||||
high: float # High price within interval
|
|
||||||
low: float # Low price within interval
|
|
||||||
close: float # Close price of interval
|
|
||||||
vwap: float # Volume weighted average price within interval
|
|
||||||
volume: float # Accumulated volume **within interval**
|
|
||||||
count: int # Number of trades within interval
|
|
||||||
# (sampled) generated tick data
|
|
||||||
ticks: list[Any] = []
|
|
||||||
|
|
||||||
|
|
||||||
async def stream_messages(
|
|
||||||
ws: NoBsWs,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Message stream parser and heartbeat handler.
|
|
||||||
|
|
||||||
Deliver ws subscription messages as well as handle heartbeat logic
|
|
||||||
though a single async generator.
|
|
||||||
|
|
||||||
'''
|
|
||||||
too_slow_count = last_hb = 0
|
|
||||||
|
|
||||||
while True:
|
|
||||||
|
|
||||||
with trio.move_on_after(5) as cs:
|
|
||||||
msg = await ws.recv_msg()
|
|
||||||
|
|
||||||
# trigger reconnection if heartbeat is laggy
|
|
||||||
if cs.cancelled_caught:
|
|
||||||
|
|
||||||
too_slow_count += 1
|
|
||||||
|
|
||||||
if too_slow_count > 20:
|
|
||||||
log.warning(
|
|
||||||
"Heartbeat is too slow, resetting ws connection")
|
|
||||||
|
|
||||||
await ws._connect()
|
|
||||||
too_slow_count = 0
|
|
||||||
continue
|
|
||||||
|
|
||||||
match msg:
|
|
||||||
case {'event': 'heartbeat'}:
|
|
||||||
now = time.time()
|
|
||||||
delay = now - last_hb
|
|
||||||
last_hb = now
|
|
||||||
|
|
||||||
# XXX: why tf is this not printing without --tl flag?
|
|
||||||
log.debug(f"Heartbeat after {delay}")
|
|
||||||
# print(f"Heartbeat after {delay}")
|
|
||||||
|
|
||||||
continue
|
|
||||||
|
|
||||||
case _:
|
|
||||||
# passthrough sub msgs
|
|
||||||
yield msg
|
|
||||||
|
|
||||||
|
|
||||||
async def process_data_feed_msgs(
|
|
||||||
ws: NoBsWs,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Parse and pack data feed messages.
|
|
||||||
|
|
||||||
'''
|
|
||||||
async for msg in stream_messages(ws):
|
|
||||||
match msg:
|
|
||||||
case {
|
|
||||||
'errorMessage': errmsg
|
|
||||||
}:
|
|
||||||
raise BrokerError(errmsg)
|
|
||||||
|
|
||||||
case {
|
|
||||||
'event': 'subscriptionStatus',
|
|
||||||
} as sub:
|
|
||||||
log.info(
|
|
||||||
'WS subscription is active:\n'
|
|
||||||
f'{sub}'
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
case [
|
|
||||||
chan_id,
|
|
||||||
*payload_array,
|
|
||||||
chan_name,
|
|
||||||
pair
|
|
||||||
]:
|
|
||||||
if 'ohlc' in chan_name:
|
|
||||||
ohlc = OHLC(
|
|
||||||
chan_id,
|
|
||||||
chan_name,
|
|
||||||
pair,
|
|
||||||
*payload_array[0]
|
|
||||||
)
|
|
||||||
ohlc.typecast()
|
|
||||||
yield 'ohlc', ohlc
|
|
||||||
|
|
||||||
elif 'spread' in chan_name:
|
|
||||||
|
|
||||||
bid, ask, ts, bsize, asize = map(
|
|
||||||
float, payload_array[0])
|
|
||||||
|
|
||||||
# TODO: really makes you think IB has a horrible API...
|
|
||||||
quote = {
|
|
||||||
'symbol': pair.replace('/', ''),
|
|
||||||
'ticks': [
|
|
||||||
{'type': 'bid', 'price': bid, 'size': bsize},
|
|
||||||
{'type': 'bsize', 'price': bid, 'size': bsize},
|
|
||||||
|
|
||||||
{'type': 'ask', 'price': ask, 'size': asize},
|
|
||||||
{'type': 'asize', 'price': ask, 'size': asize},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
yield 'l1', quote
|
|
||||||
|
|
||||||
# elif 'book' in msg[-2]:
|
|
||||||
# chan_id, *payload_array, chan_name, pair = msg
|
|
||||||
# print(msg)
|
|
||||||
|
|
||||||
case _:
|
|
||||||
print(f'UNHANDLED MSG: {msg}')
|
|
||||||
# yield msg
|
|
||||||
|
|
||||||
|
|
||||||
def normalize(
|
|
||||||
ohlc: OHLC,
|
|
||||||
|
|
||||||
) -> dict:
|
|
||||||
quote = ohlc.to_dict()
|
|
||||||
quote['broker_ts'] = quote['time']
|
|
||||||
quote['brokerd_ts'] = time.time()
|
|
||||||
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
|
||||||
quote['last'] = quote['close']
|
|
||||||
quote['bar_wap'] = ohlc.vwap
|
|
||||||
|
|
||||||
# seriously eh? what's with this non-symmetry everywhere
|
|
||||||
# in subscription systems...
|
|
||||||
# XXX: piker style is always lowercases symbols.
|
|
||||||
topic = quote['pair'].replace('/', '').lower()
|
|
||||||
|
|
||||||
# print(quote)
|
|
||||||
return topic, quote
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_history_client(
|
|
||||||
symbol: str,
|
|
||||||
|
|
||||||
) -> tuple[Callable, int]:
|
|
||||||
|
|
||||||
# TODO implement history getter for the new storage layer.
|
|
||||||
async with open_cached_client('kraken') as client:
|
|
||||||
|
|
||||||
# lol, kraken won't send any more then the "last"
|
|
||||||
# 720 1m bars.. so we have to just ignore further
|
|
||||||
# requests of this type..
|
|
||||||
queries: int = 0
|
|
||||||
|
|
||||||
async def get_ohlc(
|
|
||||||
timeframe: float,
|
|
||||||
end_dt: Optional[datetime] = None,
|
|
||||||
start_dt: Optional[datetime] = None,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
datetime, # start
|
|
||||||
datetime, # end
|
|
||||||
]:
|
|
||||||
|
|
||||||
nonlocal queries
|
|
||||||
if (
|
|
||||||
queries > 0
|
|
||||||
or timeframe != 60
|
|
||||||
):
|
|
||||||
raise DataUnavailable(
|
|
||||||
'Only a single query for 1m bars supported')
|
|
||||||
|
|
||||||
count = 0
|
|
||||||
while count <= 3:
|
|
||||||
try:
|
|
||||||
array = await client.bars(
|
|
||||||
symbol,
|
|
||||||
since=end_dt,
|
|
||||||
)
|
|
||||||
count += 1
|
|
||||||
queries += 1
|
|
||||||
break
|
|
||||||
except DataThrottle:
|
|
||||||
log.warning(f'kraken OHLC throttle for {symbol}')
|
|
||||||
await trio.sleep(1)
|
|
||||||
|
|
||||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
|
||||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
|
||||||
return array, start_dt, end_dt
|
|
||||||
|
|
||||||
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
|
||||||
symbols: list[str],
|
|
||||||
feed_is_live: trio.Event,
|
|
||||||
loglevel: str = None,
|
|
||||||
|
|
||||||
# backend specific
|
|
||||||
sub_type: str = 'ohlc',
|
|
||||||
|
|
||||||
# startup sync
|
|
||||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Subscribe for ohlc stream of quotes for ``pairs``.
|
|
||||||
|
|
||||||
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
|
||||||
|
|
||||||
ws_pairs = {}
|
|
||||||
sym_infos = {}
|
|
||||||
|
|
||||||
async with open_cached_client('kraken') as client, send_chan as send_chan:
|
|
||||||
|
|
||||||
# keep client cached for real-time section
|
|
||||||
for sym in symbols:
|
|
||||||
|
|
||||||
# transform to upper since piker style is always lower
|
|
||||||
sym = sym.upper()
|
|
||||||
si: Pair = await client.symbol_info(sym)
|
|
||||||
# try:
|
|
||||||
# si = Pair(**sym_info) # validation
|
|
||||||
# except TypeError:
|
|
||||||
# fields_diff = set(sym_info) - set(Pair.__struct_fields__)
|
|
||||||
# raise TypeError(
|
|
||||||
# f'Missing msg fields {fields_diff}'
|
|
||||||
# )
|
|
||||||
syminfo = si.to_dict()
|
|
||||||
syminfo['price_tick_size'] = 1. / 10**si.pair_decimals
|
|
||||||
syminfo['lot_tick_size'] = 1. / 10**si.lot_decimals
|
|
||||||
syminfo['asset_type'] = 'crypto'
|
|
||||||
sym_infos[sym] = syminfo
|
|
||||||
ws_pairs[sym] = si.wsname
|
|
||||||
|
|
||||||
symbol = symbols[0].lower()
|
|
||||||
|
|
||||||
init_msgs = {
|
|
||||||
# pass back token, and bool, signalling if we're the writer
|
|
||||||
# and that history has been written
|
|
||||||
symbol: {
|
|
||||||
'symbol_info': sym_infos[sym],
|
|
||||||
'shm_write_opts': {'sum_tick_vml': False},
|
|
||||||
'fqsn': sym,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def subscribe(ws: NoBsWs):
|
|
||||||
|
|
||||||
# XXX: setup subs
|
|
||||||
# https://docs.kraken.com/websockets/#message-subscribe
|
|
||||||
# specific logic for this in kraken's sync client:
|
|
||||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
|
||||||
ohlc_sub = {
|
|
||||||
'event': 'subscribe',
|
|
||||||
'pair': list(ws_pairs.values()),
|
|
||||||
'subscription': {
|
|
||||||
'name': 'ohlc',
|
|
||||||
'interval': 1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# TODO: we want to eventually allow unsubs which should
|
|
||||||
# be completely fine to request from a separate task
|
|
||||||
# since internally the ws methods appear to be FIFO
|
|
||||||
# locked.
|
|
||||||
await ws.send_msg(ohlc_sub)
|
|
||||||
|
|
||||||
# trade data (aka L1)
|
|
||||||
l1_sub = {
|
|
||||||
'event': 'subscribe',
|
|
||||||
'pair': list(ws_pairs.values()),
|
|
||||||
'subscription': {
|
|
||||||
'name': 'spread',
|
|
||||||
# 'depth': 10}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# pull a first quote and deliver
|
|
||||||
await ws.send_msg(l1_sub)
|
|
||||||
|
|
||||||
yield
|
|
||||||
|
|
||||||
# unsub from all pairs on teardown
|
|
||||||
if ws.connected():
|
|
||||||
await ws.send_msg({
|
|
||||||
'pair': list(ws_pairs.values()),
|
|
||||||
'event': 'unsubscribe',
|
|
||||||
'subscription': ['ohlc', 'spread'],
|
|
||||||
})
|
|
||||||
|
|
||||||
# XXX: do we need to ack the unsub?
|
|
||||||
# await ws.recv_msg()
|
|
||||||
|
|
||||||
# see the tips on reconnection logic:
|
|
||||||
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
|
||||||
ws: NoBsWs
|
|
||||||
async with (
|
|
||||||
open_autorecon_ws(
|
|
||||||
'wss://ws.kraken.com/',
|
|
||||||
fixture=subscribe,
|
|
||||||
) as ws,
|
|
||||||
aclosing(process_data_feed_msgs(ws)) as msg_gen,
|
|
||||||
):
|
|
||||||
# pull a first quote and deliver
|
|
||||||
typ, ohlc_last = await anext(msg_gen)
|
|
||||||
topic, quote = normalize(ohlc_last)
|
|
||||||
|
|
||||||
task_status.started((init_msgs, quote))
|
|
||||||
|
|
||||||
# lol, only "closes" when they're margin squeezing clients ;P
|
|
||||||
feed_is_live.set()
|
|
||||||
|
|
||||||
# keep start of last interval for volume tracking
|
|
||||||
last_interval_start = ohlc_last.etime
|
|
||||||
|
|
||||||
# start streaming
|
|
||||||
async for typ, ohlc in msg_gen:
|
|
||||||
|
|
||||||
if typ == 'ohlc':
|
|
||||||
|
|
||||||
# TODO: can get rid of all this by using
|
|
||||||
# ``trades`` subscription...
|
|
||||||
|
|
||||||
# generate tick values to match time & sales pane:
|
|
||||||
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
|
|
||||||
volume = ohlc.volume
|
|
||||||
|
|
||||||
# new OHLC sample interval
|
|
||||||
if ohlc.etime > last_interval_start:
|
|
||||||
last_interval_start = ohlc.etime
|
|
||||||
tick_volume = volume
|
|
||||||
|
|
||||||
else:
|
|
||||||
# this is the tick volume *within the interval*
|
|
||||||
tick_volume = volume - ohlc_last.volume
|
|
||||||
|
|
||||||
ohlc_last = ohlc
|
|
||||||
last = ohlc.close
|
|
||||||
|
|
||||||
if tick_volume:
|
|
||||||
ohlc.ticks.append({
|
|
||||||
'type': 'trade',
|
|
||||||
'price': last,
|
|
||||||
'size': tick_volume,
|
|
||||||
})
|
|
||||||
|
|
||||||
topic, quote = normalize(ohlc)
|
|
||||||
|
|
||||||
elif typ == 'l1':
|
|
||||||
quote = ohlc
|
|
||||||
topic = quote['symbol'].lower()
|
|
||||||
|
|
||||||
await send_chan.send({topic: quote})
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
|
||||||
async def open_symbol_search(
|
|
||||||
ctx: tractor.Context,
|
|
||||||
|
|
||||||
) -> Client:
|
|
||||||
async with open_cached_client('kraken') as client:
|
|
||||||
|
|
||||||
# load all symbols locally for fast search
|
|
||||||
cache = await client.cache_symbols()
|
|
||||||
await ctx.started(cache)
|
|
||||||
|
|
||||||
async with ctx.open_stream() as stream:
|
|
||||||
|
|
||||||
async for pattern in stream:
|
|
||||||
|
|
||||||
matches = fuzzy.extractBests(
|
|
||||||
pattern,
|
|
||||||
cache,
|
|
||||||
score_cutoff=50,
|
|
||||||
)
|
|
||||||
# repack in dict form
|
|
||||||
await stream.send({
|
|
||||||
pair[0].altname: pair[0]
|
|
||||||
for pair in matches
|
|
||||||
})
|
|
|
@ -18,9 +18,3 @@
|
||||||
Market machinery for order executions, book, management.
|
Market machinery for order executions, book, management.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from ._client import open_ems
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'open_ems',
|
|
||||||
]
|
|
||||||
|
|
|
@ -22,10 +22,54 @@ from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
|
from pydantic import BaseModel, validator
|
||||||
|
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ..data.types import Struct
|
from ._messages import BrokerdPosition, Status
|
||||||
from ..pp import Position
|
|
||||||
|
|
||||||
|
class Position(BaseModel):
|
||||||
|
'''
|
||||||
|
Basic pp (personal position) model with attached fills history.
|
||||||
|
|
||||||
|
This type should be IPC wire ready?
|
||||||
|
|
||||||
|
'''
|
||||||
|
symbol: Symbol
|
||||||
|
|
||||||
|
# last size and avg entry price
|
||||||
|
size: float
|
||||||
|
avg_price: float # TODO: contextual pricing
|
||||||
|
|
||||||
|
# ordered record of known constituent trade messages
|
||||||
|
fills: list[Status] = []
|
||||||
|
|
||||||
|
def update_from_msg(
|
||||||
|
self,
|
||||||
|
msg: BrokerdPosition,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# XXX: better place to do this?
|
||||||
|
symbol = self.symbol
|
||||||
|
|
||||||
|
lot_size_digits = symbol.lot_size_digits
|
||||||
|
avg_price, size = (
|
||||||
|
round(msg['avg_price'], ndigits=symbol.tick_size_digits),
|
||||||
|
round(msg['size'], ndigits=lot_size_digits),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.avg_price = avg_price
|
||||||
|
self.size = size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dsize(self) -> float:
|
||||||
|
'''
|
||||||
|
The "dollar" size of the pp, normally in trading (fiat) unit
|
||||||
|
terms.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return self.avg_price * self.size
|
||||||
|
|
||||||
|
|
||||||
_size_units = bidict({
|
_size_units = bidict({
|
||||||
|
@ -40,9 +84,34 @@ SizeUnit = Enum(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Allocator(Struct):
|
class Allocator(BaseModel):
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
validate_assignment = True
|
||||||
|
copy_on_model_validation = False
|
||||||
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
|
# required to get the account validator lookup working?
|
||||||
|
extra = 'allow'
|
||||||
|
underscore_attrs_are_private = False
|
||||||
|
|
||||||
symbol: Symbol
|
symbol: Symbol
|
||||||
|
account: Optional[str] = 'paper'
|
||||||
|
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||||
|
# a default at startup by passing in a `dict` but yet you can set
|
||||||
|
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||||
|
# unintuitive garbage.
|
||||||
|
size_unit: str = 'currency'
|
||||||
|
_size_units: dict[str, Optional[str]] = _size_units
|
||||||
|
|
||||||
|
@validator('size_unit', pre=True)
|
||||||
|
def maybe_lookup_key(cls, v):
|
||||||
|
# apply the corresponding enum key for the text "description" value
|
||||||
|
if v not in _size_units:
|
||||||
|
return _size_units.inverse[v]
|
||||||
|
|
||||||
|
assert v in _size_units
|
||||||
|
return v
|
||||||
|
|
||||||
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
||||||
# "sizes"
|
# "sizes"
|
||||||
|
@ -51,28 +120,6 @@ class Allocator(Struct):
|
||||||
units_limit: float
|
units_limit: float
|
||||||
currency_limit: float
|
currency_limit: float
|
||||||
slots: int
|
slots: int
|
||||||
account: Optional[str] = 'paper'
|
|
||||||
|
|
||||||
_size_units: bidict[str, Optional[str]] = _size_units
|
|
||||||
|
|
||||||
# TODO: for enums this clearly doesn't fucking work, you can't set
|
|
||||||
# a default at startup by passing in a `dict` but yet you can set
|
|
||||||
# that value through assignment..for wtv cucked reason.. honestly, pure
|
|
||||||
# unintuitive garbage.
|
|
||||||
_size_unit: str = 'currency'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size_unit(self) -> str:
|
|
||||||
return self._size_unit
|
|
||||||
|
|
||||||
@size_unit.setter
|
|
||||||
def size_unit(self, v: str) -> Optional[str]:
|
|
||||||
if v not in _size_units:
|
|
||||||
v = _size_units.inverse[v]
|
|
||||||
|
|
||||||
assert v in _size_units
|
|
||||||
self._size_unit = v
|
|
||||||
return v
|
|
||||||
|
|
||||||
def step_sizes(
|
def step_sizes(
|
||||||
self,
|
self,
|
||||||
|
@ -93,13 +140,10 @@ class Allocator(Struct):
|
||||||
else:
|
else:
|
||||||
return self.units_limit
|
return self.units_limit
|
||||||
|
|
||||||
def limit_info(self) -> tuple[str, float]:
|
|
||||||
return self.size_unit, self.limit()
|
|
||||||
|
|
||||||
def next_order_info(
|
def next_order_info(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
# we only need a startup size for exit calcs, we can then
|
# we only need a startup size for exit calcs, we can the
|
||||||
# determine how large slots should be if the initial pp size was
|
# determine how large slots should be if the initial pp size was
|
||||||
# larger then the current live one, and the live one is smaller
|
# larger then the current live one, and the live one is smaller
|
||||||
# then the initial config settings.
|
# then the initial config settings.
|
||||||
|
@ -129,7 +173,7 @@ class Allocator(Struct):
|
||||||
l_sub_pp = self.units_limit - abs_live_size
|
l_sub_pp = self.units_limit - abs_live_size
|
||||||
|
|
||||||
elif size_unit == 'currency':
|
elif size_unit == 'currency':
|
||||||
live_cost_basis = abs_live_size * live_pp.ppu
|
live_cost_basis = abs_live_size * live_pp.avg_price
|
||||||
slot_size = currency_per_slot / price
|
slot_size = currency_per_slot / price
|
||||||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
||||||
|
|
||||||
|
@ -140,14 +184,12 @@ class Allocator(Struct):
|
||||||
|
|
||||||
# an entry (adding-to or starting a pp)
|
# an entry (adding-to or starting a pp)
|
||||||
if (
|
if (
|
||||||
|
action == 'buy' and live_size > 0 or
|
||||||
|
action == 'sell' and live_size < 0 or
|
||||||
live_size == 0
|
live_size == 0
|
||||||
or (action == 'buy' and live_size > 0)
|
|
||||||
or action == 'sell' and live_size < 0
|
|
||||||
):
|
):
|
||||||
order_size = min(
|
|
||||||
slot_size,
|
order_size = min(slot_size, l_sub_pp)
|
||||||
max(l_sub_pp, 0),
|
|
||||||
)
|
|
||||||
|
|
||||||
# an exit (removing-from or going to net-zero pp)
|
# an exit (removing-from or going to net-zero pp)
|
||||||
else:
|
else:
|
||||||
|
@ -163,7 +205,7 @@ class Allocator(Struct):
|
||||||
if size_unit == 'currency':
|
if size_unit == 'currency':
|
||||||
# compute the "projected" limit's worth of units at the
|
# compute the "projected" limit's worth of units at the
|
||||||
# current pp (weighted) price:
|
# current pp (weighted) price:
|
||||||
slot_size = currency_per_slot / live_pp.ppu
|
slot_size = currency_per_slot / live_pp.avg_price
|
||||||
|
|
||||||
else:
|
else:
|
||||||
slot_size = u_per_slot
|
slot_size = u_per_slot
|
||||||
|
@ -202,12 +244,7 @@ class Allocator(Struct):
|
||||||
if order_size < slot_size:
|
if order_size < slot_size:
|
||||||
# compute a fractional slots size to display
|
# compute a fractional slots size to display
|
||||||
slots_used = self.slots_used(
|
slots_used = self.slots_used(
|
||||||
Position(
|
Position(symbol=sym, size=order_size, avg_price=price)
|
||||||
symbol=sym,
|
|
||||||
size=order_size,
|
|
||||||
ppu=price,
|
|
||||||
bsuid=sym,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -234,8 +271,8 @@ class Allocator(Struct):
|
||||||
abs_pp_size = abs(pp.size)
|
abs_pp_size = abs(pp.size)
|
||||||
|
|
||||||
if self.size_unit == 'currency':
|
if self.size_unit == 'currency':
|
||||||
# live_currency_size = size or (abs_pp_size * pp.ppu)
|
# live_currency_size = size or (abs_pp_size * pp.avg_price)
|
||||||
live_currency_size = abs_pp_size * pp.ppu
|
live_currency_size = abs_pp_size * pp.avg_price
|
||||||
prop = live_currency_size / self.currency_limit
|
prop = live_currency_size / self.currency_limit
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -247,6 +284,14 @@ class Allocator(Struct):
|
||||||
return round(prop * self.slots)
|
return round(prop * self.slots)
|
||||||
|
|
||||||
|
|
||||||
|
_derivs = (
|
||||||
|
'future',
|
||||||
|
'continuous_future',
|
||||||
|
'option',
|
||||||
|
'futures_option',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def mk_allocator(
|
def mk_allocator(
|
||||||
|
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
|
@ -255,7 +300,7 @@ def mk_allocator(
|
||||||
# default allocation settings
|
# default allocation settings
|
||||||
defaults: dict[str, float] = {
|
defaults: dict[str, float] = {
|
||||||
'account': None, # select paper by default
|
'account': None, # select paper by default
|
||||||
# 'size_unit': 'currency',
|
'size_unit': 'currency',
|
||||||
'units_limit': 400,
|
'units_limit': 400,
|
||||||
'currency_limit': 5e3,
|
'currency_limit': 5e3,
|
||||||
'slots': 4,
|
'slots': 4,
|
||||||
|
@ -273,9 +318,42 @@ def mk_allocator(
|
||||||
'currency_limit': 6e3,
|
'currency_limit': 6e3,
|
||||||
'slots': 6,
|
'slots': 6,
|
||||||
}
|
}
|
||||||
|
|
||||||
defaults.update(user_def)
|
defaults.update(user_def)
|
||||||
|
|
||||||
return Allocator(
|
alloc = Allocator(
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
**defaults,
|
**defaults,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
asset_type = symbol.type_key
|
||||||
|
|
||||||
|
# specific configs by asset class / type
|
||||||
|
|
||||||
|
if asset_type in _derivs:
|
||||||
|
# since it's harder to know how currency "applies" in this case
|
||||||
|
# given leverage properties
|
||||||
|
alloc.size_unit = '# units'
|
||||||
|
|
||||||
|
# set units limit to slots size thus making make the next
|
||||||
|
# entry step 1.0
|
||||||
|
alloc.units_limit = alloc.slots
|
||||||
|
|
||||||
|
# if the current position is already greater then the limit
|
||||||
|
# settings, increase the limit to the current position
|
||||||
|
if alloc.size_unit == 'currency':
|
||||||
|
startup_size = startup_pp.size * startup_pp.avg_price
|
||||||
|
|
||||||
|
if startup_size > alloc.currency_limit:
|
||||||
|
alloc.currency_limit = round(startup_size, ndigits=2)
|
||||||
|
|
||||||
|
else:
|
||||||
|
startup_size = abs(startup_pp.size)
|
||||||
|
|
||||||
|
if startup_size > alloc.units_limit:
|
||||||
|
alloc.units_limit = startup_size
|
||||||
|
|
||||||
|
if asset_type in _derivs:
|
||||||
|
alloc.slots = alloc.units_limit
|
||||||
|
|
||||||
|
return alloc
|
||||||
|
|
|
@ -18,35 +18,26 @@
|
||||||
Orders and execution client API.
|
Orders and execution client API.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from typing import Dict
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import TYPE_CHECKING
|
from dataclasses import dataclass, field
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.trionics import broadcast_receiver
|
from tractor.trionics import broadcast_receiver
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ..data.types import Struct
|
from ._ems import _emsd_main
|
||||||
from ..service import maybe_open_emsd
|
from .._daemon import maybe_open_emsd
|
||||||
from ._messages import (
|
from ._messages import Order, Cancel
|
||||||
Order,
|
|
||||||
Cancel,
|
|
||||||
)
|
|
||||||
from ..brokers import get_brokermod
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ._messages import (
|
|
||||||
BrokerdPosition,
|
|
||||||
Status,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class OrderBook(Struct):
|
@dataclass
|
||||||
|
class OrderBook:
|
||||||
'''EMS-client-side order book ctl and tracking.
|
'''EMS-client-side order book ctl and tracking.
|
||||||
|
|
||||||
A style similar to "model-view" is used here where this api is
|
A style similar to "model-view" is used here where this api is
|
||||||
|
@ -61,18 +52,20 @@ class OrderBook(Struct):
|
||||||
# mem channels used to relay order requests to the EMS daemon
|
# mem channels used to relay order requests to the EMS daemon
|
||||||
_to_ems: trio.abc.SendChannel
|
_to_ems: trio.abc.SendChannel
|
||||||
_from_order_book: trio.abc.ReceiveChannel
|
_from_order_book: trio.abc.ReceiveChannel
|
||||||
_sent_orders: dict[str, Order] = {}
|
|
||||||
|
_sent_orders: Dict[str, Order] = field(default_factory=dict)
|
||||||
|
_ready_to_receive: trio.Event = trio.Event()
|
||||||
|
|
||||||
def send(
|
def send(
|
||||||
self,
|
self,
|
||||||
msg: Order | dict,
|
msg: Order,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
self._sent_orders[msg.oid] = msg
|
self._sent_orders[msg.oid] = msg
|
||||||
self._to_ems.send_nowait(msg)
|
self._to_ems.send_nowait(msg.dict())
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def send_update(
|
def update(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
uuid: str,
|
uuid: str,
|
||||||
|
@ -80,8 +73,9 @@ class OrderBook(Struct):
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
cmd = self._sent_orders[uuid]
|
cmd = self._sent_orders[uuid]
|
||||||
msg = cmd.copy(update=data)
|
msg = cmd.dict()
|
||||||
self._sent_orders[uuid] = msg
|
msg.update(data)
|
||||||
|
self._sent_orders[uuid] = Order(**msg)
|
||||||
self._to_ems.send_nowait(msg)
|
self._to_ems.send_nowait(msg)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
@ -89,18 +83,12 @@ class OrderBook(Struct):
|
||||||
"""Cancel an order (or alert) in the EMS.
|
"""Cancel an order (or alert) in the EMS.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
cmd = self._sent_orders.get(uuid)
|
cmd = self._sent_orders[uuid]
|
||||||
if not cmd:
|
|
||||||
log.error(
|
|
||||||
f'Unknown order {uuid}!?\n'
|
|
||||||
f'Maybe there is a stale entry or line?\n'
|
|
||||||
f'You should report this as a bug!'
|
|
||||||
)
|
|
||||||
msg = Cancel(
|
msg = Cancel(
|
||||||
oid=uuid,
|
oid=uuid,
|
||||||
symbol=cmd.symbol,
|
symbol=cmd.symbol,
|
||||||
)
|
)
|
||||||
self._to_ems.send_nowait(msg)
|
self._to_ems.send_nowait(msg.dict())
|
||||||
|
|
||||||
|
|
||||||
_orders: OrderBook = None
|
_orders: OrderBook = None
|
||||||
|
@ -161,36 +149,21 @@ async def relay_order_cmds_from_sync_code(
|
||||||
book = get_orders()
|
book = get_orders()
|
||||||
async with book._from_order_book.subscribe() as orders_stream:
|
async with book._from_order_book.subscribe() as orders_stream:
|
||||||
async for cmd in orders_stream:
|
async for cmd in orders_stream:
|
||||||
sym = cmd.symbol
|
if cmd['symbol'] == symbol_key:
|
||||||
msg = pformat(cmd)
|
log.info(f'Send order cmd:\n{pformat(cmd)}')
|
||||||
if sym == symbol_key:
|
|
||||||
log.info(f'Send order cmd:\n{msg}')
|
|
||||||
# send msg over IPC / wire
|
# send msg over IPC / wire
|
||||||
await to_ems_stream.send(cmd)
|
await to_ems_stream.send(cmd)
|
||||||
else:
|
|
||||||
log.warning(
|
|
||||||
f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
|
|
||||||
f'\n{msg}'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_ems(
|
async def open_ems(
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
mode: str = 'live',
|
|
||||||
loglevel: str = 'error',
|
|
||||||
|
|
||||||
) -> tuple[
|
) -> (
|
||||||
OrderBook,
|
OrderBook,
|
||||||
tractor.MsgStream,
|
tractor.MsgStream,
|
||||||
dict[
|
dict,
|
||||||
# brokername, acctid
|
):
|
||||||
tuple[str, str],
|
|
||||||
list[BrokerdPosition],
|
|
||||||
],
|
|
||||||
list[str],
|
|
||||||
dict[str, Status],
|
|
||||||
]:
|
|
||||||
'''
|
'''
|
||||||
Spawn an EMS daemon and begin sending orders and receiving
|
Spawn an EMS daemon and begin sending orders and receiving
|
||||||
alerts.
|
alerts.
|
||||||
|
@ -233,36 +206,18 @@ async def open_ems(
|
||||||
|
|
||||||
async with maybe_open_emsd(broker) as portal:
|
async with maybe_open_emsd(broker) as portal:
|
||||||
|
|
||||||
mod = get_brokermod(broker)
|
|
||||||
if (
|
|
||||||
not getattr(mod, 'trades_dialogue', None)
|
|
||||||
or mode == 'paper'
|
|
||||||
):
|
|
||||||
mode = 'paper'
|
|
||||||
|
|
||||||
from ._ems import _emsd_main
|
|
||||||
async with (
|
async with (
|
||||||
# connect to emsd
|
# connect to emsd
|
||||||
portal.open_context(
|
portal.open_context(
|
||||||
|
|
||||||
_emsd_main,
|
_emsd_main,
|
||||||
fqsn=fqsn,
|
fqsn=fqsn,
|
||||||
exec_mode=mode,
|
|
||||||
loglevel=loglevel,
|
|
||||||
|
|
||||||
) as (
|
) as (ctx, (positions, accounts)),
|
||||||
ctx,
|
|
||||||
(
|
|
||||||
positions,
|
|
||||||
accounts,
|
|
||||||
dialogs,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
|
|
||||||
# open 2-way trade command stream
|
# open 2-way trade command stream
|
||||||
ctx.open_stream() as trades_stream,
|
ctx.open_stream() as trades_stream,
|
||||||
):
|
):
|
||||||
# start sync code order msg delivery task
|
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
n.start_soon(
|
n.start_soon(
|
||||||
relay_order_cmds_from_sync_code,
|
relay_order_cmds_from_sync_code,
|
||||||
|
@ -270,10 +225,4 @@ async def open_ems(
|
||||||
trades_stream
|
trades_stream
|
||||||
)
|
)
|
||||||
|
|
||||||
yield (
|
yield book, trades_stream, positions, accounts
|
||||||
book,
|
|
||||||
trades_stream,
|
|
||||||
positions,
|
|
||||||
accounts,
|
|
||||||
dialogs,
|
|
||||||
)
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -15,162 +15,108 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Clearing sub-system message and protocols.
|
Clearing system messagingn types and protocols.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# from collections import (
|
from typing import Optional, Union
|
||||||
# ChainMap,
|
|
||||||
# deque,
|
|
||||||
# )
|
|
||||||
from typing import (
|
|
||||||
Optional,
|
|
||||||
Literal,
|
|
||||||
)
|
|
||||||
|
|
||||||
from msgspec import field
|
# TODO: try out just encoding/send direction for now?
|
||||||
|
# import msgspec
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ..data.types import Struct
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: a composite for tracking msg flow on 2-legged
|
|
||||||
# dialogs.
|
|
||||||
# class Dialog(ChainMap):
|
|
||||||
# '''
|
|
||||||
# Msg collection abstraction to easily track the state changes of
|
|
||||||
# a msg flow in one high level, query-able and immutable construct.
|
|
||||||
|
|
||||||
# The main use case is to query data from a (long-running)
|
|
||||||
# msg-transaction-sequence
|
|
||||||
|
|
||||||
|
|
||||||
# '''
|
|
||||||
# def update(
|
|
||||||
# self,
|
|
||||||
# msg,
|
|
||||||
# ) -> None:
|
|
||||||
# self.maps.insert(0, msg.to_dict())
|
|
||||||
|
|
||||||
# def flatten(self) -> dict:
|
|
||||||
# return dict(self)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: ``msgspec`` stuff worth paying attention to:
|
|
||||||
# - schema evolution:
|
|
||||||
# https://jcristharif.com/msgspec/usage.html#schema-evolution
|
|
||||||
# - for eg. ``BrokerdStatus``, instead just have separate messages?
|
|
||||||
# - use literals for a common msg determined by diff keys?
|
|
||||||
# - https://jcristharif.com/msgspec/usage.html#literal
|
|
||||||
|
|
||||||
# --------------
|
|
||||||
# Client -> emsd
|
# Client -> emsd
|
||||||
# --------------
|
|
||||||
|
|
||||||
class Order(Struct):
|
|
||||||
|
|
||||||
# TODO: ideally we can combine these 2 fields into
|
|
||||||
# 1 and just use the size polarity to determine a buy/sell.
|
|
||||||
# i would like to see this become more like
|
|
||||||
# https://jcristharif.com/msgspec/usage.html#literal
|
|
||||||
# action: Literal[
|
|
||||||
# 'live',
|
|
||||||
# 'dark',
|
|
||||||
# 'alert',
|
|
||||||
# ]
|
|
||||||
|
|
||||||
action: Literal[
|
|
||||||
'buy',
|
|
||||||
'sell',
|
|
||||||
'alert',
|
|
||||||
]
|
|
||||||
# determines whether the create execution
|
|
||||||
# will be submitted to the ems or directly to
|
|
||||||
# the backend broker
|
|
||||||
exec_mode: Literal[
|
|
||||||
'dark',
|
|
||||||
'live',
|
|
||||||
# 'paper', no right?
|
|
||||||
]
|
|
||||||
|
|
||||||
# internal ``emdsd`` unique "order id"
|
|
||||||
oid: str # uuid4
|
|
||||||
symbol: str | Symbol
|
|
||||||
account: str # should we set a default as '' ?
|
|
||||||
|
|
||||||
price: float
|
|
||||||
size: float # -ve is "sell", +ve is "buy"
|
|
||||||
|
|
||||||
brokers: list[str] = []
|
|
||||||
|
|
||||||
|
|
||||||
class Cancel(Struct):
|
class Cancel(BaseModel):
|
||||||
'''
|
'''Cancel msg for removing a dark (ems triggered) or
|
||||||
Cancel msg for removing a dark (ems triggered) or
|
|
||||||
broker-submitted (live) trigger/order.
|
broker-submitted (live) trigger/order.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
action: str = 'cancel'
|
||||||
oid: str # uuid4
|
oid: str # uuid4
|
||||||
symbol: str
|
symbol: str
|
||||||
action: str = 'cancel'
|
|
||||||
|
|
||||||
|
|
||||||
# --------------
|
class Order(BaseModel):
|
||||||
|
|
||||||
|
action: str # {'buy', 'sell', 'alert'}
|
||||||
|
# internal ``emdsd`` unique "order id"
|
||||||
|
oid: str # uuid4
|
||||||
|
symbol: Union[str, Symbol]
|
||||||
|
account: str # should we set a default as '' ?
|
||||||
|
|
||||||
|
price: float
|
||||||
|
size: float
|
||||||
|
brokers: list[str]
|
||||||
|
|
||||||
|
# Assigned once initial ack is received
|
||||||
|
# ack_time_ns: Optional[int] = None
|
||||||
|
|
||||||
|
# determines whether the create execution
|
||||||
|
# will be submitted to the ems or directly to
|
||||||
|
# the backend broker
|
||||||
|
exec_mode: str # {'dark', 'live', 'paper'}
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
# just for pre-loading a ``Symbol`` when used
|
||||||
|
# in the order mode staging process
|
||||||
|
arbitrary_types_allowed = True
|
||||||
|
# don't copy this model instance when used in
|
||||||
|
# a recursive model
|
||||||
|
copy_on_model_validation = False
|
||||||
|
|
||||||
# Client <- emsd
|
# Client <- emsd
|
||||||
# --------------
|
|
||||||
# update msgs from ems which relay state change info
|
# update msgs from ems which relay state change info
|
||||||
# from the active clearing engine.
|
# from the active clearing engine.
|
||||||
|
|
||||||
class Status(Struct):
|
|
||||||
|
|
||||||
time_ns: int
|
class Status(BaseModel):
|
||||||
oid: str # uuid4 ems-order dialog id
|
|
||||||
|
|
||||||
resp: Literal[
|
|
||||||
'pending', # acked by broker but not yet open
|
|
||||||
'open',
|
|
||||||
'dark_open', # dark/algo triggered order is open in ems clearing loop
|
|
||||||
'triggered', # above triggered order sent to brokerd, or an alert closed
|
|
||||||
'closed', # fully cleared all size/units
|
|
||||||
'fill', # partial execution
|
|
||||||
'canceled',
|
|
||||||
'error',
|
|
||||||
]
|
|
||||||
|
|
||||||
name: str = 'status'
|
name: str = 'status'
|
||||||
|
oid: str # uuid4
|
||||||
|
time_ns: int
|
||||||
|
|
||||||
|
# {
|
||||||
|
# 'dark_submitted',
|
||||||
|
# 'dark_cancelled',
|
||||||
|
# 'dark_triggered',
|
||||||
|
|
||||||
|
# 'broker_submitted',
|
||||||
|
# 'broker_cancelled',
|
||||||
|
# 'broker_executed',
|
||||||
|
# 'broker_filled',
|
||||||
|
# 'broker_errored',
|
||||||
|
|
||||||
|
# 'alert_submitted',
|
||||||
|
# 'alert_triggered',
|
||||||
|
|
||||||
|
# }
|
||||||
|
resp: str # "response", see above
|
||||||
|
|
||||||
|
# symbol: str
|
||||||
|
|
||||||
|
# trigger info
|
||||||
|
trigger_price: Optional[float] = None
|
||||||
|
# price: float
|
||||||
|
|
||||||
|
# broker: Optional[str] = None
|
||||||
|
|
||||||
# this maps normally to the ``BrokerdOrder.reqid`` below, an id
|
# this maps normally to the ``BrokerdOrder.reqid`` below, an id
|
||||||
# normally allocated internally by the backend broker routing system
|
# normally allocated internally by the backend broker routing system
|
||||||
reqid: Optional[int | str] = None
|
broker_reqid: Optional[Union[int, str]] = None
|
||||||
|
|
||||||
# the (last) source order/request msg if provided
|
# for relaying backend msg data "through" the ems layer
|
||||||
# (eg. the Order/Cancel which causes this msg) and
|
|
||||||
# acts as a back-reference to the corresponding
|
|
||||||
# request message which was the source of this msg.
|
|
||||||
req: Order | None = None
|
|
||||||
|
|
||||||
# XXX: better design/name here?
|
|
||||||
# flag that can be set to indicate a message for an order
|
|
||||||
# event that wasn't originated by piker's emsd (eg. some external
|
|
||||||
# trading system which does it's own order control but that you
|
|
||||||
# might want to "track" using piker UIs/systems).
|
|
||||||
src: Optional[str] = None
|
|
||||||
|
|
||||||
# set when a cancel request msg was set for this order flow dialog
|
|
||||||
# but the brokerd dialog isn't yet in a cancelled state.
|
|
||||||
cancel_called: bool = False
|
|
||||||
|
|
||||||
# for relaying a boxed brokerd-dialog-side msg data "through" the
|
|
||||||
# ems layer to clients.
|
|
||||||
brokerd_msg: dict = {}
|
brokerd_msg: dict = {}
|
||||||
|
|
||||||
|
|
||||||
# ---------------
|
|
||||||
# emsd -> brokerd
|
# emsd -> brokerd
|
||||||
# ---------------
|
|
||||||
# requests *sent* from ems to respective backend broker daemon
|
# requests *sent* from ems to respective backend broker daemon
|
||||||
|
|
||||||
class BrokerdCancel(Struct):
|
class BrokerdCancel(BaseModel):
|
||||||
|
|
||||||
|
action: str = 'cancel'
|
||||||
oid: str # piker emsd order id
|
oid: str # piker emsd order id
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
|
||||||
|
@ -181,39 +127,34 @@ class BrokerdCancel(Struct):
|
||||||
# for setting a unique order id then this value will be relayed back
|
# for setting a unique order id then this value will be relayed back
|
||||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||||
# field
|
# field
|
||||||
reqid: Optional[int | str] = None
|
reqid: Optional[Union[int, str]] = None
|
||||||
action: str = 'cancel'
|
|
||||||
|
|
||||||
|
|
||||||
class BrokerdOrder(Struct):
|
class BrokerdOrder(BaseModel):
|
||||||
|
|
||||||
|
action: str # {buy, sell}
|
||||||
oid: str
|
oid: str
|
||||||
account: str
|
account: str
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
|
||||||
symbol: str # fqsn
|
|
||||||
price: float
|
|
||||||
size: float
|
|
||||||
|
|
||||||
# TODO: if we instead rely on a +ve/-ve size to determine
|
|
||||||
# the action we more or less don't need this field right?
|
|
||||||
action: str = '' # {buy, sell}
|
|
||||||
|
|
||||||
# "broker request id": broker specific/internal order id if this is
|
# "broker request id": broker specific/internal order id if this is
|
||||||
# None, creates a new order otherwise if the id is valid the backend
|
# None, creates a new order otherwise if the id is valid the backend
|
||||||
# api must modify the existing matching order. If the broker allows
|
# api must modify the existing matching order. If the broker allows
|
||||||
# for setting a unique order id then this value will be relayed back
|
# for setting a unique order id then this value will be relayed back
|
||||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||||
# field
|
# field
|
||||||
reqid: Optional[int | str] = None
|
reqid: Optional[Union[int, str]] = None
|
||||||
|
|
||||||
|
symbol: str # symbol.<providername> ?
|
||||||
|
price: float
|
||||||
|
size: float
|
||||||
|
|
||||||
|
|
||||||
# ---------------
|
|
||||||
# emsd <- brokerd
|
# emsd <- brokerd
|
||||||
# ---------------
|
|
||||||
# requests *received* to ems from broker backend
|
# requests *received* to ems from broker backend
|
||||||
|
|
||||||
class BrokerdOrderAck(Struct):
|
|
||||||
|
class BrokerdOrderAck(BaseModel):
|
||||||
'''
|
'''
|
||||||
Immediate reponse to a brokerd order request providing the broker
|
Immediate reponse to a brokerd order request providing the broker
|
||||||
specific unique order id so that the EMS can associate this
|
specific unique order id so that the EMS can associate this
|
||||||
|
@ -221,93 +162,102 @@ class BrokerdOrderAck(Struct):
|
||||||
``.oid`` (which is a uuid4).
|
``.oid`` (which is a uuid4).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
name: str = 'ack'
|
||||||
|
|
||||||
# defined and provided by backend
|
# defined and provided by backend
|
||||||
reqid: int | str
|
reqid: Union[int, str]
|
||||||
|
|
||||||
# emsd id originally sent in matching request msg
|
# emsd id originally sent in matching request msg
|
||||||
oid: str
|
oid: str
|
||||||
account: str = ''
|
account: str = ''
|
||||||
name: str = 'ack'
|
|
||||||
|
|
||||||
|
|
||||||
class BrokerdStatus(Struct):
|
class BrokerdStatus(BaseModel):
|
||||||
|
|
||||||
reqid: int | str
|
|
||||||
time_ns: int
|
|
||||||
status: Literal[
|
|
||||||
'open',
|
|
||||||
'canceled',
|
|
||||||
'fill',
|
|
||||||
'pending',
|
|
||||||
'error',
|
|
||||||
]
|
|
||||||
|
|
||||||
account: str
|
|
||||||
name: str = 'status'
|
name: str = 'status'
|
||||||
|
reqid: Union[int, str]
|
||||||
|
time_ns: int
|
||||||
|
|
||||||
|
# XXX: should be best effort set for every update
|
||||||
|
account: str = ''
|
||||||
|
|
||||||
|
# {
|
||||||
|
# 'submitted',
|
||||||
|
# 'cancelled',
|
||||||
|
# 'filled',
|
||||||
|
# }
|
||||||
|
status: str
|
||||||
|
|
||||||
filled: float = 0.0
|
filled: float = 0.0
|
||||||
reason: str = ''
|
reason: str = ''
|
||||||
remaining: float = 0.0
|
remaining: float = 0.0
|
||||||
|
|
||||||
# external: bool = False
|
# XXX: better design/name here?
|
||||||
|
# flag that can be set to indicate a message for an order
|
||||||
|
# event that wasn't originated by piker's emsd (eg. some external
|
||||||
|
# trading system which does it's own order control but that you
|
||||||
|
# might want to "track" using piker UIs/systems).
|
||||||
|
external: bool = False
|
||||||
|
|
||||||
# XXX: not required schema as of yet
|
# XXX: not required schema as of yet
|
||||||
broker_details: dict = field(default_factory=lambda: {
|
broker_details: dict = {
|
||||||
'name': '',
|
'name': '',
|
||||||
})
|
}
|
||||||
|
|
||||||
|
|
||||||
class BrokerdFill(Struct):
|
class BrokerdFill(BaseModel):
|
||||||
'''
|
'''
|
||||||
A single message indicating a "fill-details" event from the broker
|
A single message indicating a "fill-details" event from the broker
|
||||||
if avaiable.
|
if avaiable.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
name: str = 'fill'
|
||||||
|
reqid: Union[int, str]
|
||||||
|
time_ns: int
|
||||||
|
|
||||||
|
# order exeuction related
|
||||||
|
action: str
|
||||||
|
size: float
|
||||||
|
price: float
|
||||||
|
|
||||||
|
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
||||||
|
|
||||||
# brokerd timestamp required for order mode arrow placement on x-axis
|
# brokerd timestamp required for order mode arrow placement on x-axis
|
||||||
|
|
||||||
# TODO: maybe int if we force ns?
|
# TODO: maybe int if we force ns?
|
||||||
# we need to normalize this somehow since backends will use their
|
# we need to normalize this somehow since backends will use their
|
||||||
# own format and likely across many disparate epoch clocks...
|
# own format and likely across many disparate epoch clocks...
|
||||||
broker_time: float
|
broker_time: float
|
||||||
reqid: int | str
|
|
||||||
time_ns: int
|
|
||||||
|
|
||||||
# order exeuction related
|
|
||||||
size: float
|
|
||||||
price: float
|
|
||||||
|
|
||||||
name: str = 'fill'
|
|
||||||
action: Optional[str] = None
|
|
||||||
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
|
||||||
|
|
||||||
|
|
||||||
class BrokerdError(Struct):
|
class BrokerdError(BaseModel):
|
||||||
'''
|
'''
|
||||||
Optional error type that can be relayed to emsd for error handling.
|
Optional error type that can be relayed to emsd for error handling.
|
||||||
|
|
||||||
This is still a TODO thing since we're not sure how to employ it yet.
|
This is still a TODO thing since we're not sure how to employ it yet.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
name: str = 'error'
|
||||||
oid: str
|
oid: str
|
||||||
symbol: str
|
|
||||||
reason: str
|
|
||||||
|
|
||||||
# if no brokerd order request was actually submitted (eg. we errored
|
# if no brokerd order request was actually submitted (eg. we errored
|
||||||
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
||||||
reqid: Optional[int | str] = None
|
reqid: Optional[Union[int, str]] = None
|
||||||
|
|
||||||
name: str = 'error'
|
symbol: str
|
||||||
|
reason: str
|
||||||
broker_details: dict = {}
|
broker_details: dict = {}
|
||||||
|
|
||||||
|
|
||||||
class BrokerdPosition(Struct):
|
class BrokerdPosition(BaseModel):
|
||||||
'''Position update event from brokerd.
|
'''Position update event from brokerd.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
name: str = 'position'
|
||||||
|
|
||||||
broker: str
|
broker: str
|
||||||
account: str
|
account: str
|
||||||
symbol: str
|
symbol: str
|
||||||
|
currency: str
|
||||||
size: float
|
size: float
|
||||||
avg_price: float
|
avg_price: float
|
||||||
currency: str = ''
|
|
||||||
name: str = 'position'
|
|
||||||
|
|
|
@ -18,75 +18,54 @@
|
||||||
Fake trading for forward testing.
|
Fake trading for forward testing.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from collections import defaultdict
|
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
import itertools
|
|
||||||
import time
|
import time
|
||||||
from typing import (
|
from typing import Tuple, Optional, Callable
|
||||||
Any,
|
|
||||||
Optional,
|
|
||||||
Callable,
|
|
||||||
)
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import pendulum
|
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from .. import data
|
from .. import data
|
||||||
from ..data.types import Struct
|
|
||||||
from ..data._source import Symbol
|
|
||||||
from ..pp import (
|
|
||||||
Position,
|
|
||||||
Transaction,
|
|
||||||
open_trade_ledger,
|
|
||||||
open_pps,
|
|
||||||
)
|
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import unpack_fqsn
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
BrokerdCancel,
|
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
||||||
BrokerdOrder,
|
BrokerdFill, BrokerdPosition, BrokerdError
|
||||||
BrokerdOrderAck,
|
|
||||||
BrokerdStatus,
|
|
||||||
BrokerdFill,
|
|
||||||
BrokerdPosition,
|
|
||||||
BrokerdError,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..config import load
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PaperBoi(Struct):
|
@dataclass
|
||||||
'''
|
class PaperBoi:
|
||||||
Emulates a broker order client providing approximately the same API
|
"""
|
||||||
and delivering an order-event response stream but with methods for
|
Emulates a broker order client providing the same API and
|
||||||
|
delivering an order-event response stream but with methods for
|
||||||
triggering desired events based on forward testing engine
|
triggering desired events based on forward testing engine
|
||||||
requirements (eg open, closed, fill msgs).
|
requirements.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
broker: str
|
broker: str
|
||||||
|
|
||||||
ems_trades_stream: tractor.MsgStream
|
ems_trades_stream: tractor.MsgStream
|
||||||
|
|
||||||
# map of paper "live" orders which be used
|
# map of paper "live" orders which be used
|
||||||
# to simulate fills based on paper engine settings
|
# to simulate fills based on paper engine settings
|
||||||
_buys: defaultdict[str, bidict]
|
_buys: bidict
|
||||||
_sells: defaultdict[str, bidict]
|
_sells: bidict
|
||||||
_reqids: bidict
|
_reqids: bidict
|
||||||
_positions: dict[str, Position]
|
_positions: dict[str, BrokerdPosition]
|
||||||
_trade_ledger: dict[str, Any]
|
|
||||||
_syms: dict[str, Symbol] = {}
|
|
||||||
|
|
||||||
# init edge case L1 spread
|
# init edge case L1 spread
|
||||||
last_ask: tuple[float, float] = (float('inf'), 0) # price, size
|
last_ask: Tuple[float, float] = (float('inf'), 0) # price, size
|
||||||
last_bid: tuple[float, float] = (0, 0)
|
last_bid: Tuple[float, float] = (0, 0)
|
||||||
|
|
||||||
async def submit_limit(
|
async def submit_limit(
|
||||||
self,
|
self,
|
||||||
|
@ -96,24 +75,27 @@ class PaperBoi(Struct):
|
||||||
action: str,
|
action: str,
|
||||||
size: float,
|
size: float,
|
||||||
reqid: Optional[str],
|
reqid: Optional[str],
|
||||||
|
|
||||||
) -> int:
|
) -> int:
|
||||||
'''
|
"""Place an order and return integer request id provided by client.
|
||||||
Place an order and return integer request id provided by client.
|
|
||||||
|
"""
|
||||||
|
is_modify: bool = False
|
||||||
|
if reqid is None:
|
||||||
|
reqid = str(uuid.uuid4())
|
||||||
|
|
||||||
|
else:
|
||||||
|
# order is already existing, this is a modify
|
||||||
|
(oid, symbol, action, old_price) = self._reqids[reqid]
|
||||||
|
assert old_price != price
|
||||||
|
is_modify = True
|
||||||
|
|
||||||
|
# register order internally
|
||||||
|
self._reqids[reqid] = (oid, symbol, action, price)
|
||||||
|
|
||||||
'''
|
|
||||||
if action == 'alert':
|
if action == 'alert':
|
||||||
# bypass all fill simulation
|
# bypass all fill simulation
|
||||||
return reqid
|
return reqid
|
||||||
|
|
||||||
entry = self._reqids.get(reqid)
|
|
||||||
if entry:
|
|
||||||
# order is already existing, this is a modify
|
|
||||||
(oid, symbol, action, old_price) = entry
|
|
||||||
else:
|
|
||||||
# register order internally
|
|
||||||
self._reqids[reqid] = (oid, symbol, action, price)
|
|
||||||
|
|
||||||
# TODO: net latency model
|
# TODO: net latency model
|
||||||
# we checkpoint here quickly particulalry
|
# we checkpoint here quickly particulalry
|
||||||
# for dark orders since we want the dark_executed
|
# for dark orders since we want the dark_executed
|
||||||
|
@ -125,18 +107,15 @@ class PaperBoi(Struct):
|
||||||
size = -size
|
size = -size
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
status='open',
|
status='submitted',
|
||||||
# account=f'paper_{self.broker}',
|
|
||||||
account='paper',
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
|
broker=self.broker,
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
filled=0.0,
|
filled=0.0,
|
||||||
reason='paper_trigger',
|
reason='paper_trigger',
|
||||||
remaining=size,
|
remaining=size,
|
||||||
|
|
||||||
broker_details={'name': 'paperboi'},
|
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg)
|
await self.ems_trades_stream.send(msg.dict())
|
||||||
|
|
||||||
# if we're already a clearing price simulate an immediate fill
|
# if we're already a clearing price simulate an immediate fill
|
||||||
if (
|
if (
|
||||||
|
@ -144,28 +123,28 @@ class PaperBoi(Struct):
|
||||||
) or (
|
) or (
|
||||||
action == 'sell' and (clear_price := self.last_bid[0]) >= price
|
action == 'sell' and (clear_price := self.last_bid[0]) >= price
|
||||||
):
|
):
|
||||||
await self.fake_fill(
|
await self.fake_fill(symbol, clear_price, size, action, reqid, oid)
|
||||||
symbol,
|
|
||||||
clear_price,
|
|
||||||
size,
|
|
||||||
action,
|
|
||||||
reqid,
|
|
||||||
oid,
|
|
||||||
)
|
|
||||||
|
|
||||||
# register this submissions as a paper live order
|
|
||||||
else:
|
else:
|
||||||
# set the simulated order in the respective table for lookup
|
# register this submissions as a paper live order
|
||||||
# and trigger by the simulated clearing task normally
|
|
||||||
# running ``simulate_fills()``.
|
# submit order to book simulation fill loop
|
||||||
if action == 'buy':
|
if action == 'buy':
|
||||||
orders = self._buys
|
orders = self._buys
|
||||||
|
|
||||||
elif action == 'sell':
|
elif action == 'sell':
|
||||||
orders = self._sells
|
orders = self._sells
|
||||||
|
|
||||||
# {symbol -> bidict[oid, (<price data>)]}
|
# set the simulated order in the respective table for lookup
|
||||||
orders[symbol][oid] = (price, size, reqid, action)
|
# and trigger by the simulated clearing task normally
|
||||||
|
# running ``simulate_fills()``.
|
||||||
|
|
||||||
|
if is_modify:
|
||||||
|
# remove any existing order for the old price
|
||||||
|
orders[symbol].pop((oid, old_price))
|
||||||
|
|
||||||
|
# buys/sells: (symbol -> (price -> order))
|
||||||
|
orders.setdefault(symbol, {})[(oid, price)] = (size, reqid, action)
|
||||||
|
|
||||||
return reqid
|
return reqid
|
||||||
|
|
||||||
|
@ -178,26 +157,26 @@ class PaperBoi(Struct):
|
||||||
oid, symbol, action, price = self._reqids[reqid]
|
oid, symbol, action, price = self._reqids[reqid]
|
||||||
|
|
||||||
if action == 'buy':
|
if action == 'buy':
|
||||||
self._buys[symbol].pop(oid, None)
|
self._buys[symbol].pop((oid, price))
|
||||||
elif action == 'sell':
|
elif action == 'sell':
|
||||||
self._sells[symbol].pop(oid, None)
|
self._sells[symbol].pop((oid, price))
|
||||||
|
|
||||||
# TODO: net latency model
|
# TODO: net latency model
|
||||||
await trio.sleep(0.05)
|
await trio.sleep(0.05)
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
status='canceled',
|
status='cancelled',
|
||||||
account='paper',
|
oid=oid,
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
|
broker=self.broker,
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
broker_details={'name': 'paperboi'},
|
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg)
|
await self.ems_trades_stream.send(msg.dict())
|
||||||
|
|
||||||
async def fake_fill(
|
async def fake_fill(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
fqsn: str,
|
symbol: str,
|
||||||
price: float,
|
price: float,
|
||||||
size: float,
|
size: float,
|
||||||
action: str, # one of {'buy', 'sell'}
|
action: str, # one of {'buy', 'sell'}
|
||||||
|
@ -211,21 +190,21 @@ class PaperBoi(Struct):
|
||||||
remaining: float = 0,
|
remaining: float = 0,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
"""Pretend to fill a broker order @ price and size.
|
||||||
Pretend to fill a broker order @ price and size.
|
|
||||||
|
|
||||||
'''
|
"""
|
||||||
# TODO: net latency model
|
# TODO: net latency model
|
||||||
await trio.sleep(0.05)
|
await trio.sleep(0.05)
|
||||||
fill_time_ns = time.time_ns()
|
|
||||||
fill_time_s = time.time()
|
|
||||||
|
|
||||||
fill_msg = BrokerdFill(
|
msg = BrokerdFill(
|
||||||
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
time_ns=fill_time_ns,
|
time_ns=time.time_ns(),
|
||||||
|
|
||||||
action=action,
|
action=action,
|
||||||
size=size,
|
size=size,
|
||||||
price=price,
|
price=price,
|
||||||
|
|
||||||
broker_time=datetime.now().timestamp(),
|
broker_time=datetime.now().timestamp(),
|
||||||
broker_details={
|
broker_details={
|
||||||
'paper_info': {
|
'paper_info': {
|
||||||
|
@ -235,64 +214,79 @@ class PaperBoi(Struct):
|
||||||
'name': self.broker + '_paper',
|
'name': self.broker + '_paper',
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
log.info(f'Fake filling order:\n{fill_msg}')
|
await self.ems_trades_stream.send(msg.dict())
|
||||||
await self.ems_trades_stream.send(fill_msg)
|
|
||||||
|
|
||||||
if order_complete:
|
if order_complete:
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
# account=f'paper_{self.broker}',
|
|
||||||
account='paper',
|
status='filled',
|
||||||
status='closed',
|
|
||||||
filled=size,
|
filled=size,
|
||||||
remaining=0 if order_complete else remaining,
|
remaining=0 if order_complete else remaining,
|
||||||
|
|
||||||
|
action=action,
|
||||||
|
size=size,
|
||||||
|
price=price,
|
||||||
|
|
||||||
|
broker_details={
|
||||||
|
'paper_info': {
|
||||||
|
'oid': oid,
|
||||||
|
},
|
||||||
|
'name': self.broker,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg)
|
await self.ems_trades_stream.send(msg.dict())
|
||||||
|
|
||||||
# lookup any existing position
|
# lookup any existing position
|
||||||
key = fqsn.rstrip(f'.{self.broker}')
|
token = f'{symbol}.{self.broker}'
|
||||||
t = Transaction(
|
pp_msg = self._positions.setdefault(
|
||||||
fqsn=fqsn,
|
token,
|
||||||
sym=self._syms[fqsn],
|
BrokerdPosition(
|
||||||
tid=oid,
|
|
||||||
size=size,
|
|
||||||
price=price,
|
|
||||||
cost=0, # TODO: cost model
|
|
||||||
dt=pendulum.from_timestamp(fill_time_s),
|
|
||||||
bsuid=key,
|
|
||||||
)
|
|
||||||
|
|
||||||
with (
|
|
||||||
open_trade_ledger(self.broker, 'paper') as ledger,
|
|
||||||
open_pps(self.broker, 'paper', write_on_exit=True) as table
|
|
||||||
):
|
|
||||||
tx = t.to_dict()
|
|
||||||
tx.pop('sym')
|
|
||||||
ledger.update({oid: tx})
|
|
||||||
# Write to pps toml right now
|
|
||||||
table.update_from_trans({oid: t})
|
|
||||||
|
|
||||||
pp = table.pps[key]
|
|
||||||
pp_msg = BrokerdPosition(
|
|
||||||
broker=self.broker,
|
broker=self.broker,
|
||||||
account='paper',
|
account='paper',
|
||||||
symbol=fqsn,
|
symbol=symbol,
|
||||||
# TODO: we need to look up the asset currency from
|
# TODO: we need to look up the asset currency from
|
||||||
# broker info. i guess for crypto this can be
|
# broker info. i guess for crypto this can be
|
||||||
# inferred from the pair?
|
# inferred from the pair?
|
||||||
currency=key,
|
currency='',
|
||||||
size=pp.size,
|
size=0.0,
|
||||||
avg_price=pp.ppu,
|
avg_price=0,
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
await self.ems_trades_stream.send(pp_msg)
|
# "avg position price" calcs
|
||||||
|
# TODO: eventually it'd be nice to have a small set of routines
|
||||||
|
# to do this stuff from a sequence of cleared orders to enable
|
||||||
|
# so called "contextual positions".
|
||||||
|
new_size = size + pp_msg.size
|
||||||
|
|
||||||
|
# old size minus the new size gives us size differential with
|
||||||
|
# +ve -> increase in pp size
|
||||||
|
# -ve -> decrease in pp size
|
||||||
|
size_diff = abs(new_size) - abs(pp_msg.size)
|
||||||
|
|
||||||
|
if new_size == 0:
|
||||||
|
pp_msg.avg_price = 0
|
||||||
|
|
||||||
|
elif size_diff > 0:
|
||||||
|
# only update the "average position price" when the position
|
||||||
|
# size increases not when it decreases (i.e. the position is
|
||||||
|
# being made smaller)
|
||||||
|
pp_msg.avg_price = (
|
||||||
|
abs(size) * price + pp_msg.avg_price * abs(pp_msg.size)
|
||||||
|
) / abs(new_size)
|
||||||
|
|
||||||
|
pp_msg.size = new_size
|
||||||
|
|
||||||
|
await self.ems_trades_stream.send(pp_msg.dict())
|
||||||
|
|
||||||
|
|
||||||
async def simulate_fills(
|
async def simulate_fills(
|
||||||
quote_stream: tractor.MsgStream, # noqa
|
quote_stream: 'tractor.ReceiveStream', # noqa
|
||||||
client: PaperBoi,
|
client: PaperBoi,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# TODO: more machinery to better simulate real-world market things:
|
# TODO: more machinery to better simulate real-world market things:
|
||||||
|
@ -312,116 +306,61 @@ async def simulate_fills(
|
||||||
|
|
||||||
# this stream may eventually contain multiple symbols
|
# this stream may eventually contain multiple symbols
|
||||||
async for quotes in quote_stream:
|
async for quotes in quote_stream:
|
||||||
|
|
||||||
for sym, quote in quotes.items():
|
for sym, quote in quotes.items():
|
||||||
|
|
||||||
for tick in iterticks(
|
for tick in iterticks(
|
||||||
quote,
|
quote,
|
||||||
# dark order price filter(s)
|
# dark order price filter(s)
|
||||||
types=('ask', 'bid', 'trade', 'last')
|
types=('ask', 'bid', 'trade', 'last')
|
||||||
):
|
):
|
||||||
tick_price = tick['price']
|
# print(tick)
|
||||||
|
tick_price = tick.get('price')
|
||||||
|
ttype = tick['type']
|
||||||
|
|
||||||
buys: bidict[str, tuple] = client._buys[sym]
|
if ttype in ('ask',):
|
||||||
iter_buys = reversed(sorted(
|
|
||||||
buys.values(),
|
|
||||||
key=itemgetter(0),
|
|
||||||
))
|
|
||||||
|
|
||||||
def buy_on_ask(our_price):
|
client.last_ask = (
|
||||||
return tick_price <= our_price
|
tick_price,
|
||||||
|
tick.get('size', client.last_ask[1]),
|
||||||
|
)
|
||||||
|
|
||||||
sells: bidict[str, tuple] = client._sells[sym]
|
orders = client._buys.get(sym, {})
|
||||||
iter_sells = sorted(
|
|
||||||
sells.values(),
|
|
||||||
key=itemgetter(0)
|
|
||||||
)
|
|
||||||
|
|
||||||
def sell_on_bid(our_price):
|
book_sequence = reversed(
|
||||||
return tick_price >= our_price
|
sorted(orders.keys(), key=itemgetter(1)))
|
||||||
|
|
||||||
match tick:
|
def pred(our_price):
|
||||||
|
return tick_price < our_price
|
||||||
|
|
||||||
# on an ask queue tick, only clear buy entries
|
elif ttype in ('bid',):
|
||||||
case {
|
|
||||||
'price': tick_price,
|
|
||||||
'type': 'ask',
|
|
||||||
}:
|
|
||||||
client.last_ask = (
|
|
||||||
tick_price,
|
|
||||||
tick.get('size', client.last_ask[1]),
|
|
||||||
)
|
|
||||||
|
|
||||||
iter_entries = zip(
|
client.last_bid = (
|
||||||
iter_buys,
|
tick_price,
|
||||||
itertools.repeat(buy_on_ask)
|
tick.get('size', client.last_bid[1]),
|
||||||
)
|
)
|
||||||
|
|
||||||
# on a bid queue tick, only clear sell entries
|
orders = client._sells.get(sym, {})
|
||||||
case {
|
book_sequence = sorted(orders.keys(), key=itemgetter(1))
|
||||||
'price': tick_price,
|
|
||||||
'type': 'bid',
|
|
||||||
}:
|
|
||||||
client.last_bid = (
|
|
||||||
tick_price,
|
|
||||||
tick.get('size', client.last_bid[1]),
|
|
||||||
)
|
|
||||||
|
|
||||||
iter_entries = zip(
|
def pred(our_price):
|
||||||
iter_sells,
|
return tick_price > our_price
|
||||||
itertools.repeat(sell_on_bid)
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: fix this block, though it definitely
|
elif ttype in ('trade', 'last'):
|
||||||
# costs a lot more CPU-wise
|
# TODO: simulate actual book queues and our orders
|
||||||
# - doesn't seem like clears are happening still on
|
# place in it, might require full L2 data?
|
||||||
# "resting" limit orders?
|
continue
|
||||||
case {
|
|
||||||
'price': tick_price,
|
|
||||||
'type': ('trade' | 'last'),
|
|
||||||
}:
|
|
||||||
# in the clearing price / last price case we
|
|
||||||
# want to iterate both sides of our book for
|
|
||||||
# clears since we don't know which direction the
|
|
||||||
# price is going to move (especially with HFT)
|
|
||||||
# and thus we simply interleave both sides (buys
|
|
||||||
# and sells) until one side clears and then
|
|
||||||
# break until the next tick?
|
|
||||||
def interleave():
|
|
||||||
for pair in zip(
|
|
||||||
iter_buys,
|
|
||||||
iter_sells,
|
|
||||||
):
|
|
||||||
for order_info, pred in zip(
|
|
||||||
pair,
|
|
||||||
itertools.cycle([buy_on_ask, sell_on_bid]),
|
|
||||||
):
|
|
||||||
yield order_info, pred
|
|
||||||
|
|
||||||
iter_entries = interleave()
|
# iterate book prices descending
|
||||||
|
for oid, our_price in book_sequence:
|
||||||
|
if pred(our_price):
|
||||||
|
|
||||||
# NOTE: all other (non-clearable) tick event types
|
# retreive order info
|
||||||
# - we don't want to sping the simulated clear loop
|
(size, reqid, action) = orders.pop((oid, our_price))
|
||||||
# below unecessarily and further don't want to pop
|
|
||||||
# simulated live orders prematurely.
|
|
||||||
case _:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# iterate all potentially clearable book prices
|
|
||||||
# in FIFO order per side.
|
|
||||||
for order_info, pred in iter_entries:
|
|
||||||
(our_price, size, reqid, action) = order_info
|
|
||||||
|
|
||||||
# print(order_info)
|
|
||||||
clearable = pred(our_price)
|
|
||||||
if clearable:
|
|
||||||
# pop and retreive order info
|
|
||||||
oid = {
|
|
||||||
'buy': buys,
|
|
||||||
'sell': sells
|
|
||||||
}[action].inverse.pop(order_info)
|
|
||||||
|
|
||||||
# clearing price would have filled entirely
|
# clearing price would have filled entirely
|
||||||
await client.fake_fill(
|
await client.fake_fill(
|
||||||
fqsn=sym,
|
symbol=sym,
|
||||||
# todo slippage to determine fill price
|
# todo slippage to determine fill price
|
||||||
price=tick_price,
|
price=tick_price,
|
||||||
size=size,
|
size=size,
|
||||||
|
@ -429,6 +368,9 @@ async def simulate_fills(
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
oid=oid,
|
oid=oid,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
# prices are iterated in sorted order so we're done
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
async def handle_order_requests(
|
async def handle_order_requests(
|
||||||
|
@ -438,81 +380,66 @@ async def handle_order_requests(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
request_msg: dict
|
# order_request: dict
|
||||||
async for request_msg in ems_order_stream:
|
async for request_msg in ems_order_stream:
|
||||||
match request_msg:
|
|
||||||
case {'action': ('buy' | 'sell')}:
|
|
||||||
order = BrokerdOrder(**request_msg)
|
|
||||||
account = order.account
|
|
||||||
|
|
||||||
# error on bad inputs
|
action = request_msg['action']
|
||||||
reason = None
|
|
||||||
if account != 'paper':
|
|
||||||
reason = f'No account found:`{account}` (paper only)?'
|
|
||||||
|
|
||||||
elif order.size == 0:
|
if action in {'buy', 'sell'}:
|
||||||
reason = 'Invalid size: 0'
|
|
||||||
|
|
||||||
if reason:
|
account = request_msg['account']
|
||||||
log.error(reason)
|
if account != 'paper':
|
||||||
await ems_order_stream.send(BrokerdError(
|
log.error(
|
||||||
oid=order.oid,
|
'This is a paper account, only a `paper` selection is valid'
|
||||||
symbol=order.symbol,
|
|
||||||
reason=reason,
|
|
||||||
))
|
|
||||||
continue
|
|
||||||
|
|
||||||
reqid = order.reqid or str(uuid.uuid4())
|
|
||||||
|
|
||||||
# deliver ack that order has been submitted to broker routing
|
|
||||||
await ems_order_stream.send(
|
|
||||||
BrokerdOrderAck(
|
|
||||||
oid=order.oid,
|
|
||||||
reqid=reqid,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
await ems_order_stream.send(BrokerdError(
|
||||||
|
oid=request_msg['oid'],
|
||||||
|
symbol=request_msg['symbol'],
|
||||||
|
reason=f'Paper only. No account found: `{account}` ?',
|
||||||
|
).dict())
|
||||||
|
continue
|
||||||
|
|
||||||
# call our client api to submit the order
|
# validate
|
||||||
reqid = await client.submit_limit(
|
order = BrokerdOrder(**request_msg)
|
||||||
|
|
||||||
|
# call our client api to submit the order
|
||||||
|
reqid = await client.submit_limit(
|
||||||
|
|
||||||
|
oid=order.oid,
|
||||||
|
symbol=order.symbol,
|
||||||
|
price=order.price,
|
||||||
|
action=order.action,
|
||||||
|
size=order.size,
|
||||||
|
|
||||||
|
# XXX: by default 0 tells ``ib_insync`` methods that
|
||||||
|
# there is no existing order so ask the client to create
|
||||||
|
# a new one (which it seems to do by allocating an int
|
||||||
|
# counter - collision prone..)
|
||||||
|
reqid=order.reqid,
|
||||||
|
)
|
||||||
|
|
||||||
|
# deliver ack that order has been submitted to broker routing
|
||||||
|
await ems_order_stream.send(
|
||||||
|
BrokerdOrderAck(
|
||||||
|
|
||||||
|
# ems order request id
|
||||||
oid=order.oid,
|
oid=order.oid,
|
||||||
symbol=f'{order.symbol}.{client.broker}',
|
|
||||||
price=order.price,
|
# broker specific request id
|
||||||
action=order.action,
|
|
||||||
size=order.size,
|
|
||||||
# XXX: by default 0 tells ``ib_insync`` methods that
|
|
||||||
# there is no existing order so ask the client to create
|
|
||||||
# a new one (which it seems to do by allocating an int
|
|
||||||
# counter - collision prone..)
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
)
|
|
||||||
log.info(f'Submitted paper LIMIT {reqid}:\n{order}')
|
|
||||||
|
|
||||||
case {'action': 'cancel'}:
|
).dict()
|
||||||
msg = BrokerdCancel(**request_msg)
|
)
|
||||||
await client.submit_cancel(
|
|
||||||
reqid=msg.reqid
|
|
||||||
)
|
|
||||||
|
|
||||||
case _:
|
elif action == 'cancel':
|
||||||
log.error(f'Unknown order command: {request_msg}')
|
msg = BrokerdCancel(**request_msg)
|
||||||
|
|
||||||
|
await client.submit_cancel(
|
||||||
|
reqid=msg.reqid
|
||||||
|
)
|
||||||
|
|
||||||
_reqids: bidict[str, tuple] = {}
|
else:
|
||||||
_buys: defaultdict[
|
log.error(f'Unknown order command: {request_msg}')
|
||||||
str, # symbol
|
|
||||||
bidict[
|
|
||||||
str, # oid
|
|
||||||
tuple[float, float, str, str], # order info
|
|
||||||
]
|
|
||||||
] = defaultdict(bidict)
|
|
||||||
_sells: defaultdict[
|
|
||||||
str, # symbol
|
|
||||||
bidict[
|
|
||||||
str, # oid
|
|
||||||
tuple[float, float, str, str], # order info
|
|
||||||
]
|
|
||||||
] = defaultdict(bidict)
|
|
||||||
_positions: dict[str, Position] = {}
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -524,68 +451,42 @@ async def trades_dialogue(
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
tractor.log.get_console_log(loglevel)
|
tractor.log.get_console_log(loglevel)
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
data.open_feed(
|
data.open_feed(
|
||||||
[fqsn],
|
[fqsn],
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) as feed,
|
) as feed,
|
||||||
|
|
||||||
):
|
):
|
||||||
|
# TODO: load paper positions per broker from .toml config file
|
||||||
with open_pps(broker, 'paper') as table:
|
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
||||||
# save pps in local state
|
# await ctx.started(all_positions)
|
||||||
_positions.update(table.pps)
|
await ctx.started(({}, {'paper',}))
|
||||||
|
|
||||||
pp_msgs: list[BrokerdPosition] = []
|
|
||||||
pos: Position
|
|
||||||
token: str # f'{symbol}.{self.broker}'
|
|
||||||
for token, pos in _positions.items():
|
|
||||||
pp_msgs.append(BrokerdPosition(
|
|
||||||
broker=broker,
|
|
||||||
account='paper',
|
|
||||||
symbol=pos.symbol.front_fqsn(),
|
|
||||||
size=pos.size,
|
|
||||||
avg_price=pos.ppu,
|
|
||||||
))
|
|
||||||
|
|
||||||
await ctx.started((
|
|
||||||
pp_msgs,
|
|
||||||
['paper'],
|
|
||||||
))
|
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as ems_stream,
|
ctx.open_stream() as ems_stream,
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
|
|
||||||
client = PaperBoi(
|
client = PaperBoi(
|
||||||
broker,
|
broker,
|
||||||
ems_stream,
|
ems_stream,
|
||||||
_buys=_buys,
|
_buys={},
|
||||||
_sells=_sells,
|
_sells={},
|
||||||
|
|
||||||
_reqids=_reqids,
|
_reqids={},
|
||||||
|
|
||||||
_positions=_positions,
|
# TODO: load paper positions from ``positions.toml``
|
||||||
|
_positions={},
|
||||||
# TODO: load postions from ledger file
|
|
||||||
_trade_ledger={},
|
|
||||||
_syms={
|
|
||||||
fqsn: flume.symbol
|
|
||||||
for fqsn, flume in feed.flumes.items()
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
n.start_soon(
|
n.start_soon(handle_order_requests, client, ems_stream)
|
||||||
handle_order_requests,
|
|
||||||
client,
|
|
||||||
ems_stream,
|
|
||||||
)
|
|
||||||
|
|
||||||
# paper engine simulator clearing task
|
# paper engine simulator clearing task
|
||||||
await simulate_fills(feed.streams[broker], client)
|
await simulate_fills(feed.stream, client)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
|
@ -610,17 +511,17 @@ async def open_paperboi(
|
||||||
# (we likely don't need more then one proc for basic
|
# (we likely don't need more then one proc for basic
|
||||||
# simulated order clearing)
|
# simulated order clearing)
|
||||||
if portal is None:
|
if portal is None:
|
||||||
log.info('Starting new paper-engine actor')
|
|
||||||
portal = await tn.start_actor(
|
portal = await tn.start_actor(
|
||||||
service_name,
|
service_name,
|
||||||
enable_modules=[__name__]
|
enable_modules=[__name__]
|
||||||
)
|
)
|
||||||
|
|
||||||
async with portal.open_context(
|
async with portal.open_context(
|
||||||
trades_dialogue,
|
trades_dialogue,
|
||||||
broker=broker,
|
broker=broker,
|
||||||
fqsn=fqsn,
|
fqsn=fqsn,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
||||||
yield ctx, first
|
yield ctx, first
|
||||||
|
|
|
@ -19,58 +19,38 @@ CLI commons.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
import os
|
import os
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from ..log import (
|
from ..log import get_console_log, get_logger, colorize_json
|
||||||
get_console_log,
|
|
||||||
get_logger,
|
|
||||||
colorize_json,
|
|
||||||
)
|
|
||||||
from ..brokers import get_brokermod
|
from ..brokers import get_brokermod
|
||||||
from ..service import (
|
from .._daemon import _tractor_kwargs
|
||||||
_default_registry_host,
|
|
||||||
_default_registry_port,
|
|
||||||
)
|
|
||||||
from .. import config
|
from .. import config
|
||||||
|
|
||||||
|
|
||||||
log = get_logger('cli')
|
log = get_logger('cli')
|
||||||
|
DEFAULT_BROKER = 'questrade'
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
||||||
@click.option('--host', '-h', default=None, help='Host addr to bind')
|
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
||||||
@click.option('--port', '-p', default=None, help='Port number to bind')
|
|
||||||
@click.option(
|
@click.option(
|
||||||
'--tsdb',
|
'--tsdb',
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
help='Enable local ``marketstore`` instance'
|
help='Enable local ``marketstore`` instance'
|
||||||
)
|
)
|
||||||
@click.option(
|
def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
'--es',
|
|
||||||
is_flag=True,
|
|
||||||
help='Enable local ``elasticsearch`` instance'
|
|
||||||
)
|
|
||||||
def pikerd(
|
|
||||||
loglevel: str,
|
|
||||||
host: str,
|
|
||||||
port: int,
|
|
||||||
tl: bool,
|
|
||||||
pdb: bool,
|
|
||||||
tsdb: bool,
|
|
||||||
es: bool,
|
|
||||||
):
|
|
||||||
'''
|
'''
|
||||||
Spawn the piker broker-daemon.
|
Spawn the piker broker-daemon.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
from .._daemon import open_pikerd
|
||||||
from ..service import open_pikerd
|
|
||||||
log = get_console_log(loglevel)
|
log = get_console_log(loglevel)
|
||||||
|
|
||||||
if pdb:
|
if pdb:
|
||||||
|
@ -82,25 +62,32 @@ def pikerd(
|
||||||
"\n"
|
"\n"
|
||||||
))
|
))
|
||||||
|
|
||||||
reg_addr: None | tuple[str, int] = None
|
|
||||||
if host or port:
|
|
||||||
reg_addr = (
|
|
||||||
host or _default_registry_host,
|
|
||||||
int(port) or _default_registry_port,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_pikerd(
|
open_pikerd(
|
||||||
tsdb=tsdb,
|
|
||||||
es=es,
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=pdb,
|
debug_mode=pdb,
|
||||||
registry_addr=reg_addr,
|
|
||||||
|
|
||||||
), # normally delivers a ``Services`` handle
|
), # normally delivers a ``Services`` handle
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
|
if tsdb:
|
||||||
|
from piker.data._ahab import start_ahab
|
||||||
|
from piker.data.marketstore import start_marketstore
|
||||||
|
|
||||||
|
log.info('Spawning `marketstore` supervisor')
|
||||||
|
ctn_ready, config, (cid, pid) = await n.start(
|
||||||
|
start_ahab,
|
||||||
|
'marketstored',
|
||||||
|
start_marketstore,
|
||||||
|
|
||||||
|
)
|
||||||
|
log.info(
|
||||||
|
f'`marketstore` up!\n'
|
||||||
|
f'`marketstored` pid: {pid}\n'
|
||||||
|
f'docker container id: {cid}\n'
|
||||||
|
f'config: {pformat(config)}'
|
||||||
|
)
|
||||||
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
@ -110,46 +97,25 @@ def pikerd(
|
||||||
@click.group(context_settings=config._context_defaults)
|
@click.group(context_settings=config._context_defaults)
|
||||||
@click.option(
|
@click.option(
|
||||||
'--brokers', '-b',
|
'--brokers', '-b',
|
||||||
default=None,
|
default=[DEFAULT_BROKER],
|
||||||
multiple=True,
|
multiple=True,
|
||||||
help='Broker backend to use'
|
help='Broker backend to use'
|
||||||
)
|
)
|
||||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.option('--configdir', '-c', help='Configuration directory')
|
@click.option('--configdir', '-c', help='Configuration directory')
|
||||||
@click.option('--host', '-h', default=None, help='Host addr to bind')
|
|
||||||
@click.option('--port', '-p', default=None, help='Port number to bind')
|
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def cli(
|
def cli(ctx, brokers, loglevel, tl, configdir):
|
||||||
ctx: click.Context,
|
|
||||||
brokers: list[str],
|
|
||||||
loglevel: str,
|
|
||||||
tl: bool,
|
|
||||||
configdir: str,
|
|
||||||
host: str,
|
|
||||||
port: int,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
if configdir is not None:
|
if configdir is not None:
|
||||||
assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
|
assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
|
||||||
config._override_config_dir(configdir)
|
config._override_config_dir(configdir)
|
||||||
|
|
||||||
ctx.ensure_object(dict)
|
ctx.ensure_object(dict)
|
||||||
|
|
||||||
if not brokers:
|
if len(brokers) == 1:
|
||||||
# (try to) load all (supposedly) supported data/broker backends
|
brokermods = [get_brokermod(brokers[0])]
|
||||||
from piker.brokers import __brokers__
|
else:
|
||||||
brokers = __brokers__
|
brokermods = [get_brokermod(broker) for broker in brokers]
|
||||||
|
|
||||||
brokermods = [get_brokermod(broker) for broker in brokers]
|
|
||||||
assert brokermods
|
|
||||||
|
|
||||||
reg_addr: None | tuple[str, int] = None
|
|
||||||
if host or port:
|
|
||||||
reg_addr = (
|
|
||||||
host or _default_registry_host,
|
|
||||||
int(port) or _default_registry_port,
|
|
||||||
)
|
|
||||||
|
|
||||||
ctx.obj.update({
|
ctx.obj.update({
|
||||||
'brokers': brokers,
|
'brokers': brokers,
|
||||||
|
@ -159,7 +125,6 @@ def cli(
|
||||||
'log': get_console_log(loglevel),
|
'log': get_console_log(loglevel),
|
||||||
'confdir': config._config_dir,
|
'confdir': config._config_dir,
|
||||||
'wl_path': config._watchlists_data_path,
|
'wl_path': config._watchlists_data_path,
|
||||||
'registry_addr': reg_addr,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
# allow enabling same loglevel in ``tractor`` machinery
|
# allow enabling same loglevel in ``tractor`` machinery
|
||||||
|
@ -169,45 +134,33 @@ def cli(
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.argument('ports', nargs=-1, required=False)
|
@click.argument('names', nargs=-1, required=False)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def services(config, tl, ports):
|
def services(config, tl, names):
|
||||||
|
|
||||||
from ..service import (
|
|
||||||
open_piker_runtime,
|
|
||||||
_default_registry_port,
|
|
||||||
_default_registry_host,
|
|
||||||
)
|
|
||||||
|
|
||||||
host = _default_registry_host
|
|
||||||
if not ports:
|
|
||||||
ports = [_default_registry_port]
|
|
||||||
|
|
||||||
async def list_services():
|
async def list_services():
|
||||||
nonlocal host
|
|
||||||
async with (
|
async with tractor.get_arbiter(
|
||||||
open_piker_runtime(
|
*_tractor_kwargs['arbiter_addr']
|
||||||
name='service_query',
|
) as portal:
|
||||||
loglevel=config['loglevel'] if tl else None,
|
|
||||||
),
|
|
||||||
tractor.get_arbiter(
|
|
||||||
host=host,
|
|
||||||
port=ports[0]
|
|
||||||
) as portal
|
|
||||||
):
|
|
||||||
registry = await portal.run_from_ns('self', 'get_registry')
|
registry = await portal.run_from_ns('self', 'get_registry')
|
||||||
json_d = {}
|
json_d = {}
|
||||||
for key, socket in registry.items():
|
for key, socket in registry.items():
|
||||||
|
# name, uuid = uid
|
||||||
host, port = socket
|
host, port = socket
|
||||||
json_d[key] = f'{host}:{port}'
|
json_d[key] = f'{host}:{port}'
|
||||||
click.echo(f"{colorize_json(json_d)}")
|
click.echo(f"{colorize_json(json_d)}")
|
||||||
|
|
||||||
trio.run(list_services)
|
tractor.run(
|
||||||
|
list_services,
|
||||||
|
name='service_query',
|
||||||
|
loglevel=config['loglevel'] if tl else None,
|
||||||
|
arbiter_addr=_tractor_kwargs['arbiter_addr'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _load_clis() -> None:
|
def _load_clis() -> None:
|
||||||
from ..service import marketstore # noqa
|
from ..data import marketstore # noqa
|
||||||
from ..service import elastic
|
|
||||||
from ..data import cli # noqa
|
from ..data import cli # noqa
|
||||||
from ..brokers import cli # noqa
|
from ..brokers import cli # noqa
|
||||||
from ..ui import cli # noqa
|
from ..ui import cli # noqa
|
||||||
|
|
|
@ -15,17 +15,15 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Platform configuration (files) mgmt.
|
Broker configuration mgmt.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
from os import path
|
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import toml
|
import toml
|
||||||
|
@ -35,16 +33,9 @@ from .log import get_logger
|
||||||
log = get_logger('broker-config')
|
log = get_logger('broker-config')
|
||||||
|
|
||||||
|
|
||||||
# XXX NOTE: taken from ``click`` since apparently they have some
|
# taken from ``click`` since apparently they have some
|
||||||
# super weirdness with sigint and sudo..no clue
|
# super weirdness with sigint and sudo..no clue
|
||||||
# we're probably going to slowly just modify it to our own version over
|
def get_app_dir(app_name, roaming=True, force_posix=False):
|
||||||
# time..
|
|
||||||
def get_app_dir(
|
|
||||||
app_name: str,
|
|
||||||
roaming: bool = True,
|
|
||||||
force_posix: bool = False,
|
|
||||||
|
|
||||||
) -> str:
|
|
||||||
r"""Returns the config folder for the application. The default behavior
|
r"""Returns the config folder for the application. The default behavior
|
||||||
is to return whatever is most appropriate for the operating system.
|
is to return whatever is most appropriate for the operating system.
|
||||||
|
|
||||||
|
@ -83,30 +74,7 @@ def get_app_dir(
|
||||||
def _posixify(name):
|
def _posixify(name):
|
||||||
return "-".join(name.split()).lower()
|
return "-".join(name.split()).lower()
|
||||||
|
|
||||||
# NOTE: for testing with `pytest` we leverage the `tmp_dir`
|
# if WIN:
|
||||||
# fixture to generate (and clean up) a test-request-specific
|
|
||||||
# directory for isolated configuration files such that,
|
|
||||||
# - multiple tests can run (possibly in parallel) without data races
|
|
||||||
# on the config state,
|
|
||||||
# - we don't need to ever worry about leaking configs into the
|
|
||||||
# system thus avoiding needing to manage config cleaup fixtures or
|
|
||||||
# other bothers (since obviously `tmp_dir` cleans up after itself).
|
|
||||||
#
|
|
||||||
# In order to "pass down" the test dir path to all (sub-)actors in
|
|
||||||
# the actor tree we preload the root actor's runtime vars state (an
|
|
||||||
# internal mechanism for inheriting state down an actor tree in
|
|
||||||
# `tractor`) with the testing dir and check for it whenever we
|
|
||||||
# detect `pytest` is being used (which it isn't under normal
|
|
||||||
# operation).
|
|
||||||
if "pytest" in sys.modules:
|
|
||||||
import tractor
|
|
||||||
actor = tractor.current_actor(err_on_no_runtime=False)
|
|
||||||
if actor: # runtime is up
|
|
||||||
rvs = tractor._state._runtime_vars
|
|
||||||
testdirpath = Path(rvs['piker_vars']['piker_test_dir'])
|
|
||||||
assert testdirpath.exists(), 'piker test harness might be borked!?'
|
|
||||||
app_name = str(testdirpath)
|
|
||||||
|
|
||||||
if platform.system() == 'Windows':
|
if platform.system() == 'Windows':
|
||||||
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
||||||
folder = os.environ.get(key)
|
folder = os.environ.get(key)
|
||||||
|
@ -143,10 +111,8 @@ if _parent_user:
|
||||||
|
|
||||||
_conf_names: set[str] = {
|
_conf_names: set[str] = {
|
||||||
'brokers',
|
'brokers',
|
||||||
'pps',
|
|
||||||
'trades',
|
'trades',
|
||||||
'watchlists',
|
'watchlists',
|
||||||
'paper_trades'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
|
@ -181,21 +147,19 @@ def get_conf_path(
|
||||||
conf_name: str = 'brokers',
|
conf_name: str = 'brokers',
|
||||||
|
|
||||||
) -> str:
|
) -> str:
|
||||||
'''
|
"""Return the default config path normally under
|
||||||
Return the top-level default config path normally under
|
``~/.config/piker`` on linux.
|
||||||
``~/.config/piker`` on linux for a given ``conf_name``, the config
|
|
||||||
name.
|
|
||||||
|
|
||||||
Contains files such as:
|
Contains files such as:
|
||||||
- brokers.toml
|
- brokers.toml
|
||||||
- pp.toml
|
|
||||||
- watchlists.toml
|
- watchlists.toml
|
||||||
|
- trades.toml
|
||||||
|
|
||||||
# maybe coming soon ;)
|
# maybe coming soon ;)
|
||||||
- signals.toml
|
- signals.toml
|
||||||
- strats.toml
|
- strats.toml
|
||||||
|
|
||||||
'''
|
"""
|
||||||
assert conf_name in _conf_names
|
assert conf_name in _conf_names
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
|
@ -209,7 +173,7 @@ def repodir():
|
||||||
Return the abspath to the repo directory.
|
Return the abspath to the repo directory.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
dirpath = path.abspath(
|
dirpath = os.path.abspath(
|
||||||
# we're 3 levels down in **this** module file
|
# we're 3 levels down in **this** module file
|
||||||
dirname(dirname(os.path.realpath(__file__)))
|
dirname(dirname(os.path.realpath(__file__)))
|
||||||
)
|
)
|
||||||
|
@ -218,9 +182,7 @@ def repodir():
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
conf_name: str = 'brokers',
|
conf_name: str = 'brokers',
|
||||||
path: str = None,
|
path: str = None
|
||||||
|
|
||||||
**tomlkws,
|
|
||||||
|
|
||||||
) -> (dict, str):
|
) -> (dict, str):
|
||||||
'''
|
'''
|
||||||
|
@ -228,10 +190,6 @@ def load(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
path = path or get_conf_path(conf_name)
|
path = path or get_conf_path(conf_name)
|
||||||
|
|
||||||
if not os.path.isdir(_config_dir):
|
|
||||||
Path(_config_dir).mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
|
|
||||||
|
@ -244,15 +202,8 @@ def load(
|
||||||
# if one exists.
|
# if one exists.
|
||||||
if os.path.isfile(template):
|
if os.path.isfile(template):
|
||||||
shutil.copyfile(template, path)
|
shutil.copyfile(template, path)
|
||||||
else:
|
|
||||||
# create an empty file
|
|
||||||
with open(path, 'x'):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
with open(path, 'r'):
|
|
||||||
pass # touch it
|
|
||||||
|
|
||||||
config = toml.load(path, **tomlkws)
|
config = toml.load(path)
|
||||||
log.debug(f"Read config file {path}")
|
log.debug(f"Read config file {path}")
|
||||||
return config, path
|
return config, path
|
||||||
|
|
||||||
|
@ -261,8 +212,6 @@ def write(
|
||||||
config: dict, # toml config as dict
|
config: dict, # toml config as dict
|
||||||
name: str = 'brokers',
|
name: str = 'brokers',
|
||||||
path: str = None,
|
path: str = None,
|
||||||
fail_empty: bool = True,
|
|
||||||
**toml_kwargs,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
''''
|
''''
|
||||||
|
@ -277,7 +226,7 @@ def write(
|
||||||
log.debug(f"Creating config dir {_config_dir}")
|
log.debug(f"Creating config dir {_config_dir}")
|
||||||
os.makedirs(dirname)
|
os.makedirs(dirname)
|
||||||
|
|
||||||
if not config and fail_empty:
|
if not config:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Watch out you're trying to write a blank config!")
|
"Watch out you're trying to write a blank config!")
|
||||||
|
|
||||||
|
@ -286,14 +235,11 @@ def write(
|
||||||
f"{path}"
|
f"{path}"
|
||||||
)
|
)
|
||||||
with open(path, 'w') as cf:
|
with open(path, 'w') as cf:
|
||||||
return toml.dump(
|
return toml.dump(config, cf)
|
||||||
config,
|
|
||||||
cf,
|
|
||||||
**toml_kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def load_accounts(
|
def load_accounts(
|
||||||
|
|
||||||
providers: Optional[list[str]] = None
|
providers: Optional[list[str]] = None
|
||||||
|
|
||||||
) -> bidict[str, Optional[str]]:
|
) -> bidict[str, Optional[str]]:
|
||||||
|
|
|
@ -22,12 +22,6 @@ and storing data from your brokers as well as
|
||||||
sharing live streams over a network.
|
sharing live streams over a network.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import tractor
|
|
||||||
import trio
|
|
||||||
|
|
||||||
from ..log import (
|
|
||||||
get_console_log,
|
|
||||||
)
|
|
||||||
from ._normalize import iterticks
|
from ._normalize import iterticks
|
||||||
from ._sharedmem import (
|
from ._sharedmem import (
|
||||||
maybe_open_shm_array,
|
maybe_open_shm_array,
|
||||||
|
@ -38,6 +32,7 @@ from ._sharedmem import (
|
||||||
)
|
)
|
||||||
from .feed import (
|
from .feed import (
|
||||||
open_feed,
|
open_feed,
|
||||||
|
_setup_persistent_brokerd,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -49,40 +44,5 @@ __all__ = [
|
||||||
'attach_shm_array',
|
'attach_shm_array',
|
||||||
'open_shm_array',
|
'open_shm_array',
|
||||||
'get_shm_token',
|
'get_shm_token',
|
||||||
|
'_setup_persistent_brokerd',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
|
||||||
async def _setup_persistent_brokerd(
|
|
||||||
ctx: tractor.Context,
|
|
||||||
brokername: str,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Allocate a actor-wide service nursery in ``brokerd``
|
|
||||||
such that feeds can be run in the background persistently by
|
|
||||||
the broker backend as needed.
|
|
||||||
|
|
||||||
'''
|
|
||||||
get_console_log(tractor.current_actor().loglevel)
|
|
||||||
|
|
||||||
from .feed import (
|
|
||||||
_bus,
|
|
||||||
get_feed_bus,
|
|
||||||
)
|
|
||||||
global _bus
|
|
||||||
assert not _bus
|
|
||||||
|
|
||||||
async with trio.open_nursery() as service_nursery:
|
|
||||||
# assign a nursery to the feeds bus for spawning
|
|
||||||
# background tasks from clients
|
|
||||||
get_feed_bus(brokername, service_nursery)
|
|
||||||
|
|
||||||
# unblock caller
|
|
||||||
await ctx.started()
|
|
||||||
|
|
||||||
# we pin this task to keep the feeds manager active until the
|
|
||||||
# parent actor decides to tear it down
|
|
||||||
await trio.sleep_forever()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -15,12 +15,9 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Supervisor for ``docker`` with included async and SC wrapping
|
Supervisor for docker with included specific-image service helpers.
|
||||||
to ensure a cancellable container lifetime system.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from collections import ChainMap
|
|
||||||
from functools import partial
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from typing import (
|
from typing import (
|
||||||
|
@ -40,18 +37,10 @@ from docker.models.containers import Container as DockerContainer
|
||||||
from docker.errors import (
|
from docker.errors import (
|
||||||
DockerException,
|
DockerException,
|
||||||
APIError,
|
APIError,
|
||||||
# ContainerError,
|
|
||||||
)
|
|
||||||
import requests
|
|
||||||
from requests.exceptions import (
|
|
||||||
ConnectionError,
|
|
||||||
ReadTimeout,
|
|
||||||
)
|
)
|
||||||
|
from requests.exceptions import ConnectionError, ReadTimeout
|
||||||
|
|
||||||
from ..log import (
|
from ..log import get_logger, get_console_log
|
||||||
get_logger,
|
|
||||||
get_console_log,
|
|
||||||
)
|
|
||||||
from .. import config
|
from .. import config
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -61,8 +50,8 @@ class DockerNotStarted(Exception):
|
||||||
'Prolly you dint start da daemon bruh'
|
'Prolly you dint start da daemon bruh'
|
||||||
|
|
||||||
|
|
||||||
class ApplicationLogError(Exception):
|
class ContainerError(RuntimeError):
|
||||||
'App in container reported an error in logs'
|
'Error reported via app-container logging level'
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
@ -107,9 +96,9 @@ async def open_docker(
|
||||||
# not perms?
|
# not perms?
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# finally:
|
finally:
|
||||||
# if client:
|
if client:
|
||||||
# client.close()
|
client.close()
|
||||||
|
|
||||||
|
|
||||||
class Container:
|
class Container:
|
||||||
|
@ -130,19 +119,8 @@ class Container:
|
||||||
|
|
||||||
async def process_logs_until(
|
async def process_logs_until(
|
||||||
self,
|
self,
|
||||||
log_msg_key: str,
|
patt: str,
|
||||||
|
bp_on_msg: bool = False,
|
||||||
# this is a predicate func for matching log msgs emitted by the
|
|
||||||
# underlying containerized app
|
|
||||||
patt_matcher: Callable[[str], bool],
|
|
||||||
|
|
||||||
# XXX WARNING XXX: do not touch this sleep value unless
|
|
||||||
# you know what you are doing! the value is critical to
|
|
||||||
# making sure the caller code inside the startup context
|
|
||||||
# does not timeout BEFORE we receive a match on the
|
|
||||||
# ``patt_matcher()`` predicate above.
|
|
||||||
checkpoint_period: float = 0.001,
|
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
'''
|
'''
|
||||||
Attempt to capture container log messages and relay through our
|
Attempt to capture container log messages and relay through our
|
||||||
|
@ -153,15 +131,6 @@ class Container:
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
logs = self.cntr.logs()
|
logs = self.cntr.logs()
|
||||||
try:
|
|
||||||
logs = self.cntr.logs()
|
|
||||||
except (
|
|
||||||
docker.errors.NotFound,
|
|
||||||
docker.errors.APIError
|
|
||||||
):
|
|
||||||
log.exception('Failed to parse logs?')
|
|
||||||
return False
|
|
||||||
|
|
||||||
entries = logs.decode().split('\n')
|
entries = logs.decode().split('\n')
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
|
|
||||||
|
@ -169,44 +138,34 @@ class Container:
|
||||||
if not entry:
|
if not entry:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
entry = entry.strip()
|
|
||||||
try:
|
try:
|
||||||
record = json.loads(entry)
|
record = json.loads(entry.strip())
|
||||||
msg = record[log_msg_key]
|
|
||||||
level = record['level']
|
|
||||||
|
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
msg = entry
|
if 'Error' in entry:
|
||||||
level = 'error'
|
raise RuntimeError(entry)
|
||||||
|
raise
|
||||||
|
|
||||||
# TODO: do we need a more general mechanism
|
msg = record['msg']
|
||||||
# for these kinda of "log record entries"?
|
level = record['level']
|
||||||
# if 'Error' in entry:
|
if msg and entry not in seen_so_far:
|
||||||
# raise RuntimeError(entry)
|
|
||||||
|
|
||||||
if (
|
|
||||||
msg
|
|
||||||
and entry not in seen_so_far
|
|
||||||
):
|
|
||||||
seen_so_far.add(entry)
|
seen_so_far.add(entry)
|
||||||
getattr(log, level.lower(), log.error)(f'{msg}')
|
if bp_on_msg:
|
||||||
|
await tractor.breakpoint()
|
||||||
|
|
||||||
if level == 'fatal':
|
getattr(log, level, log.error)(f'{msg}')
|
||||||
raise ApplicationLogError(msg)
|
|
||||||
|
|
||||||
if await patt_matcher(msg):
|
# print(f'level: {level}')
|
||||||
|
if level in ('error', 'fatal'):
|
||||||
|
raise ContainerError(msg)
|
||||||
|
|
||||||
|
if patt in msg:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# do a checkpoint so we don't block if cancelled B)
|
# do a checkpoint so we don't block if cancelled B)
|
||||||
await trio.sleep(checkpoint_period)
|
await trio.sleep(0.01)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@property
|
|
||||||
def cuid(self) -> str:
|
|
||||||
fqcn: str = self.cntr.attrs['Config']['Image']
|
|
||||||
return f'{fqcn}[{self.cntr.short_id}]'
|
|
||||||
|
|
||||||
def try_signal(
|
def try_signal(
|
||||||
self,
|
self,
|
||||||
signal: str = 'SIGINT',
|
signal: str = 'SIGINT',
|
||||||
|
@ -226,65 +185,29 @@ class Container:
|
||||||
if 'is not running' in err.explanation:
|
if 'is not running' in err.explanation:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def hard_kill(self, start: float) -> None:
|
|
||||||
delay = time.time() - start
|
|
||||||
# get out the big guns, bc apparently marketstore
|
|
||||||
# doesn't actually know how to terminate gracefully
|
|
||||||
# :eyeroll:...
|
|
||||||
log.error(
|
|
||||||
f'SIGKILL-ing: {self.cntr.id} after {delay}s\n'
|
|
||||||
)
|
|
||||||
self.try_signal('SIGKILL')
|
|
||||||
self.cntr.wait(
|
|
||||||
timeout=3,
|
|
||||||
condition='not-running',
|
|
||||||
)
|
|
||||||
|
|
||||||
async def cancel(
|
async def cancel(
|
||||||
self,
|
self,
|
||||||
log_msg_key: str,
|
stop_msg: str,
|
||||||
stop_predicate: Callable[[str], bool],
|
|
||||||
|
|
||||||
hard_kill: bool = False,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
|
||||||
Attempt to cancel this container gracefully, fail over to
|
|
||||||
a hard kill on timeout.
|
|
||||||
|
|
||||||
'''
|
|
||||||
cid = self.cntr.id
|
cid = self.cntr.id
|
||||||
|
|
||||||
# first try a graceful cancel
|
# first try a graceful cancel
|
||||||
log.cancel(
|
log.cancel(
|
||||||
f'SIGINT cancelling container: {self.cuid}\n'
|
f'SIGINT cancelling container: {cid}\n'
|
||||||
'waiting on stop predicate...'
|
f'waiting on stop msg: "{stop_msg}"'
|
||||||
)
|
)
|
||||||
self.try_signal('SIGINT')
|
self.try_signal('SIGINT')
|
||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
for _ in range(6):
|
for _ in range(30):
|
||||||
|
|
||||||
with trio.move_on_after(0.5) as cs:
|
with trio.move_on_after(0.5) as cs:
|
||||||
log.cancel('polling for CNTR logs...')
|
cs.shield = True
|
||||||
|
await self.process_logs_until(stop_msg)
|
||||||
|
|
||||||
try:
|
# if we aren't cancelled on above checkpoint then we
|
||||||
await self.process_logs_until(
|
# assume we read the expected stop msg and terminated.
|
||||||
log_msg_key,
|
break
|
||||||
stop_predicate,
|
|
||||||
)
|
|
||||||
except ApplicationLogError:
|
|
||||||
hard_kill = True
|
|
||||||
else:
|
|
||||||
# if we aren't cancelled on above checkpoint then we
|
|
||||||
# assume we read the expected stop msg and
|
|
||||||
# terminated.
|
|
||||||
break
|
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
|
||||||
# on timeout just try a hard kill after
|
|
||||||
# a quick container sync-wait.
|
|
||||||
hard_kill = True
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
log.info(f'Polling for container shutdown:\n{cid}')
|
log.info(f'Polling for container shutdown:\n{cid}')
|
||||||
|
@ -295,7 +218,6 @@ class Container:
|
||||||
condition='not-running',
|
condition='not-running',
|
||||||
)
|
)
|
||||||
|
|
||||||
# graceful exit if we didn't time out
|
|
||||||
break
|
break
|
||||||
|
|
||||||
except (
|
except (
|
||||||
|
@ -307,39 +229,36 @@ class Container:
|
||||||
except (
|
except (
|
||||||
docker.errors.APIError,
|
docker.errors.APIError,
|
||||||
ConnectionError,
|
ConnectionError,
|
||||||
requests.exceptions.ConnectionError,
|
|
||||||
trio.Cancelled,
|
|
||||||
):
|
):
|
||||||
log.exception('Docker connection failure')
|
log.exception('Docker connection failure')
|
||||||
self.hard_kill(start)
|
break
|
||||||
raise
|
|
||||||
|
|
||||||
except trio.Cancelled:
|
|
||||||
log.exception('trio cancelled...')
|
|
||||||
self.hard_kill(start)
|
|
||||||
else:
|
else:
|
||||||
hard_kill = True
|
delay = time.time() - start
|
||||||
|
log.error(
|
||||||
|
f'Failed to kill container {cid} after {delay}s\n'
|
||||||
|
'sending SIGKILL..'
|
||||||
|
)
|
||||||
|
# get out the big guns, bc apparently marketstore
|
||||||
|
# doesn't actually know how to terminate gracefully
|
||||||
|
# :eyeroll:...
|
||||||
|
self.try_signal('SIGKILL')
|
||||||
|
self.cntr.wait(
|
||||||
|
timeout=3,
|
||||||
|
condition='not-running',
|
||||||
|
)
|
||||||
|
|
||||||
if hard_kill:
|
log.cancel(f'Container stopped: {cid}')
|
||||||
self.hard_kill(start)
|
|
||||||
else:
|
|
||||||
log.cancel(f'Container stopped: {cid}')
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def open_ahabd(
|
async def open_ahabd(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
endpoint: str, # ns-pointer str-msg-type
|
endpoint: str, # ns-pointer str-msg-type
|
||||||
loglevel: str | None = 'cancel',
|
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
get_console_log('info', name=__name__)
|
||||||
log = get_console_log(
|
|
||||||
loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
async with open_docker() as client:
|
async with open_docker() as client:
|
||||||
|
|
||||||
|
@ -350,84 +269,26 @@ async def open_ahabd(
|
||||||
(
|
(
|
||||||
dcntr,
|
dcntr,
|
||||||
cntr_config,
|
cntr_config,
|
||||||
start_pred,
|
start_msg,
|
||||||
stop_pred,
|
stop_msg,
|
||||||
) = ep_func(client)
|
) = ep_func(client)
|
||||||
cntr = Container(dcntr)
|
cntr = Container(dcntr)
|
||||||
|
|
||||||
conf: ChainMap[str, Any] = ChainMap(
|
with trio.move_on_after(1):
|
||||||
|
found = await cntr.process_logs_until(start_msg)
|
||||||
|
|
||||||
# container specific
|
if not found and cntr not in client.containers.list():
|
||||||
|
raise RuntimeError(
|
||||||
|
'Failed to start `marketstore` check logs deats'
|
||||||
|
)
|
||||||
|
|
||||||
|
await ctx.started((
|
||||||
|
cntr.cntr.id,
|
||||||
|
os.getpid(),
|
||||||
cntr_config,
|
cntr_config,
|
||||||
|
))
|
||||||
# defaults
|
|
||||||
{
|
|
||||||
# startup time limit which is the max the supervisor
|
|
||||||
# will wait for the container to be registered in
|
|
||||||
# ``client.containers.list()``
|
|
||||||
'startup_timeout': 1.0,
|
|
||||||
|
|
||||||
# how fast to poll for the starup predicate by sleeping
|
|
||||||
# this amount incrementally thus yielding to the
|
|
||||||
# ``trio`` scheduler on during sync polling execution.
|
|
||||||
'startup_query_period': 0.001,
|
|
||||||
|
|
||||||
# str-key value expected to contain log message body-contents
|
|
||||||
# when read using:
|
|
||||||
# ``json.loads(entry for entry in DockerContainer.logs())``
|
|
||||||
'log_msg_key': 'msg',
|
|
||||||
|
|
||||||
|
|
||||||
# startup sync func, like `Nursery.started()`
|
|
||||||
'started_afunc': None,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with trio.move_on_after(conf['startup_timeout']) as cs:
|
|
||||||
async with trio.open_nursery() as tn:
|
|
||||||
tn.start_soon(
|
|
||||||
partial(
|
|
||||||
cntr.process_logs_until,
|
|
||||||
log_msg_key=conf['log_msg_key'],
|
|
||||||
patt_matcher=start_pred,
|
|
||||||
checkpoint_period=conf['startup_query_period'],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# optional blocking routine
|
|
||||||
started = conf['started_afunc']
|
|
||||||
if started:
|
|
||||||
await started()
|
|
||||||
|
|
||||||
# poll for container startup or timeout
|
|
||||||
while not cs.cancel_called:
|
|
||||||
if dcntr in client.containers.list():
|
|
||||||
break
|
|
||||||
|
|
||||||
await trio.sleep(conf['startup_query_period'])
|
|
||||||
|
|
||||||
# sync with remote caller actor-task but allow log
|
|
||||||
# processing to continue running in bg.
|
|
||||||
await ctx.started((
|
|
||||||
cntr.cntr.id,
|
|
||||||
os.getpid(),
|
|
||||||
cntr_config,
|
|
||||||
))
|
|
||||||
|
|
||||||
# XXX: if we timeout on finding the "startup msg" we
|
|
||||||
# expect then we want to FOR SURE raise an error
|
|
||||||
# upwards!
|
|
||||||
if cs.cancelled_caught:
|
|
||||||
# if dcntr not in client.containers.list():
|
|
||||||
for entry in cntr.seen_so_far:
|
|
||||||
log.info(entry)
|
|
||||||
|
|
||||||
raise DockerNotStarted(
|
|
||||||
f'Failed to start container: {cntr.cuid}\n'
|
|
||||||
f'due to timeout={conf["startup_timeout"]}s\n\n'
|
|
||||||
"check ur container's logs!"
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: we might eventually want a proxy-style msg-prot here
|
# TODO: we might eventually want a proxy-style msg-prot here
|
||||||
# to allow remote control of containers without needing
|
# to allow remote control of containers without needing
|
||||||
|
@ -435,25 +296,13 @@ async def open_ahabd(
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# TODO: ensure loglevel can be set and teardown logs are
|
|
||||||
# reported if possible on error or cancel..
|
|
||||||
# XXX WARNING: currently shielding here can result in hangs
|
|
||||||
# on ctl-c from user.. ideally we can avoid a cancel getting
|
|
||||||
# consumed and not propagating whilst still doing teardown
|
|
||||||
# logging..
|
|
||||||
with trio.CancelScope(shield=True):
|
with trio.CancelScope(shield=True):
|
||||||
await cntr.cancel(
|
await cntr.cancel(stop_msg)
|
||||||
log_msg_key=conf['log_msg_key'],
|
|
||||||
stop_predicate=stop_pred,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def start_ahab(
|
async def start_ahab(
|
||||||
service_name: str,
|
service_name: str,
|
||||||
endpoint: Callable[docker.DockerClient, DockerContainer],
|
endpoint: Callable[docker.DockerClient, DockerContainer],
|
||||||
loglevel: str | None = 'cancel',
|
|
||||||
drop_root_perms: bool = True,
|
|
||||||
|
|
||||||
task_status: TaskStatus[
|
task_status: TaskStatus[
|
||||||
tuple[
|
tuple[
|
||||||
trio.Event,
|
trio.Event,
|
||||||
|
@ -474,12 +323,13 @@ async def start_ahab(
|
||||||
'''
|
'''
|
||||||
cn_ready = trio.Event()
|
cn_ready = trio.Event()
|
||||||
try:
|
try:
|
||||||
async with tractor.open_nursery() as an:
|
async with tractor.open_nursery(
|
||||||
|
loglevel='runtime',
|
||||||
|
) as tn:
|
||||||
|
|
||||||
portal = await an.start_actor(
|
portal = await tn.start_actor(
|
||||||
service_name,
|
service_name,
|
||||||
enable_modules=[__name__],
|
enable_modules=[__name__]
|
||||||
loglevel=loglevel,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: we have issues with this on teardown
|
# TODO: we have issues with this on teardown
|
||||||
|
@ -489,10 +339,7 @@ async def start_ahab(
|
||||||
|
|
||||||
# de-escalate root perms to the original user
|
# de-escalate root perms to the original user
|
||||||
# after the docker supervisor actor is spawned.
|
# after the docker supervisor actor is spawned.
|
||||||
if (
|
if config._parent_user:
|
||||||
drop_root_perms
|
|
||||||
and config._parent_user
|
|
||||||
):
|
|
||||||
import pwd
|
import pwd
|
||||||
os.setuid(
|
os.setuid(
|
||||||
pwd.getpwnam(
|
pwd.getpwnam(
|
||||||
|
@ -503,7 +350,6 @@ async def start_ahab(
|
||||||
async with portal.open_context(
|
async with portal.open_context(
|
||||||
open_ahabd,
|
open_ahabd,
|
||||||
endpoint=str(NamespacePath.from_ref(endpoint)),
|
endpoint=str(NamespacePath.from_ref(endpoint)),
|
||||||
loglevel='cancel',
|
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
||||||
cid, pid, cntr_config = first
|
cid, pid, cntr_config = first
|
|
@ -1,827 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
"""
|
|
||||||
Pre-(path)-graphics formatted x/y nd/1d rendering subsystem.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
from typing import (
|
|
||||||
Optional,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
|
|
||||||
import msgspec
|
|
||||||
from msgspec import field
|
|
||||||
import numpy as np
|
|
||||||
from numpy.lib import recfunctions as rfn
|
|
||||||
|
|
||||||
from ._sharedmem import (
|
|
||||||
ShmArray,
|
|
||||||
)
|
|
||||||
from ._pathops import (
|
|
||||||
path_arrays_from_ohlc,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ._dataviz import (
|
|
||||||
Viz,
|
|
||||||
)
|
|
||||||
from .._profile import Profiler
|
|
||||||
|
|
||||||
|
|
||||||
class IncrementalFormatter(msgspec.Struct):
|
|
||||||
'''
|
|
||||||
Incrementally updating, pre-path-graphics tracking, formatter.
|
|
||||||
|
|
||||||
Allows tracking source data state in an updateable pre-graphics
|
|
||||||
``np.ndarray`` format (in local process memory) as well as
|
|
||||||
incrementally rendering from that format **to** 1d x/y for path
|
|
||||||
generation using ``pg.functions.arrayToQPath()``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
shm: ShmArray
|
|
||||||
viz: Viz
|
|
||||||
|
|
||||||
# the value to be multiplied any any index into the x/y_1d arrays
|
|
||||||
# given the input index is based on the original source data array.
|
|
||||||
flat_index_ratio: float = 1
|
|
||||||
|
|
||||||
@property
|
|
||||||
def index_field(self) -> 'str':
|
|
||||||
'''
|
|
||||||
Value (``str``) used to look up the "index series" from the
|
|
||||||
underlying source ``numpy`` struct-array; delegate directly to
|
|
||||||
the managing ``Viz``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return self.viz.index_field
|
|
||||||
|
|
||||||
# Incrementally updated xy ndarray formatted data, a pre-1d
|
|
||||||
# format which is updated and cached independently of the final
|
|
||||||
# pre-graphics-path 1d format.
|
|
||||||
x_nd: Optional[np.ndarray] = None
|
|
||||||
y_nd: Optional[np.ndarray] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def xy_nd(self) -> tuple[np.ndarray, np.ndarray]:
|
|
||||||
return (
|
|
||||||
self.x_nd[self.xy_slice],
|
|
||||||
self.y_nd[self.xy_slice],
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def xy_slice(self) -> slice:
|
|
||||||
return slice(
|
|
||||||
self.xy_nd_start,
|
|
||||||
self.xy_nd_stop,
|
|
||||||
)
|
|
||||||
|
|
||||||
# indexes which slice into the above arrays (which are allocated
|
|
||||||
# based on source data shm input size) and allow retrieving
|
|
||||||
# incrementally updated data.
|
|
||||||
xy_nd_start: int | None = None
|
|
||||||
xy_nd_stop: int | None = None
|
|
||||||
|
|
||||||
# TODO: eventually incrementally update 1d-pre-graphics path data?
|
|
||||||
x_1d: np.ndarray | None = None
|
|
||||||
y_1d: np.ndarray | None = None
|
|
||||||
|
|
||||||
# incremental view-change state(s) tracking
|
|
||||||
_last_vr: tuple[float, float] | None = None
|
|
||||||
_last_ivdr: tuple[float, float] | None = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def index_step_size(self) -> float:
|
|
||||||
'''
|
|
||||||
Readonly value computed on first ``.diff()`` call.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return self.viz.index_step()
|
|
||||||
|
|
||||||
def diff(
|
|
||||||
self,
|
|
||||||
new_read: tuple[np.ndarray],
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
]:
|
|
||||||
# TODO:
|
|
||||||
# - can the renderer just call ``Viz.read()`` directly? unpack
|
|
||||||
# latest source data read
|
|
||||||
# - eventually maybe we can implement some kind of
|
|
||||||
# transform on the ``QPainterPath`` that will more or less
|
|
||||||
# detect the diff in "elements" terms? update diff state since
|
|
||||||
# we've now rendered paths.
|
|
||||||
(
|
|
||||||
xfirst,
|
|
||||||
xlast,
|
|
||||||
array,
|
|
||||||
ivl,
|
|
||||||
ivr,
|
|
||||||
in_view,
|
|
||||||
) = new_read
|
|
||||||
|
|
||||||
index = array['index']
|
|
||||||
|
|
||||||
# if the first index in the read array is 0 then
|
|
||||||
# it means the source buffer has bee completely backfilled to
|
|
||||||
# available space.
|
|
||||||
src_start = index[0]
|
|
||||||
src_stop = index[-1] + 1
|
|
||||||
|
|
||||||
# these are the "formatted output data" indices
|
|
||||||
# for the pre-graphics arrays.
|
|
||||||
nd_start = self.xy_nd_start
|
|
||||||
nd_stop = self.xy_nd_stop
|
|
||||||
|
|
||||||
if (
|
|
||||||
nd_start is None
|
|
||||||
):
|
|
||||||
assert nd_stop is None
|
|
||||||
|
|
||||||
# setup to do a prepend of all existing src history
|
|
||||||
nd_start = self.xy_nd_start = src_stop
|
|
||||||
# set us in a zero-to-append state
|
|
||||||
nd_stop = self.xy_nd_stop = src_stop
|
|
||||||
|
|
||||||
# compute the length diffs between the first/last index entry in
|
|
||||||
# the input data and the last indexes we have on record from the
|
|
||||||
# last time we updated the curve index.
|
|
||||||
prepend_length = int(nd_start - src_start)
|
|
||||||
append_length = int(src_stop - nd_stop)
|
|
||||||
|
|
||||||
# blah blah blah
|
|
||||||
# do diffing for prepend, append and last entry
|
|
||||||
return (
|
|
||||||
slice(src_start, nd_start),
|
|
||||||
prepend_length,
|
|
||||||
append_length,
|
|
||||||
slice(nd_stop, src_stop),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _track_inview_range(
|
|
||||||
self,
|
|
||||||
view_range: tuple[int, int],
|
|
||||||
|
|
||||||
) -> bool:
|
|
||||||
# if a view range is passed, plan to draw the
|
|
||||||
# source ouput that's "in view" of the chart.
|
|
||||||
vl, vr = view_range
|
|
||||||
zoom_or_append = False
|
|
||||||
last_vr = self._last_vr
|
|
||||||
|
|
||||||
# incremental in-view data update.
|
|
||||||
if last_vr:
|
|
||||||
lvl, lvr = last_vr # relative slice indices
|
|
||||||
|
|
||||||
# TODO: detecting more specifically the interaction changes
|
|
||||||
# last_ivr = self._last_ivdr or (vl, vr)
|
|
||||||
# al, ar = last_ivr # abs slice indices
|
|
||||||
# left_change = abs(x_iv[0] - al) >= 1
|
|
||||||
# right_change = abs(x_iv[-1] - ar) >= 1
|
|
||||||
|
|
||||||
# likely a zoom/pan view change or data append update
|
|
||||||
if (
|
|
||||||
(vr - lvr) > 2
|
|
||||||
or vl < lvl
|
|
||||||
|
|
||||||
# append / prepend update
|
|
||||||
# we had an append update where the view range
|
|
||||||
# didn't change but the data-viewed (shifted)
|
|
||||||
# underneath, so we need to redraw.
|
|
||||||
# or left_change and right_change and last_vr == view_range
|
|
||||||
|
|
||||||
# not (left_change and right_change) and ivr
|
|
||||||
# (
|
|
||||||
# or abs(x_iv[ivr] - livr) > 1
|
|
||||||
):
|
|
||||||
zoom_or_append = True
|
|
||||||
|
|
||||||
self._last_vr = view_range
|
|
||||||
|
|
||||||
return zoom_or_append
|
|
||||||
|
|
||||||
def format_to_1d(
|
|
||||||
self,
|
|
||||||
new_read: tuple,
|
|
||||||
array_key: str,
|
|
||||||
profiler: Profiler,
|
|
||||||
|
|
||||||
slice_to_inview: bool = True,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
]:
|
|
||||||
shm = self.shm
|
|
||||||
|
|
||||||
(
|
|
||||||
_,
|
|
||||||
_,
|
|
||||||
array,
|
|
||||||
ivl,
|
|
||||||
ivr,
|
|
||||||
in_view,
|
|
||||||
|
|
||||||
) = new_read
|
|
||||||
|
|
||||||
(
|
|
||||||
pre_slice,
|
|
||||||
prepend_len,
|
|
||||||
append_len,
|
|
||||||
post_slice,
|
|
||||||
) = self.diff(new_read)
|
|
||||||
|
|
||||||
# we first need to allocate xy data arrays
|
|
||||||
# from the source data.
|
|
||||||
if self.y_nd is None:
|
|
||||||
self.xy_nd_start = shm._first.value
|
|
||||||
self.xy_nd_stop = shm._last.value
|
|
||||||
self.x_nd, self.y_nd = self.allocate_xy_nd(
|
|
||||||
shm,
|
|
||||||
array_key,
|
|
||||||
)
|
|
||||||
profiler('allocated xy history')
|
|
||||||
|
|
||||||
# once allocated we do incremental pre/append
|
|
||||||
# updates from the diff with the source buffer.
|
|
||||||
else:
|
|
||||||
if prepend_len:
|
|
||||||
|
|
||||||
self.incr_update_xy_nd(
|
|
||||||
shm,
|
|
||||||
array_key,
|
|
||||||
|
|
||||||
# this is the pre-sliced, "normally expected"
|
|
||||||
# new data that an updater would normally be
|
|
||||||
# expected to process, however in some cases (like
|
|
||||||
# step curves) the updater routine may want to do
|
|
||||||
# the source history-data reading itself, so we pass
|
|
||||||
# both here.
|
|
||||||
shm._array[pre_slice],
|
|
||||||
pre_slice,
|
|
||||||
prepend_len,
|
|
||||||
|
|
||||||
self.xy_nd_start,
|
|
||||||
self.xy_nd_stop,
|
|
||||||
is_append=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.xy_nd_start -= prepend_len
|
|
||||||
profiler('prepended xy history: {prepend_length}')
|
|
||||||
|
|
||||||
if append_len:
|
|
||||||
self.incr_update_xy_nd(
|
|
||||||
shm,
|
|
||||||
array_key,
|
|
||||||
|
|
||||||
shm._array[post_slice],
|
|
||||||
post_slice,
|
|
||||||
append_len,
|
|
||||||
|
|
||||||
self.xy_nd_start,
|
|
||||||
self.xy_nd_stop,
|
|
||||||
is_append=True,
|
|
||||||
)
|
|
||||||
self.xy_nd_stop += append_len
|
|
||||||
profiler('appened xy history: {append_length}')
|
|
||||||
# sanity
|
|
||||||
# slice_ln = post_slice.stop - post_slice.start
|
|
||||||
# assert append_len == slice_ln
|
|
||||||
|
|
||||||
view_changed: bool = False
|
|
||||||
view_range: tuple[int, int] = (ivl, ivr)
|
|
||||||
if slice_to_inview:
|
|
||||||
view_changed = self._track_inview_range(view_range)
|
|
||||||
array = in_view
|
|
||||||
profiler(f'{self.viz.name} view range slice {view_range}')
|
|
||||||
|
|
||||||
# TODO: we need to check if the last-datum-in-view is true and
|
|
||||||
# if so only slice to the 2nd last datumonly slice to the 2nd
|
|
||||||
# last datum.
|
|
||||||
# hist = array[:slice_to_head]
|
|
||||||
|
|
||||||
# XXX: WOA WTF TRACTOR DEBUGGING BUGGG
|
|
||||||
# assert 0
|
|
||||||
|
|
||||||
# xy-path data transform: convert source data to a format
|
|
||||||
# able to be passed to a `QPainterPath` rendering routine.
|
|
||||||
if not len(array):
|
|
||||||
# XXX: this might be why the profiler only has exits?
|
|
||||||
return
|
|
||||||
|
|
||||||
# TODO: hist here should be the pre-sliced
|
|
||||||
# x/y_data in the case where allocate_xy is
|
|
||||||
# defined?
|
|
||||||
x_1d, y_1d, connect = self.format_xy_nd_to_1d(
|
|
||||||
array,
|
|
||||||
array_key,
|
|
||||||
view_range,
|
|
||||||
)
|
|
||||||
# cache/save last 1d outputs for use by other
|
|
||||||
# readers (eg. `Viz.draw_last_datum()` in the
|
|
||||||
# only-draw-last-uppx case).
|
|
||||||
self.x_1d = x_1d
|
|
||||||
self.y_1d = y_1d
|
|
||||||
|
|
||||||
# app_tres = None
|
|
||||||
# if append_len:
|
|
||||||
# appended = array[-append_len-1:slice_to_head]
|
|
||||||
# app_tres = self.format_xy_nd_to_1d(
|
|
||||||
# appended,
|
|
||||||
# array_key,
|
|
||||||
# (
|
|
||||||
# view_range[1] - append_len + slice_to_head,
|
|
||||||
# view_range[1]
|
|
||||||
# ),
|
|
||||||
# )
|
|
||||||
# # assert (len(appended) - 1) == append_len
|
|
||||||
# # assert len(appended) == append_len
|
|
||||||
# print(
|
|
||||||
# f'{self.viz.name} APPEND LEN: {append_len}\n'
|
|
||||||
# f'{self.viz.name} APPENDED: {appended}\n'
|
|
||||||
# f'{self.viz.name} app_tres: {app_tres}\n'
|
|
||||||
# )
|
|
||||||
|
|
||||||
# update the last "in view data range"
|
|
||||||
if len(x_1d):
|
|
||||||
self._last_ivdr = x_1d[0], x_1d[-1]
|
|
||||||
|
|
||||||
profiler('.format_to_1d()')
|
|
||||||
|
|
||||||
return (
|
|
||||||
x_1d,
|
|
||||||
y_1d,
|
|
||||||
connect,
|
|
||||||
prepend_len,
|
|
||||||
append_len,
|
|
||||||
view_changed,
|
|
||||||
# app_tres,
|
|
||||||
)
|
|
||||||
|
|
||||||
###############################
|
|
||||||
# Sub-type override interface #
|
|
||||||
###############################
|
|
||||||
|
|
||||||
x_offset: np.ndarray = np.array([0])
|
|
||||||
|
|
||||||
# optional pre-graphics xy formatted data which
|
|
||||||
# is incrementally updated in sync with the source data.
|
|
||||||
# XXX: was ``.allocate_xy()``
|
|
||||||
def allocate_xy_nd(
|
|
||||||
self,
|
|
||||||
src_shm: ShmArray,
|
|
||||||
data_field: str,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray, # x
|
|
||||||
np.nd.array # y
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Convert the structured-array ``src_shm`` format to
|
|
||||||
a equivalently shaped (and field-less) ``np.ndarray``.
|
|
||||||
|
|
||||||
Eg. a 4 field x N struct-array => (N, 4)
|
|
||||||
|
|
||||||
'''
|
|
||||||
y_nd = src_shm._array[data_field].copy()
|
|
||||||
x_nd = (
|
|
||||||
src_shm._array[self.index_field].copy()
|
|
||||||
+
|
|
||||||
self.x_offset
|
|
||||||
)
|
|
||||||
return x_nd, y_nd
|
|
||||||
|
|
||||||
# XXX: was ``.update_xy()``
|
|
||||||
def incr_update_xy_nd(
|
|
||||||
self,
|
|
||||||
|
|
||||||
src_shm: ShmArray,
|
|
||||||
data_field: str,
|
|
||||||
|
|
||||||
new_from_src: np.ndarray, # portion of source that was updated
|
|
||||||
|
|
||||||
read_slc: slice,
|
|
||||||
ln: int, # len of updated
|
|
||||||
|
|
||||||
nd_start: int,
|
|
||||||
nd_stop: int,
|
|
||||||
|
|
||||||
is_append: bool,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
# write pushed data to flattened copy
|
|
||||||
y_nd_new = new_from_src[data_field]
|
|
||||||
self.y_nd[read_slc] = y_nd_new
|
|
||||||
|
|
||||||
x_nd_new = self.x_nd[read_slc]
|
|
||||||
x_nd_new[:] = (
|
|
||||||
new_from_src[self.index_field]
|
|
||||||
+
|
|
||||||
self.x_offset
|
|
||||||
)
|
|
||||||
|
|
||||||
# x_nd = self.x_nd[self.xy_slice]
|
|
||||||
# y_nd = self.y_nd[self.xy_slice]
|
|
||||||
# name = self.viz.name
|
|
||||||
# if 'trade_rate' == name:
|
|
||||||
# s = 4
|
|
||||||
# print(
|
|
||||||
# f'{name.upper()}:\n'
|
|
||||||
# 'NEW_FROM_SRC:\n'
|
|
||||||
# f'new_from_src: {new_from_src}\n\n'
|
|
||||||
|
|
||||||
# f'PRE self.x_nd:'
|
|
||||||
# f'\n{list(x_nd[-s:])}\n'
|
|
||||||
|
|
||||||
# f'PRE self.y_nd:\n'
|
|
||||||
# f'{list(y_nd[-s:])}\n\n'
|
|
||||||
|
|
||||||
# f'TO WRITE:\n'
|
|
||||||
|
|
||||||
# f'x_nd_new:\n'
|
|
||||||
# f'{x_nd_new[0]}\n'
|
|
||||||
|
|
||||||
# f'y_nd_new:\n'
|
|
||||||
# f'{y_nd_new}\n'
|
|
||||||
# )
|
|
||||||
|
|
||||||
# XXX: was ``.format_xy()``
|
|
||||||
def format_xy_nd_to_1d(
|
|
||||||
self,
|
|
||||||
|
|
||||||
array: np.ndarray,
|
|
||||||
array_key: str,
|
|
||||||
vr: tuple[int, int],
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray, # 1d x
|
|
||||||
np.ndarray, # 1d y
|
|
||||||
np.ndarray | str, # connection array/style
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Default xy-nd array to 1d pre-graphics-path render routine.
|
|
||||||
|
|
||||||
Return single field column data verbatim
|
|
||||||
|
|
||||||
'''
|
|
||||||
# NOTE: we don't include the very last datum which is filled in
|
|
||||||
# normally by another graphics object.
|
|
||||||
x_1d = array[self.index_field][:-1]
|
|
||||||
y_1d = array[array_key][:-1]
|
|
||||||
|
|
||||||
# name = self.viz.name
|
|
||||||
# if 'trade_rate' == name:
|
|
||||||
# s = 4
|
|
||||||
# x_nd = list(self.x_nd[self.xy_slice][-s:-1])
|
|
||||||
# y_nd = list(self.y_nd[self.xy_slice][-s:-1])
|
|
||||||
# print(
|
|
||||||
# f'{name}:\n'
|
|
||||||
# f'XY data:\n'
|
|
||||||
# f'x: {x_nd}\n'
|
|
||||||
# f'y: {y_nd}\n\n'
|
|
||||||
# f'x_1d: {list(x_1d[-s:])}\n'
|
|
||||||
# f'y_1d: {list(y_1d[-s:])}\n\n'
|
|
||||||
|
|
||||||
# )
|
|
||||||
return (
|
|
||||||
x_1d,
|
|
||||||
y_1d,
|
|
||||||
|
|
||||||
# 1d connection array or style-key to
|
|
||||||
# ``pg.functions.arrayToQPath()``
|
|
||||||
'all',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OHLCBarsFmtr(IncrementalFormatter):
|
|
||||||
x_offset: np.ndarray = np.array([
|
|
||||||
-0.5,
|
|
||||||
0,
|
|
||||||
0,
|
|
||||||
0.5,
|
|
||||||
])
|
|
||||||
|
|
||||||
fields: list[str] = field(
|
|
||||||
default_factory=lambda: ['open', 'high', 'low', 'close']
|
|
||||||
)
|
|
||||||
flat_index_ratio: float = 4
|
|
||||||
|
|
||||||
def allocate_xy_nd(
|
|
||||||
self,
|
|
||||||
|
|
||||||
ohlc_shm: ShmArray,
|
|
||||||
data_field: str,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray, # x
|
|
||||||
np.nd.array # y
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Convert an input struct-array holding OHLC samples into a pair of
|
|
||||||
flattened x, y arrays with the same size (datums wise) as the source
|
|
||||||
data.
|
|
||||||
|
|
||||||
'''
|
|
||||||
y_nd = ohlc_shm.ustruct(self.fields)
|
|
||||||
|
|
||||||
# generate an flat-interpolated x-domain
|
|
||||||
x_nd = (
|
|
||||||
np.broadcast_to(
|
|
||||||
ohlc_shm._array[self.index_field][:, None],
|
|
||||||
(
|
|
||||||
ohlc_shm._array.size,
|
|
||||||
# 4, # only ohlc
|
|
||||||
y_nd.shape[1],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
+
|
|
||||||
self.x_offset
|
|
||||||
)
|
|
||||||
assert y_nd.any()
|
|
||||||
|
|
||||||
# write pushed data to flattened copy
|
|
||||||
return (
|
|
||||||
x_nd,
|
|
||||||
y_nd,
|
|
||||||
)
|
|
||||||
|
|
||||||
def incr_update_xy_nd(
|
|
||||||
self,
|
|
||||||
|
|
||||||
src_shm: ShmArray,
|
|
||||||
data_field: str,
|
|
||||||
|
|
||||||
new_from_src: np.ndarray, # portion of source that was updated
|
|
||||||
|
|
||||||
read_slc: slice,
|
|
||||||
ln: int, # len of updated
|
|
||||||
|
|
||||||
nd_start: int,
|
|
||||||
nd_stop: int,
|
|
||||||
|
|
||||||
is_append: bool,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
# write newly pushed data to flattened copy
|
|
||||||
# a struct-arr is always passed in.
|
|
||||||
new_y_nd = rfn.structured_to_unstructured(
|
|
||||||
new_from_src[self.fields]
|
|
||||||
)
|
|
||||||
self.y_nd[read_slc] = new_y_nd
|
|
||||||
|
|
||||||
# generate same-valued-per-row x support based on y shape
|
|
||||||
x_nd_new = self.x_nd[read_slc]
|
|
||||||
x_nd_new[:] = np.broadcast_to(
|
|
||||||
new_from_src[self.index_field][:, None],
|
|
||||||
new_y_nd.shape,
|
|
||||||
) + self.x_offset
|
|
||||||
|
|
||||||
# TODO: can we drop this frame and just use the above?
|
|
||||||
def format_xy_nd_to_1d(
|
|
||||||
self,
|
|
||||||
|
|
||||||
array: np.ndarray,
|
|
||||||
array_key: str,
|
|
||||||
vr: tuple[int, int],
|
|
||||||
|
|
||||||
start: int = 0, # XXX: do we need this?
|
|
||||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
|
||||||
w: float = 0.16,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
More or less direct proxy to the ``numba``-fied
|
|
||||||
``path_arrays_from_ohlc()`` (above) but with closed in kwargs
|
|
||||||
for line spacing.
|
|
||||||
|
|
||||||
'''
|
|
||||||
x, y, c = path_arrays_from_ohlc(
|
|
||||||
array[:-1],
|
|
||||||
start,
|
|
||||||
bar_w=self.index_step_size,
|
|
||||||
bar_gap=w * self.index_step_size,
|
|
||||||
|
|
||||||
# XXX: don't ask, due to a ``numba`` bug..
|
|
||||||
use_time_index=(self.index_field == 'time'),
|
|
||||||
)
|
|
||||||
return x, y, c
|
|
||||||
|
|
||||||
|
|
||||||
class OHLCBarsAsCurveFmtr(OHLCBarsFmtr):
|
|
||||||
|
|
||||||
def format_xy_nd_to_1d(
|
|
||||||
self,
|
|
||||||
|
|
||||||
array: np.ndarray,
|
|
||||||
array_key: str,
|
|
||||||
vr: tuple[int, int],
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
str,
|
|
||||||
]:
|
|
||||||
# TODO: in the case of an existing ``.update_xy()``
|
|
||||||
# should we be passing in array as an xy arrays tuple?
|
|
||||||
|
|
||||||
# 2 more datum-indexes to capture zero at end
|
|
||||||
x_flat = self.x_nd[self.xy_nd_start:self.xy_nd_stop-1]
|
|
||||||
y_flat = self.y_nd[self.xy_nd_start:self.xy_nd_stop-1]
|
|
||||||
|
|
||||||
# slice to view
|
|
||||||
ivl, ivr = vr
|
|
||||||
x_iv_flat = x_flat[ivl:ivr]
|
|
||||||
y_iv_flat = y_flat[ivl:ivr]
|
|
||||||
|
|
||||||
# reshape to 1d for graphics rendering
|
|
||||||
y_iv = y_iv_flat.reshape(-1)
|
|
||||||
x_iv = x_iv_flat.reshape(-1)
|
|
||||||
|
|
||||||
return x_iv, y_iv, 'all'
|
|
||||||
|
|
||||||
|
|
||||||
class StepCurveFmtr(IncrementalFormatter):
|
|
||||||
|
|
||||||
x_offset: np.ndarray = np.array([
|
|
||||||
0,
|
|
||||||
1,
|
|
||||||
])
|
|
||||||
|
|
||||||
def allocate_xy_nd(
|
|
||||||
self,
|
|
||||||
|
|
||||||
shm: ShmArray,
|
|
||||||
data_field: str,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray, # x
|
|
||||||
np.nd.array # y
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Convert an input 1d shm array to a "step array" format
|
|
||||||
for use by path graphics generation.
|
|
||||||
|
|
||||||
'''
|
|
||||||
i = shm._array[self.index_field].copy()
|
|
||||||
out = shm._array[data_field].copy()
|
|
||||||
|
|
||||||
x_out = (
|
|
||||||
np.broadcast_to(
|
|
||||||
i[:, None],
|
|
||||||
(i.size, 2),
|
|
||||||
)
|
|
||||||
+
|
|
||||||
self.x_offset
|
|
||||||
)
|
|
||||||
|
|
||||||
# fill out Nx2 array to hold each step's left + right vertices.
|
|
||||||
y_out = np.empty(
|
|
||||||
x_out.shape,
|
|
||||||
dtype=out.dtype,
|
|
||||||
)
|
|
||||||
# fill in (current) values from source shm buffer
|
|
||||||
y_out[:] = out[:, np.newaxis]
|
|
||||||
|
|
||||||
# TODO: pretty sure we can drop this?
|
|
||||||
# start y at origin level
|
|
||||||
# y_out[0, 0] = 0
|
|
||||||
# y_out[self.xy_nd_start] = 0
|
|
||||||
return x_out, y_out
|
|
||||||
|
|
||||||
def incr_update_xy_nd(
|
|
||||||
self,
|
|
||||||
|
|
||||||
src_shm: ShmArray,
|
|
||||||
array_key: str,
|
|
||||||
|
|
||||||
new_from_src: np.ndarray, # portion of source that was updated
|
|
||||||
read_slc: slice,
|
|
||||||
ln: int, # len of updated
|
|
||||||
|
|
||||||
nd_start: int,
|
|
||||||
nd_stop: int,
|
|
||||||
|
|
||||||
is_append: bool,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
slice,
|
|
||||||
]:
|
|
||||||
# NOTE: for a step curve we slice from one datum prior
|
|
||||||
# to the current "update slice" to get the previous
|
|
||||||
# "level".
|
|
||||||
#
|
|
||||||
# why this is needed,
|
|
||||||
# - the current new append slice will often have a zero
|
|
||||||
# value in the latest datum-step (at least for zero-on-new
|
|
||||||
# cases like vlm in the) as per configuration of the FSP
|
|
||||||
# engine.
|
|
||||||
# - we need to look back a datum to get the last level which
|
|
||||||
# will be used to terminate/complete the last step x-width
|
|
||||||
# which will be set to pair with the last x-index THIS MEANS
|
|
||||||
#
|
|
||||||
# XXX: this means WE CAN'T USE the append slice since we need to
|
|
||||||
# "look backward" one step to get the needed back-to-zero level
|
|
||||||
# and the update data in ``new_from_src`` will only contain the
|
|
||||||
# latest new data.
|
|
||||||
back_1 = slice(
|
|
||||||
read_slc.start - 1,
|
|
||||||
read_slc.stop,
|
|
||||||
)
|
|
||||||
|
|
||||||
to_write = src_shm._array[back_1]
|
|
||||||
y_nd_new = self.y_nd[back_1]
|
|
||||||
y_nd_new[:] = to_write[array_key][:, None]
|
|
||||||
|
|
||||||
x_nd_new = self.x_nd[read_slc]
|
|
||||||
x_nd_new[:] = (
|
|
||||||
new_from_src[self.index_field][:, None]
|
|
||||||
+
|
|
||||||
self.x_offset
|
|
||||||
)
|
|
||||||
|
|
||||||
# XXX: uncomment for debugging
|
|
||||||
# x_nd = self.x_nd[self.xy_slice]
|
|
||||||
# y_nd = self.y_nd[self.xy_slice]
|
|
||||||
# name = self.viz.name
|
|
||||||
# if 'dolla_vlm' in name:
|
|
||||||
# s = 4
|
|
||||||
# print(
|
|
||||||
# f'{name}:\n'
|
|
||||||
# 'NEW_FROM_SRC:\n'
|
|
||||||
# f'new_from_src: {new_from_src}\n\n'
|
|
||||||
|
|
||||||
# f'PRE self.x_nd:'
|
|
||||||
# f'\n{x_nd[-s:]}\n'
|
|
||||||
# f'PRE self.y_nd:\n'
|
|
||||||
# f'{y_nd[-s:]}\n\n'
|
|
||||||
|
|
||||||
# f'TO WRITE:\n'
|
|
||||||
# f'x_nd_new:\n'
|
|
||||||
# f'{x_nd_new}\n'
|
|
||||||
# f'y_nd_new:\n'
|
|
||||||
# f'{y_nd_new}\n'
|
|
||||||
# )
|
|
||||||
|
|
||||||
def format_xy_nd_to_1d(
|
|
||||||
self,
|
|
||||||
|
|
||||||
array: np.ndarray,
|
|
||||||
array_key: str,
|
|
||||||
vr: tuple[int, int],
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
str,
|
|
||||||
]:
|
|
||||||
last_t, last = array[-1][[self.index_field, array_key]]
|
|
||||||
|
|
||||||
start = self.xy_nd_start
|
|
||||||
stop = self.xy_nd_stop
|
|
||||||
|
|
||||||
x_step = self.x_nd[start:stop]
|
|
||||||
y_step = self.y_nd[start:stop]
|
|
||||||
|
|
||||||
# slice out in-view data
|
|
||||||
ivl, ivr = vr
|
|
||||||
|
|
||||||
# NOTE: add an extra step to get the vertical-line-down-to-zero
|
|
||||||
# adjacent to the last-datum graphic (filled rect).
|
|
||||||
x_step_iv = x_step[ivl:ivr+1]
|
|
||||||
y_step_iv = y_step[ivl:ivr+1]
|
|
||||||
|
|
||||||
# flatten to 1d
|
|
||||||
x_1d = x_step_iv.reshape(x_step_iv.size)
|
|
||||||
y_1d = y_step_iv.reshape(y_step_iv.size)
|
|
||||||
|
|
||||||
# debugging
|
|
||||||
# if y_1d.any():
|
|
||||||
# s = 6
|
|
||||||
# print(
|
|
||||||
# f'x_step_iv:\n{x_step_iv[-s:]}\n'
|
|
||||||
# f'y_step_iv:\n{y_step_iv[-s:]}\n\n'
|
|
||||||
# f'x_1d:\n{x_1d[-s:]}\n'
|
|
||||||
# f'y_1d:\n{y_1d[-s:]}\n'
|
|
||||||
# )
|
|
||||||
|
|
||||||
return x_1d, y_1d, 'all'
|
|
|
@ -56,7 +56,7 @@ def iterticks(
|
||||||
sig = (
|
sig = (
|
||||||
time,
|
time,
|
||||||
tick['price'],
|
tick['price'],
|
||||||
tick.get('size')
|
tick['size']
|
||||||
)
|
)
|
||||||
|
|
||||||
if ttype == 'dark_trade':
|
if ttype == 'dark_trade':
|
||||||
|
|
|
@ -1,452 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
"""
|
|
||||||
Super fast ``QPainterPath`` generation related operator routines.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from math import (
|
|
||||||
ceil,
|
|
||||||
floor,
|
|
||||||
)
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from numpy.lib import recfunctions as rfn
|
|
||||||
from numba import (
|
|
||||||
# types,
|
|
||||||
njit,
|
|
||||||
float64,
|
|
||||||
int64,
|
|
||||||
# optional,
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: for ``numba`` typing..
|
|
||||||
# from ._source import numba_ohlc_dtype
|
|
||||||
from ._m4 import ds_m4
|
|
||||||
from .._profile import (
|
|
||||||
Profiler,
|
|
||||||
pg_profile_enabled,
|
|
||||||
ms_slower_then,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def xy_downsample(
|
|
||||||
x,
|
|
||||||
y,
|
|
||||||
uppx,
|
|
||||||
|
|
||||||
x_spacer: float = 0.5,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
float,
|
|
||||||
float,
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Downsample 1D (flat ``numpy.ndarray``) arrays using M4 given an input
|
|
||||||
``uppx`` (units-per-pixel) and add space between discreet datums.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# downsample whenever more then 1 pixels per datum can be shown.
|
|
||||||
# always refresh data bounds until we get diffing
|
|
||||||
# working properly, see above..
|
|
||||||
m4_out = ds_m4(
|
|
||||||
x,
|
|
||||||
y,
|
|
||||||
uppx,
|
|
||||||
)
|
|
||||||
|
|
||||||
if m4_out is not None:
|
|
||||||
bins, x, y, ymn, ymx = m4_out
|
|
||||||
# flatten output to 1d arrays suitable for path-graphics generation.
|
|
||||||
x = np.broadcast_to(x[:, None], y.shape)
|
|
||||||
x = (x + np.array(
|
|
||||||
[-x_spacer, 0, 0, x_spacer]
|
|
||||||
)).flatten()
|
|
||||||
y = y.flatten()
|
|
||||||
|
|
||||||
return x, y, ymn, ymx
|
|
||||||
|
|
||||||
# XXX: we accept a None output for the case where the input range
|
|
||||||
# to ``ds_m4()`` is bad (-ve) and we want to catch and debug
|
|
||||||
# that (seemingly super rare) circumstance..
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@njit(
|
|
||||||
# NOTE: need to construct this manually for readonly
|
|
||||||
# arrays, see https://github.com/numba/numba/issues/4511
|
|
||||||
# (
|
|
||||||
# types.Array(
|
|
||||||
# numba_ohlc_dtype,
|
|
||||||
# 1,
|
|
||||||
# 'C',
|
|
||||||
# readonly=True,
|
|
||||||
# ),
|
|
||||||
# int64,
|
|
||||||
# types.unicode_type,
|
|
||||||
# optional(float64),
|
|
||||||
# ),
|
|
||||||
nogil=True
|
|
||||||
)
|
|
||||||
def path_arrays_from_ohlc(
|
|
||||||
data: np.ndarray,
|
|
||||||
start: int64,
|
|
||||||
bar_w: float64,
|
|
||||||
bar_gap: float64 = 0.16,
|
|
||||||
use_time_index: bool = True,
|
|
||||||
|
|
||||||
# XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622
|
|
||||||
# index_field: str,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Generate an array of lines objects from input ohlc data.
|
|
||||||
|
|
||||||
'''
|
|
||||||
size = int(data.shape[0] * 6)
|
|
||||||
|
|
||||||
# XXX: see this for why the dtype might have to be defined outside
|
|
||||||
# the routine.
|
|
||||||
# https://github.com/numba/numba/issues/4098#issuecomment-493914533
|
|
||||||
x = np.zeros(
|
|
||||||
shape=size,
|
|
||||||
dtype=float64,
|
|
||||||
)
|
|
||||||
y, c = x.copy(), x.copy()
|
|
||||||
|
|
||||||
half_w: float = bar_w/2
|
|
||||||
|
|
||||||
# TODO: report bug for assert @
|
|
||||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
|
||||||
for i, q in enumerate(data[start:], start):
|
|
||||||
|
|
||||||
open = q['open']
|
|
||||||
high = q['high']
|
|
||||||
low = q['low']
|
|
||||||
close = q['close']
|
|
||||||
|
|
||||||
if use_time_index:
|
|
||||||
index = float64(q['time'])
|
|
||||||
else:
|
|
||||||
index = float64(q['index'])
|
|
||||||
|
|
||||||
# XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622
|
|
||||||
# index = float64(q[index_field])
|
|
||||||
# AND this (probably)
|
|
||||||
# open, high, low, close, index = q[
|
|
||||||
# ['open', 'high', 'low', 'close', 'index']]
|
|
||||||
|
|
||||||
istart = i * 6
|
|
||||||
istop = istart + 6
|
|
||||||
|
|
||||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
|
||||||
mid: float = index + half_w
|
|
||||||
x[istart:istop] = (
|
|
||||||
index + bar_gap,
|
|
||||||
mid,
|
|
||||||
mid,
|
|
||||||
mid,
|
|
||||||
mid,
|
|
||||||
index + bar_w - bar_gap,
|
|
||||||
)
|
|
||||||
y[istart:istop] = (
|
|
||||||
open,
|
|
||||||
open,
|
|
||||||
low,
|
|
||||||
high,
|
|
||||||
close,
|
|
||||||
close,
|
|
||||||
)
|
|
||||||
|
|
||||||
# specifies that the first edge is never connected to the
|
|
||||||
# prior bars last edge thus providing a small "gap"/"space"
|
|
||||||
# between bars determined by ``bar_gap``.
|
|
||||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
|
||||||
|
|
||||||
return x, y, c
|
|
||||||
|
|
||||||
|
|
||||||
def hl2mxmn(
|
|
||||||
ohlc: np.ndarray,
|
|
||||||
index_field: str = 'index',
|
|
||||||
|
|
||||||
) -> np.ndarray:
|
|
||||||
'''
|
|
||||||
Convert a OHLC struct-array containing 'high'/'low' columns
|
|
||||||
to a "joined" max/min 1-d array.
|
|
||||||
|
|
||||||
'''
|
|
||||||
index = ohlc[index_field]
|
|
||||||
hls = ohlc[[
|
|
||||||
'low',
|
|
||||||
'high',
|
|
||||||
]]
|
|
||||||
|
|
||||||
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
|
||||||
x = np.empty(2*hls.size, dtype=np.float64)
|
|
||||||
trace_hl(hls, mxmn, x, index[0])
|
|
||||||
x = x + index[0]
|
|
||||||
|
|
||||||
return mxmn, x
|
|
||||||
|
|
||||||
|
|
||||||
@njit(
|
|
||||||
# TODO: the type annots..
|
|
||||||
# float64[:](float64[:],),
|
|
||||||
)
|
|
||||||
def trace_hl(
|
|
||||||
hl: 'np.ndarray',
|
|
||||||
out: np.ndarray,
|
|
||||||
x: np.ndarray,
|
|
||||||
start: int,
|
|
||||||
|
|
||||||
# the "offset" values in the x-domain which
|
|
||||||
# place the 2 output points around each ``int``
|
|
||||||
# master index.
|
|
||||||
margin: float = 0.43,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
"Trace" the outline of the high-low values of an ohlc sequence
|
|
||||||
as a line such that the maximum deviation (aka disperaion) between
|
|
||||||
bars if preserved.
|
|
||||||
|
|
||||||
This routine is expected to modify input arrays in-place.
|
|
||||||
|
|
||||||
'''
|
|
||||||
last_l = hl['low'][0]
|
|
||||||
last_h = hl['high'][0]
|
|
||||||
|
|
||||||
for i in range(hl.size):
|
|
||||||
row = hl[i]
|
|
||||||
l, h = row['low'], row['high']
|
|
||||||
|
|
||||||
up_diff = h - last_l
|
|
||||||
down_diff = last_h - l
|
|
||||||
|
|
||||||
if up_diff > down_diff:
|
|
||||||
out[2*i + 1] = h
|
|
||||||
out[2*i] = last_l
|
|
||||||
else:
|
|
||||||
out[2*i + 1] = l
|
|
||||||
out[2*i] = last_h
|
|
||||||
|
|
||||||
last_l = l
|
|
||||||
last_h = h
|
|
||||||
|
|
||||||
x[2*i] = int(i) - margin
|
|
||||||
x[2*i + 1] = int(i) + margin
|
|
||||||
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
def ohlc_flatten(
|
|
||||||
ohlc: np.ndarray,
|
|
||||||
use_mxmn: bool = True,
|
|
||||||
index_field: str = 'index',
|
|
||||||
|
|
||||||
) -> tuple[np.ndarray, np.ndarray]:
|
|
||||||
'''
|
|
||||||
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
|
||||||
1-d array that is 4 times the size with x-domain values distributed
|
|
||||||
evenly (by 0.5 steps) over each index.
|
|
||||||
|
|
||||||
'''
|
|
||||||
index = ohlc[index_field]
|
|
||||||
|
|
||||||
if use_mxmn:
|
|
||||||
# traces a line optimally over highs to lows
|
|
||||||
# using numba. NOTE: pretty sure this is faster
|
|
||||||
# and looks about the same as the below output.
|
|
||||||
flat, x = hl2mxmn(ohlc)
|
|
||||||
|
|
||||||
else:
|
|
||||||
flat = rfn.structured_to_unstructured(
|
|
||||||
ohlc[['open', 'high', 'low', 'close']]
|
|
||||||
).flatten()
|
|
||||||
|
|
||||||
x = np.linspace(
|
|
||||||
start=index[0] - 0.5,
|
|
||||||
stop=index[-1] + 0.5,
|
|
||||||
num=len(flat),
|
|
||||||
)
|
|
||||||
return x, flat
|
|
||||||
|
|
||||||
|
|
||||||
def slice_from_time(
|
|
||||||
arr: np.ndarray,
|
|
||||||
start_t: float,
|
|
||||||
stop_t: float,
|
|
||||||
step: int | None = None,
|
|
||||||
|
|
||||||
) -> slice:
|
|
||||||
'''
|
|
||||||
Calculate array indices mapped from a time range and return them in
|
|
||||||
a slice.
|
|
||||||
|
|
||||||
Given an input array with an epoch `'time'` series entry, calculate
|
|
||||||
the indices which span the time range and return in a slice. Presume
|
|
||||||
each `'time'` step increment is uniform and when the time stamp
|
|
||||||
series contains gaps (the uniform presumption is untrue) use
|
|
||||||
``np.searchsorted()`` binary search to look up the appropriate
|
|
||||||
index.
|
|
||||||
|
|
||||||
'''
|
|
||||||
profiler = Profiler(
|
|
||||||
msg='slice_from_time()',
|
|
||||||
disabled=not pg_profile_enabled(),
|
|
||||||
ms_threshold=ms_slower_then,
|
|
||||||
)
|
|
||||||
|
|
||||||
times = arr['time']
|
|
||||||
t_first = floor(times[0])
|
|
||||||
t_last = ceil(times[-1])
|
|
||||||
|
|
||||||
# the greatest index we can return which slices to the
|
|
||||||
# end of the input array.
|
|
||||||
read_i_max = arr.shape[0]
|
|
||||||
|
|
||||||
# TODO: require this is always passed in?
|
|
||||||
if step is None:
|
|
||||||
step = round(t_last - times[-2])
|
|
||||||
if step == 0:
|
|
||||||
step = 1
|
|
||||||
|
|
||||||
# compute (presumed) uniform-time-step index offsets
|
|
||||||
i_start_t = floor(start_t)
|
|
||||||
read_i_start = floor(((i_start_t - t_first) // step)) - 1
|
|
||||||
|
|
||||||
i_stop_t = ceil(stop_t)
|
|
||||||
|
|
||||||
# XXX: edge case -> always set stop index to last in array whenever
|
|
||||||
# the input stop time is detected to be greater then the equiv time
|
|
||||||
# stamp at that last entry.
|
|
||||||
if i_stop_t >= t_last:
|
|
||||||
read_i_stop = read_i_max
|
|
||||||
else:
|
|
||||||
read_i_stop = ceil((i_stop_t - t_first) // step) + 1
|
|
||||||
|
|
||||||
# always clip outputs to array support
|
|
||||||
# for read start:
|
|
||||||
# - never allow a start < the 0 index
|
|
||||||
# - never allow an end index > the read array len
|
|
||||||
read_i_start = min(
|
|
||||||
max(0, read_i_start),
|
|
||||||
read_i_max - 1,
|
|
||||||
)
|
|
||||||
read_i_stop = max(
|
|
||||||
0,
|
|
||||||
min(read_i_stop, read_i_max),
|
|
||||||
)
|
|
||||||
|
|
||||||
# check for larger-then-latest calculated index for given start
|
|
||||||
# time, in which case we do a binary search for the correct index.
|
|
||||||
# NOTE: this is usually the result of a time series with time gaps
|
|
||||||
# where it is expected that each index step maps to a uniform step
|
|
||||||
# in the time stamp series.
|
|
||||||
t_iv_start = times[read_i_start]
|
|
||||||
if (
|
|
||||||
t_iv_start > i_start_t
|
|
||||||
):
|
|
||||||
# do a binary search for the best index mapping to ``start_t``
|
|
||||||
# given we measured an overshoot using the uniform-time-step
|
|
||||||
# calculation from above.
|
|
||||||
|
|
||||||
# TODO: once we start caching these per source-array,
|
|
||||||
# we can just overwrite ``read_i_start`` directly.
|
|
||||||
new_read_i_start = np.searchsorted(
|
|
||||||
times,
|
|
||||||
i_start_t,
|
|
||||||
side='left',
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: minimize binary search work as much as possible:
|
|
||||||
# - cache these remap values which compensate for gaps in the
|
|
||||||
# uniform time step basis where we calc a later start
|
|
||||||
# index for the given input ``start_t``.
|
|
||||||
# - can we shorten the input search sequence by heuristic?
|
|
||||||
# up_to_arith_start = index[:read_i_start]
|
|
||||||
|
|
||||||
if (
|
|
||||||
new_read_i_start <= read_i_start
|
|
||||||
):
|
|
||||||
# t_diff = t_iv_start - start_t
|
|
||||||
# print(
|
|
||||||
# f"WE'RE CUTTING OUT TIME - STEP:{step}\n"
|
|
||||||
# f'start_t:{start_t} -> 0index start_t:{t_iv_start}\n'
|
|
||||||
# f'diff: {t_diff}\n'
|
|
||||||
# f'REMAPPED START i: {read_i_start} -> {new_read_i_start}\n'
|
|
||||||
# )
|
|
||||||
read_i_start = new_read_i_start - 1
|
|
||||||
|
|
||||||
t_iv_stop = times[read_i_stop - 1]
|
|
||||||
if (
|
|
||||||
t_iv_stop > i_stop_t
|
|
||||||
):
|
|
||||||
# t_diff = stop_t - t_iv_stop
|
|
||||||
# print(
|
|
||||||
# f"WE'RE CUTTING OUT TIME - STEP:{step}\n"
|
|
||||||
# f'calced iv stop:{t_iv_stop} -> stop_t:{stop_t}\n'
|
|
||||||
# f'diff: {t_diff}\n'
|
|
||||||
# # f'SHOULD REMAP STOP: {read_i_start} -> {new_read_i_start}\n'
|
|
||||||
# )
|
|
||||||
new_read_i_stop = np.searchsorted(
|
|
||||||
times[read_i_start:],
|
|
||||||
# times,
|
|
||||||
i_stop_t,
|
|
||||||
side='left',
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
new_read_i_stop <= read_i_stop
|
|
||||||
):
|
|
||||||
read_i_stop = read_i_start + new_read_i_stop + 1
|
|
||||||
|
|
||||||
# sanity checks for range size
|
|
||||||
# samples = (i_stop_t - i_start_t) // step
|
|
||||||
# index_diff = read_i_stop - read_i_start + 1
|
|
||||||
# if index_diff > (samples + 3):
|
|
||||||
# breakpoint()
|
|
||||||
|
|
||||||
# read-relative indexes: gives a slice where `shm.array[read_slc]`
|
|
||||||
# will be the data spanning the input time range `start_t` ->
|
|
||||||
# `stop_t`
|
|
||||||
read_slc = slice(
|
|
||||||
int(read_i_start),
|
|
||||||
int(read_i_stop),
|
|
||||||
)
|
|
||||||
|
|
||||||
profiler(
|
|
||||||
'slicing complete'
|
|
||||||
# f'{start_t} -> {abs_slc.start} | {read_slc.start}\n'
|
|
||||||
# f'{stop_t} -> {abs_slc.stop} | {read_slc.stop}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
# NOTE: if caller needs absolute buffer indices they can
|
|
||||||
# slice the buffer abs index like so:
|
|
||||||
# index = arr['index']
|
|
||||||
# abs_indx = index[read_slc]
|
|
||||||
# abs_slc = slice(
|
|
||||||
# int(abs_indx[0]),
|
|
||||||
# int(abs_indx[-1]),
|
|
||||||
# )
|
|
||||||
|
|
||||||
return read_slc
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -27,14 +27,13 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
||||||
if _USE_POSIX:
|
if _USE_POSIX:
|
||||||
from _posixshmem import shm_unlink
|
from _posixshmem import shm_unlink
|
||||||
|
|
||||||
# import msgspec
|
|
||||||
import numpy as np
|
|
||||||
from numpy.lib import recfunctions as rfn
|
|
||||||
import tractor
|
import tractor
|
||||||
|
import numpy as np
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from numpy.lib import recfunctions as rfn
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._source import base_iohlc_dtype
|
from ._source import base_iohlc_dtype
|
||||||
from .types import Struct
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -50,11 +49,7 @@ _rt_buffer_start = int((_days_worth - 1) * _secs_in_day)
|
||||||
|
|
||||||
|
|
||||||
def cuckoff_mantracker():
|
def cuckoff_mantracker():
|
||||||
'''
|
|
||||||
Disable all ``multiprocessing``` "resource tracking" machinery since
|
|
||||||
it's an absolute multi-threaded mess of non-SC madness.
|
|
||||||
|
|
||||||
'''
|
|
||||||
from multiprocessing import resource_tracker as mantracker
|
from multiprocessing import resource_tracker as mantracker
|
||||||
|
|
||||||
# Tell the "resource tracker" thing to fuck off.
|
# Tell the "resource tracker" thing to fuck off.
|
||||||
|
@ -112,39 +107,36 @@ class SharedInt:
|
||||||
log.warning(f'Shm for {name} already unlinked?')
|
log.warning(f'Shm for {name} already unlinked?')
|
||||||
|
|
||||||
|
|
||||||
class _Token(Struct, frozen=True):
|
class _Token(BaseModel):
|
||||||
'''
|
'''
|
||||||
Internal represenation of a shared memory "token"
|
Internal represenation of a shared memory "token"
|
||||||
which can be used to key a system wide post shm entry.
|
which can be used to key a system wide post shm entry.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
class Config:
|
||||||
|
frozen = True
|
||||||
|
|
||||||
shm_name: str # this servers as a "key" value
|
shm_name: str # this servers as a "key" value
|
||||||
shm_first_index_name: str
|
shm_first_index_name: str
|
||||||
shm_last_index_name: str
|
shm_last_index_name: str
|
||||||
dtype_descr: tuple
|
dtype_descr: tuple
|
||||||
size: int # in struct-array index / row terms
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dtype(self) -> np.dtype:
|
def dtype(self) -> np.dtype:
|
||||||
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
||||||
|
|
||||||
def as_msg(self):
|
def as_msg(self):
|
||||||
return self.to_dict()
|
return self.dict()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_msg(cls, msg: dict) -> _Token:
|
def from_msg(cls, msg: dict) -> _Token:
|
||||||
if isinstance(msg, _Token):
|
if isinstance(msg, _Token):
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
# TODO: native struct decoding
|
|
||||||
# return _token_dec.decode(msg)
|
|
||||||
|
|
||||||
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
||||||
return _Token(**msg)
|
return _Token(**msg)
|
||||||
|
|
||||||
|
|
||||||
# _token_dec = msgspec.msgpack.Decoder(_Token)
|
|
||||||
|
|
||||||
# TODO: this api?
|
# TODO: this api?
|
||||||
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
||||||
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
||||||
|
@ -163,7 +155,6 @@ def get_shm_token(key: str) -> _Token:
|
||||||
|
|
||||||
def _make_token(
|
def _make_token(
|
||||||
key: str,
|
key: str,
|
||||||
size: int,
|
|
||||||
dtype: Optional[np.dtype] = None,
|
dtype: Optional[np.dtype] = None,
|
||||||
) -> _Token:
|
) -> _Token:
|
||||||
'''
|
'''
|
||||||
|
@ -176,8 +167,7 @@ def _make_token(
|
||||||
shm_name=key,
|
shm_name=key,
|
||||||
shm_first_index_name=key + "_first",
|
shm_first_index_name=key + "_first",
|
||||||
shm_last_index_name=key + "_last",
|
shm_last_index_name=key + "_last",
|
||||||
dtype_descr=tuple(np.dtype(dtype).descr),
|
dtype_descr=np.dtype(dtype).descr
|
||||||
size=size,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -229,7 +219,6 @@ class ShmArray:
|
||||||
shm_first_index_name=self._first._shm.name,
|
shm_first_index_name=self._first._shm.name,
|
||||||
shm_last_index_name=self._last._shm.name,
|
shm_last_index_name=self._last._shm.name,
|
||||||
dtype_descr=tuple(self._array.dtype.descr),
|
dtype_descr=tuple(self._array.dtype.descr),
|
||||||
size=self._len,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -444,7 +433,7 @@ class ShmArray:
|
||||||
def open_shm_array(
|
def open_shm_array(
|
||||||
|
|
||||||
key: Optional[str] = None,
|
key: Optional[str] = None,
|
||||||
size: int = _default_size, # see above
|
size: int = _default_size,
|
||||||
dtype: Optional[np.dtype] = None,
|
dtype: Optional[np.dtype] = None,
|
||||||
readonly: bool = False,
|
readonly: bool = False,
|
||||||
|
|
||||||
|
@ -475,8 +464,7 @@ def open_shm_array(
|
||||||
|
|
||||||
token = _make_token(
|
token = _make_token(
|
||||||
key=key,
|
key=key,
|
||||||
size=size,
|
dtype=dtype
|
||||||
dtype=dtype,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# create single entry arrays for storing an first and last indices
|
# create single entry arrays for storing an first and last indices
|
||||||
|
@ -528,15 +516,15 @@ def open_shm_array(
|
||||||
# "unlink" created shm on process teardown by
|
# "unlink" created shm on process teardown by
|
||||||
# pushing teardown calls onto actor context stack
|
# pushing teardown calls onto actor context stack
|
||||||
|
|
||||||
stack = tractor.current_actor().lifetime_stack
|
tractor._actor._lifetime_stack.callback(shmarr.close)
|
||||||
stack.callback(shmarr.close)
|
tractor._actor._lifetime_stack.callback(shmarr.destroy)
|
||||||
stack.callback(shmarr.destroy)
|
|
||||||
|
|
||||||
return shmarr
|
return shmarr
|
||||||
|
|
||||||
|
|
||||||
def attach_shm_array(
|
def attach_shm_array(
|
||||||
token: tuple[str, str, tuple[str, str]],
|
token: tuple[str, str, tuple[str, str]],
|
||||||
|
size: int = _default_size,
|
||||||
readonly: bool = True,
|
readonly: bool = True,
|
||||||
|
|
||||||
) -> ShmArray:
|
) -> ShmArray:
|
||||||
|
@ -575,7 +563,7 @@ def attach_shm_array(
|
||||||
raise _err
|
raise _err
|
||||||
|
|
||||||
shmarr = np.ndarray(
|
shmarr = np.ndarray(
|
||||||
(token.size,),
|
(size,),
|
||||||
dtype=token.dtype,
|
dtype=token.dtype,
|
||||||
buffer=shm.buf
|
buffer=shm.buf
|
||||||
)
|
)
|
||||||
|
@ -614,8 +602,8 @@ def attach_shm_array(
|
||||||
if key not in _known_tokens:
|
if key not in _known_tokens:
|
||||||
_known_tokens[key] = token
|
_known_tokens[key] = token
|
||||||
|
|
||||||
# "close" attached shm on actor teardown
|
# "close" attached shm on process teardown
|
||||||
tractor.current_actor().lifetime_stack.callback(sha.close)
|
tractor._actor._lifetime_stack.callback(sha.close)
|
||||||
|
|
||||||
return sha
|
return sha
|
||||||
|
|
||||||
|
@ -643,7 +631,6 @@ def maybe_open_shm_array(
|
||||||
use ``attach_shm_array``.
|
use ``attach_shm_array``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
size = kwargs.pop('size', _default_size)
|
|
||||||
try:
|
try:
|
||||||
# see if we already know this key
|
# see if we already know this key
|
||||||
token = _known_tokens[key]
|
token = _known_tokens[key]
|
||||||
|
@ -651,11 +638,7 @@ def maybe_open_shm_array(
|
||||||
except KeyError:
|
except KeyError:
|
||||||
log.warning(f"Could not find {key} in shms cache")
|
log.warning(f"Could not find {key} in shms cache")
|
||||||
if dtype:
|
if dtype:
|
||||||
token = _make_token(
|
token = _make_token(key, dtype)
|
||||||
key,
|
|
||||||
size=size,
|
|
||||||
dtype=dtype,
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
return attach_shm_array(token=token, **kwargs), False
|
return attach_shm_array(token=token, **kwargs), False
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
|
|
|
@ -18,16 +18,12 @@
|
||||||
numpy data source coversion helpers.
|
numpy data source coversion helpers.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from decimal import (
|
|
||||||
Decimal,
|
|
||||||
ROUND_HALF_EVEN,
|
|
||||||
)
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
import decimal
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from pydantic import BaseModel
|
||||||
from .types import Struct
|
|
||||||
# from numba import from_dtype
|
# from numba import from_dtype
|
||||||
|
|
||||||
|
|
||||||
|
@ -80,14 +76,10 @@ def mk_fqsn(
|
||||||
def float_digits(
|
def float_digits(
|
||||||
value: float,
|
value: float,
|
||||||
) -> int:
|
) -> int:
|
||||||
'''
|
|
||||||
Return the number of precision digits read from a float value.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if value == 0:
|
if value == 0:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
return int(-Decimal(str(value)).as_tuple().exponent)
|
return int(-decimal.Decimal(str(value)).as_tuple().exponent)
|
||||||
|
|
||||||
|
|
||||||
def ohlc_zeros(length: int) -> np.ndarray:
|
def ohlc_zeros(length: int) -> np.ndarray:
|
||||||
|
@ -134,57 +126,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class MktPair(Struct, frozen=True):
|
class Symbol(BaseModel):
|
||||||
|
|
||||||
src: str # source asset name being used to buy
|
|
||||||
src_type: str # source asset's financial type/classification name
|
|
||||||
# ^ specifies a "class" of financial instrument
|
|
||||||
# egs. stock, futer, option, bond etc.
|
|
||||||
|
|
||||||
dst: str # destination asset name being bought
|
|
||||||
dst_type: str # destination asset's financial type/classification name
|
|
||||||
|
|
||||||
price_tick: float # minimum price increment value increment
|
|
||||||
price_tick_digits: int # required decimal digits for above
|
|
||||||
|
|
||||||
size_tick: float # minimum size (aka vlm) increment value increment
|
|
||||||
size_tick_digits: int # required decimal digits for above
|
|
||||||
|
|
||||||
venue: str | None = None # market venue provider name
|
|
||||||
expiry: str | None = None # for derivs, expiry datetime parseable str
|
|
||||||
|
|
||||||
# for derivs, info describing contract, egs.
|
|
||||||
# strike price, call or put, swap type, exercise model, etc.
|
|
||||||
contract_info: str | None = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_msg(
|
|
||||||
self,
|
|
||||||
msg: dict[str, Any],
|
|
||||||
|
|
||||||
) -> MktPair:
|
|
||||||
'''
|
|
||||||
Constructor for a received msg-dict normally received over IPC.
|
|
||||||
|
|
||||||
'''
|
|
||||||
...
|
|
||||||
|
|
||||||
# fqa, fqma, .. etc. see issue:
|
|
||||||
# https://github.com/pikers/piker/issues/467
|
|
||||||
@property
|
|
||||||
def fqsn(self) -> str:
|
|
||||||
'''
|
|
||||||
Return the fully qualified market (endpoint) name for the
|
|
||||||
pair of transacting assets.
|
|
||||||
|
|
||||||
'''
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: rework the below `Symbol` (which was originally inspired and
|
|
||||||
# derived from stuff in quantdom) into a simpler, ipc msg ready, market
|
|
||||||
# endpoint meta-data container type as per the drafted interace above.
|
|
||||||
class Symbol(Struct):
|
|
||||||
'''
|
'''
|
||||||
I guess this is some kinda container thing for dealing with
|
I guess this is some kinda container thing for dealing with
|
||||||
all the different meta-data formats from brokers?
|
all the different meta-data formats from brokers?
|
||||||
|
@ -198,6 +140,10 @@ class Symbol(Struct):
|
||||||
suffix: str = ''
|
suffix: str = ''
|
||||||
broker_info: dict[str, dict[str, Any]] = {}
|
broker_info: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
|
# specifies a "class" of financial instrument
|
||||||
|
# ex. stock, futer, option, bond etc.
|
||||||
|
|
||||||
|
# @validate_arguments
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_broker_info(
|
def from_broker_info(
|
||||||
cls,
|
cls,
|
||||||
|
@ -206,17 +152,19 @@ class Symbol(Struct):
|
||||||
info: dict[str, Any],
|
info: dict[str, Any],
|
||||||
suffix: str = '',
|
suffix: str = '',
|
||||||
|
|
||||||
) -> Symbol:
|
# XXX: like wtf..
|
||||||
|
# ) -> 'Symbol':
|
||||||
|
) -> None:
|
||||||
|
|
||||||
tick_size = info.get('price_tick_size', 0.01)
|
tick_size = info.get('price_tick_size', 0.01)
|
||||||
lot_size = info.get('lot_tick_size', 0.0)
|
lot_tick_size = info.get('lot_tick_size', 0.0)
|
||||||
|
|
||||||
return Symbol(
|
return Symbol(
|
||||||
key=symbol,
|
key=symbol,
|
||||||
tick_size=tick_size,
|
tick_size=tick_size,
|
||||||
lot_tick_size=lot_size,
|
lot_tick_size=lot_tick_size,
|
||||||
tick_size_digits=float_digits(tick_size),
|
tick_size_digits=float_digits(tick_size),
|
||||||
lot_size_digits=float_digits(lot_size),
|
lot_size_digits=float_digits(lot_tick_size),
|
||||||
suffix=suffix,
|
suffix=suffix,
|
||||||
broker_info={broker: info},
|
broker_info={broker: info},
|
||||||
)
|
)
|
||||||
|
@ -227,7 +175,9 @@ class Symbol(Struct):
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
info: dict[str, Any],
|
info: dict[str, Any],
|
||||||
|
|
||||||
) -> Symbol:
|
# XXX: like wtf..
|
||||||
|
# ) -> 'Symbol':
|
||||||
|
) -> None:
|
||||||
broker, key, suffix = unpack_fqsn(fqsn)
|
broker, key, suffix = unpack_fqsn(fqsn)
|
||||||
return cls.from_broker_info(
|
return cls.from_broker_info(
|
||||||
broker,
|
broker,
|
||||||
|
@ -271,10 +221,6 @@ class Symbol(Struct):
|
||||||
else:
|
else:
|
||||||
return (key, broker)
|
return (key, broker)
|
||||||
|
|
||||||
@property
|
|
||||||
def fqsn(self) -> str:
|
|
||||||
return '.'.join(self.tokens()).lower()
|
|
||||||
|
|
||||||
def front_fqsn(self) -> str:
|
def front_fqsn(self) -> str:
|
||||||
'''
|
'''
|
||||||
fqsn = "fully qualified symbol name"
|
fqsn = "fully qualified symbol name"
|
||||||
|
@ -294,24 +240,18 @@ class Symbol(Struct):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
tokens = self.tokens()
|
tokens = self.tokens()
|
||||||
fqsn = '.'.join(map(str.lower, tokens))
|
fqsn = '.'.join(tokens)
|
||||||
return fqsn
|
return fqsn
|
||||||
|
|
||||||
def quantize_size(
|
def iterfqsns(self) -> list[str]:
|
||||||
self,
|
keys = []
|
||||||
size: float,
|
for broker in self.broker_info.keys():
|
||||||
|
fqsn = mk_fqsn(self.key, broker)
|
||||||
|
if self.suffix:
|
||||||
|
fqsn += f'.{self.suffix}'
|
||||||
|
keys.append(fqsn)
|
||||||
|
|
||||||
) -> Decimal:
|
return keys
|
||||||
'''
|
|
||||||
Truncate input ``size: float`` using ``Decimal``
|
|
||||||
and ``.lot_size_digits``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
digits = self.lot_size_digits
|
|
||||||
return Decimal(size).quantize(
|
|
||||||
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
|
||||||
rounding=ROUND_HALF_EVEN
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _nan_to_closest_num(array: np.ndarray):
|
def _nan_to_closest_num(array: np.ndarray):
|
||||||
|
|
|
@ -18,24 +18,13 @@
|
||||||
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import (
|
from contextlib import asynccontextmanager, AsyncExitStack
|
||||||
asynccontextmanager,
|
|
||||||
AsyncExitStack,
|
|
||||||
)
|
|
||||||
from itertools import count
|
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import (
|
from typing import Any, Callable, AsyncGenerator
|
||||||
Any,
|
|
||||||
Optional,
|
|
||||||
Callable,
|
|
||||||
AsyncGenerator,
|
|
||||||
Iterable,
|
|
||||||
)
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import trio_websocket
|
import trio_websocket
|
||||||
from wsproto.utilities import LocalProtocolError
|
|
||||||
from trio_websocket._impl import (
|
from trio_websocket._impl import (
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
DisconnectionTimeout,
|
DisconnectionTimeout,
|
||||||
|
@ -46,53 +35,43 @@ from trio_websocket._impl import (
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
from .types import Struct
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class NoBsWs:
|
class NoBsWs:
|
||||||
'''
|
"""Make ``trio_websocket`` sockets stay up no matter the bs.
|
||||||
Make ``trio_websocket`` sockets stay up no matter the bs.
|
|
||||||
|
|
||||||
You can provide a ``fixture`` async-context-manager which will be
|
"""
|
||||||
enter/exitted around each reconnect operation.
|
|
||||||
'''
|
|
||||||
recon_errors = (
|
recon_errors = (
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
DisconnectionTimeout,
|
DisconnectionTimeout,
|
||||||
ConnectionRejected,
|
ConnectionRejected,
|
||||||
HandshakeError,
|
HandshakeError,
|
||||||
ConnectionTimeout,
|
ConnectionTimeout,
|
||||||
LocalProtocolError,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
|
token: str,
|
||||||
stack: AsyncExitStack,
|
stack: AsyncExitStack,
|
||||||
fixture: Optional[Callable] = None,
|
fixture: Callable,
|
||||||
serializer: ModuleType = json
|
serializer: ModuleType = json,
|
||||||
):
|
):
|
||||||
self.url = url
|
self.url = url
|
||||||
|
self.token = token
|
||||||
self.fixture = fixture
|
self.fixture = fixture
|
||||||
self._stack = stack
|
self._stack = stack
|
||||||
self._ws: 'WebSocketConnection' = None # noqa
|
self._ws: 'WebSocketConnection' = None # noqa
|
||||||
|
|
||||||
# TODO: is there some method we can call
|
|
||||||
# on the underlying `._ws` to get this?
|
|
||||||
self._connected: bool = False
|
|
||||||
|
|
||||||
async def _connect(
|
async def _connect(
|
||||||
self,
|
self,
|
||||||
tries: int = 1000,
|
tries: int = 1000,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
self._connected = False
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
await self._stack.aclose()
|
await self._stack.aclose()
|
||||||
except self.recon_errors:
|
except (DisconnectionTimeout, RuntimeError):
|
||||||
await trio.sleep(0.5)
|
await trio.sleep(0.5)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
@ -103,18 +82,19 @@ class NoBsWs:
|
||||||
self._ws = await self._stack.enter_async_context(
|
self._ws = await self._stack.enter_async_context(
|
||||||
trio_websocket.open_websocket_url(self.url)
|
trio_websocket.open_websocket_url(self.url)
|
||||||
)
|
)
|
||||||
|
# rerun user code fixture
|
||||||
if self.fixture is not None:
|
if self.token == '':
|
||||||
# rerun user code fixture
|
|
||||||
ret = await self._stack.enter_async_context(
|
ret = await self._stack.enter_async_context(
|
||||||
self.fixture(self)
|
self.fixture(self)
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
ret = await self._stack.enter_async_context(
|
||||||
|
self.fixture(self, self.token)
|
||||||
|
)
|
||||||
|
|
||||||
assert ret is None
|
assert ret is None
|
||||||
|
|
||||||
log.info(f'Connection success: {self.url}')
|
log.info(f'Connection success: {self.url}')
|
||||||
|
|
||||||
self._connected = True
|
|
||||||
return self._ws
|
return self._ws
|
||||||
|
|
||||||
except self.recon_errors as err:
|
except self.recon_errors as err:
|
||||||
|
@ -124,15 +104,11 @@ class NoBsWs:
|
||||||
f'{type(err)}...retry attempt {i}'
|
f'{type(err)}...retry attempt {i}'
|
||||||
)
|
)
|
||||||
await trio.sleep(0.5)
|
await trio.sleep(0.5)
|
||||||
self._connected = False
|
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
log.exception('ws connection fail...')
|
log.exception('ws connection fail...')
|
||||||
raise last_err
|
raise last_err
|
||||||
|
|
||||||
def connected(self) -> bool:
|
|
||||||
return self._connected
|
|
||||||
|
|
||||||
async def send_msg(
|
async def send_msg(
|
||||||
self,
|
self,
|
||||||
data: Any,
|
data: Any,
|
||||||
|
@ -152,26 +128,21 @@ class NoBsWs:
|
||||||
except self.recon_errors:
|
except self.recon_errors:
|
||||||
await self._connect()
|
await self._connect()
|
||||||
|
|
||||||
def __aiter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __anext__(self):
|
|
||||||
return await self.recv_msg()
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_autorecon_ws(
|
async def open_autorecon_ws(
|
||||||
url: str,
|
url: str,
|
||||||
|
|
||||||
# TODO: proper type cannot smh
|
# TODO: proper type annot smh
|
||||||
fixture: Optional[Callable] = None,
|
fixture: Callable,
|
||||||
|
# used for authenticated websockets
|
||||||
|
token: str = '',
|
||||||
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
async with AsyncExitStack() as stack:
|
async with AsyncExitStack() as stack:
|
||||||
ws = NoBsWs(url, stack, fixture=fixture)
|
ws = NoBsWs(url, token, stack, fixture=fixture)
|
||||||
await ws._connect()
|
await ws._connect()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -179,114 +150,3 @@ async def open_autorecon_ws(
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
await stack.aclose()
|
await stack.aclose()
|
||||||
|
|
||||||
|
|
||||||
'''
|
|
||||||
JSONRPC response-request style machinery for transparent multiplexing of msgs
|
|
||||||
over a NoBsWs.
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
class JSONRPCResult(Struct):
|
|
||||||
id: int
|
|
||||||
jsonrpc: str = '2.0'
|
|
||||||
result: Optional[dict] = None
|
|
||||||
error: Optional[dict] = None
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def open_jsonrpc_session(
|
|
||||||
url: str,
|
|
||||||
start_id: int = 0,
|
|
||||||
response_type: type = JSONRPCResult,
|
|
||||||
request_type: Optional[type] = None,
|
|
||||||
request_hook: Optional[Callable] = None,
|
|
||||||
error_hook: Optional[Callable] = None,
|
|
||||||
) -> Callable[[str, dict], dict]:
|
|
||||||
|
|
||||||
async with (
|
|
||||||
trio.open_nursery() as n,
|
|
||||||
open_autorecon_ws(url) as ws
|
|
||||||
):
|
|
||||||
rpc_id: Iterable = count(start_id)
|
|
||||||
rpc_results: dict[int, dict] = {}
|
|
||||||
|
|
||||||
async def json_rpc(method: str, params: dict) -> dict:
|
|
||||||
'''
|
|
||||||
perform a json rpc call and wait for the result, raise exception in
|
|
||||||
case of error field present on response
|
|
||||||
'''
|
|
||||||
msg = {
|
|
||||||
'jsonrpc': '2.0',
|
|
||||||
'id': next(rpc_id),
|
|
||||||
'method': method,
|
|
||||||
'params': params
|
|
||||||
}
|
|
||||||
_id = msg['id']
|
|
||||||
|
|
||||||
rpc_results[_id] = {
|
|
||||||
'result': None,
|
|
||||||
'event': trio.Event()
|
|
||||||
}
|
|
||||||
|
|
||||||
await ws.send_msg(msg)
|
|
||||||
|
|
||||||
await rpc_results[_id]['event'].wait()
|
|
||||||
|
|
||||||
ret = rpc_results[_id]['result']
|
|
||||||
|
|
||||||
del rpc_results[_id]
|
|
||||||
|
|
||||||
if ret.error is not None:
|
|
||||||
raise Exception(json.dumps(ret.error, indent=4))
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
async def recv_task():
|
|
||||||
'''
|
|
||||||
receives every ws message and stores it in its corresponding
|
|
||||||
result field, then sets the event to wakeup original sender
|
|
||||||
tasks. also recieves responses to requests originated from
|
|
||||||
the server side.
|
|
||||||
|
|
||||||
'''
|
|
||||||
async for msg in ws:
|
|
||||||
match msg:
|
|
||||||
case {
|
|
||||||
'result': _,
|
|
||||||
'id': mid,
|
|
||||||
} if res_entry := rpc_results.get(mid):
|
|
||||||
|
|
||||||
res_entry['result'] = response_type(**msg)
|
|
||||||
res_entry['event'].set()
|
|
||||||
|
|
||||||
case {
|
|
||||||
'result': _,
|
|
||||||
'id': mid,
|
|
||||||
} if not rpc_results.get(mid):
|
|
||||||
log.warning(
|
|
||||||
f'Unexpected ws msg: {json.dumps(msg, indent=4)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
case {
|
|
||||||
'method': _,
|
|
||||||
'params': _,
|
|
||||||
}:
|
|
||||||
log.debug(f'Recieved\n{msg}')
|
|
||||||
if request_hook:
|
|
||||||
await request_hook(request_type(**msg))
|
|
||||||
|
|
||||||
case {
|
|
||||||
'error': error
|
|
||||||
}:
|
|
||||||
log.warning(f'Recieved\n{error}')
|
|
||||||
if error_hook:
|
|
||||||
await error_hook(response_type(**msg))
|
|
||||||
|
|
||||||
case _:
|
|
||||||
log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
|
|
||||||
|
|
||||||
n.start_soon(recv_task)
|
|
||||||
yield json_rpc
|
|
||||||
n.cancel_scope.cancel()
|
|
||||||
|
|
|
@ -18,22 +18,31 @@
|
||||||
marketstore cli.
|
marketstore cli.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from functools import partial
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
from anyio_marketstore import open_marketstore_client
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
import click
|
import click
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
from ..service.marketstore import (
|
from .marketstore import (
|
||||||
# get_client,
|
get_client,
|
||||||
# stream_quotes,
|
# stream_quotes,
|
||||||
ingest_quote_stream,
|
ingest_quote_stream,
|
||||||
# _url,
|
# _url,
|
||||||
# _tick_tbk_ids,
|
_tick_tbk_ids,
|
||||||
# mk_tbk,
|
mk_tbk,
|
||||||
)
|
)
|
||||||
from ..cli import cli
|
from ..cli import cli
|
||||||
from .. import watchlists as wl
|
from .. import watchlists as wl
|
||||||
from ..log import (
|
from ..log import get_logger
|
||||||
get_logger,
|
from ._sharedmem import (
|
||||||
|
maybe_open_shm_array,
|
||||||
|
)
|
||||||
|
from ._source import (
|
||||||
|
base_iohlc_dtype,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -104,11 +113,15 @@ def ms_stream(
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option(
|
@click.option(
|
||||||
'--tsdb_host',
|
'--tl',
|
||||||
|
is_flag=True,
|
||||||
|
help='Enable tractor logging')
|
||||||
|
@click.option(
|
||||||
|
'--host',
|
||||||
default='localhost'
|
default='localhost'
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
'--tsdb_port',
|
'--port',
|
||||||
default=5993
|
default=5993
|
||||||
)
|
)
|
||||||
@click.argument('symbols', nargs=-1)
|
@click.argument('symbols', nargs=-1)
|
||||||
|
@ -124,93 +137,18 @@ def storesh(
|
||||||
Start an IPython shell ready to query the local marketstore db.
|
Start an IPython shell ready to query the local marketstore db.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from piker.data.marketstore import open_tsdb_client
|
from piker.data.marketstore import tsdb_history_update
|
||||||
from piker.service import open_piker_runtime
|
from piker._daemon import open_piker_runtime
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
nonlocal symbols
|
nonlocal symbols
|
||||||
|
|
||||||
async with open_piker_runtime(
|
async with open_piker_runtime(
|
||||||
'storesh',
|
'storesh',
|
||||||
enable_modules=['piker.service._ahab'],
|
enable_modules=['piker.data._ahab'],
|
||||||
):
|
):
|
||||||
symbol = symbols[0]
|
symbol = symbols[0]
|
||||||
|
await tsdb_history_update(symbol)
|
||||||
async with open_tsdb_client(symbol):
|
|
||||||
# TODO: ask if user wants to write history for detected
|
|
||||||
# available shm buffers?
|
|
||||||
from tractor.trionics import ipython_embed
|
|
||||||
await ipython_embed()
|
|
||||||
|
|
||||||
trio.run(main)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.option(
|
|
||||||
'--host',
|
|
||||||
default='localhost'
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
'--port',
|
|
||||||
default=5993
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
'--delete',
|
|
||||||
'-d',
|
|
||||||
is_flag=True,
|
|
||||||
help='Delete history (1 Min) for symbol(s)',
|
|
||||||
)
|
|
||||||
@click.argument('symbols', nargs=-1)
|
|
||||||
@click.pass_obj
|
|
||||||
def storage(
|
|
||||||
config,
|
|
||||||
host,
|
|
||||||
port,
|
|
||||||
symbols: list[str],
|
|
||||||
delete: bool,
|
|
||||||
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Start an IPython shell ready to query the local marketstore db.
|
|
||||||
|
|
||||||
'''
|
|
||||||
from piker.service.marketstore import open_tsdb_client
|
|
||||||
from piker.service import open_piker_runtime
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
nonlocal symbols
|
|
||||||
|
|
||||||
async with open_piker_runtime(
|
|
||||||
'tsdb_storage',
|
|
||||||
enable_modules=['piker.service._ahab'],
|
|
||||||
):
|
|
||||||
symbol = symbols[0]
|
|
||||||
async with open_tsdb_client(symbol) as storage:
|
|
||||||
if delete:
|
|
||||||
for fqsn in symbols:
|
|
||||||
syms = await storage.client.list_symbols()
|
|
||||||
|
|
||||||
resp60s = await storage.delete_ts(fqsn, 60)
|
|
||||||
|
|
||||||
msgish = resp60s.ListFields()[0][1]
|
|
||||||
if 'error' in str(msgish):
|
|
||||||
|
|
||||||
# TODO: MEGA LOL, apparently the symbols don't
|
|
||||||
# flush out until you refresh something or other
|
|
||||||
# (maybe the WALFILE)... #lelandorlulzone, classic
|
|
||||||
# alpaca(Rtm) design here ..
|
|
||||||
# well, if we ever can make this work we
|
|
||||||
# probably want to dogsplain the real reason
|
|
||||||
# for the delete errurz..llululu
|
|
||||||
if fqsn not in syms:
|
|
||||||
log.error(f'Pair {fqsn} dne in DB')
|
|
||||||
|
|
||||||
log.error(f'Deletion error: {fqsn}\n{msgish}')
|
|
||||||
|
|
||||||
resp1s = await storage.delete_ts(fqsn, 1)
|
|
||||||
msgish = resp1s.ListFields()[0][1]
|
|
||||||
if 'error' in str(msgish):
|
|
||||||
log.error(f'Deletion error: {fqsn}\n{msgish}')
|
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
1925
piker/data/feed.py
1925
piker/data/feed.py
File diff suppressed because it is too large
Load Diff
|
@ -1,210 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
abstractions for organizing, managing and generally operating-on
|
|
||||||
real-time data processing data-structures.
|
|
||||||
|
|
||||||
"Streams, flumes, cascades and flows.."
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
from typing import (
|
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
|
|
||||||
import tractor
|
|
||||||
import pendulum
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
from .types import Struct
|
|
||||||
from ._source import (
|
|
||||||
Symbol,
|
|
||||||
)
|
|
||||||
from ._sharedmem import (
|
|
||||||
attach_shm_array,
|
|
||||||
ShmArray,
|
|
||||||
_Token,
|
|
||||||
)
|
|
||||||
# from .._profile import (
|
|
||||||
# Profiler,
|
|
||||||
# pg_profile_enabled,
|
|
||||||
# )
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
# from pyqtgraph import PlotItem
|
|
||||||
from .feed import Feed
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: ideas for further abstractions as per
|
|
||||||
# https://github.com/pikers/piker/issues/216 and
|
|
||||||
# https://github.com/pikers/piker/issues/270:
|
|
||||||
# - a ``Cascade`` would be the minimal "connection" of 2 ``Flumes``
|
|
||||||
# as per circuit parlance:
|
|
||||||
# https://en.wikipedia.org/wiki/Two-port_network#Cascade_connection
|
|
||||||
# - could cover the combination of our `FspAdmin` and the
|
|
||||||
# backend `.fsp._engine` related machinery to "connect" one flume
|
|
||||||
# to another?
|
|
||||||
# - a (financial signal) ``Flow`` would be the a "collection" of such
|
|
||||||
# minmial cascades. Some engineering based jargon concepts:
|
|
||||||
# - https://en.wikipedia.org/wiki/Signal_chain
|
|
||||||
# - https://en.wikipedia.org/wiki/Daisy_chain_(electrical_engineering)
|
|
||||||
# - https://en.wikipedia.org/wiki/Audio_signal_flow
|
|
||||||
# - https://en.wikipedia.org/wiki/Digital_signal_processing#Implementation
|
|
||||||
# - https://en.wikipedia.org/wiki/Dataflow_programming
|
|
||||||
# - https://en.wikipedia.org/wiki/Signal_programming
|
|
||||||
# - https://en.wikipedia.org/wiki/Incremental_computing
|
|
||||||
|
|
||||||
|
|
||||||
class Flume(Struct):
|
|
||||||
'''
|
|
||||||
Composite reference type which points to all the addressing handles
|
|
||||||
and other meta-data necessary for the read, measure and management
|
|
||||||
of a set of real-time updated data flows.
|
|
||||||
|
|
||||||
Can be thought of as a "flow descriptor" or "flow frame" which
|
|
||||||
describes the high level properties of a set of data flows that can
|
|
||||||
be used seamlessly across process-memory boundaries.
|
|
||||||
|
|
||||||
Each instance's sub-components normally includes:
|
|
||||||
- a msg oriented quote stream provided via an IPC transport
|
|
||||||
- history and real-time shm buffers which are both real-time
|
|
||||||
updated and backfilled.
|
|
||||||
- associated startup indexing information related to both buffer
|
|
||||||
real-time-append and historical prepend addresses.
|
|
||||||
- low level APIs to read and measure the updated data and manage
|
|
||||||
queuing properties.
|
|
||||||
|
|
||||||
'''
|
|
||||||
symbol: Symbol
|
|
||||||
first_quote: dict
|
|
||||||
_rt_shm_token: _Token
|
|
||||||
|
|
||||||
# optional since some data flows won't have a "downsampled" history
|
|
||||||
# buffer/stream (eg. FSPs).
|
|
||||||
_hist_shm_token: _Token | None = None
|
|
||||||
|
|
||||||
# private shm refs loaded dynamically from tokens
|
|
||||||
_hist_shm: ShmArray | None = None
|
|
||||||
_rt_shm: ShmArray | None = None
|
|
||||||
|
|
||||||
stream: tractor.MsgStream | None = None
|
|
||||||
izero_hist: int = 0
|
|
||||||
izero_rt: int = 0
|
|
||||||
throttle_rate: int | None = None
|
|
||||||
|
|
||||||
# TODO: do we need this really if we can pull the `Portal` from
|
|
||||||
# ``tractor``'s internals?
|
|
||||||
feed: Feed | None = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def rt_shm(self) -> ShmArray:
|
|
||||||
|
|
||||||
if self._rt_shm is None:
|
|
||||||
self._rt_shm = attach_shm_array(
|
|
||||||
token=self._rt_shm_token,
|
|
||||||
readonly=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._rt_shm
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hist_shm(self) -> ShmArray:
|
|
||||||
|
|
||||||
if self._hist_shm_token is None:
|
|
||||||
raise RuntimeError(
|
|
||||||
'No shm token has been set for the history buffer?'
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
self._hist_shm is None
|
|
||||||
):
|
|
||||||
self._hist_shm = attach_shm_array(
|
|
||||||
token=self._hist_shm_token,
|
|
||||||
readonly=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._hist_shm
|
|
||||||
|
|
||||||
async def receive(self) -> dict:
|
|
||||||
return await self.stream.receive()
|
|
||||||
|
|
||||||
def get_ds_info(
|
|
||||||
self,
|
|
||||||
) -> tuple[float, float, float]:
|
|
||||||
'''
|
|
||||||
Compute the "downsampling" ratio info between the historical shm
|
|
||||||
buffer and the real-time (HFT) one.
|
|
||||||
|
|
||||||
Return a tuple of the fast sample period, historical sample
|
|
||||||
period and ratio between them.
|
|
||||||
|
|
||||||
'''
|
|
||||||
times = self.hist_shm.array['time']
|
|
||||||
end = pendulum.from_timestamp(times[-1])
|
|
||||||
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
|
||||||
hist_step_size_s = (end - start).seconds
|
|
||||||
|
|
||||||
times = self.rt_shm.array['time']
|
|
||||||
end = pendulum.from_timestamp(times[-1])
|
|
||||||
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
|
||||||
rt_step_size_s = (end - start).seconds
|
|
||||||
|
|
||||||
ratio = hist_step_size_s / rt_step_size_s
|
|
||||||
return (
|
|
||||||
rt_step_size_s,
|
|
||||||
hist_step_size_s,
|
|
||||||
ratio,
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: get native msgspec decoding for these workinn
|
|
||||||
def to_msg(self) -> dict:
|
|
||||||
msg = self.to_dict()
|
|
||||||
msg['symbol'] = msg['symbol'].to_dict()
|
|
||||||
|
|
||||||
# can't serialize the stream or feed objects, it's expected
|
|
||||||
# you'll have a ref to it since this msg should be rxed on
|
|
||||||
# a stream on whatever far end IPC..
|
|
||||||
msg.pop('stream')
|
|
||||||
msg.pop('feed')
|
|
||||||
return msg
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_msg(cls, msg: dict) -> dict:
|
|
||||||
symbol = Symbol(**msg.pop('symbol'))
|
|
||||||
return cls(
|
|
||||||
symbol=symbol,
|
|
||||||
**msg,
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_index(
|
|
||||||
self,
|
|
||||||
time_s: float,
|
|
||||||
array: np.ndarray,
|
|
||||||
|
|
||||||
) -> int | float:
|
|
||||||
'''
|
|
||||||
Return array shm-buffer index for for epoch time.
|
|
||||||
|
|
||||||
'''
|
|
||||||
times = array['time']
|
|
||||||
first = np.searchsorted(
|
|
||||||
times,
|
|
||||||
time_s,
|
|
||||||
side='left',
|
|
||||||
)
|
|
||||||
imx = times.shape[0] - 1
|
|
||||||
return min(first, imx)
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -25,49 +25,47 @@
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from datetime import datetime
|
||||||
|
from pprint import pformat
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Optional,
|
Optional,
|
||||||
|
Union,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
import time
|
import time
|
||||||
from math import isnan
|
from math import isnan
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
from msgspec.msgpack import (
|
import msgpack
|
||||||
encode,
|
import pyqtgraph as pg
|
||||||
decode,
|
|
||||||
)
|
|
||||||
# import pyqtgraph as pg
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
from trio_websocket import open_websocket_url
|
from trio_websocket import open_websocket_url
|
||||||
from anyio_marketstore import ( # noqa
|
from anyio_marketstore import (
|
||||||
open_marketstore_client,
|
open_marketstore_client,
|
||||||
MarketstoreClient,
|
MarketstoreClient,
|
||||||
Params,
|
Params,
|
||||||
)
|
)
|
||||||
import pendulum
|
import pendulum
|
||||||
# TODO: import this for specific error set expected by mkts client
|
import purerpc
|
||||||
# import purerpc
|
|
||||||
|
|
||||||
from ..data.feed import maybe_open_feed
|
|
||||||
from ..log import get_logger, get_console_log
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
import docker
|
import docker
|
||||||
from ._ahab import DockerContainer
|
from ._ahab import DockerContainer
|
||||||
|
|
||||||
|
from .feed import maybe_open_feed
|
||||||
|
from ..log import get_logger, get_console_log
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# ahabd-supervisor and container level config
|
# container level config
|
||||||
_config = {
|
_config = {
|
||||||
'grpc_listen_port': 5995,
|
'grpc_listen_port': 5995,
|
||||||
'ws_listen_port': 5993,
|
'ws_listen_port': 5993,
|
||||||
'log_level': 'debug',
|
'log_level': 'debug',
|
||||||
'startup_timeout': 2,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_yaml_config = '''
|
_yaml_config = '''
|
||||||
|
@ -133,10 +131,7 @@ def start_marketstore(
|
||||||
|
|
||||||
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
||||||
|
|
||||||
# create dirs when dne
|
# create when dne
|
||||||
if not os.path.isdir(config._config_dir):
|
|
||||||
Path(config._config_dir).mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
if not os.path.isdir(mktsdir):
|
if not os.path.isdir(mktsdir):
|
||||||
os.mkdir(mktsdir)
|
os.mkdir(mktsdir)
|
||||||
|
|
||||||
|
@ -185,29 +180,18 @@ def start_marketstore(
|
||||||
config_dir_mnt,
|
config_dir_mnt,
|
||||||
data_dir_mnt,
|
data_dir_mnt,
|
||||||
],
|
],
|
||||||
|
|
||||||
# XXX: this must be set to allow backgrounding/non-blocking
|
|
||||||
# usage interaction with the container's process.
|
|
||||||
detach=True,
|
detach=True,
|
||||||
|
|
||||||
# stop_signal='SIGINT',
|
# stop_signal='SIGINT',
|
||||||
init=True,
|
init=True,
|
||||||
# remove=True,
|
# remove=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def start_matcher(msg: str):
|
|
||||||
return "launching tcp listener for all services..." in msg
|
|
||||||
|
|
||||||
async def stop_matcher(msg: str):
|
|
||||||
return "exiting..." in msg
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
dcntr,
|
dcntr,
|
||||||
_config,
|
_config,
|
||||||
|
|
||||||
# expected startup and stop msgs
|
# expected startup and stop msgs
|
||||||
start_matcher,
|
"launching tcp listener for all services...",
|
||||||
stop_matcher,
|
"exiting...",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -328,7 +312,7 @@ def quote_to_marketstore_structarray(
|
||||||
@acm
|
@acm
|
||||||
async def get_client(
|
async def get_client(
|
||||||
host: str = 'localhost',
|
host: str = 'localhost',
|
||||||
port: int = _config['grpc_listen_port'],
|
port: int = 5995
|
||||||
|
|
||||||
) -> MarketstoreClient:
|
) -> MarketstoreClient:
|
||||||
'''
|
'''
|
||||||
|
@ -370,6 +354,340 @@ tf_in_1s = bidict({
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class Storage:
|
||||||
|
'''
|
||||||
|
High level storage api for both real-time and historical ingest.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
client: MarketstoreClient,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# TODO: eventually this should be an api/interface type that
|
||||||
|
# ensures we can support multiple tsdb backends.
|
||||||
|
self.client = client
|
||||||
|
|
||||||
|
# series' cache from tsdb reads
|
||||||
|
self._arrays: dict[str, np.ndarray] = {}
|
||||||
|
|
||||||
|
async def list_keys(self) -> list[str]:
|
||||||
|
return await self.client.list_symbols()
|
||||||
|
|
||||||
|
async def search_keys(self, pattern: str) -> list[str]:
|
||||||
|
'''
|
||||||
|
Search for time series key in the storage backend.
|
||||||
|
|
||||||
|
'''
|
||||||
|
...
|
||||||
|
|
||||||
|
async def write_ticks(self, ticks: list) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def load(
|
||||||
|
self,
|
||||||
|
fqsn: str,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
dict[int, np.ndarray], # timeframe (in secs) to series
|
||||||
|
Optional[datetime], # first dt
|
||||||
|
Optional[datetime], # last dt
|
||||||
|
]:
|
||||||
|
|
||||||
|
first_tsdb_dt, last_tsdb_dt = None, None
|
||||||
|
tsdb_arrays = await self.read_ohlcv(
|
||||||
|
fqsn,
|
||||||
|
# on first load we don't need to pull the max
|
||||||
|
# history per request size worth.
|
||||||
|
limit=3000,
|
||||||
|
)
|
||||||
|
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
||||||
|
|
||||||
|
if tsdb_arrays:
|
||||||
|
fastest = list(tsdb_arrays.values())[0]
|
||||||
|
times = fastest['Epoch']
|
||||||
|
first, last = times[0], times[-1]
|
||||||
|
first_tsdb_dt, last_tsdb_dt = map(
|
||||||
|
pendulum.from_timestamp, [first, last]
|
||||||
|
)
|
||||||
|
|
||||||
|
return tsdb_arrays, first_tsdb_dt, last_tsdb_dt
|
||||||
|
|
||||||
|
async def read_ohlcv(
|
||||||
|
self,
|
||||||
|
fqsn: str,
|
||||||
|
timeframe: Optional[Union[int, str]] = None,
|
||||||
|
end: Optional[int] = None,
|
||||||
|
limit: int = int(800e3),
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
MarketstoreClient,
|
||||||
|
Union[dict, np.ndarray]
|
||||||
|
]:
|
||||||
|
client = self.client
|
||||||
|
syms = await client.list_symbols()
|
||||||
|
|
||||||
|
if fqsn not in syms:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
tfstr = tf_in_1s[1]
|
||||||
|
|
||||||
|
params = Params(
|
||||||
|
symbols=fqsn,
|
||||||
|
timeframe=tfstr,
|
||||||
|
attrgroup='OHLCV',
|
||||||
|
end=end,
|
||||||
|
# limit_from_start=True,
|
||||||
|
|
||||||
|
# TODO: figure the max limit here given the
|
||||||
|
# ``purepc`` msg size limit of purerpc: 33554432
|
||||||
|
limit=limit,
|
||||||
|
)
|
||||||
|
|
||||||
|
if timeframe is None:
|
||||||
|
log.info(f'starting {fqsn} tsdb granularity scan..')
|
||||||
|
# loop through and try to find highest granularity
|
||||||
|
for tfstr in tf_in_1s.values():
|
||||||
|
try:
|
||||||
|
log.info(f'querying for {tfstr}@{fqsn}')
|
||||||
|
params.set('timeframe', tfstr)
|
||||||
|
result = await client.query(params)
|
||||||
|
break
|
||||||
|
|
||||||
|
except purerpc.grpclib.exceptions.UnknownError:
|
||||||
|
# XXX: this is already logged by the container and
|
||||||
|
# thus shows up through `marketstored` logs relay.
|
||||||
|
# log.warning(f'{tfstr}@{fqsn} not found')
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
else:
|
||||||
|
result = await client.query(params)
|
||||||
|
|
||||||
|
# TODO: it turns out column access on recarrays is actually slower:
|
||||||
|
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||||
|
# it might make sense to make these structured arrays?
|
||||||
|
# Fill out a `numpy` array-results map
|
||||||
|
arrays = {}
|
||||||
|
for fqsn, data_set in result.by_symbols().items():
|
||||||
|
arrays.setdefault(fqsn, {})[
|
||||||
|
tf_in_1s.inverse[data_set.timeframe]
|
||||||
|
] = data_set.array
|
||||||
|
|
||||||
|
return arrays[fqsn][timeframe] if timeframe else arrays[fqsn]
|
||||||
|
|
||||||
|
async def delete_ts(
|
||||||
|
self,
|
||||||
|
key: str,
|
||||||
|
timeframe: Optional[Union[int, str]] = None,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
|
||||||
|
client = self.client
|
||||||
|
syms = await client.list_symbols()
|
||||||
|
print(syms)
|
||||||
|
# if key not in syms:
|
||||||
|
# raise KeyError(f'`{fqsn}` table key not found?')
|
||||||
|
|
||||||
|
return await client.destroy(tbk=key)
|
||||||
|
|
||||||
|
async def write_ohlcv(
|
||||||
|
self,
|
||||||
|
fqsn: str,
|
||||||
|
ohlcv: np.ndarray,
|
||||||
|
append_and_duplicate: bool = True,
|
||||||
|
limit: int = int(800e3),
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# build mkts schema compat array for writing
|
||||||
|
mkts_dt = np.dtype(_ohlcv_dt)
|
||||||
|
mkts_array = np.zeros(
|
||||||
|
len(ohlcv),
|
||||||
|
dtype=mkts_dt,
|
||||||
|
)
|
||||||
|
# copy from shm array (yes it's this easy):
|
||||||
|
# https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays
|
||||||
|
mkts_array[:] = ohlcv[[
|
||||||
|
'time',
|
||||||
|
'open',
|
||||||
|
'high',
|
||||||
|
'low',
|
||||||
|
'close',
|
||||||
|
'volume',
|
||||||
|
]]
|
||||||
|
|
||||||
|
m, r = divmod(len(mkts_array), limit)
|
||||||
|
|
||||||
|
for i in range(m, 1):
|
||||||
|
to_push = mkts_array[i-1:i*limit]
|
||||||
|
|
||||||
|
# write to db
|
||||||
|
resp = await self.client.write(
|
||||||
|
to_push,
|
||||||
|
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||||
|
|
||||||
|
# NOTE: will will append duplicates
|
||||||
|
# for the same timestamp-index.
|
||||||
|
# TODO: pre deduplicate?
|
||||||
|
isvariablelength=append_and_duplicate,
|
||||||
|
)
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f'Wrote {mkts_array.size} datums to tsdb\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
for resp in resp.responses:
|
||||||
|
err = resp.error
|
||||||
|
if err:
|
||||||
|
raise MarketStoreError(err)
|
||||||
|
|
||||||
|
if r:
|
||||||
|
to_push = mkts_array[m*limit:]
|
||||||
|
|
||||||
|
# write to db
|
||||||
|
resp = await self.client.write(
|
||||||
|
to_push,
|
||||||
|
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||||
|
|
||||||
|
# NOTE: will will append duplicates
|
||||||
|
# for the same timestamp-index.
|
||||||
|
# TODO: pre deduplicate?
|
||||||
|
isvariablelength=append_and_duplicate,
|
||||||
|
)
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f'Wrote {mkts_array.size} datums to tsdb\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
for resp in resp.responses:
|
||||||
|
err = resp.error
|
||||||
|
if err:
|
||||||
|
raise MarketStoreError(err)
|
||||||
|
|
||||||
|
# XXX: currently the only way to do this is through the CLI:
|
||||||
|
|
||||||
|
# sudo ./marketstore connect --dir ~/.config/piker/data
|
||||||
|
# >> \show mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
||||||
|
# and this seems to block and use up mem..
|
||||||
|
# >> \trim mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
||||||
|
|
||||||
|
# relevant source code for this is here:
|
||||||
|
# https://github.com/alpacahq/marketstore/blob/master/cmd/connect/session/trim.go#L14
|
||||||
|
# def delete_range(self, start_dt, end_dt) -> None:
|
||||||
|
# ...
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_storage_client(
|
||||||
|
fqsn: str,
|
||||||
|
period: Optional[Union[int, str]] = None, # in seconds
|
||||||
|
|
||||||
|
) -> tuple[Storage, dict[str, np.ndarray]]:
|
||||||
|
'''
|
||||||
|
Load a series by key and deliver in ``numpy`` struct array format.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with (
|
||||||
|
# eventually a storage backend endpoint
|
||||||
|
get_client() as client,
|
||||||
|
):
|
||||||
|
# slap on our wrapper api
|
||||||
|
yield Storage(client)
|
||||||
|
|
||||||
|
|
||||||
|
async def tsdb_history_update(
|
||||||
|
fqsn: Optional[str] = None,
|
||||||
|
|
||||||
|
) -> list[str]:
|
||||||
|
|
||||||
|
# TODO: real-time dedicated task for ensuring
|
||||||
|
# history consistency between the tsdb, shm and real-time feed..
|
||||||
|
|
||||||
|
# update sequence design notes:
|
||||||
|
|
||||||
|
# - load existing highest frequency data from mkts
|
||||||
|
# * how do we want to offer this to the UI?
|
||||||
|
# - lazy loading?
|
||||||
|
# - try to load it all and expect graphics caching/diffing
|
||||||
|
# to hide extra bits that aren't in view?
|
||||||
|
|
||||||
|
# - compute the diff between latest data from broker and shm
|
||||||
|
# * use sql api in mkts to determine where the backend should
|
||||||
|
# start querying for data?
|
||||||
|
# * append any diff with new shm length
|
||||||
|
# * determine missing (gapped) history by scanning
|
||||||
|
# * how far back do we look?
|
||||||
|
|
||||||
|
# - begin rt update ingest and aggregation
|
||||||
|
# * could start by always writing ticks to mkts instead of
|
||||||
|
# worrying about a shm queue for now.
|
||||||
|
# * we have a short list of shm queues worth groking:
|
||||||
|
# - https://github.com/pikers/piker/issues/107
|
||||||
|
# * the original data feed arch blurb:
|
||||||
|
# - https://github.com/pikers/piker/issues/98
|
||||||
|
#
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
disabled=False, # not pg_profile_enabled(),
|
||||||
|
delayed=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_storage_client(fqsn) as storage,
|
||||||
|
|
||||||
|
maybe_open_feed(
|
||||||
|
[fqsn],
|
||||||
|
start_stream=False,
|
||||||
|
|
||||||
|
) as (feed, stream),
|
||||||
|
):
|
||||||
|
profiler(f'opened feed for {fqsn}')
|
||||||
|
|
||||||
|
to_append = feed.shm.array
|
||||||
|
to_prepend = None
|
||||||
|
|
||||||
|
if fqsn:
|
||||||
|
symbol = feed.symbols.get(fqsn)
|
||||||
|
if symbol:
|
||||||
|
fqsn = symbol.front_fqsn()
|
||||||
|
|
||||||
|
# diff db history with shm and only write the missing portions
|
||||||
|
ohlcv = feed.shm.array
|
||||||
|
|
||||||
|
# TODO: use pg profiler
|
||||||
|
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
||||||
|
# hist diffing
|
||||||
|
if tsdb_arrays:
|
||||||
|
for secs in (1, 60):
|
||||||
|
ts = tsdb_arrays.get(secs)
|
||||||
|
if ts is not None and len(ts):
|
||||||
|
# these aren't currently used but can be referenced from
|
||||||
|
# within the embedded ipython shell below.
|
||||||
|
to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
||||||
|
to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
||||||
|
|
||||||
|
profiler('Finished db arrays diffs')
|
||||||
|
|
||||||
|
syms = await storage.client.list_symbols()
|
||||||
|
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||||
|
profiler(f'listed symbols {syms}')
|
||||||
|
|
||||||
|
# TODO: ask if user wants to write history for detected
|
||||||
|
# available shm buffers?
|
||||||
|
from tractor.trionics import ipython_embed
|
||||||
|
await ipython_embed()
|
||||||
|
|
||||||
|
# for array in [to_append, to_prepend]:
|
||||||
|
# if array is None:
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# log.info(
|
||||||
|
# f'Writing datums {array.size} -> to tsdb from shm\n'
|
||||||
|
# )
|
||||||
|
# await storage.write_ohlcv(fqsn, array)
|
||||||
|
|
||||||
|
# profiler('Finished db writes')
|
||||||
|
|
||||||
|
|
||||||
async def ingest_quote_stream(
|
async def ingest_quote_stream(
|
||||||
symbols: list[str],
|
symbols: list[str],
|
||||||
brokername: str,
|
brokername: str,
|
||||||
|
@ -456,13 +774,12 @@ async def stream_quotes(
|
||||||
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
||||||
# send subs topics to server
|
# send subs topics to server
|
||||||
resp = await ws.send_message(
|
resp = await ws.send_message(
|
||||||
|
msgpack.dumps({'streams': list(tbks.values())})
|
||||||
encode({'streams': list(tbks.values())})
|
|
||||||
)
|
)
|
||||||
log.info(resp)
|
log.info(resp)
|
||||||
|
|
||||||
async def recv() -> dict[str, Any]:
|
async def recv() -> dict[str, Any]:
|
||||||
return decode((await ws.get_message()), encoding='utf-8')
|
return msgpack.loads((await ws.get_message()), encoding='utf-8')
|
||||||
|
|
||||||
streams = (await recv())['streams']
|
streams = (await recv())['streams']
|
||||||
log.info(f"Subscribed to {streams}")
|
log.info(f"Subscribed to {streams}")
|
|
@ -1,88 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Built-in (extension) types.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
from typing import Optional
|
|
||||||
from pprint import pformat
|
|
||||||
|
|
||||||
import msgspec
|
|
||||||
|
|
||||||
|
|
||||||
class Struct(
|
|
||||||
msgspec.Struct,
|
|
||||||
|
|
||||||
# https://jcristharif.com/msgspec/structs.html#tagged-unions
|
|
||||||
# tag='pikerstruct',
|
|
||||||
# tag=True,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
A "human friendlier" (aka repl buddy) struct subtype.
|
|
||||||
|
|
||||||
'''
|
|
||||||
def to_dict(self) -> dict:
|
|
||||||
return {
|
|
||||||
f: getattr(self, f)
|
|
||||||
for f in self.__struct_fields__
|
|
||||||
}
|
|
||||||
|
|
||||||
# Lul, doesn't seem to work that well..
|
|
||||||
# def __repr__(self):
|
|
||||||
# # only turn on pprint when we detect a python REPL
|
|
||||||
# # at runtime B)
|
|
||||||
# if (
|
|
||||||
# hasattr(sys, 'ps1')
|
|
||||||
# # TODO: check if we're in pdb
|
|
||||||
# ):
|
|
||||||
# return self.pformat()
|
|
||||||
|
|
||||||
# return super().__repr__()
|
|
||||||
|
|
||||||
def pformat(self) -> str:
|
|
||||||
return f'Struct({pformat(self.to_dict())})'
|
|
||||||
|
|
||||||
def copy(
|
|
||||||
self,
|
|
||||||
update: Optional[dict] = None,
|
|
||||||
|
|
||||||
) -> msgspec.Struct:
|
|
||||||
'''
|
|
||||||
Validate-typecast all self defined fields, return a copy of us
|
|
||||||
with all such fields.
|
|
||||||
|
|
||||||
This is kinda like the default behaviour in `pydantic.BaseModel`.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if update:
|
|
||||||
for k, v in update.items():
|
|
||||||
setattr(self, k, v)
|
|
||||||
|
|
||||||
# roundtrip serialize to validate
|
|
||||||
return msgspec.msgpack.Decoder(
|
|
||||||
type=type(self)
|
|
||||||
).decode(
|
|
||||||
msgspec.msgpack.Encoder().encode(self)
|
|
||||||
)
|
|
||||||
|
|
||||||
def typecast(
|
|
||||||
self,
|
|
||||||
# fields: Optional[list[str]] = None,
|
|
||||||
) -> None:
|
|
||||||
for fname, ftype in self.__annotations__.items():
|
|
||||||
setattr(self, fname, ftype(getattr(self, fname)))
|
|
|
@ -78,8 +78,7 @@ class Fsp:
|
||||||
# + the consuming fsp *to* the consumers output
|
# + the consuming fsp *to* the consumers output
|
||||||
# shm flow.
|
# shm flow.
|
||||||
_flow_registry: dict[
|
_flow_registry: dict[
|
||||||
tuple[_Token, str],
|
tuple[_Token, str], _Token,
|
||||||
tuple[_Token, Optional[ShmArray]],
|
|
||||||
] = {}
|
] = {}
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -121,6 +120,7 @@ class Fsp:
|
||||||
):
|
):
|
||||||
return self.func(*args, **kwargs)
|
return self.func(*args, **kwargs)
|
||||||
|
|
||||||
|
# TODO: lru_cache this? prettty sure it'll work?
|
||||||
def get_shm(
|
def get_shm(
|
||||||
self,
|
self,
|
||||||
src_shm: ShmArray,
|
src_shm: ShmArray,
|
||||||
|
@ -131,27 +131,12 @@ class Fsp:
|
||||||
for this "instance" of a signal processor for
|
for this "instance" of a signal processor for
|
||||||
the given ``key``.
|
the given ``key``.
|
||||||
|
|
||||||
The destination shm "token" and array are cached if possible to
|
|
||||||
minimize multiple stdlib/system calls.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
dst_token, maybe_array = self._flow_registry[
|
dst_token = self._flow_registry[
|
||||||
(src_shm._token, self.name)
|
(src_shm._token, self.name)
|
||||||
]
|
]
|
||||||
if maybe_array is None:
|
shm = attach_shm_array(dst_token)
|
||||||
self._flow_registry[
|
return shm
|
||||||
(src_shm._token, self.name)
|
|
||||||
] = (
|
|
||||||
dst_token,
|
|
||||||
# "cache" the ``ShmArray`` such that
|
|
||||||
# we call the underlying "attach" code as few
|
|
||||||
# times as possible as per:
|
|
||||||
# - https://github.com/pikers/piker/issues/359
|
|
||||||
# - https://github.com/pikers/piker/issues/332
|
|
||||||
maybe_array := attach_shm_array(dst_token)
|
|
||||||
)
|
|
||||||
|
|
||||||
return maybe_array
|
|
||||||
|
|
||||||
|
|
||||||
def fsp(
|
def fsp(
|
||||||
|
@ -199,10 +184,7 @@ def maybe_mk_fsp_shm(
|
||||||
# TODO: load output types from `Fsp`
|
# TODO: load output types from `Fsp`
|
||||||
# - should `index` be a required internal field?
|
# - should `index` be a required internal field?
|
||||||
fsp_dtype = np.dtype(
|
fsp_dtype = np.dtype(
|
||||||
[('index', int)]
|
[('index', int)] +
|
||||||
+
|
|
||||||
[('time', float)]
|
|
||||||
+
|
|
||||||
[(field_name, float) for field_name in target.outputs]
|
[(field_name, float) for field_name in target.outputs]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -21,13 +21,12 @@ core task logic for processing chains
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import (
|
from typing import (
|
||||||
AsyncIterator,
|
AsyncIterator, Callable, Optional,
|
||||||
Callable,
|
|
||||||
Optional,
|
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import pyqtgraph as pg
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
|
@ -36,22 +35,14 @@ from tractor.msg import NamespacePath
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from .. import data
|
from .. import data
|
||||||
from ..data import attach_shm_array
|
from ..data import attach_shm_array
|
||||||
from ..data.feed import (
|
from ..data.feed import Feed
|
||||||
Flume,
|
|
||||||
Feed,
|
|
||||||
)
|
|
||||||
from ..data._sharedmem import ShmArray
|
from ..data._sharedmem import ShmArray
|
||||||
from ..data._sampling import (
|
|
||||||
_default_delay_s,
|
|
||||||
open_sample_stream,
|
|
||||||
)
|
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ._api import (
|
from ._api import (
|
||||||
Fsp,
|
Fsp,
|
||||||
_load_builtins,
|
_load_builtins,
|
||||||
_Token,
|
_Token,
|
||||||
)
|
)
|
||||||
from .._profile import Profiler
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -86,7 +77,7 @@ async def filter_quotes_by_sym(
|
||||||
async def fsp_compute(
|
async def fsp_compute(
|
||||||
|
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
flume: Flume,
|
feed: Feed,
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
|
|
||||||
src: ShmArray,
|
src: ShmArray,
|
||||||
|
@ -99,7 +90,7 @@ async def fsp_compute(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = Profiler(
|
profiler = pg.debug.Profiler(
|
||||||
delayed=False,
|
delayed=False,
|
||||||
disabled=True
|
disabled=True
|
||||||
)
|
)
|
||||||
|
@ -114,17 +105,16 @@ async def fsp_compute(
|
||||||
filter_quotes_by_sym(fqsn, quote_stream),
|
filter_quotes_by_sym(fqsn, quote_stream),
|
||||||
|
|
||||||
# XXX: currently the ``ohlcv`` arg
|
# XXX: currently the ``ohlcv`` arg
|
||||||
flume.rt_shm,
|
feed.shm,
|
||||||
)
|
)
|
||||||
|
|
||||||
# HISTORY COMPUTE PHASE
|
# Conduct a single iteration of fsp with historical bars input
|
||||||
# conduct a single iteration of fsp with historical bars input
|
# and get historical output
|
||||||
# and get historical output.
|
|
||||||
history_output: Union[
|
history_output: Union[
|
||||||
dict[str, np.ndarray], # multi-output case
|
dict[str, np.ndarray], # multi-output case
|
||||||
np.ndarray, # single output case
|
np.ndarray, # single output case
|
||||||
]
|
]
|
||||||
history_output = await anext(out_stream)
|
history_output = await out_stream.__anext__()
|
||||||
|
|
||||||
func_name = func.__name__
|
func_name = func.__name__
|
||||||
profiler(f'{func_name} generated history')
|
profiler(f'{func_name} generated history')
|
||||||
|
@ -136,13 +126,9 @@ async def fsp_compute(
|
||||||
# each respective field.
|
# each respective field.
|
||||||
fields = getattr(dst.array.dtype, 'fields', None).copy()
|
fields = getattr(dst.array.dtype, 'fields', None).copy()
|
||||||
fields.pop('index')
|
fields.pop('index')
|
||||||
history_by_field: Optional[np.ndarray] = None
|
history: Optional[np.ndarray] = None # TODO: nptyping here!
|
||||||
src_time = src.array['time']
|
|
||||||
|
|
||||||
if (
|
if fields and len(fields) > 1 and fields:
|
||||||
fields and
|
|
||||||
len(fields) > 1
|
|
||||||
):
|
|
||||||
if not isinstance(history_output, dict):
|
if not isinstance(history_output, dict):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'`{func_name}` is a multi-output FSP and should yield a '
|
f'`{func_name}` is a multi-output FSP and should yield a '
|
||||||
|
@ -153,7 +139,7 @@ async def fsp_compute(
|
||||||
if key in history_output:
|
if key in history_output:
|
||||||
output = history_output[key]
|
output = history_output[key]
|
||||||
|
|
||||||
if history_by_field is None:
|
if history is None:
|
||||||
|
|
||||||
if output is None:
|
if output is None:
|
||||||
length = len(src.array)
|
length = len(src.array)
|
||||||
|
@ -163,7 +149,7 @@ async def fsp_compute(
|
||||||
# using the first output, determine
|
# using the first output, determine
|
||||||
# the length of the struct-array that
|
# the length of the struct-array that
|
||||||
# will be pushed to shm.
|
# will be pushed to shm.
|
||||||
history_by_field = np.zeros(
|
history = np.zeros(
|
||||||
length,
|
length,
|
||||||
dtype=dst.array.dtype
|
dtype=dst.array.dtype
|
||||||
)
|
)
|
||||||
|
@ -171,7 +157,7 @@ async def fsp_compute(
|
||||||
if output is None:
|
if output is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
history_by_field[key] = output
|
history[key] = output
|
||||||
|
|
||||||
# single-key output stream
|
# single-key output stream
|
||||||
else:
|
else:
|
||||||
|
@ -180,15 +166,11 @@ async def fsp_compute(
|
||||||
f'`{func_name}` is a single output FSP and should yield an '
|
f'`{func_name}` is a single output FSP and should yield an '
|
||||||
'`np.ndarray` for history'
|
'`np.ndarray` for history'
|
||||||
)
|
)
|
||||||
history_by_field = np.zeros(
|
history = np.zeros(
|
||||||
len(history_output),
|
len(history_output),
|
||||||
dtype=dst.array.dtype
|
dtype=dst.array.dtype
|
||||||
)
|
)
|
||||||
history_by_field[func_name] = history_output
|
history[func_name] = history_output
|
||||||
|
|
||||||
history_by_field['time'] = src_time[-len(history_by_field):]
|
|
||||||
|
|
||||||
history_output['time'] = src.array['time']
|
|
||||||
|
|
||||||
# TODO: XXX:
|
# TODO: XXX:
|
||||||
# THERE'S A BIG BUG HERE WITH THE `index` field since we're
|
# THERE'S A BIG BUG HERE WITH THE `index` field since we're
|
||||||
|
@ -205,10 +187,7 @@ async def fsp_compute(
|
||||||
|
|
||||||
# TODO: can we use this `start` flag instead of the manual
|
# TODO: can we use this `start` flag instead of the manual
|
||||||
# setting above?
|
# setting above?
|
||||||
index = dst.push(
|
index = dst.push(history, start=first)
|
||||||
history_by_field,
|
|
||||||
start=first,
|
|
||||||
)
|
|
||||||
|
|
||||||
profiler(f'{func_name} pushed history')
|
profiler(f'{func_name} pushed history')
|
||||||
profiler.finish()
|
profiler.finish()
|
||||||
|
@ -234,14 +213,8 @@ async def fsp_compute(
|
||||||
|
|
||||||
log.debug(f"{func_name}: {processed}")
|
log.debug(f"{func_name}: {processed}")
|
||||||
key, output = processed
|
key, output = processed
|
||||||
# dst.array[-1][key] = output
|
index = src.index
|
||||||
dst.array[[key, 'time']][-1] = (
|
dst.array[-1][key] = output
|
||||||
output,
|
|
||||||
# TODO: what about pushing ``time.time_ns()``
|
|
||||||
# in which case we'll need to round at the graphics
|
|
||||||
# processing / sampling layer?
|
|
||||||
src.array[-1]['time']
|
|
||||||
)
|
|
||||||
|
|
||||||
# NOTE: for now we aren't streaming this to the consumer
|
# NOTE: for now we aren't streaming this to the consumer
|
||||||
# stream latest array index entry which basically just acts
|
# stream latest array index entry which basically just acts
|
||||||
|
@ -252,7 +225,6 @@ async def fsp_compute(
|
||||||
# N-consumers who subscribe for the real-time output,
|
# N-consumers who subscribe for the real-time output,
|
||||||
# which we'll likely want to implement using local-mem
|
# which we'll likely want to implement using local-mem
|
||||||
# chans for the fan out?
|
# chans for the fan out?
|
||||||
# index = src.index
|
|
||||||
# if attach_stream:
|
# if attach_stream:
|
||||||
# await client_stream.send(index)
|
# await client_stream.send(index)
|
||||||
|
|
||||||
|
@ -289,7 +261,7 @@ async def cascade(
|
||||||
destination shm array buffer.
|
destination shm array buffer.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
profiler = Profiler(
|
profiler = pg.debug.Profiler(
|
||||||
delayed=False,
|
delayed=False,
|
||||||
disabled=False
|
disabled=False
|
||||||
)
|
)
|
||||||
|
@ -312,10 +284,9 @@ async def cascade(
|
||||||
# TODO: ugh i hate this wind/unwind to list over the wire
|
# TODO: ugh i hate this wind/unwind to list over the wire
|
||||||
# but not sure how else to do it.
|
# but not sure how else to do it.
|
||||||
for (token, fsp_name, dst_token) in shm_registry:
|
for (token, fsp_name, dst_token) in shm_registry:
|
||||||
Fsp._flow_registry[(
|
Fsp._flow_registry[
|
||||||
_Token.from_msg(token),
|
(_Token.from_msg(token), fsp_name)
|
||||||
fsp_name,
|
] = _Token.from_msg(dst_token)
|
||||||
)] = _Token.from_msg(dst_token), None
|
|
||||||
|
|
||||||
fsp: Fsp = reg.get(
|
fsp: Fsp = reg.get(
|
||||||
NamespacePath(ns_path)
|
NamespacePath(ns_path)
|
||||||
|
@ -327,7 +298,6 @@ async def cascade(
|
||||||
raise ValueError(f'Unknown fsp target: {ns_path}')
|
raise ValueError(f'Unknown fsp target: {ns_path}')
|
||||||
|
|
||||||
# open a data feed stream with requested broker
|
# open a data feed stream with requested broker
|
||||||
feed: Feed
|
|
||||||
async with data.feed.maybe_open_feed(
|
async with data.feed.maybe_open_feed(
|
||||||
[fqsn],
|
[fqsn],
|
||||||
|
|
||||||
|
@ -337,13 +307,14 @@ async def cascade(
|
||||||
# needs to get throttled the ticks we generate.
|
# needs to get throttled the ticks we generate.
|
||||||
# tick_throttle=60,
|
# tick_throttle=60,
|
||||||
|
|
||||||
) as feed:
|
) as (feed, quote_stream):
|
||||||
|
symbol = feed.symbols[fqsn]
|
||||||
|
|
||||||
flume = feed.flumes[fqsn]
|
|
||||||
symbol = flume.symbol
|
|
||||||
assert src.token == flume.rt_shm.token
|
|
||||||
profiler(f'{func}: feed up')
|
profiler(f'{func}: feed up')
|
||||||
|
|
||||||
|
assert src.token == feed.shm.token
|
||||||
|
# last_len = new_len = len(src.array)
|
||||||
|
|
||||||
func_name = func.__name__
|
func_name = func.__name__
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
|
@ -353,8 +324,8 @@ async def cascade(
|
||||||
|
|
||||||
fsp_compute,
|
fsp_compute,
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
flume=flume,
|
feed=feed,
|
||||||
quote_stream=flume.stream,
|
quote_stream=quote_stream,
|
||||||
|
|
||||||
# shm
|
# shm
|
||||||
src=src,
|
src=src,
|
||||||
|
@ -390,7 +361,7 @@ async def cascade(
|
||||||
) -> tuple[TaskTracker, int]:
|
) -> tuple[TaskTracker, int]:
|
||||||
# TODO: adopt an incremental update engine/approach
|
# TODO: adopt an incremental update engine/approach
|
||||||
# where possible here eventually!
|
# where possible here eventually!
|
||||||
log.info(f're-syncing fsp {func_name} to source')
|
log.debug(f're-syncing fsp {func_name} to source')
|
||||||
tracker.cs.cancel()
|
tracker.cs.cancel()
|
||||||
await tracker.complete.wait()
|
await tracker.complete.wait()
|
||||||
tracker, index = await n.start(fsp_target)
|
tracker, index = await n.start(fsp_target)
|
||||||
|
@ -403,16 +374,14 @@ async def cascade(
|
||||||
'key': dst_shm_token,
|
'key': dst_shm_token,
|
||||||
'first': dst._first.value,
|
'first': dst._first.value,
|
||||||
'last': dst._last.value,
|
'last': dst._last.value,
|
||||||
}
|
}})
|
||||||
})
|
|
||||||
return tracker, index
|
return tracker, index
|
||||||
|
|
||||||
def is_synced(
|
def is_synced(
|
||||||
src: ShmArray,
|
src: ShmArray,
|
||||||
dst: ShmArray
|
dst: ShmArray
|
||||||
) -> tuple[bool, int, int]:
|
) -> tuple[bool, int, int]:
|
||||||
'''
|
'''Predicate to dertmine if a destination FSP
|
||||||
Predicate to dertmine if a destination FSP
|
|
||||||
output array is aligned to its source array.
|
output array is aligned to its source array.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -421,15 +390,16 @@ async def cascade(
|
||||||
return not (
|
return not (
|
||||||
# the source is likely backfilling and we must
|
# the source is likely backfilling and we must
|
||||||
# sync history calculations
|
# sync history calculations
|
||||||
len_diff > 2
|
len_diff > 2 or
|
||||||
|
|
||||||
# we aren't step synced to the source and may be
|
# we aren't step synced to the source and may be
|
||||||
# leading/lagging by a step
|
# leading/lagging by a step
|
||||||
or step_diff > 1
|
step_diff > 1 or
|
||||||
or step_diff < 0
|
step_diff < 0
|
||||||
), step_diff, len_diff
|
), step_diff, len_diff
|
||||||
|
|
||||||
async def poll_and_sync_to_step(
|
async def poll_and_sync_to_step(
|
||||||
|
|
||||||
tracker: TaskTracker,
|
tracker: TaskTracker,
|
||||||
src: ShmArray,
|
src: ShmArray,
|
||||||
dst: ShmArray,
|
dst: ShmArray,
|
||||||
|
@ -448,23 +418,18 @@ async def cascade(
|
||||||
# detect sample period step for subscription to increment
|
# detect sample period step for subscription to increment
|
||||||
# signal
|
# signal
|
||||||
times = src.array['time']
|
times = src.array['time']
|
||||||
if len(times) > 1:
|
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||||
last_ts = times[-1]
|
|
||||||
delay_s = float(last_ts - times[times != last_ts][-1])
|
|
||||||
else:
|
|
||||||
# our default "HFT" sample rate.
|
|
||||||
delay_s = _default_delay_s
|
|
||||||
|
|
||||||
# sub and increment the underlying shared memory buffer
|
# Increment the underlying shared memory buffer on every
|
||||||
# on every step msg received from the global `samplerd`
|
# "increment" msg received from the underlying data feed.
|
||||||
# service.
|
async with feed.index_stream(
|
||||||
async with open_sample_stream(float(delay_s)) as istream:
|
int(delay_s)
|
||||||
|
) as istream:
|
||||||
|
|
||||||
profiler(f'{func_name}: sample stream up')
|
profiler(f'{func_name}: sample stream up')
|
||||||
profiler.finish()
|
profiler.finish()
|
||||||
|
|
||||||
async for i in istream:
|
async for _ in istream:
|
||||||
# print(f'FSP incrementing {i}')
|
|
||||||
|
|
||||||
# respawn the compute task if the source
|
# respawn the compute task if the source
|
||||||
# array has been updated such that we compute
|
# array has been updated such that we compute
|
||||||
|
@ -493,23 +458,3 @@ async def cascade(
|
||||||
last = array[-1:].copy()
|
last = array[-1:].copy()
|
||||||
|
|
||||||
dst.push(last)
|
dst.push(last)
|
||||||
|
|
||||||
# sync with source buffer's time step
|
|
||||||
src_l2 = src.array[-2:]
|
|
||||||
src_li, src_lt = src_l2[-1][['index', 'time']]
|
|
||||||
src_2li, src_2lt = src_l2[-2][['index', 'time']]
|
|
||||||
dst._array['time'][src_li] = src_lt
|
|
||||||
dst._array['time'][src_2li] = src_2lt
|
|
||||||
|
|
||||||
# last2 = dst.array[-2:]
|
|
||||||
# if (
|
|
||||||
# last2[-1]['index'] != src_li
|
|
||||||
# or last2[-2]['index'] != src_2li
|
|
||||||
# ):
|
|
||||||
# dstl2 = list(last2)
|
|
||||||
# srcl2 = list(src_l2)
|
|
||||||
# print(
|
|
||||||
# # f'{dst.token}\n'
|
|
||||||
# f'src: {srcl2}\n'
|
|
||||||
# f'dst: {dstl2}\n'
|
|
||||||
# )
|
|
||||||
|
|
|
@ -234,7 +234,7 @@ async def flow_rates(
|
||||||
# FSPs, user input, and possibly any general event stream in
|
# FSPs, user input, and possibly any general event stream in
|
||||||
# real-time. Hint: ideally implemented with caching until mutated
|
# real-time. Hint: ideally implemented with caching until mutated
|
||||||
# ;)
|
# ;)
|
||||||
period: 'Param[int]' = 1, # noqa
|
period: 'Param[int]' = 6, # noqa
|
||||||
|
|
||||||
# TODO: support other means by providing a map
|
# TODO: support other means by providing a map
|
||||||
# to weights `partial()`-ed with `wma()`?
|
# to weights `partial()`-ed with `wma()`?
|
||||||
|
@ -268,7 +268,8 @@ async def flow_rates(
|
||||||
'dark_dvlm_rate': None,
|
'dark_dvlm_rate': None,
|
||||||
}
|
}
|
||||||
|
|
||||||
quote = await anext(source)
|
# TODO: 3.10 do ``anext()``
|
||||||
|
quote = await source.__anext__()
|
||||||
|
|
||||||
# ltr = 0
|
# ltr = 0
|
||||||
# lvr = 0
|
# lvr = 0
|
||||||
|
|
46
piker/log.py
46
piker/log.py
|
@ -21,11 +21,7 @@ import logging
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
from pygments import (
|
from pygments import highlight, lexers, formatters
|
||||||
highlight,
|
|
||||||
lexers,
|
|
||||||
formatters,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Makes it so we only see the full module name when using ``__name__``
|
# Makes it so we only see the full module name when using ``__name__``
|
||||||
# without the extra "piker." prefix.
|
# without the extra "piker." prefix.
|
||||||
|
@ -36,48 +32,26 @@ def get_logger(
|
||||||
name: str = None,
|
name: str = None,
|
||||||
|
|
||||||
) -> logging.Logger:
|
) -> logging.Logger:
|
||||||
'''
|
'''Return the package log or a sub-log for `name` if provided.
|
||||||
Return the package log or a sub-log for `name` if provided.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
return tractor.log.get_logger(name=name, _root_name=_proj_name)
|
return tractor.log.get_logger(name=name, _root_name=_proj_name)
|
||||||
|
|
||||||
|
|
||||||
def get_console_log(
|
def get_console_log(level: str = None, name: str = None) -> logging.Logger:
|
||||||
level: str | None = None,
|
'''Get the package logger and enable a handler which writes to stderr.
|
||||||
name: str | None = None,
|
|
||||||
|
|
||||||
) -> logging.Logger:
|
|
||||||
'''
|
|
||||||
Get the package logger and enable a handler which writes to stderr.
|
|
||||||
|
|
||||||
Yeah yeah, i know we can use ``DictConfig``. You do it...
|
Yeah yeah, i know we can use ``DictConfig``. You do it...
|
||||||
|
|
||||||
'''
|
'''
|
||||||
return tractor.log.get_console_log(
|
return tractor.log.get_console_log(
|
||||||
level,
|
level, name=name, _root_name=_proj_name) # our root logger
|
||||||
name=name,
|
|
||||||
_root_name=_proj_name,
|
|
||||||
) # our root logger
|
|
||||||
|
|
||||||
|
|
||||||
def colorize_json(
|
def colorize_json(data, style='algol_nu'):
|
||||||
data: dict,
|
"""Colorize json output using ``pygments``.
|
||||||
style='algol_nu',
|
"""
|
||||||
):
|
formatted_json = json.dumps(data, sort_keys=True, indent=4)
|
||||||
'''
|
|
||||||
Colorize json output using ``pygments``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
formatted_json = json.dumps(
|
|
||||||
data,
|
|
||||||
sort_keys=True,
|
|
||||||
indent=4,
|
|
||||||
)
|
|
||||||
return highlight(
|
return highlight(
|
||||||
formatted_json,
|
formatted_json, lexers.JsonLexer(),
|
||||||
lexers.JsonLexer(),
|
|
||||||
|
|
||||||
# likeable styles: algol_nu, tango, monokai
|
# likeable styles: algol_nu, tango, monokai
|
||||||
formatters.TerminalTrueColorFormatter(style=style)
|
formatters.TerminalTrueColorFormatter(style=style)
|
||||||
)
|
)
|
||||||
|
|
1047
piker/pp.py
1047
piker/pp.py
File diff suppressed because it is too large
Load Diff
|
@ -1,60 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Actor-runtime service orchestration machinery.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from ._mngr import Services
|
|
||||||
from ._registry import ( # noqa
|
|
||||||
_tractor_kwargs,
|
|
||||||
_default_reg_addr,
|
|
||||||
_default_registry_host,
|
|
||||||
_default_registry_port,
|
|
||||||
open_registry,
|
|
||||||
find_service,
|
|
||||||
check_for_service,
|
|
||||||
)
|
|
||||||
from ._daemon import ( # noqa
|
|
||||||
maybe_spawn_daemon,
|
|
||||||
spawn_brokerd,
|
|
||||||
maybe_spawn_brokerd,
|
|
||||||
spawn_emsd,
|
|
||||||
maybe_open_emsd,
|
|
||||||
)
|
|
||||||
from ._actor_runtime import (
|
|
||||||
open_piker_runtime,
|
|
||||||
maybe_open_pikerd,
|
|
||||||
open_pikerd,
|
|
||||||
get_tractor_runtime_kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'check_for_service',
|
|
||||||
'Services',
|
|
||||||
'maybe_spawn_daemon',
|
|
||||||
'spawn_brokerd',
|
|
||||||
'maybe_spawn_brokerd',
|
|
||||||
'spawn_emsd',
|
|
||||||
'maybe_open_emsd',
|
|
||||||
'open_piker_runtime',
|
|
||||||
'maybe_open_pikerd',
|
|
||||||
'open_pikerd',
|
|
||||||
'get_tractor_runtime_kwargs',
|
|
||||||
]
|
|
|
@ -1,347 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
``tractor`` wrapping + default config to bootstrap the `pikerd`.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
from pprint import pformat
|
|
||||||
from functools import partial
|
|
||||||
import os
|
|
||||||
from typing import (
|
|
||||||
Optional,
|
|
||||||
Any,
|
|
||||||
ClassVar,
|
|
||||||
)
|
|
||||||
from contextlib import (
|
|
||||||
asynccontextmanager as acm,
|
|
||||||
)
|
|
||||||
|
|
||||||
import tractor
|
|
||||||
import trio
|
|
||||||
|
|
||||||
from ..log import (
|
|
||||||
get_logger,
|
|
||||||
get_console_log,
|
|
||||||
)
|
|
||||||
from ._mngr import (
|
|
||||||
Services,
|
|
||||||
)
|
|
||||||
from ._registry import ( # noqa
|
|
||||||
_tractor_kwargs,
|
|
||||||
_default_reg_addr,
|
|
||||||
open_registry,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def get_tractor_runtime_kwargs() -> dict[str, Any]:
|
|
||||||
'''
|
|
||||||
Deliver ``tractor`` related runtime variables in a `dict`.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return _tractor_kwargs
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_piker_runtime(
|
|
||||||
name: str,
|
|
||||||
enable_modules: list[str] = [],
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
|
|
||||||
# XXX NOTE XXX: you should pretty much never want debug mode
|
|
||||||
# for data daemons when running in production.
|
|
||||||
debug_mode: bool = False,
|
|
||||||
|
|
||||||
registry_addr: None | tuple[str, int] = None,
|
|
||||||
|
|
||||||
# TODO: once we have `rsyscall` support we will read a config
|
|
||||||
# and spawn the service tree distributed per that.
|
|
||||||
start_method: str = 'trio',
|
|
||||||
|
|
||||||
tractor_runtime_overrides: dict | None = None,
|
|
||||||
**tractor_kwargs,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
tractor.Actor,
|
|
||||||
tuple[str, int],
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Start a piker actor who's runtime will automatically sync with
|
|
||||||
existing piker actors on the local link based on configuration.
|
|
||||||
|
|
||||||
Can be called from a subactor or any program that needs to start
|
|
||||||
a root actor.
|
|
||||||
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
# check for existing runtime
|
|
||||||
actor = tractor.current_actor().uid
|
|
||||||
|
|
||||||
except tractor._exceptions.NoRuntime:
|
|
||||||
tractor._state._runtime_vars[
|
|
||||||
'piker_vars'] = tractor_runtime_overrides
|
|
||||||
|
|
||||||
registry_addr = registry_addr or _default_reg_addr
|
|
||||||
|
|
||||||
async with (
|
|
||||||
tractor.open_root_actor(
|
|
||||||
|
|
||||||
# passed through to ``open_root_actor``
|
|
||||||
arbiter_addr=registry_addr,
|
|
||||||
name=name,
|
|
||||||
loglevel=loglevel,
|
|
||||||
debug_mode=debug_mode,
|
|
||||||
start_method=start_method,
|
|
||||||
|
|
||||||
# TODO: eventually we should be able to avoid
|
|
||||||
# having the root have more then permissions to
|
|
||||||
# spawn other specialized daemons I think?
|
|
||||||
enable_modules=enable_modules,
|
|
||||||
|
|
||||||
**tractor_kwargs,
|
|
||||||
) as _,
|
|
||||||
|
|
||||||
open_registry(registry_addr, ensure_exists=False) as addr,
|
|
||||||
):
|
|
||||||
yield (
|
|
||||||
tractor.current_actor(),
|
|
||||||
addr,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
async with open_registry(registry_addr) as addr:
|
|
||||||
yield (
|
|
||||||
actor,
|
|
||||||
addr,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_root_dname = 'pikerd'
|
|
||||||
_root_modules = [
|
|
||||||
__name__,
|
|
||||||
'piker.service._daemon',
|
|
||||||
'piker.clearing._ems',
|
|
||||||
'piker.clearing._client',
|
|
||||||
'piker.data._sampling',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_pikerd(
|
|
||||||
|
|
||||||
loglevel: str | None = None,
|
|
||||||
|
|
||||||
# XXX: you should pretty much never want debug mode
|
|
||||||
# for data daemons when running in production.
|
|
||||||
debug_mode: bool = False,
|
|
||||||
registry_addr: None | tuple[str, int] = None,
|
|
||||||
|
|
||||||
# db init flags
|
|
||||||
tsdb: bool = False,
|
|
||||||
es: bool = False,
|
|
||||||
drop_root_perms_for_ahab: bool = True,
|
|
||||||
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> Services:
|
|
||||||
'''
|
|
||||||
Start a root piker daemon with an indefinite lifetime.
|
|
||||||
|
|
||||||
A root actor nursery is created which can be used to create and keep
|
|
||||||
alive underling services (see below).
|
|
||||||
|
|
||||||
'''
|
|
||||||
async with (
|
|
||||||
open_piker_runtime(
|
|
||||||
|
|
||||||
name=_root_dname,
|
|
||||||
# TODO: eventually we should be able to avoid
|
|
||||||
# having the root have more then permissions to
|
|
||||||
# spawn other specialized daemons I think?
|
|
||||||
enable_modules=_root_modules,
|
|
||||||
loglevel=loglevel,
|
|
||||||
debug_mode=debug_mode,
|
|
||||||
registry_addr=registry_addr,
|
|
||||||
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) as (root_actor, reg_addr),
|
|
||||||
tractor.open_nursery() as actor_nursery,
|
|
||||||
trio.open_nursery() as service_nursery,
|
|
||||||
):
|
|
||||||
if root_actor.accept_addr != reg_addr:
|
|
||||||
raise RuntimeError(f'Daemon failed to bind on {reg_addr}!?')
|
|
||||||
|
|
||||||
# assign globally for future daemon/task creation
|
|
||||||
Services.actor_n = actor_nursery
|
|
||||||
Services.service_n = service_nursery
|
|
||||||
Services.debug_mode = debug_mode
|
|
||||||
|
|
||||||
if tsdb:
|
|
||||||
from ._ahab import start_ahab
|
|
||||||
from .marketstore import start_marketstore
|
|
||||||
|
|
||||||
log.info('Spawning `marketstore` supervisor')
|
|
||||||
ctn_ready, config, (cid, pid) = await service_nursery.start(
|
|
||||||
partial(
|
|
||||||
start_ahab,
|
|
||||||
'marketstored',
|
|
||||||
start_marketstore,
|
|
||||||
loglevel=loglevel,
|
|
||||||
drop_root_perms=drop_root_perms_for_ahab,
|
|
||||||
)
|
|
||||||
|
|
||||||
)
|
|
||||||
log.info(
|
|
||||||
f'`marketstored` up!\n'
|
|
||||||
f'pid: {pid}\n'
|
|
||||||
f'container id: {cid[:12]}\n'
|
|
||||||
f'config: {pformat(config)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
if es:
|
|
||||||
from ._ahab import start_ahab
|
|
||||||
from .elastic import start_elasticsearch
|
|
||||||
|
|
||||||
log.info('Spawning `elasticsearch` supervisor')
|
|
||||||
ctn_ready, config, (cid, pid) = await service_nursery.start(
|
|
||||||
partial(
|
|
||||||
start_ahab,
|
|
||||||
'elasticsearch',
|
|
||||||
start_elasticsearch,
|
|
||||||
loglevel=loglevel,
|
|
||||||
drop_root_perms=drop_root_perms_for_ahab,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
f'`elasticsearch` up!\n'
|
|
||||||
f'pid: {pid}\n'
|
|
||||||
f'container id: {cid[:12]}\n'
|
|
||||||
f'config: {pformat(config)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
yield Services
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# TODO: is this more clever/efficient?
|
|
||||||
# if 'samplerd' in Services.service_tasks:
|
|
||||||
# await Services.cancel_service('samplerd')
|
|
||||||
service_nursery.cancel_scope.cancel()
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: do we even need this?
|
|
||||||
# @acm
|
|
||||||
# async def maybe_open_runtime(
|
|
||||||
# loglevel: Optional[str] = None,
|
|
||||||
# **kwargs,
|
|
||||||
|
|
||||||
# ) -> None:
|
|
||||||
# '''
|
|
||||||
# Start the ``tractor`` runtime (a root actor) if none exists.
|
|
||||||
|
|
||||||
# '''
|
|
||||||
# name = kwargs.pop('name')
|
|
||||||
|
|
||||||
# if not tractor.current_actor(err_on_no_runtime=False):
|
|
||||||
# async with open_piker_runtime(
|
|
||||||
# name,
|
|
||||||
# loglevel=loglevel,
|
|
||||||
# **kwargs,
|
|
||||||
# ) as (_, addr):
|
|
||||||
# yield addr,
|
|
||||||
# else:
|
|
||||||
# async with open_registry() as addr:
|
|
||||||
# yield addr
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def maybe_open_pikerd(
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
registry_addr: None | tuple = None,
|
|
||||||
tsdb: bool = False,
|
|
||||||
es: bool = False,
|
|
||||||
drop_root_perms_for_ahab: bool = True,
|
|
||||||
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> tractor._portal.Portal | ClassVar[Services]:
|
|
||||||
'''
|
|
||||||
If no ``pikerd`` daemon-root-actor can be found start it and
|
|
||||||
yield up (we should probably figure out returning a portal to self
|
|
||||||
though).
|
|
||||||
|
|
||||||
'''
|
|
||||||
if loglevel:
|
|
||||||
get_console_log(loglevel)
|
|
||||||
|
|
||||||
# subtle, we must have the runtime up here or portal lookup will fail
|
|
||||||
query_name = kwargs.pop(
|
|
||||||
'name',
|
|
||||||
f'piker_query_{os.getpid()}',
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: if we need to make the query part faster we could not init
|
|
||||||
# an actor runtime and instead just hit the socket?
|
|
||||||
# from tractor._ipc import _connect_chan, Channel
|
|
||||||
# async with _connect_chan(host, port) as chan:
|
|
||||||
# async with open_portal(chan) as arb_portal:
|
|
||||||
# yield arb_portal
|
|
||||||
|
|
||||||
async with (
|
|
||||||
open_piker_runtime(
|
|
||||||
name=query_name,
|
|
||||||
registry_addr=registry_addr,
|
|
||||||
loglevel=loglevel,
|
|
||||||
**kwargs,
|
|
||||||
) as _,
|
|
||||||
|
|
||||||
tractor.find_actor(
|
|
||||||
_root_dname,
|
|
||||||
arbiter_sockaddr=registry_addr,
|
|
||||||
) as portal
|
|
||||||
):
|
|
||||||
# connect to any existing daemon presuming
|
|
||||||
# its registry socket was selected.
|
|
||||||
if (
|
|
||||||
portal is not None
|
|
||||||
):
|
|
||||||
yield portal
|
|
||||||
return
|
|
||||||
|
|
||||||
# presume pikerd role since no daemon could be found at
|
|
||||||
# configured address
|
|
||||||
async with open_pikerd(
|
|
||||||
loglevel=loglevel,
|
|
||||||
registry_addr=registry_addr,
|
|
||||||
|
|
||||||
# ahabd (docker super) specific controls
|
|
||||||
tsdb=tsdb,
|
|
||||||
es=es,
|
|
||||||
drop_root_perms_for_ahab=drop_root_perms_for_ahab,
|
|
||||||
|
|
||||||
# passthrough to ``tractor`` init
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) as service_manager:
|
|
||||||
# in the case where we're starting up the
|
|
||||||
# tractor-piker runtime stack in **this** process
|
|
||||||
# we return no portal to self.
|
|
||||||
assert service_manager
|
|
||||||
yield service_manager
|
|
|
@ -1,271 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Daemon-actor spawning "endpoint-hooks".
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
from typing import (
|
|
||||||
Optional,
|
|
||||||
Callable,
|
|
||||||
Any,
|
|
||||||
)
|
|
||||||
from contextlib import (
|
|
||||||
asynccontextmanager as acm,
|
|
||||||
)
|
|
||||||
|
|
||||||
import tractor
|
|
||||||
|
|
||||||
from ..log import (
|
|
||||||
get_logger,
|
|
||||||
get_console_log,
|
|
||||||
)
|
|
||||||
from ..brokers import get_brokermod
|
|
||||||
from ._mngr import (
|
|
||||||
Services,
|
|
||||||
)
|
|
||||||
from ._actor_runtime import maybe_open_pikerd
|
|
||||||
from ._registry import find_service
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
# `brokerd` enabled modules
|
|
||||||
# NOTE: keeping this list as small as possible is part of our caps-sec
|
|
||||||
# model and should be treated with utmost care!
|
|
||||||
_data_mods = [
|
|
||||||
'piker.brokers.core',
|
|
||||||
'piker.brokers.data',
|
|
||||||
'piker.data',
|
|
||||||
'piker.data.feed',
|
|
||||||
'piker.data._sampling'
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def maybe_spawn_daemon(
|
|
||||||
|
|
||||||
service_name: str,
|
|
||||||
service_task_target: Callable,
|
|
||||||
spawn_args: dict[str, Any],
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
|
|
||||||
singleton: bool = False,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> tractor.Portal:
|
|
||||||
'''
|
|
||||||
If no ``service_name`` daemon-actor can be found,
|
|
||||||
spawn one in a local subactor and return a portal to it.
|
|
||||||
|
|
||||||
If this function is called from a non-pikerd actor, the
|
|
||||||
spawned service will persist as long as pikerd does or
|
|
||||||
it is requested to be cancelled.
|
|
||||||
|
|
||||||
This can be seen as a service starting api for remote-actor
|
|
||||||
clients.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if loglevel:
|
|
||||||
get_console_log(loglevel)
|
|
||||||
|
|
||||||
# serialize access to this section to avoid
|
|
||||||
# 2 or more tasks racing to create a daemon
|
|
||||||
lock = Services.locks[service_name]
|
|
||||||
await lock.acquire()
|
|
||||||
|
|
||||||
async with find_service(service_name) as portal:
|
|
||||||
if portal is not None:
|
|
||||||
lock.release()
|
|
||||||
yield portal
|
|
||||||
return
|
|
||||||
|
|
||||||
log.warning(f"Couldn't find any existing {service_name}")
|
|
||||||
|
|
||||||
# TODO: really shouldn't the actor spawning be part of the service
|
|
||||||
# starting method `Services.start_service()` ?
|
|
||||||
|
|
||||||
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
|
||||||
# pikerd is not live we now become the root of the
|
|
||||||
# process tree
|
|
||||||
async with maybe_open_pikerd(
|
|
||||||
|
|
||||||
loglevel=loglevel,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) as pikerd_portal:
|
|
||||||
|
|
||||||
# we are the root and thus are `pikerd`
|
|
||||||
# so spawn the target service directly by calling
|
|
||||||
# the provided target routine.
|
|
||||||
# XXX: this assumes that the target is well formed and will
|
|
||||||
# do the right things to setup both a sub-actor **and** call
|
|
||||||
# the ``_Services`` api from above to start the top level
|
|
||||||
# service task for that actor.
|
|
||||||
started: bool
|
|
||||||
if pikerd_portal is None:
|
|
||||||
started = await service_task_target(**spawn_args)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# tell the remote `pikerd` to start the target,
|
|
||||||
# the target can't return a non-serializable value
|
|
||||||
# since it is expected that service startingn is
|
|
||||||
# non-blocking and the target task will persist running
|
|
||||||
# on `pikerd` after the client requesting it's start
|
|
||||||
# disconnects.
|
|
||||||
started = await pikerd_portal.run(
|
|
||||||
service_task_target,
|
|
||||||
**spawn_args,
|
|
||||||
)
|
|
||||||
|
|
||||||
if started:
|
|
||||||
log.info(f'Service {service_name} started!')
|
|
||||||
|
|
||||||
async with tractor.wait_for_actor(service_name) as portal:
|
|
||||||
lock.release()
|
|
||||||
yield portal
|
|
||||||
await portal.cancel_actor()
|
|
||||||
|
|
||||||
|
|
||||||
async def spawn_brokerd(
|
|
||||||
|
|
||||||
brokername: str,
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
**tractor_kwargs,
|
|
||||||
|
|
||||||
) -> bool:
|
|
||||||
|
|
||||||
log.info(f'Spawning {brokername} broker daemon')
|
|
||||||
|
|
||||||
brokermod = get_brokermod(brokername)
|
|
||||||
dname = f'brokerd.{brokername}'
|
|
||||||
|
|
||||||
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
|
||||||
tractor_kwargs.update(extra_tractor_kwargs)
|
|
||||||
|
|
||||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
|
||||||
# actor nursery
|
|
||||||
modpath = brokermod.__name__
|
|
||||||
broker_enable = [modpath]
|
|
||||||
for submodname in getattr(
|
|
||||||
brokermod,
|
|
||||||
'__enable_modules__',
|
|
||||||
[],
|
|
||||||
):
|
|
||||||
subpath = f'{modpath}.{submodname}'
|
|
||||||
broker_enable.append(subpath)
|
|
||||||
|
|
||||||
portal = await Services.actor_n.start_actor(
|
|
||||||
dname,
|
|
||||||
enable_modules=_data_mods + broker_enable,
|
|
||||||
loglevel=loglevel,
|
|
||||||
debug_mode=Services.debug_mode,
|
|
||||||
**tractor_kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
# non-blocking setup of brokerd service nursery
|
|
||||||
from ..data import _setup_persistent_brokerd
|
|
||||||
|
|
||||||
await Services.start_service_task(
|
|
||||||
dname,
|
|
||||||
portal,
|
|
||||||
_setup_persistent_brokerd,
|
|
||||||
brokername=brokername,
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def maybe_spawn_brokerd(
|
|
||||||
|
|
||||||
brokername: str,
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> tractor.Portal:
|
|
||||||
'''
|
|
||||||
Helper to spawn a brokerd service *from* a client
|
|
||||||
who wishes to use the sub-actor-daemon.
|
|
||||||
|
|
||||||
'''
|
|
||||||
async with maybe_spawn_daemon(
|
|
||||||
|
|
||||||
f'brokerd.{brokername}',
|
|
||||||
service_task_target=spawn_brokerd,
|
|
||||||
spawn_args={
|
|
||||||
'brokername': brokername,
|
|
||||||
'loglevel': loglevel,
|
|
||||||
},
|
|
||||||
loglevel=loglevel,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) as portal:
|
|
||||||
yield portal
|
|
||||||
|
|
||||||
|
|
||||||
async def spawn_emsd(
|
|
||||||
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
**extra_tractor_kwargs
|
|
||||||
|
|
||||||
) -> bool:
|
|
||||||
"""
|
|
||||||
Start the clearing engine under ``pikerd``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
log.info('Spawning emsd')
|
|
||||||
|
|
||||||
portal = await Services.actor_n.start_actor(
|
|
||||||
'emsd',
|
|
||||||
enable_modules=[
|
|
||||||
'piker.clearing._ems',
|
|
||||||
'piker.clearing._client',
|
|
||||||
],
|
|
||||||
loglevel=loglevel,
|
|
||||||
debug_mode=Services.debug_mode, # set by pikerd flag
|
|
||||||
**extra_tractor_kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
# non-blocking setup of clearing service
|
|
||||||
from ..clearing._ems import _setup_persistent_emsd
|
|
||||||
|
|
||||||
await Services.start_service_task(
|
|
||||||
'emsd',
|
|
||||||
portal,
|
|
||||||
_setup_persistent_emsd,
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def maybe_open_emsd(
|
|
||||||
|
|
||||||
brokername: str,
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> tractor._portal.Portal: # noqa
|
|
||||||
|
|
||||||
async with maybe_spawn_daemon(
|
|
||||||
|
|
||||||
'emsd',
|
|
||||||
service_task_target=spawn_emsd,
|
|
||||||
spawn_args={'loglevel': loglevel},
|
|
||||||
loglevel=loglevel,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) as portal:
|
|
||||||
yield portal
|
|
|
@ -1,136 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
daemon-service management API.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from collections import defaultdict
|
|
||||||
from typing import (
|
|
||||||
Callable,
|
|
||||||
Any,
|
|
||||||
)
|
|
||||||
|
|
||||||
import trio
|
|
||||||
from trio_typing import TaskStatus
|
|
||||||
import tractor
|
|
||||||
|
|
||||||
from ..log import (
|
|
||||||
get_logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: factor this into a ``tractor.highlevel`` extension
|
|
||||||
# pack for the library.
|
|
||||||
class Services:
|
|
||||||
|
|
||||||
actor_n: tractor._supervise.ActorNursery
|
|
||||||
service_n: trio.Nursery
|
|
||||||
debug_mode: bool # tractor sub-actor debug mode flag
|
|
||||||
service_tasks: dict[
|
|
||||||
str,
|
|
||||||
tuple[
|
|
||||||
trio.CancelScope,
|
|
||||||
tractor.Portal,
|
|
||||||
trio.Event,
|
|
||||||
]
|
|
||||||
] = {}
|
|
||||||
locks = defaultdict(trio.Lock)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def start_service_task(
|
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
portal: tractor.Portal,
|
|
||||||
target: Callable,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> (trio.CancelScope, tractor.Context):
|
|
||||||
'''
|
|
||||||
Open a context in a service sub-actor, add to a stack
|
|
||||||
that gets unwound at ``pikerd`` teardown.
|
|
||||||
|
|
||||||
This allows for allocating long-running sub-services in our main
|
|
||||||
daemon and explicitly controlling their lifetimes.
|
|
||||||
|
|
||||||
'''
|
|
||||||
async def open_context_in_task(
|
|
||||||
task_status: TaskStatus[
|
|
||||||
tuple[
|
|
||||||
trio.CancelScope,
|
|
||||||
trio.Event,
|
|
||||||
Any,
|
|
||||||
]
|
|
||||||
] = trio.TASK_STATUS_IGNORED,
|
|
||||||
|
|
||||||
) -> Any:
|
|
||||||
|
|
||||||
with trio.CancelScope() as cs:
|
|
||||||
async with portal.open_context(
|
|
||||||
target,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) as (ctx, first):
|
|
||||||
|
|
||||||
# unblock once the remote context has started
|
|
||||||
complete = trio.Event()
|
|
||||||
task_status.started((cs, complete, first))
|
|
||||||
log.info(
|
|
||||||
f'`pikerd` service {name} started with value {first}'
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
# wait on any context's return value
|
|
||||||
# and any final portal result from the
|
|
||||||
# sub-actor.
|
|
||||||
ctx_res = await ctx.result()
|
|
||||||
|
|
||||||
# NOTE: blocks indefinitely until cancelled
|
|
||||||
# either by error from the target context
|
|
||||||
# function or by being cancelled here by the
|
|
||||||
# surrounding cancel scope.
|
|
||||||
return (await portal.result(), ctx_res)
|
|
||||||
|
|
||||||
finally:
|
|
||||||
await portal.cancel_actor()
|
|
||||||
complete.set()
|
|
||||||
self.service_tasks.pop(name)
|
|
||||||
|
|
||||||
cs, complete, first = await self.service_n.start(open_context_in_task)
|
|
||||||
|
|
||||||
# store the cancel scope and portal for later cancellation or
|
|
||||||
# retstart if needed.
|
|
||||||
self.service_tasks[name] = (cs, portal, complete)
|
|
||||||
|
|
||||||
return cs, first
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def cancel_service(
|
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
|
|
||||||
) -> Any:
|
|
||||||
'''
|
|
||||||
Cancel the service task and actor for the given ``name``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
log.info(f'Cancelling `pikerd` service {name}')
|
|
||||||
cs, portal, complete = self.service_tasks[name]
|
|
||||||
cs.cancel()
|
|
||||||
await complete.wait()
|
|
||||||
assert name not in self.service_tasks, \
|
|
||||||
f'Serice task for {name} not terminated?'
|
|
|
@ -1,144 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Inter-actor "discovery" (protocol) layer.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
from contextlib import (
|
|
||||||
asynccontextmanager as acm,
|
|
||||||
)
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
)
|
|
||||||
|
|
||||||
import tractor
|
|
||||||
|
|
||||||
|
|
||||||
from ..log import (
|
|
||||||
get_logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
_default_registry_host: str = '127.0.0.1'
|
|
||||||
_default_registry_port: int = 6116
|
|
||||||
_default_reg_addr: tuple[str, int] = (
|
|
||||||
_default_registry_host,
|
|
||||||
_default_registry_port,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# NOTE: this value is set as an actor-global once the first endpoint
|
|
||||||
# who is capable, spawns a `pikerd` service tree.
|
|
||||||
_registry: Registry | None = None
|
|
||||||
|
|
||||||
|
|
||||||
class Registry:
|
|
||||||
addr: None | tuple[str, int] = None
|
|
||||||
|
|
||||||
# TODO: table of uids to sockaddrs
|
|
||||||
peers: dict[
|
|
||||||
tuple[str, str],
|
|
||||||
tuple[str, int],
|
|
||||||
] = {}
|
|
||||||
|
|
||||||
|
|
||||||
_tractor_kwargs: dict[str, Any] = {}
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_registry(
|
|
||||||
addr: None | tuple[str, int] = None,
|
|
||||||
ensure_exists: bool = True,
|
|
||||||
|
|
||||||
) -> tuple[str, int]:
|
|
||||||
|
|
||||||
global _tractor_kwargs
|
|
||||||
actor = tractor.current_actor()
|
|
||||||
uid = actor.uid
|
|
||||||
if (
|
|
||||||
Registry.addr is not None
|
|
||||||
and addr
|
|
||||||
):
|
|
||||||
raise RuntimeError(
|
|
||||||
f'`{uid}` registry addr already bound @ {_registry.sockaddr}'
|
|
||||||
)
|
|
||||||
|
|
||||||
was_set: bool = False
|
|
||||||
|
|
||||||
if (
|
|
||||||
not tractor.is_root_process()
|
|
||||||
and Registry.addr is None
|
|
||||||
):
|
|
||||||
Registry.addr = actor._arb_addr
|
|
||||||
|
|
||||||
if (
|
|
||||||
ensure_exists
|
|
||||||
and Registry.addr is None
|
|
||||||
):
|
|
||||||
raise RuntimeError(
|
|
||||||
f"`{uid}` registry should already exist bug doesn't?"
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
Registry.addr is None
|
|
||||||
):
|
|
||||||
was_set = True
|
|
||||||
Registry.addr = addr or _default_reg_addr
|
|
||||||
|
|
||||||
_tractor_kwargs['arbiter_addr'] = Registry.addr
|
|
||||||
|
|
||||||
try:
|
|
||||||
yield Registry.addr
|
|
||||||
finally:
|
|
||||||
# XXX: always clear the global addr if we set it so that the
|
|
||||||
# next (set of) calls will apply whatever new one is passed
|
|
||||||
# in.
|
|
||||||
if was_set:
|
|
||||||
Registry.addr = None
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def find_service(
|
|
||||||
service_name: str,
|
|
||||||
) -> tractor.Portal | None:
|
|
||||||
|
|
||||||
async with open_registry() as reg_addr:
|
|
||||||
log.info(f'Scanning for service `{service_name}`')
|
|
||||||
# attach to existing daemon by name if possible
|
|
||||||
async with tractor.find_actor(
|
|
||||||
service_name,
|
|
||||||
arbiter_sockaddr=reg_addr,
|
|
||||||
) as maybe_portal:
|
|
||||||
yield maybe_portal
|
|
||||||
|
|
||||||
|
|
||||||
async def check_for_service(
|
|
||||||
service_name: str,
|
|
||||||
|
|
||||||
) -> None | tuple[str, int]:
|
|
||||||
'''
|
|
||||||
Service daemon "liveness" predicate.
|
|
||||||
|
|
||||||
'''
|
|
||||||
async with open_registry(ensure_exists=False) as reg_addr:
|
|
||||||
async with tractor.query_actor(
|
|
||||||
service_name,
|
|
||||||
arbiter_sockaddr=reg_addr,
|
|
||||||
) as sockaddr:
|
|
||||||
return sockaddr
|
|
|
@ -1,128 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
import docker
|
|
||||||
from ._ahab import DockerContainer
|
|
||||||
|
|
||||||
from piker.log import (
|
|
||||||
get_logger,
|
|
||||||
get_console_log
|
|
||||||
)
|
|
||||||
|
|
||||||
import asks
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# container level config
|
|
||||||
_config = {
|
|
||||||
'port': 19200,
|
|
||||||
'log_level': 'debug',
|
|
||||||
|
|
||||||
# hardcoded to our image version
|
|
||||||
'version': '7.17.4',
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def start_elasticsearch(
|
|
||||||
client: docker.DockerClient,
|
|
||||||
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> tuple[DockerContainer, dict[str, Any]]:
|
|
||||||
'''
|
|
||||||
Start and supervise an elasticsearch instance with its config bind-mounted
|
|
||||||
in from the piker config directory on the system.
|
|
||||||
|
|
||||||
The equivalent cli cmd to this code is:
|
|
||||||
|
|
||||||
sudo docker run \
|
|
||||||
-itd \
|
|
||||||
--rm \
|
|
||||||
--network=host \
|
|
||||||
--mount type=bind,source="$(pwd)"/elastic,\
|
|
||||||
target=/usr/share/elasticsearch/data \
|
|
||||||
--env "elastic_username=elastic" \
|
|
||||||
--env "elastic_password=password" \
|
|
||||||
--env "xpack.security.enabled=false" \
|
|
||||||
elastic
|
|
||||||
|
|
||||||
'''
|
|
||||||
get_console_log('info', name=__name__)
|
|
||||||
|
|
||||||
dcntr: DockerContainer = client.containers.run(
|
|
||||||
'piker:elastic',
|
|
||||||
name='piker-elastic',
|
|
||||||
network='host',
|
|
||||||
detach=True,
|
|
||||||
remove=True
|
|
||||||
)
|
|
||||||
|
|
||||||
async def health_query(msg: str | None = None):
|
|
||||||
if (
|
|
||||||
msg
|
|
||||||
and _config['version'] in msg
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
|
|
||||||
try:
|
|
||||||
health = (await asks.get(
|
|
||||||
'http://localhost:19200/_cat/health',
|
|
||||||
params={'format': 'json'}
|
|
||||||
)).json()
|
|
||||||
kog.info(
|
|
||||||
'ElasticSearch cntr health:\n'
|
|
||||||
f'{health}'
|
|
||||||
)
|
|
||||||
|
|
||||||
except OSError:
|
|
||||||
log.exception('couldnt reach elastic container')
|
|
||||||
return False
|
|
||||||
|
|
||||||
log.info(health)
|
|
||||||
return health[0]['status'] == 'green'
|
|
||||||
|
|
||||||
async def chk_for_closed_msg(msg: str):
|
|
||||||
return msg == 'closed'
|
|
||||||
|
|
||||||
return (
|
|
||||||
dcntr,
|
|
||||||
{
|
|
||||||
# apparently we're REALLY tolerant of startup latency
|
|
||||||
# for CI XD
|
|
||||||
'startup_timeout': 240.0,
|
|
||||||
|
|
||||||
# XXX: decrease http poll period bc docker
|
|
||||||
# is shite at handling fast poll rates..
|
|
||||||
'startup_query_period': 0.1,
|
|
||||||
|
|
||||||
'log_msg_key': 'message',
|
|
||||||
|
|
||||||
# 'started_afunc': health_query,
|
|
||||||
},
|
|
||||||
# expected startup and stop msgs
|
|
||||||
health_query,
|
|
||||||
chk_for_closed_msg,
|
|
||||||
)
|
|
|
@ -1,414 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
(time-series) database middle ware layer.
|
|
||||||
|
|
||||||
- APIs for read, write, delete, replicate over multiple
|
|
||||||
db systems.
|
|
||||||
- backend agnostic tick msg ingest machinery.
|
|
||||||
- broadcast systems for fan out of real-time ingested
|
|
||||||
data to live consumers.
|
|
||||||
- test harness utilities for data-processing verification.
|
|
||||||
|
|
||||||
'''
|
|
||||||
from __future__ import annotations
|
|
||||||
from contextlib import asynccontextmanager as acm
|
|
||||||
from datetime import datetime
|
|
||||||
from pprint import pformat
|
|
||||||
from typing import (
|
|
||||||
Optional,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from anyio_marketstore import (
|
|
||||||
Params,
|
|
||||||
)
|
|
||||||
import pendulum
|
|
||||||
import purerpc
|
|
||||||
|
|
||||||
from ..service.marketstore import (
|
|
||||||
MarketstoreClient,
|
|
||||||
tf_in_1s,
|
|
||||||
mk_tbk,
|
|
||||||
_ohlcv_dt,
|
|
||||||
MarketStoreError,
|
|
||||||
)
|
|
||||||
from ..data.feed import maybe_open_feed
|
|
||||||
from ..log import get_logger
|
|
||||||
from .._profile import Profiler
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Storage:
|
|
||||||
'''
|
|
||||||
High level storage api for both real-time and historical ingest.
|
|
||||||
|
|
||||||
'''
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
client: MarketstoreClient,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
# TODO: eventually this should be an api/interface type that
|
|
||||||
# ensures we can support multiple tsdb backends.
|
|
||||||
self.client = client
|
|
||||||
|
|
||||||
# series' cache from tsdb reads
|
|
||||||
self._arrays: dict[str, np.ndarray] = {}
|
|
||||||
|
|
||||||
async def list_keys(self) -> list[str]:
|
|
||||||
return await self.client.list_symbols()
|
|
||||||
|
|
||||||
async def search_keys(self, pattern: str) -> list[str]:
|
|
||||||
'''
|
|
||||||
Search for time series key in the storage backend.
|
|
||||||
|
|
||||||
'''
|
|
||||||
...
|
|
||||||
|
|
||||||
async def write_ticks(self, ticks: list) -> None:
|
|
||||||
...
|
|
||||||
|
|
||||||
async def load(
|
|
||||||
self,
|
|
||||||
fqsn: str,
|
|
||||||
timeframe: int,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray, # timeframe sampled array-series
|
|
||||||
Optional[datetime], # first dt
|
|
||||||
Optional[datetime], # last dt
|
|
||||||
]:
|
|
||||||
|
|
||||||
first_tsdb_dt, last_tsdb_dt = None, None
|
|
||||||
hist = await self.read_ohlcv(
|
|
||||||
fqsn,
|
|
||||||
# on first load we don't need to pull the max
|
|
||||||
# history per request size worth.
|
|
||||||
limit=3000,
|
|
||||||
timeframe=timeframe,
|
|
||||||
)
|
|
||||||
log.info(f'Loaded tsdb history {hist}')
|
|
||||||
|
|
||||||
if len(hist):
|
|
||||||
times = hist['Epoch']
|
|
||||||
first, last = times[0], times[-1]
|
|
||||||
first_tsdb_dt, last_tsdb_dt = map(
|
|
||||||
pendulum.from_timestamp, [first, last]
|
|
||||||
)
|
|
||||||
|
|
||||||
return (
|
|
||||||
hist, # array-data
|
|
||||||
first_tsdb_dt, # start of query-frame
|
|
||||||
last_tsdb_dt, # most recent
|
|
||||||
)
|
|
||||||
|
|
||||||
async def read_ohlcv(
|
|
||||||
self,
|
|
||||||
fqsn: str,
|
|
||||||
timeframe: int | str,
|
|
||||||
end: Optional[int] = None,
|
|
||||||
limit: int = int(800e3),
|
|
||||||
|
|
||||||
) -> np.ndarray:
|
|
||||||
|
|
||||||
client = self.client
|
|
||||||
syms = await client.list_symbols()
|
|
||||||
|
|
||||||
if fqsn not in syms:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
# use the provided timeframe or 1s by default
|
|
||||||
tfstr = tf_in_1s.get(timeframe, tf_in_1s[1])
|
|
||||||
|
|
||||||
params = Params(
|
|
||||||
symbols=fqsn,
|
|
||||||
timeframe=tfstr,
|
|
||||||
attrgroup='OHLCV',
|
|
||||||
end=end,
|
|
||||||
# limit_from_start=True,
|
|
||||||
|
|
||||||
# TODO: figure the max limit here given the
|
|
||||||
# ``purepc`` msg size limit of purerpc: 33554432
|
|
||||||
limit=limit,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = await client.query(params)
|
|
||||||
except purerpc.grpclib.exceptions.UnknownError as err:
|
|
||||||
# indicate there is no history for this timeframe
|
|
||||||
log.exception(
|
|
||||||
f'Unknown mkts QUERY error: {params}\n'
|
|
||||||
f'{err.args}'
|
|
||||||
)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
# TODO: it turns out column access on recarrays is actually slower:
|
|
||||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
|
||||||
# it might make sense to make these structured arrays?
|
|
||||||
data_set = result.by_symbols()[fqsn]
|
|
||||||
array = data_set.array
|
|
||||||
|
|
||||||
# XXX: ensure sample rate is as expected
|
|
||||||
time = data_set.array['Epoch']
|
|
||||||
if len(time) > 1:
|
|
||||||
time_step = time[-1] - time[-2]
|
|
||||||
ts = tf_in_1s.inverse[data_set.timeframe]
|
|
||||||
|
|
||||||
if time_step != ts:
|
|
||||||
log.warning(
|
|
||||||
f'MKTS BUG: wrong timeframe loaded: {time_step}'
|
|
||||||
'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG'
|
|
||||||
f'WIPING HISTORY FOR {ts}s'
|
|
||||||
)
|
|
||||||
await self.delete_ts(fqsn, timeframe)
|
|
||||||
|
|
||||||
# try reading again..
|
|
||||||
return await self.read_ohlcv(
|
|
||||||
fqsn,
|
|
||||||
timeframe,
|
|
||||||
end,
|
|
||||||
limit,
|
|
||||||
)
|
|
||||||
|
|
||||||
return array
|
|
||||||
|
|
||||||
async def delete_ts(
|
|
||||||
self,
|
|
||||||
key: str,
|
|
||||||
timeframe: Optional[Union[int, str]] = None,
|
|
||||||
fmt: str = 'OHLCV',
|
|
||||||
|
|
||||||
) -> bool:
|
|
||||||
|
|
||||||
client = self.client
|
|
||||||
syms = await client.list_symbols()
|
|
||||||
if key not in syms:
|
|
||||||
raise KeyError(f'`{key}` table key not found in\n{syms}?')
|
|
||||||
|
|
||||||
tbk = mk_tbk((
|
|
||||||
key,
|
|
||||||
tf_in_1s.get(timeframe, tf_in_1s[60]),
|
|
||||||
fmt,
|
|
||||||
))
|
|
||||||
return await client.destroy(tbk=tbk)
|
|
||||||
|
|
||||||
async def write_ohlcv(
|
|
||||||
self,
|
|
||||||
fqsn: str,
|
|
||||||
ohlcv: np.ndarray,
|
|
||||||
timeframe: int,
|
|
||||||
append_and_duplicate: bool = True,
|
|
||||||
limit: int = int(800e3),
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
# build mkts schema compat array for writing
|
|
||||||
mkts_dt = np.dtype(_ohlcv_dt)
|
|
||||||
mkts_array = np.zeros(
|
|
||||||
len(ohlcv),
|
|
||||||
dtype=mkts_dt,
|
|
||||||
)
|
|
||||||
# copy from shm array (yes it's this easy):
|
|
||||||
# https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays
|
|
||||||
mkts_array[:] = ohlcv[[
|
|
||||||
'time',
|
|
||||||
'open',
|
|
||||||
'high',
|
|
||||||
'low',
|
|
||||||
'close',
|
|
||||||
'volume',
|
|
||||||
]]
|
|
||||||
|
|
||||||
m, r = divmod(len(mkts_array), limit)
|
|
||||||
|
|
||||||
tfkey = tf_in_1s[timeframe]
|
|
||||||
for i in range(m, 1):
|
|
||||||
to_push = mkts_array[i-1:i*limit]
|
|
||||||
|
|
||||||
# write to db
|
|
||||||
resp = await self.client.write(
|
|
||||||
to_push,
|
|
||||||
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
|
||||||
|
|
||||||
# NOTE: will will append duplicates
|
|
||||||
# for the same timestamp-index.
|
|
||||||
# TODO: pre-deduplicate?
|
|
||||||
isvariablelength=append_and_duplicate,
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
f'Wrote {mkts_array.size} datums to tsdb\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
for resp in resp.responses:
|
|
||||||
err = resp.error
|
|
||||||
if err:
|
|
||||||
raise MarketStoreError(err)
|
|
||||||
|
|
||||||
if r:
|
|
||||||
to_push = mkts_array[m*limit:]
|
|
||||||
|
|
||||||
# write to db
|
|
||||||
resp = await self.client.write(
|
|
||||||
to_push,
|
|
||||||
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
|
||||||
|
|
||||||
# NOTE: will will append duplicates
|
|
||||||
# for the same timestamp-index.
|
|
||||||
# TODO: pre deduplicate?
|
|
||||||
isvariablelength=append_and_duplicate,
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
f'Wrote {mkts_array.size} datums to tsdb\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
for resp in resp.responses:
|
|
||||||
err = resp.error
|
|
||||||
if err:
|
|
||||||
raise MarketStoreError(err)
|
|
||||||
|
|
||||||
# XXX: currently the only way to do this is through the CLI:
|
|
||||||
|
|
||||||
# sudo ./marketstore connect --dir ~/.config/piker/data
|
|
||||||
# >> \show mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
|
||||||
# and this seems to block and use up mem..
|
|
||||||
# >> \trim mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
|
||||||
|
|
||||||
# relevant source code for this is here:
|
|
||||||
# https://github.com/alpacahq/marketstore/blob/master/cmd/connect/session/trim.go#L14
|
|
||||||
# def delete_range(self, start_dt, end_dt) -> None:
|
|
||||||
# ...
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_storage_client(
|
|
||||||
fqsn: str,
|
|
||||||
period: Optional[Union[int, str]] = None, # in seconds
|
|
||||||
|
|
||||||
) -> tuple[Storage, dict[str, np.ndarray]]:
|
|
||||||
'''
|
|
||||||
Load a series by key and deliver in ``numpy`` struct array format.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# TODO: generic import-by-name system for each backend much like
|
|
||||||
# we have in ``piker.brokers`` module loading for `brokerd` B)
|
|
||||||
from ..service import marketstore
|
|
||||||
mod = marketstore
|
|
||||||
|
|
||||||
async with (
|
|
||||||
# eventually a storage backend endpoint
|
|
||||||
mod.get_client() as client,
|
|
||||||
):
|
|
||||||
# slap on our wrapper api
|
|
||||||
yield Storage(client)
|
|
||||||
|
|
||||||
|
|
||||||
# NOTE: pretty sure right now this is only being
|
|
||||||
# called by a CLI entrypoint?
|
|
||||||
@acm
|
|
||||||
async def open_tsdb_client(
|
|
||||||
fqsn: str,
|
|
||||||
|
|
||||||
) -> Storage:
|
|
||||||
|
|
||||||
# TODO: real-time dedicated task for ensuring
|
|
||||||
# history consistency between the tsdb, shm and real-time feed..
|
|
||||||
|
|
||||||
# update sequence design notes:
|
|
||||||
|
|
||||||
# - load existing highest frequency data from mkts
|
|
||||||
# * how do we want to offer this to the UI?
|
|
||||||
# - lazy loading?
|
|
||||||
# - try to load it all and expect graphics caching/diffing
|
|
||||||
# to hide extra bits that aren't in view?
|
|
||||||
|
|
||||||
# - compute the diff between latest data from broker and shm
|
|
||||||
# * use sql api in mkts to determine where the backend should
|
|
||||||
# start querying for data?
|
|
||||||
# * append any diff with new shm length
|
|
||||||
# * determine missing (gapped) history by scanning
|
|
||||||
# * how far back do we look?
|
|
||||||
|
|
||||||
# - begin rt update ingest and aggregation
|
|
||||||
# * could start by always writing ticks to mkts instead of
|
|
||||||
# worrying about a shm queue for now.
|
|
||||||
# * we have a short list of shm queues worth groking:
|
|
||||||
# - https://github.com/pikers/piker/issues/107
|
|
||||||
# * the original data feed arch blurb:
|
|
||||||
# - https://github.com/pikers/piker/issues/98
|
|
||||||
#
|
|
||||||
profiler = Profiler(
|
|
||||||
disabled=True, # not pg_profile_enabled(),
|
|
||||||
delayed=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
async with (
|
|
||||||
open_storage_client(fqsn) as storage,
|
|
||||||
|
|
||||||
maybe_open_feed(
|
|
||||||
[fqsn],
|
|
||||||
start_stream=False,
|
|
||||||
|
|
||||||
) as feed,
|
|
||||||
):
|
|
||||||
profiler(f'opened feed for {fqsn}')
|
|
||||||
|
|
||||||
# to_append = feed.hist_shm.array
|
|
||||||
# to_prepend = None
|
|
||||||
|
|
||||||
if fqsn:
|
|
||||||
flume = feed.flumes[fqsn]
|
|
||||||
symbol = flume.symbol
|
|
||||||
if symbol:
|
|
||||||
fqsn = symbol.fqsn
|
|
||||||
|
|
||||||
# diff db history with shm and only write the missing portions
|
|
||||||
# ohlcv = flume.hist_shm.array
|
|
||||||
|
|
||||||
# TODO: use pg profiler
|
|
||||||
# for secs in (1, 60):
|
|
||||||
# tsdb_array = await storage.read_ohlcv(
|
|
||||||
# fqsn,
|
|
||||||
# timeframe=timeframe,
|
|
||||||
# )
|
|
||||||
# # hist diffing:
|
|
||||||
# # these aren't currently used but can be referenced from
|
|
||||||
# # within the embedded ipython shell below.
|
|
||||||
# to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
|
||||||
# to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
|
||||||
|
|
||||||
# profiler('Finished db arrays diffs')
|
|
||||||
|
|
||||||
syms = await storage.client.list_symbols()
|
|
||||||
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
|
||||||
# profiler(f'listed symbols {syms}')
|
|
||||||
yield storage
|
|
||||||
|
|
||||||
# for array in [to_append, to_prepend]:
|
|
||||||
# if array is None:
|
|
||||||
# continue
|
|
||||||
|
|
||||||
# log.info(
|
|
||||||
# f'Writing datums {array.size} -> to tsdb from shm\n'
|
|
||||||
# )
|
|
||||||
# await storage.write_ohlcv(fqsn, array)
|
|
||||||
|
|
||||||
# profiler('Finished db writes')
|
|
|
@ -32,22 +32,16 @@ def mk_marker_path(
|
||||||
style: str,
|
style: str,
|
||||||
|
|
||||||
) -> QGraphicsPathItem:
|
) -> QGraphicsPathItem:
|
||||||
'''
|
"""Add a marker to be displayed on the line wrapped in a ``QGraphicsPathItem``
|
||||||
Add a marker to be displayed on the line wrapped in
|
ready to be placed using scene coordinates (not view).
|
||||||
a ``QGraphicsPathItem`` ready to be placed using scene coordinates
|
|
||||||
(not view).
|
|
||||||
|
|
||||||
**Arguments**
|
**Arguments**
|
||||||
style String indicating the style of marker to add:
|
style String indicating the style of marker to add:
|
||||||
``'<|'``, ``'|>'``, ``'>|'``, ``'|<'``, ``'<|>'``,
|
``'<|'``, ``'|>'``, ``'>|'``, ``'|<'``, ``'<|>'``,
|
||||||
``'>|<'``, ``'^'``, ``'v'``, ``'o'``
|
``'>|<'``, ``'^'``, ``'v'``, ``'o'``
|
||||||
|
size Size of the marker in pixels.
|
||||||
|
|
||||||
This code is taken nearly verbatim from the
|
"""
|
||||||
`InfiniteLine.addMarker()` method but does not attempt do be aware
|
|
||||||
of low(er) level graphics controls and expects for the output
|
|
||||||
polygon to be applied to a ``QGraphicsPathItem``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
path = QtGui.QPainterPath()
|
path = QtGui.QPainterPath()
|
||||||
|
|
||||||
if style == 'o':
|
if style == 'o':
|
||||||
|
@ -93,8 +87,7 @@ def mk_marker_path(
|
||||||
|
|
||||||
|
|
||||||
class LevelMarker(QGraphicsPathItem):
|
class LevelMarker(QGraphicsPathItem):
|
||||||
'''
|
'''An arrow marker path graphich which redraws itself
|
||||||
An arrow marker path graphich which redraws itself
|
|
||||||
to the specified view coordinate level on each paint cycle.
|
to the specified view coordinate level on each paint cycle.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -111,8 +104,7 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
# get polygon and scale
|
# get polygon and scale
|
||||||
super().__init__()
|
super().__init__()
|
||||||
# self.setScale(size, size)
|
self.scale(size, size)
|
||||||
self.setScale(size)
|
|
||||||
|
|
||||||
# interally generates path
|
# interally generates path
|
||||||
self._style = None
|
self._style = None
|
||||||
|
@ -122,7 +114,6 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
self.get_level = get_level
|
self.get_level = get_level
|
||||||
self._on_paint = on_paint
|
self._on_paint = on_paint
|
||||||
|
|
||||||
self.scene_x = lambda: chart.marker_right_points()[1]
|
self.scene_x = lambda: chart.marker_right_points()[1]
|
||||||
self.level: float = 0
|
self.level: float = 0
|
||||||
self.keep_in_view = keep_in_view
|
self.keep_in_view = keep_in_view
|
||||||
|
@ -158,9 +149,12 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
def w(self) -> float:
|
def w(self) -> float:
|
||||||
return self.path_br().width()
|
return self.path_br().width()
|
||||||
|
|
||||||
def position_in_view(self) -> None:
|
def position_in_view(
|
||||||
'''
|
self,
|
||||||
Show a pp off-screen indicator for a level label.
|
# level: float,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''Show a pp off-screen indicator for a level label.
|
||||||
|
|
||||||
This is like in fps games where you have a gps "nav" indicator
|
This is like in fps games where you have a gps "nav" indicator
|
||||||
but your teammate is outside the range of view, except in 2D, on
|
but your teammate is outside the range of view, except in 2D, on
|
||||||
|
@ -168,6 +162,7 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
level = self.get_level()
|
level = self.get_level()
|
||||||
|
|
||||||
view = self.chart.getViewBox()
|
view = self.chart.getViewBox()
|
||||||
vr = view.state['viewRange']
|
vr = view.state['viewRange']
|
||||||
ymn, ymx = vr[1]
|
ymn, ymx = vr[1]
|
||||||
|
@ -191,6 +186,7 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
)
|
)
|
||||||
|
|
||||||
elif level < ymn: # pin to bottom of view
|
elif level < ymn: # pin to bottom of view
|
||||||
|
|
||||||
self.setPos(
|
self.setPos(
|
||||||
QPointF(
|
QPointF(
|
||||||
x,
|
x,
|
||||||
|
@ -215,8 +211,7 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
w: QtWidgets.QWidget
|
w: QtWidgets.QWidget
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''Core paint which we override to always update
|
||||||
Core paint which we override to always update
|
|
||||||
our marker position in scene coordinates from a
|
our marker position in scene coordinates from a
|
||||||
view cooridnate "level".
|
view cooridnate "level".
|
||||||
|
|
||||||
|
@ -240,12 +235,11 @@ def qgo_draw_markers(
|
||||||
right_offset: float,
|
right_offset: float,
|
||||||
|
|
||||||
) -> float:
|
) -> float:
|
||||||
'''
|
"""Paint markers in ``pg.GraphicsItem`` style by first
|
||||||
Paint markers in ``pg.GraphicsItem`` style by first
|
|
||||||
removing the view transform for the painter, drawing the markers
|
removing the view transform for the painter, drawing the markers
|
||||||
in scene coords, then restoring the view coords.
|
in scene coords, then restoring the view coords.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
# paint markers in native coordinate system
|
# paint markers in native coordinate system
|
||||||
orig_tr = p.transform()
|
orig_tr = p.transform()
|
||||||
|
|
||||||
|
|
|
@ -19,16 +19,15 @@ Main app startup and run.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from types import ModuleType
|
|
||||||
|
|
||||||
from PyQt5.QtCore import QEvent
|
from PyQt5.QtCore import QEvent
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from ..service import maybe_spawn_brokerd
|
from .._daemon import maybe_spawn_brokerd
|
||||||
|
from ..brokers import get_brokermod
|
||||||
from . import _event
|
from . import _event
|
||||||
from ._exec import run_qtractor
|
from ._exec import run_qtractor
|
||||||
from ..data.feed import install_brokerd_search
|
from ..data.feed import install_brokerd_search
|
||||||
from ..data._source import unpack_fqsn
|
|
||||||
from . import _search
|
from . import _search
|
||||||
from ._chart import GodWidget
|
from ._chart import GodWidget
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
@ -37,26 +36,27 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def load_provider_search(
|
async def load_provider_search(
|
||||||
brokermod: str,
|
|
||||||
|
broker: str,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
name = brokermod.name
|
log.info(f'loading brokerd for {broker}..')
|
||||||
log.info(f'loading brokerd for {name}..')
|
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
maybe_spawn_brokerd(
|
maybe_spawn_brokerd(
|
||||||
name,
|
broker,
|
||||||
loglevel=loglevel
|
loglevel=loglevel
|
||||||
) as portal,
|
) as portal,
|
||||||
|
|
||||||
install_brokerd_search(
|
install_brokerd_search(
|
||||||
portal,
|
portal,
|
||||||
brokermod,
|
get_brokermod(broker),
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
|
|
||||||
# keep search engine stream up until cancelled
|
# keep search engine stream up until cancelled
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
@ -66,8 +66,8 @@ async def _async_main(
|
||||||
# implicit required argument provided by ``qtractor_run()``
|
# implicit required argument provided by ``qtractor_run()``
|
||||||
main_widget: GodWidget,
|
main_widget: GodWidget,
|
||||||
|
|
||||||
syms: list[str],
|
sym: str,
|
||||||
brokers: dict[str, ModuleType],
|
brokernames: str,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -78,8 +78,6 @@ async def _async_main(
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from . import _display
|
from . import _display
|
||||||
from ._pg_overrides import _do_overrides
|
|
||||||
_do_overrides()
|
|
||||||
|
|
||||||
godwidget = main_widget
|
godwidget = main_widget
|
||||||
|
|
||||||
|
@ -99,11 +97,6 @@ async def _async_main(
|
||||||
sbar = godwidget.window.status_bar
|
sbar = godwidget.window.status_bar
|
||||||
starting_done = sbar.open_status('starting ze sexy chartz')
|
starting_done = sbar.open_status('starting ze sexy chartz')
|
||||||
|
|
||||||
needed_brokermods: dict[str, ModuleType] = {}
|
|
||||||
for fqsn in syms:
|
|
||||||
brokername, *_ = unpack_fqsn(fqsn)
|
|
||||||
needed_brokermods[brokername] = brokers[brokername]
|
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as root_n,
|
trio.open_nursery() as root_n,
|
||||||
):
|
):
|
||||||
|
@ -114,14 +107,18 @@ async def _async_main(
|
||||||
# setup search widget and focus main chart view at startup
|
# setup search widget and focus main chart view at startup
|
||||||
# search widget is a singleton alongside the godwidget
|
# search widget is a singleton alongside the godwidget
|
||||||
search = _search.SearchWidget(godwidget=godwidget)
|
search = _search.SearchWidget(godwidget=godwidget)
|
||||||
# search.bar.unfocus()
|
search.bar.unfocus()
|
||||||
# godwidget.hbox.addWidget(search)
|
|
||||||
|
godwidget.hbox.addWidget(search)
|
||||||
godwidget.search = search
|
godwidget.search = search
|
||||||
|
|
||||||
|
symbol, _, provider = sym.rpartition('.')
|
||||||
|
|
||||||
# this internally starts a ``display_symbol_data()`` task above
|
# this internally starts a ``display_symbol_data()`` task above
|
||||||
order_mode_ready = await godwidget.load_symbols(
|
order_mode_ready = await godwidget.load_symbol(
|
||||||
fqsns=syms,
|
provider,
|
||||||
loglevel=loglevel,
|
symbol,
|
||||||
|
loglevel
|
||||||
)
|
)
|
||||||
|
|
||||||
# spin up a search engine for the local cached symbol set
|
# spin up a search engine for the local cached symbol set
|
||||||
|
@ -138,12 +135,8 @@ async def _async_main(
|
||||||
):
|
):
|
||||||
# load other providers into search **after**
|
# load other providers into search **after**
|
||||||
# the chart's select cache
|
# the chart's select cache
|
||||||
for brokername, mod in needed_brokermods.items():
|
for broker in brokernames:
|
||||||
root_n.start_soon(
|
root_n.start_soon(load_provider_search, broker, loglevel)
|
||||||
load_provider_search,
|
|
||||||
mod,
|
|
||||||
loglevel,
|
|
||||||
)
|
|
||||||
|
|
||||||
await order_mode_ready.wait()
|
await order_mode_ready.wait()
|
||||||
|
|
||||||
|
@ -172,22 +165,19 @@ async def _async_main(
|
||||||
|
|
||||||
|
|
||||||
def _main(
|
def _main(
|
||||||
syms: list[str],
|
sym: str,
|
||||||
brokermods: list[ModuleType],
|
brokernames: [str],
|
||||||
piker_loglevel: str,
|
piker_loglevel: str,
|
||||||
tractor_kwargs,
|
tractor_kwargs,
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Sync entry point to start a chart: a ``tractor`` + Qt runtime.
|
Sync entry point to start a chart: a ``tractor`` + Qt runtime
|
||||||
|
entry point
|
||||||
|
|
||||||
'''
|
'''
|
||||||
run_qtractor(
|
run_qtractor(
|
||||||
func=_async_main,
|
func=_async_main,
|
||||||
args=(
|
args=(sym, brokernames, piker_loglevel),
|
||||||
syms,
|
main_widget=GodWidget,
|
||||||
{mod.name: mod for mod in brokermods},
|
|
||||||
piker_loglevel,
|
|
||||||
),
|
|
||||||
main_widget_type=GodWidget,
|
|
||||||
tractor_kwargs=tractor_kwargs,
|
tractor_kwargs=tractor_kwargs,
|
||||||
)
|
)
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
Chart axes graphics and behavior.
|
Chart axes graphics and behavior.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from typing import Optional, Callable
|
from typing import Optional, Callable
|
||||||
from math import floor
|
from math import floor
|
||||||
|
@ -28,7 +27,6 @@ import pyqtgraph as pg
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
from PyQt5.QtCore import QPointF
|
from PyQt5.QtCore import QPointF
|
||||||
|
|
||||||
from . import _pg_overrides as pgo
|
|
||||||
from ..data._source import float_digits
|
from ..data._source import float_digits
|
||||||
from ._label import Label
|
from ._label import Label
|
||||||
from ._style import DpiAwareFont, hcolor, _font
|
from ._style import DpiAwareFont, hcolor, _font
|
||||||
|
@ -41,17 +39,12 @@ class Axis(pg.AxisItem):
|
||||||
'''
|
'''
|
||||||
A better axis that sizes tick contents considering font size.
|
A better axis that sizes tick contents considering font size.
|
||||||
|
|
||||||
Also includes tick values lru caching originally proposed in but never
|
|
||||||
accepted upstream:
|
|
||||||
https://github.com/pyqtgraph/pyqtgraph/pull/2160
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
plotitem: pgo.PlotItem,
|
linkedsplits,
|
||||||
typical_max_str: str = '100 000.000 ',
|
typical_max_str: str = '100 000.000',
|
||||||
text_color: str = 'bracket',
|
text_color: str = 'bracket',
|
||||||
lru_cache_tick_strings: bool = True,
|
|
||||||
**kwargs
|
**kwargs
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -63,78 +56,41 @@ class Axis(pg.AxisItem):
|
||||||
# XXX: pretty sure this makes things slower
|
# XXX: pretty sure this makes things slower
|
||||||
# self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
# self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
self.pi = plotitem
|
self.linkedsplits = linkedsplits
|
||||||
self._dpi_font = _font
|
self._dpi_font = _font
|
||||||
|
|
||||||
self.setTickFont(_font.font)
|
self.setTickFont(_font.font)
|
||||||
font_size = self._dpi_font.font.pixelSize()
|
font_size = self._dpi_font.font.pixelSize()
|
||||||
|
|
||||||
style_conf = {
|
|
||||||
'textFillLimits': [(0, 0.5)],
|
|
||||||
'tickFont': self._dpi_font.font,
|
|
||||||
|
|
||||||
}
|
|
||||||
text_offset = None
|
|
||||||
if self.orientation in ('bottom',):
|
if self.orientation in ('bottom',):
|
||||||
text_offset = floor(0.25 * font_size)
|
text_offset = floor(0.25 * font_size)
|
||||||
|
|
||||||
elif self.orientation in ('left', 'right'):
|
elif self.orientation in ('left', 'right'):
|
||||||
text_offset = floor(font_size / 2)
|
text_offset = floor(font_size / 2)
|
||||||
|
|
||||||
if text_offset:
|
self.setStyle(**{
|
||||||
style_conf.update({
|
'textFillLimits': [(0, 0.5)],
|
||||||
# offset of text *away from* axis line in px
|
'tickFont': self._dpi_font.font,
|
||||||
# use approx. half the font pixel size (height)
|
|
||||||
'tickTextOffset': text_offset,
|
# offset of text *away from* axis line in px
|
||||||
})
|
# use approx. half the font pixel size (height)
|
||||||
|
'tickTextOffset': text_offset,
|
||||||
|
})
|
||||||
|
|
||||||
self.setStyle(**style_conf)
|
|
||||||
self.setTickFont(_font.font)
|
self.setTickFont(_font.font)
|
||||||
|
|
||||||
# NOTE: this is for surrounding "border"
|
# NOTE: this is for surrounding "border"
|
||||||
self.setPen(_axis_pen)
|
self.setPen(_axis_pen)
|
||||||
|
|
||||||
# this is the text color
|
# this is the text color
|
||||||
|
# self.setTextPen(pg.mkPen(hcolor(text_color)))
|
||||||
self.text_color = text_color
|
self.text_color = text_color
|
||||||
|
|
||||||
# generate a bounding rect based on sizing to a "typical"
|
|
||||||
# maximum length-ed string defined as init default.
|
|
||||||
self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
||||||
|
|
||||||
# size the pertinent axis dimension to a "typical value"
|
# size the pertinent axis dimension to a "typical value"
|
||||||
self.size_to_values()
|
self.size_to_values()
|
||||||
|
|
||||||
# NOTE: requires override ``.tickValues()`` method seen below.
|
|
||||||
if lru_cache_tick_strings:
|
|
||||||
self.tickStrings = lru_cache(
|
|
||||||
maxsize=2**20
|
|
||||||
)(self.tickStrings)
|
|
||||||
|
|
||||||
# axis "sticky" labels
|
|
||||||
self._stickies: dict[str, YAxisLabel] = {}
|
|
||||||
|
|
||||||
# NOTE: only overriden to cast tick values entries into tuples
|
|
||||||
# for use with the lru caching.
|
|
||||||
def tickValues(
|
|
||||||
self,
|
|
||||||
minVal: float,
|
|
||||||
maxVal: float,
|
|
||||||
size: int,
|
|
||||||
|
|
||||||
) -> list[tuple[float, tuple[str]]]:
|
|
||||||
'''
|
|
||||||
Repack tick values into tuples for lru caching.
|
|
||||||
|
|
||||||
'''
|
|
||||||
ticks = []
|
|
||||||
for scalar, values in super().tickValues(minVal, maxVal, size):
|
|
||||||
ticks.append((
|
|
||||||
scalar,
|
|
||||||
tuple(values), # this
|
|
||||||
))
|
|
||||||
|
|
||||||
return ticks
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def text_color(self) -> str:
|
def text_color(self) -> str:
|
||||||
return self._text_color
|
return self._text_color
|
||||||
|
@ -150,38 +106,6 @@ class Axis(pg.AxisItem):
|
||||||
def txt_offsets(self) -> tuple[int, int]:
|
def txt_offsets(self) -> tuple[int, int]:
|
||||||
return tuple(self.style['tickTextOffset'])
|
return tuple(self.style['tickTextOffset'])
|
||||||
|
|
||||||
def add_sticky(
|
|
||||||
self,
|
|
||||||
pi: pgo.PlotItem,
|
|
||||||
name: None | str = None,
|
|
||||||
digits: None | int = 2,
|
|
||||||
bg_color='default',
|
|
||||||
fg_color='black',
|
|
||||||
|
|
||||||
) -> YAxisLabel:
|
|
||||||
|
|
||||||
# if the sticky is for our symbol
|
|
||||||
# use the tick size precision for display
|
|
||||||
name = name or pi.name
|
|
||||||
digits = digits or 2
|
|
||||||
|
|
||||||
# TODO: ``._ysticks`` should really be an attr on each
|
|
||||||
# ``PlotItem`` now instead of the containing widget (because of
|
|
||||||
# overlays) ?
|
|
||||||
|
|
||||||
# add y-axis "last" value label
|
|
||||||
sticky = self._stickies[name] = YAxisLabel(
|
|
||||||
pi=pi,
|
|
||||||
parent=self,
|
|
||||||
digits=digits, # TODO: pass this from symbol data
|
|
||||||
opacity=0.9, # slight see-through
|
|
||||||
bg_color=bg_color,
|
|
||||||
fg_color=fg_color,
|
|
||||||
)
|
|
||||||
|
|
||||||
pi.sigRangeChanged.connect(sticky.update_on_resize)
|
|
||||||
return sticky
|
|
||||||
|
|
||||||
|
|
||||||
class PriceAxis(Axis):
|
class PriceAxis(Axis):
|
||||||
|
|
||||||
|
@ -243,6 +167,7 @@ class PriceAxis(Axis):
|
||||||
self._min_tick = size
|
self._min_tick = size
|
||||||
|
|
||||||
def size_to_values(self) -> None:
|
def size_to_values(self) -> None:
|
||||||
|
# self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
||||||
self.setWidth(self.typical_br.width())
|
self.setWidth(self.typical_br.width())
|
||||||
|
|
||||||
# XXX: drop for now since it just eats up h space
|
# XXX: drop for now since it just eats up h space
|
||||||
|
@ -297,50 +222,28 @@ class DynamicDateAxis(Axis):
|
||||||
|
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
|
|
||||||
# XX: ARGGGGG AG:LKSKDJF:LKJSDFD
|
chart = self.linkedsplits.chart
|
||||||
chart = self.pi.chart_widget
|
flow = chart._flows[chart.name]
|
||||||
|
shm = flow.shm
|
||||||
|
bars = shm.array
|
||||||
|
first = shm._first.value
|
||||||
|
|
||||||
viz = chart._vizs[chart.name]
|
bars_len = len(bars)
|
||||||
shm = viz.shm
|
times = bars['time']
|
||||||
array = shm.array
|
|
||||||
times = array['time']
|
|
||||||
i_0, i_l = times[0], times[-1]
|
|
||||||
|
|
||||||
# edge cases
|
epochs = times[list(
|
||||||
if (
|
map(
|
||||||
not indexes
|
int,
|
||||||
or
|
filter(
|
||||||
(indexes[0] < i_0
|
lambda i: i > 0 and i < bars_len,
|
||||||
and indexes[-1] < i_l)
|
(i-first for i in indexes)
|
||||||
or
|
|
||||||
(indexes[0] > i_0
|
|
||||||
and indexes[-1] > i_l)
|
|
||||||
):
|
|
||||||
return []
|
|
||||||
|
|
||||||
if viz.index_field == 'index':
|
|
||||||
arr_len = times.shape[0]
|
|
||||||
first = shm._first.value
|
|
||||||
epochs = times[
|
|
||||||
list(
|
|
||||||
map(
|
|
||||||
int,
|
|
||||||
filter(
|
|
||||||
lambda i: i > 0 and i < arr_len,
|
|
||||||
(i - first for i in indexes)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
]
|
)
|
||||||
else:
|
)]
|
||||||
epochs = list(map(int, indexes))
|
|
||||||
|
|
||||||
# TODO: **don't** have this hard coded shift to EST
|
# TODO: **don't** have this hard coded shift to EST
|
||||||
# delay = times[-1] - times[-2]
|
# delay = times[-1] - times[-2]
|
||||||
dts = np.array(
|
dts = np.array(epochs, dtype='datetime64[s]')
|
||||||
epochs,
|
|
||||||
dtype='datetime64[s]',
|
|
||||||
)
|
|
||||||
|
|
||||||
# see units listing:
|
# see units listing:
|
||||||
# https://numpy.org/devdocs/reference/arrays.datetime.html#datetime-units
|
# https://numpy.org/devdocs/reference/arrays.datetime.html#datetime-units
|
||||||
|
@ -358,39 +261,24 @@ class DynamicDateAxis(Axis):
|
||||||
spacing: float,
|
spacing: float,
|
||||||
|
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
|
|
||||||
return self._indexes_to_timestrs(values)
|
|
||||||
|
|
||||||
# NOTE: handy for debugging the lru cache
|
|
||||||
# info = self.tickStrings.cache_info()
|
# info = self.tickStrings.cache_info()
|
||||||
# print(info)
|
# print(info)
|
||||||
|
return self._indexes_to_timestrs(values)
|
||||||
|
|
||||||
|
|
||||||
class AxisLabel(pg.GraphicsObject):
|
class AxisLabel(pg.GraphicsObject):
|
||||||
|
|
||||||
# relative offsets *OF* the bounding rect relative
|
_x_margin = 0
|
||||||
# to parent graphics object.
|
_y_margin = 0
|
||||||
# eg. <parent>| => <_x_br_offset> => | <text> |
|
|
||||||
_x_br_offset: float = 0
|
|
||||||
_y_br_offset: float = 0
|
|
||||||
|
|
||||||
# relative offsets of text *within* bounding rect
|
|
||||||
# eg. | <_x_margin> => <text> |
|
|
||||||
_x_margin: float = 0
|
|
||||||
_y_margin: float = 0
|
|
||||||
|
|
||||||
# multiplier of the text content's height in order
|
|
||||||
# to force a larger (y-dimension) bounding rect.
|
|
||||||
_y_txt_h_scaling: float = 1
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
parent: pg.GraphicsItem,
|
parent: pg.GraphicsItem,
|
||||||
digits: int = 2,
|
digits: int = 2,
|
||||||
|
|
||||||
bg_color: str = 'default',
|
bg_color: str = 'bracket',
|
||||||
fg_color: str = 'black',
|
fg_color: str = 'black',
|
||||||
opacity: int = .8, # XXX: seriously don't set this to 0
|
opacity: int = 1, # XXX: seriously don't set this to 0
|
||||||
font_size: str = 'default',
|
font_size: str = 'default',
|
||||||
|
|
||||||
use_arrow: bool = True,
|
use_arrow: bool = True,
|
||||||
|
@ -401,7 +289,6 @@ class AxisLabel(pg.GraphicsObject):
|
||||||
self.setParentItem(parent)
|
self.setParentItem(parent)
|
||||||
|
|
||||||
self.setFlag(self.ItemIgnoresTransformations)
|
self.setFlag(self.ItemIgnoresTransformations)
|
||||||
self.setZValue(100)
|
|
||||||
|
|
||||||
# XXX: pretty sure this is faster
|
# XXX: pretty sure this is faster
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
@ -433,14 +320,14 @@ class AxisLabel(pg.GraphicsObject):
|
||||||
p: QtGui.QPainter,
|
p: QtGui.QPainter,
|
||||||
opt: QtWidgets.QStyleOptionGraphicsItem,
|
opt: QtWidgets.QStyleOptionGraphicsItem,
|
||||||
w: QtWidgets.QWidget
|
w: QtWidgets.QWidget
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
"""Draw a filled rectangle based on the size of ``.label_str`` text.
|
||||||
Draw a filled rectangle based on the size of ``.label_str`` text.
|
|
||||||
|
|
||||||
Subtypes can customize further by overloading ``.draw()``.
|
Subtypes can customize further by overloading ``.draw()``.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
|
# p.setCompositionMode(QtWidgets.QPainter.CompositionMode_SourceOver)
|
||||||
|
|
||||||
if self.label_str:
|
if self.label_str:
|
||||||
|
|
||||||
# if not self.rect:
|
# if not self.rect:
|
||||||
|
@ -451,11 +338,7 @@ class AxisLabel(pg.GraphicsObject):
|
||||||
|
|
||||||
p.setFont(self._dpifont.font)
|
p.setFont(self._dpifont.font)
|
||||||
p.setPen(self.fg_color)
|
p.setPen(self.fg_color)
|
||||||
p.drawText(
|
p.drawText(self.rect, self.text_flags, self.label_str)
|
||||||
self.rect,
|
|
||||||
self.text_flags,
|
|
||||||
self.label_str,
|
|
||||||
)
|
|
||||||
|
|
||||||
def draw(
|
def draw(
|
||||||
self,
|
self,
|
||||||
|
@ -463,8 +346,6 @@ class AxisLabel(pg.GraphicsObject):
|
||||||
rect: QtCore.QRectF
|
rect: QtCore.QRectF
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
p.setOpacity(self.opacity)
|
|
||||||
|
|
||||||
if self._use_arrow:
|
if self._use_arrow:
|
||||||
if not self.path:
|
if not self.path:
|
||||||
self._draw_arrow_path()
|
self._draw_arrow_path()
|
||||||
|
@ -472,13 +353,15 @@ class AxisLabel(pg.GraphicsObject):
|
||||||
p.drawPath(self.path)
|
p.drawPath(self.path)
|
||||||
p.fillPath(self.path, pg.mkBrush(self.bg_color))
|
p.fillPath(self.path, pg.mkBrush(self.bg_color))
|
||||||
|
|
||||||
|
# this adds a nice black outline around the label for some odd
|
||||||
|
# reason; ok by us
|
||||||
|
p.setOpacity(self.opacity)
|
||||||
|
|
||||||
# this cause the L1 labels to glitch out if used in the subtype
|
# this cause the L1 labels to glitch out if used in the subtype
|
||||||
# and it will leave a small black strip with the arrow path if
|
# and it will leave a small black strip with the arrow path if
|
||||||
# done before the above
|
# done before the above
|
||||||
p.fillRect(
|
p.fillRect(self.rect, self.bg_color)
|
||||||
self.rect,
|
|
||||||
self.bg_color,
|
|
||||||
)
|
|
||||||
|
|
||||||
def boundingRect(self): # noqa
|
def boundingRect(self): # noqa
|
||||||
'''
|
'''
|
||||||
|
@ -522,18 +405,15 @@ class AxisLabel(pg.GraphicsObject):
|
||||||
txt_h, txt_w = txt_br.height(), txt_br.width()
|
txt_h, txt_w = txt_br.height(), txt_br.width()
|
||||||
# print(f'wsw: {self._dpifont.boundingRect(" ")}')
|
# print(f'wsw: {self._dpifont.boundingRect(" ")}')
|
||||||
|
|
||||||
# allow subtypes to override width and height
|
# allow subtypes to specify a static width and height
|
||||||
h, w = self.size_hint()
|
h, w = self.size_hint()
|
||||||
|
# print(f'axis size: {self._parent.size()}')
|
||||||
|
# print(f'axis geo: {self._parent.geometry()}')
|
||||||
|
|
||||||
self.rect = QtCore.QRectF(
|
self.rect = QtCore.QRectF(
|
||||||
|
0, 0,
|
||||||
# relative bounds offsets
|
|
||||||
self._x_br_offset,
|
|
||||||
self._y_br_offset,
|
|
||||||
|
|
||||||
(w or txt_w) + self._x_margin / 2,
|
(w or txt_w) + self._x_margin / 2,
|
||||||
|
(h or txt_h) + self._y_margin / 2,
|
||||||
(h or txt_h) * self._y_txt_h_scaling + (self._y_margin / 2),
|
|
||||||
)
|
)
|
||||||
# print(self.rect)
|
# print(self.rect)
|
||||||
# hb = self.path.controlPointRect()
|
# hb = self.path.controlPointRect()
|
||||||
|
@ -609,7 +489,7 @@ class XAxisLabel(AxisLabel):
|
||||||
|
|
||||||
|
|
||||||
class YAxisLabel(AxisLabel):
|
class YAxisLabel(AxisLabel):
|
||||||
_y_margin: int = 4
|
_y_margin = 4
|
||||||
|
|
||||||
text_flags = (
|
text_flags = (
|
||||||
QtCore.Qt.AlignLeft
|
QtCore.Qt.AlignLeft
|
||||||
|
@ -620,19 +500,19 @@ class YAxisLabel(AxisLabel):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
pi: pgo.PlotItem,
|
chart,
|
||||||
*args,
|
*args,
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
self._pi = pi
|
self._chart = chart
|
||||||
pi.sigRangeChanged.connect(self.update_on_resize)
|
|
||||||
|
chart.sigRangeChanged.connect(self.update_on_resize)
|
||||||
|
|
||||||
self._last_datum = (None, None)
|
self._last_datum = (None, None)
|
||||||
|
|
||||||
self.x_offset = 0
|
|
||||||
# pull text offset from axis from parent axis
|
# pull text offset from axis from parent axis
|
||||||
if getattr(self._parent, 'txt_offsets', False):
|
if getattr(self._parent, 'txt_offsets', False):
|
||||||
self.x_offset, y_offset = self._parent.txt_offsets()
|
self.x_offset, y_offset = self._parent.txt_offsets()
|
||||||
|
@ -651,8 +531,7 @@ class YAxisLabel(AxisLabel):
|
||||||
value: float, # data for text
|
value: float, # data for text
|
||||||
|
|
||||||
# on odd dimension and/or adds nice black line
|
# on odd dimension and/or adds nice black line
|
||||||
x_offset: int = 0,
|
x_offset: Optional[int] = None
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# this is read inside ``.paint()``
|
# this is read inside ``.paint()``
|
||||||
|
@ -698,7 +577,7 @@ class YAxisLabel(AxisLabel):
|
||||||
self._last_datum = (index, value)
|
self._last_datum = (index, value)
|
||||||
|
|
||||||
self.update_label(
|
self.update_label(
|
||||||
self._pi.mapFromView(QPointF(index, value)),
|
self._chart.mapFromView(QPointF(index, value)),
|
||||||
value
|
value
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
1017
piker/ui/_chart.py
1017
piker/ui/_chart.py
File diff suppressed because it is too large
Load Diff
|
@ -15,30 +15,17 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Graphics downsampling using the infamous M4 algorithm.
|
Graphics related downsampling routines for compressing to pixel
|
||||||
|
limits on the display device.
|
||||||
This is one of ``piker``'s secret weapons allowing us to boss all other
|
|
||||||
charting platforms B)
|
|
||||||
|
|
||||||
(AND DON'T YOU DARE TAKE THIS CODE WITHOUT CREDIT OR WE'LL SUE UR F#&@* ASS).
|
|
||||||
|
|
||||||
NOTES: this method is a so called "visualization driven data
|
|
||||||
aggregation" approach. It gives error-free line chart
|
|
||||||
downsampling, see
|
|
||||||
further scientific paper resources:
|
|
||||||
- http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
|
||||||
- http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
|
||||||
|
|
||||||
Details on implementation of this algo are based in,
|
|
||||||
https://github.com/pikers/piker/issues/109
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
import math
|
import math
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from numpy.lib import recfunctions as rfn
|
||||||
from numba import (
|
from numba import (
|
||||||
njit,
|
jit,
|
||||||
# float64, optional, int64,
|
# float64, optional, int64,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -48,6 +35,109 @@ from ..log import get_logger
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def hl2mxmn(ohlc: np.ndarray) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||||
|
to a "joined" max/min 1-d array.
|
||||||
|
|
||||||
|
'''
|
||||||
|
index = ohlc['index']
|
||||||
|
hls = ohlc[[
|
||||||
|
'low',
|
||||||
|
'high',
|
||||||
|
]]
|
||||||
|
|
||||||
|
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||||
|
x = np.empty(2*hls.size, dtype=np.float64)
|
||||||
|
trace_hl(hls, mxmn, x, index[0])
|
||||||
|
x = x + index[0]
|
||||||
|
|
||||||
|
return mxmn, x
|
||||||
|
|
||||||
|
|
||||||
|
@jit(
|
||||||
|
# TODO: the type annots..
|
||||||
|
# float64[:](float64[:],),
|
||||||
|
nopython=True,
|
||||||
|
)
|
||||||
|
def trace_hl(
|
||||||
|
hl: 'np.ndarray',
|
||||||
|
out: np.ndarray,
|
||||||
|
x: np.ndarray,
|
||||||
|
start: int,
|
||||||
|
|
||||||
|
# the "offset" values in the x-domain which
|
||||||
|
# place the 2 output points around each ``int``
|
||||||
|
# master index.
|
||||||
|
margin: float = 0.43,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
"Trace" the outline of the high-low values of an ohlc sequence
|
||||||
|
as a line such that the maximum deviation (aka disperaion) between
|
||||||
|
bars if preserved.
|
||||||
|
|
||||||
|
This routine is expected to modify input arrays in-place.
|
||||||
|
|
||||||
|
'''
|
||||||
|
last_l = hl['low'][0]
|
||||||
|
last_h = hl['high'][0]
|
||||||
|
|
||||||
|
for i in range(hl.size):
|
||||||
|
row = hl[i]
|
||||||
|
l, h = row['low'], row['high']
|
||||||
|
|
||||||
|
up_diff = h - last_l
|
||||||
|
down_diff = last_h - l
|
||||||
|
|
||||||
|
if up_diff > down_diff:
|
||||||
|
out[2*i + 1] = h
|
||||||
|
out[2*i] = last_l
|
||||||
|
else:
|
||||||
|
out[2*i + 1] = l
|
||||||
|
out[2*i] = last_h
|
||||||
|
|
||||||
|
last_l = l
|
||||||
|
last_h = h
|
||||||
|
|
||||||
|
x[2*i] = int(i) - margin
|
||||||
|
x[2*i + 1] = int(i) + margin
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def ohlc_flatten(
|
||||||
|
ohlc: np.ndarray,
|
||||||
|
use_mxmn: bool = True,
|
||||||
|
|
||||||
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
'''
|
||||||
|
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||||
|
1-d array that is 4 times the size with x-domain values distributed
|
||||||
|
evenly (by 0.5 steps) over each index.
|
||||||
|
|
||||||
|
'''
|
||||||
|
index = ohlc['index']
|
||||||
|
|
||||||
|
if use_mxmn:
|
||||||
|
# traces a line optimally over highs to lows
|
||||||
|
# using numba. NOTE: pretty sure this is faster
|
||||||
|
# and looks about the same as the below output.
|
||||||
|
flat, x = hl2mxmn(ohlc)
|
||||||
|
|
||||||
|
else:
|
||||||
|
flat = rfn.structured_to_unstructured(
|
||||||
|
ohlc[['open', 'high', 'low', 'close']]
|
||||||
|
).flatten()
|
||||||
|
|
||||||
|
x = np.linspace(
|
||||||
|
start=index[0] - 0.5,
|
||||||
|
stop=index[-1] + 0.5,
|
||||||
|
num=len(flat),
|
||||||
|
)
|
||||||
|
return x, flat
|
||||||
|
|
||||||
|
|
||||||
def ds_m4(
|
def ds_m4(
|
||||||
x: np.ndarray,
|
x: np.ndarray,
|
||||||
y: np.ndarray,
|
y: np.ndarray,
|
||||||
|
@ -70,6 +160,16 @@ def ds_m4(
|
||||||
This is more or less an OHLC style sampling of a line-style series.
|
This is more or less an OHLC style sampling of a line-style series.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
# NOTE: this method is a so called "visualization driven data
|
||||||
|
# aggregation" approach. It gives error-free line chart
|
||||||
|
# downsampling, see
|
||||||
|
# further scientific paper resources:
|
||||||
|
# - http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||||
|
# - http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||||
|
|
||||||
|
# Details on implementation of this algo are based in,
|
||||||
|
# https://github.com/pikers/piker/issues/109
|
||||||
|
|
||||||
# XXX: from infinite on downsampling viewable graphics:
|
# XXX: from infinite on downsampling viewable graphics:
|
||||||
# "one thing i remembered about the binning - if you are
|
# "one thing i remembered about the binning - if you are
|
||||||
# picking a range within your timeseries the start and end bin
|
# picking a range within your timeseries the start and end bin
|
||||||
|
@ -91,14 +191,6 @@ def ds_m4(
|
||||||
x_end = x[-1] # x end value/highest in domain
|
x_end = x[-1] # x end value/highest in domain
|
||||||
xrange = (x_end - x_start)
|
xrange = (x_end - x_start)
|
||||||
|
|
||||||
if xrange < 0:
|
|
||||||
log.error(f'-VE M4 X-RANGE: {x_start} -> {x_end}')
|
|
||||||
# XXX: broken x-range calc-case, likely the x-end points
|
|
||||||
# are wrong and have some default value set (such as
|
|
||||||
# x_end -> <some epoch float> while x_start -> 0.5).
|
|
||||||
# breakpoint()
|
|
||||||
return None
|
|
||||||
|
|
||||||
# XXX: always round up on the input pixels
|
# XXX: always round up on the input pixels
|
||||||
# lnx = len(x)
|
# lnx = len(x)
|
||||||
# uppx *= max(4 / (1 + math.log(uppx, 2)), 1)
|
# uppx *= max(4 / (1 + math.log(uppx, 2)), 1)
|
||||||
|
@ -131,20 +223,14 @@ def ds_m4(
|
||||||
assert frames >= (xrange / uppx)
|
assert frames >= (xrange / uppx)
|
||||||
|
|
||||||
# call into ``numba``
|
# call into ``numba``
|
||||||
(
|
nb, i_win, y_out = _m4(
|
||||||
nb,
|
|
||||||
x_out,
|
|
||||||
y_out,
|
|
||||||
ymn,
|
|
||||||
ymx,
|
|
||||||
) = _m4(
|
|
||||||
x,
|
x,
|
||||||
y,
|
y,
|
||||||
|
|
||||||
frames,
|
frames,
|
||||||
|
|
||||||
# TODO: see func below..
|
# TODO: see func below..
|
||||||
# x_out,
|
# i_win,
|
||||||
# y_out,
|
# y_out,
|
||||||
|
|
||||||
# first index in x data to start at
|
# first index in x data to start at
|
||||||
|
@ -157,14 +243,14 @@ def ds_m4(
|
||||||
# filter out any overshoot in the input allocation arrays by
|
# filter out any overshoot in the input allocation arrays by
|
||||||
# removing zero-ed tail entries which should start at a certain
|
# removing zero-ed tail entries which should start at a certain
|
||||||
# index.
|
# index.
|
||||||
x_out = x_out[x_out != 0]
|
i_win = i_win[i_win != 0]
|
||||||
y_out = y_out[:x_out.size]
|
y_out = y_out[:i_win.size]
|
||||||
|
|
||||||
# print(f'M4 output ymn, ymx: {ymn},{ymx}')
|
return nb, i_win, y_out
|
||||||
return nb, x_out, y_out, ymn, ymx
|
|
||||||
|
|
||||||
|
|
||||||
@njit(
|
@jit(
|
||||||
|
nopython=True,
|
||||||
nogil=True,
|
nogil=True,
|
||||||
)
|
)
|
||||||
def _m4(
|
def _m4(
|
||||||
|
@ -174,8 +260,8 @@ def _m4(
|
||||||
|
|
||||||
frames: int,
|
frames: int,
|
||||||
|
|
||||||
# TODO: using this approach, having the ``.zeros()`` alloc lines
|
# TODO: using this approach by having the ``.zeros()`` alloc lines
|
||||||
# below in pure python, there were segs faults and alloc crashes..
|
# below, in put python was causing segs faults and alloc crashes..
|
||||||
# we might need to see how it behaves with shm arrays and consider
|
# we might need to see how it behaves with shm arrays and consider
|
||||||
# allocating them once at startup?
|
# allocating them once at startup?
|
||||||
|
|
||||||
|
@ -188,22 +274,14 @@ def _m4(
|
||||||
x_start: int,
|
x_start: int,
|
||||||
step: float,
|
step: float,
|
||||||
|
|
||||||
) -> tuple[
|
) -> int:
|
||||||
int,
|
# nbins = len(i_win)
|
||||||
np.ndarray,
|
# count = len(xs)
|
||||||
np.ndarray,
|
|
||||||
float,
|
|
||||||
float,
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Implementation of the m4 algorithm in ``numba``:
|
|
||||||
http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
|
||||||
|
|
||||||
'''
|
|
||||||
# these are pre-allocated and mutated by ``numba``
|
# these are pre-allocated and mutated by ``numba``
|
||||||
# code in-place.
|
# code in-place.
|
||||||
y_out = np.zeros((frames, 4), ys.dtype)
|
y_out = np.zeros((frames, 4), ys.dtype)
|
||||||
x_out = np.zeros(frames, xs.dtype)
|
i_win = np.zeros(frames, xs.dtype)
|
||||||
|
|
||||||
bincount = 0
|
bincount = 0
|
||||||
x_left = x_start
|
x_left = x_start
|
||||||
|
@ -217,34 +295,24 @@ def _m4(
|
||||||
|
|
||||||
# set all bins in the left-most entry to the starting left-most x value
|
# set all bins in the left-most entry to the starting left-most x value
|
||||||
# (aka a row broadcast).
|
# (aka a row broadcast).
|
||||||
x_out[bincount] = x_left
|
i_win[bincount] = x_left
|
||||||
# set all y-values to the first value passed in.
|
# set all y-values to the first value passed in.
|
||||||
y_out[bincount] = ys[0]
|
y_out[bincount] = ys[0]
|
||||||
|
|
||||||
# full input y-data mx and mn
|
|
||||||
mx: float = -np.inf
|
|
||||||
mn: float = np.inf
|
|
||||||
|
|
||||||
# compute OHLC style max / min values per window sized x-frame.
|
|
||||||
for i in range(len(xs)):
|
for i in range(len(xs)):
|
||||||
|
|
||||||
x = xs[i]
|
x = xs[i]
|
||||||
y = ys[i]
|
y = ys[i]
|
||||||
|
|
||||||
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||||
ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||||
ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||||
y_out[bincount, 3] = y
|
y_out[bincount, 3] = y
|
||||||
mx = max(mx, ymx)
|
|
||||||
mn = min(mn, ymn)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Find the next bin
|
# Find the next bin
|
||||||
while x >= x_left + step:
|
while x >= x_left + step:
|
||||||
x_left += step
|
x_left += step
|
||||||
|
|
||||||
bincount += 1
|
bincount += 1
|
||||||
x_out[bincount] = x_left
|
i_win[bincount] = x_left
|
||||||
y_out[bincount] = y
|
y_out[bincount] = y
|
||||||
|
|
||||||
return bincount, x_out, y_out, mn, mx
|
return bincount, i_win, y_out
|
|
@ -18,13 +18,8 @@
|
||||||
Mouse interaction graphics
|
Mouse interaction graphics
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import (
|
from typing import Optional, Callable
|
||||||
Optional,
|
|
||||||
Callable,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -41,12 +36,6 @@ from ._style import (
|
||||||
from ._axes import YAxisLabel, XAxisLabel
|
from ._axes import YAxisLabel, XAxisLabel
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ._chart import (
|
|
||||||
ChartPlotWidget,
|
|
||||||
LinkedSplits,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -69,9 +58,9 @@ class LineDot(pg.CurvePoint):
|
||||||
curve: pg.PlotCurveItem,
|
curve: pg.PlotCurveItem,
|
||||||
index: int,
|
index: int,
|
||||||
|
|
||||||
plot: ChartPlotWidget, # type: ingore # noqa
|
plot: 'ChartPlotWidget', # type: ingore # noqa
|
||||||
pos=None,
|
pos=None,
|
||||||
color: str = 'bracket',
|
color: str = 'default_light',
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# scale from dpi aware font size
|
# scale from dpi aware font size
|
||||||
|
@ -162,7 +151,7 @@ class ContentsLabel(pg.LabelItem):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
# chart: ChartPlotWidget, # noqa
|
# chart: 'ChartPlotWidget', # noqa
|
||||||
view: pg.ViewBox,
|
view: pg.ViewBox,
|
||||||
|
|
||||||
anchor_at: str = ('top', 'right'),
|
anchor_at: str = ('top', 'right'),
|
||||||
|
@ -198,11 +187,12 @@ class ContentsLabel(pg.LabelItem):
|
||||||
self,
|
self,
|
||||||
|
|
||||||
name: str,
|
name: str,
|
||||||
ix: int,
|
index: int,
|
||||||
array: np.ndarray,
|
array: np.ndarray,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# this being "html" is the dumbest shit :eyeroll:
|
# this being "html" is the dumbest shit :eyeroll:
|
||||||
|
first = array[0]['index']
|
||||||
|
|
||||||
self.setText(
|
self.setText(
|
||||||
"<b>i</b>:{index}<br/>"
|
"<b>i</b>:{index}<br/>"
|
||||||
|
@ -215,7 +205,7 @@ class ContentsLabel(pg.LabelItem):
|
||||||
"<b>C</b>:{}<br/>"
|
"<b>C</b>:{}<br/>"
|
||||||
"<b>V</b>:{}<br/>"
|
"<b>V</b>:{}<br/>"
|
||||||
"<b>wap</b>:{}".format(
|
"<b>wap</b>:{}".format(
|
||||||
*array[ix][
|
*array[index - first][
|
||||||
[
|
[
|
||||||
'time',
|
'time',
|
||||||
'open',
|
'open',
|
||||||
|
@ -227,7 +217,7 @@ class ContentsLabel(pg.LabelItem):
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
name=name,
|
name=name,
|
||||||
index=ix,
|
index=index,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -235,12 +225,15 @@ class ContentsLabel(pg.LabelItem):
|
||||||
self,
|
self,
|
||||||
|
|
||||||
name: str,
|
name: str,
|
||||||
ix: int,
|
index: int,
|
||||||
array: np.ndarray,
|
array: np.ndarray,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
data = array[ix][name]
|
|
||||||
self.setText(f"{name}: {data:.2f}")
|
first = array[0]['index']
|
||||||
|
if index < array[-1]['index'] and index > first:
|
||||||
|
data = array[index - first][name]
|
||||||
|
self.setText(f"{name}: {data:.2f}")
|
||||||
|
|
||||||
|
|
||||||
class ContentsLabels:
|
class ContentsLabels:
|
||||||
|
@ -251,7 +244,7 @@ class ContentsLabels:
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
linkedsplits: LinkedSplits, # type: ignore # noqa
|
linkedsplits: 'LinkedSplits', # type: ignore # noqa
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
@ -265,20 +258,17 @@ class ContentsLabels:
|
||||||
|
|
||||||
def update_labels(
|
def update_labels(
|
||||||
self,
|
self,
|
||||||
x_in: int,
|
index: int,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
for chart, name, label, update in self._labels:
|
for chart, name, label, update in self._labels:
|
||||||
|
|
||||||
viz = chart.get_viz(name)
|
flow = chart._flows[name]
|
||||||
array = viz.shm.array
|
array = flow.shm.array
|
||||||
index = array[viz.index_field]
|
|
||||||
start = index[0]
|
|
||||||
stop = index[-1]
|
|
||||||
|
|
||||||
if not (
|
if not (
|
||||||
x_in >= start
|
index >= 0
|
||||||
and x_in <= stop
|
and index < array[-1]['index']
|
||||||
):
|
):
|
||||||
# out of range
|
# out of range
|
||||||
print('WTF out of range?')
|
print('WTF out of range?')
|
||||||
|
@ -287,10 +277,7 @@ class ContentsLabels:
|
||||||
# call provided update func with data point
|
# call provided update func with data point
|
||||||
try:
|
try:
|
||||||
label.show()
|
label.show()
|
||||||
ix = np.searchsorted(index, x_in)
|
update(index, array)
|
||||||
if ix > len(array):
|
|
||||||
breakpoint()
|
|
||||||
update(ix, array)
|
|
||||||
|
|
||||||
except IndexError:
|
except IndexError:
|
||||||
log.exception(f"Failed to update label: {name}")
|
log.exception(f"Failed to update label: {name}")
|
||||||
|
@ -302,7 +289,7 @@ class ContentsLabels:
|
||||||
def add_label(
|
def add_label(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
chart: ChartPlotWidget, # type: ignore # noqa
|
chart: 'ChartPlotWidget', # type: ignore # noqa
|
||||||
name: str,
|
name: str,
|
||||||
anchor_at: tuple[str, str] = ('top', 'left'),
|
anchor_at: tuple[str, str] = ('top', 'left'),
|
||||||
update_func: Callable = ContentsLabel.update_from_value,
|
update_func: Callable = ContentsLabel.update_from_value,
|
||||||
|
@ -329,7 +316,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
linkedsplits: LinkedSplits, # noqa
|
linkedsplits: 'LinkedSplits', # noqa
|
||||||
digits: int = 0
|
digits: int = 0
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -338,8 +325,6 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
self.linked = linkedsplits
|
self.linked = linkedsplits
|
||||||
self.graphics: dict[str, pg.GraphicsObject] = {}
|
self.graphics: dict[str, pg.GraphicsObject] = {}
|
||||||
self.xaxis_label: Optional[XAxisLabel] = None
|
|
||||||
self.always_show_xlabel: bool = True
|
|
||||||
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
||||||
self.active_plot = None
|
self.active_plot = None
|
||||||
self.digits: int = digits
|
self.digits: int = digits
|
||||||
|
@ -351,7 +336,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
# XXX: not sure why these are instance variables?
|
# XXX: not sure why these are instance variables?
|
||||||
# It's not like we can change them on the fly..?
|
# It's not like we can change them on the fly..?
|
||||||
self.pen = pg.mkPen(
|
self.pen = pg.mkPen(
|
||||||
color=hcolor('bracket'),
|
color=hcolor('default'),
|
||||||
style=QtCore.Qt.DashLine,
|
style=QtCore.Qt.DashLine,
|
||||||
)
|
)
|
||||||
self.lines_pen = pg.mkPen(
|
self.lines_pen = pg.mkPen(
|
||||||
|
@ -367,7 +352,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
self._lw = self.pixelWidth() * self.lines_pen.width()
|
self._lw = self.pixelWidth() * self.lines_pen.width()
|
||||||
|
|
||||||
# xhair label's color name
|
# xhair label's color name
|
||||||
self.label_color: str = 'bracket'
|
self.label_color: str = 'default'
|
||||||
|
|
||||||
self._y_label_update: bool = True
|
self._y_label_update: bool = True
|
||||||
|
|
||||||
|
@ -400,7 +385,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
def add_plot(
|
def add_plot(
|
||||||
self,
|
self,
|
||||||
plot: ChartPlotWidget, # noqa
|
plot: 'ChartPlotWidget', # noqa
|
||||||
digits: int = 0,
|
digits: int = 0,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -420,7 +405,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
hl.hide()
|
hl.hide()
|
||||||
|
|
||||||
yl = YAxisLabel(
|
yl = YAxisLabel(
|
||||||
pi=plot.plotItem,
|
chart=plot,
|
||||||
# parent=plot.getAxis('right'),
|
# parent=plot.getAxis('right'),
|
||||||
parent=plot.pi_overlay.get_axis(plot.plotItem, 'right'),
|
parent=plot.pi_overlay.get_axis(plot.plotItem, 'right'),
|
||||||
digits=digits or self.digits,
|
digits=digits or self.digits,
|
||||||
|
@ -484,58 +469,39 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
def add_curve_cursor(
|
def add_curve_cursor(
|
||||||
self,
|
self,
|
||||||
chart: ChartPlotWidget, # noqa
|
plot: 'ChartPlotWidget', # noqa
|
||||||
curve: 'PlotCurveItem', # noqa
|
curve: 'PlotCurveItem', # noqa
|
||||||
|
|
||||||
) -> LineDot:
|
) -> LineDot:
|
||||||
# if this chart contains curves add line dot "cursors" to denote
|
# if this plot contains curves add line dot "cursors" to denote
|
||||||
# the current sample under the mouse
|
# the current sample under the mouse
|
||||||
main_viz = chart.get_viz(chart.name)
|
main_flow = plot._flows[plot.name]
|
||||||
|
|
||||||
# read out last index
|
# read out last index
|
||||||
i = main_viz.shm.array[-1]['index']
|
i = main_flow.shm.array[-1]['index']
|
||||||
cursor = LineDot(
|
cursor = LineDot(
|
||||||
curve,
|
curve,
|
||||||
index=i,
|
index=i,
|
||||||
plot=chart
|
plot=plot
|
||||||
)
|
)
|
||||||
chart.addItem(cursor)
|
plot.addItem(cursor)
|
||||||
self.graphics[chart].setdefault('cursors', []).append(cursor)
|
self.graphics[plot].setdefault('cursors', []).append(cursor)
|
||||||
return cursor
|
return cursor
|
||||||
|
|
||||||
def mouseAction(
|
def mouseAction(self, action, plot): # noqa
|
||||||
self,
|
|
||||||
action: str,
|
|
||||||
plot: ChartPlotWidget,
|
|
||||||
|
|
||||||
) -> None: # noqa
|
|
||||||
|
|
||||||
log.debug(f"{(action, plot.name)}")
|
log.debug(f"{(action, plot.name)}")
|
||||||
if action == 'Enter':
|
if action == 'Enter':
|
||||||
self.active_plot = plot
|
self.active_plot = plot
|
||||||
plot.linked.godwidget._active_cursor = self
|
|
||||||
|
|
||||||
# show horiz line and y-label
|
# show horiz line and y-label
|
||||||
self.graphics[plot]['hl'].show()
|
self.graphics[plot]['hl'].show()
|
||||||
self.graphics[plot]['yl'].show()
|
self.graphics[plot]['yl'].show()
|
||||||
|
|
||||||
if (
|
else: # Leave
|
||||||
not self.always_show_xlabel
|
|
||||||
and not self.xaxis_label.isVisible()
|
|
||||||
):
|
|
||||||
self.xaxis_label.show()
|
|
||||||
|
|
||||||
# Leave: hide horiz line and y-label
|
# hide horiz line and y-label
|
||||||
else:
|
|
||||||
self.graphics[plot]['hl'].hide()
|
self.graphics[plot]['hl'].hide()
|
||||||
self.graphics[plot]['yl'].hide()
|
self.graphics[plot]['yl'].hide()
|
||||||
|
|
||||||
if (
|
|
||||||
not self.always_show_xlabel
|
|
||||||
and self.xaxis_label.isVisible()
|
|
||||||
):
|
|
||||||
self.xaxis_label.hide()
|
|
||||||
|
|
||||||
def mouseMoved(
|
def mouseMoved(
|
||||||
self,
|
self,
|
||||||
coords: tuple[QPointF], # noqa
|
coords: tuple[QPointF], # noqa
|
||||||
|
@ -624,17 +590,13 @@ class Cursor(pg.GraphicsObject):
|
||||||
left_axis_width += left.width()
|
left_axis_width += left.width()
|
||||||
|
|
||||||
# map back to abs (label-local) coordinates
|
# map back to abs (label-local) coordinates
|
||||||
if (
|
self.xaxis_label.update_label(
|
||||||
self.always_show_xlabel
|
abs_pos=(
|
||||||
or self.xaxis_label.isVisible()
|
plot.mapFromView(QPointF(vl_x, iy)) -
|
||||||
):
|
QPointF(left_axis_width, 0)
|
||||||
self.xaxis_label.update_label(
|
),
|
||||||
abs_pos=(
|
value=ix,
|
||||||
plot.mapFromView(QPointF(vl_x, iy)) -
|
)
|
||||||
QPointF(left_axis_width, 0)
|
|
||||||
),
|
|
||||||
value=ix,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._datum_xy = ix, iy
|
self._datum_xy = ix, iy
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,10 @@ from PyQt5.QtWidgets import QGraphicsItem
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
Qt,
|
Qt,
|
||||||
QLineF,
|
QLineF,
|
||||||
|
QSizeF,
|
||||||
QRectF,
|
QRectF,
|
||||||
|
# QRect,
|
||||||
|
QPointF,
|
||||||
)
|
)
|
||||||
from PyQt5.QtGui import (
|
from PyQt5.QtGui import (
|
||||||
QPainter,
|
QPainter,
|
||||||
|
@ -36,8 +39,11 @@ from PyQt5.QtGui import (
|
||||||
)
|
)
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
from ._style import hcolor
|
from ._style import hcolor
|
||||||
|
# from ._compression import (
|
||||||
|
# # ohlc_to_m4_line,
|
||||||
|
# ds_m4,
|
||||||
|
# )
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from .._profile import Profiler
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -51,117 +57,7 @@ _line_styles: dict[str, int] = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class FlowGraphic(pg.GraphicsObject):
|
class Curve(pg.GraphicsObject):
|
||||||
'''
|
|
||||||
Base class with minimal interface for `QPainterPath` implemented,
|
|
||||||
real-time updated "data flow" graphics.
|
|
||||||
|
|
||||||
See subtypes below.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# sub-type customization methods
|
|
||||||
declare_paintables: Callable | None = None
|
|
||||||
sub_paint: Callable | None = None
|
|
||||||
|
|
||||||
# XXX-NOTE-XXX: graphics caching B)
|
|
||||||
# see explanation for different caching modes:
|
|
||||||
# https://stackoverflow.com/a/39410081
|
|
||||||
cache_mode: int = QGraphicsItem.DeviceCoordinateCache
|
|
||||||
# XXX: WARNING item caching seems to only be useful
|
|
||||||
# if we don't re-generate the entire QPainterPath every time
|
|
||||||
# don't ever use this - it's a colossal nightmare of artefacts
|
|
||||||
# and is disastrous for performance.
|
|
||||||
# QGraphicsItem.ItemCoordinateCache
|
|
||||||
# TODO: still questions todo with coord-cacheing that we should
|
|
||||||
# probably talk to a core dev about:
|
|
||||||
# - if this makes trasform interactions slower (such as zooming)
|
|
||||||
# and if so maybe if/when we implement a "history" mode for the
|
|
||||||
# view we disable this in that mode?
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*args,
|
|
||||||
name: str | None = None,
|
|
||||||
|
|
||||||
# line styling
|
|
||||||
color: str = 'bracket',
|
|
||||||
last_step_color: str | None = None,
|
|
||||||
fill_color: Optional[str] = None,
|
|
||||||
style: str = 'solid',
|
|
||||||
|
|
||||||
**kwargs
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
self._name = name
|
|
||||||
|
|
||||||
# primary graphics item used for history
|
|
||||||
self.path: QPainterPath = QPainterPath()
|
|
||||||
|
|
||||||
# additional path that can be optionally used for appends which
|
|
||||||
# tries to avoid triggering an update/redraw of the presumably
|
|
||||||
# larger historical ``.path`` above. the flag to enable
|
|
||||||
# this behaviour is found in `Renderer.render()`.
|
|
||||||
self.fast_path: QPainterPath | None = None
|
|
||||||
|
|
||||||
# TODO: evaluating the path capacity stuff and see
|
|
||||||
# if it really makes much diff pre-allocating it.
|
|
||||||
# self._last_cap: int = 0
|
|
||||||
# cap = path.capacity()
|
|
||||||
# if cap != self._last_cap:
|
|
||||||
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
|
||||||
# self._last_cap = cap
|
|
||||||
|
|
||||||
# all history of curve is drawn in single px thickness
|
|
||||||
self._color: str = color
|
|
||||||
pen = pg.mkPen(hcolor(color), width=1)
|
|
||||||
pen.setStyle(_line_styles[style])
|
|
||||||
|
|
||||||
if 'dash' in style:
|
|
||||||
pen.setDashPattern([8, 3])
|
|
||||||
|
|
||||||
self._pen = pen
|
|
||||||
self._brush = pg.functions.mkBrush(
|
|
||||||
hcolor(fill_color or color)
|
|
||||||
)
|
|
||||||
|
|
||||||
# last segment is drawn in 2px thickness for emphasis
|
|
||||||
if last_step_color:
|
|
||||||
self.last_step_pen = pg.mkPen(
|
|
||||||
hcolor(last_step_color),
|
|
||||||
width=2,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.last_step_pen = pg.mkPen(
|
|
||||||
self._pen,
|
|
||||||
width=2,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._last_line: QLineF = QLineF()
|
|
||||||
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# apply cache mode
|
|
||||||
self.setCacheMode(self.cache_mode)
|
|
||||||
|
|
||||||
def x_uppx(self) -> int:
|
|
||||||
|
|
||||||
px_vecs = self.pixelVectors()[0]
|
|
||||||
if px_vecs:
|
|
||||||
return px_vecs.x()
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def x_last(self) -> float | None:
|
|
||||||
'''
|
|
||||||
Return the last most x value of the last line segment or if not
|
|
||||||
drawn yet, ``None``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return self._last_line.x1() if self._last_line else None
|
|
||||||
|
|
||||||
|
|
||||||
class Curve(FlowGraphic):
|
|
||||||
'''
|
'''
|
||||||
A faster, simpler, append friendly version of
|
A faster, simpler, append friendly version of
|
||||||
``pyqtgraph.PlotCurveItem`` built for highly customizable real-time
|
``pyqtgraph.PlotCurveItem`` built for highly customizable real-time
|
||||||
|
@ -178,7 +74,7 @@ class Curve(FlowGraphic):
|
||||||
lower level graphics data can be rendered in different threads and
|
lower level graphics data can be rendered in different threads and
|
||||||
then read and drawn in this main thread without having to worry
|
then read and drawn in this main thread without having to worry
|
||||||
about dealing with Qt's concurrency primitives. See
|
about dealing with Qt's concurrency primitives. See
|
||||||
``piker.ui._render.Renderer`` for details and logic related to lower
|
``piker.ui._flows.Renderer`` for details and logic related to lower
|
||||||
level path generation and incremental update. The main differences in
|
level path generation and incremental update. The main differences in
|
||||||
the path generation code include:
|
the path generation code include:
|
||||||
|
|
||||||
|
@ -190,38 +86,127 @@ class Curve(FlowGraphic):
|
||||||
updates don't trigger a full path redraw.
|
updates don't trigger a full path redraw.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# TODO: can we remove this?
|
|
||||||
# sub_br: Optional[Callable] = None
|
# sub-type customization methods
|
||||||
|
sub_br: Optional[Callable] = None
|
||||||
|
sub_paint: Optional[Callable] = None
|
||||||
|
declare_paintables: Optional[Callable] = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
*args,
|
*args,
|
||||||
|
|
||||||
# color: str = 'default_lightest',
|
step_mode: bool = False,
|
||||||
# fill_color: Optional[str] = None,
|
color: str = 'default_lightest',
|
||||||
# style: str = 'solid',
|
fill_color: Optional[str] = None,
|
||||||
|
style: str = 'solid',
|
||||||
|
name: Optional[str] = None,
|
||||||
|
use_fpath: bool = True,
|
||||||
|
|
||||||
**kwargs
|
**kwargs
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
self._name = name
|
||||||
|
|
||||||
# brutaaalll, see comments within..
|
# brutaaalll, see comments within..
|
||||||
self.yData = None
|
self.yData = None
|
||||||
self.xData = None
|
self.xData = None
|
||||||
|
|
||||||
|
# self._last_cap: int = 0
|
||||||
|
self.path: Optional[QPainterPath] = None
|
||||||
|
|
||||||
|
# additional path used for appends which tries to avoid
|
||||||
|
# triggering an update/redraw of the presumably larger
|
||||||
|
# historical ``.path`` above.
|
||||||
|
self.use_fpath = use_fpath
|
||||||
|
self.fast_path: Optional[QPainterPath] = None
|
||||||
|
|
||||||
# TODO: we can probably just dispense with the parent since
|
# TODO: we can probably just dispense with the parent since
|
||||||
# we're basically only using the pen setting now...
|
# we're basically only using the pen setting now...
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
self._last_line: QLineF = QLineF()
|
# all history of curve is drawn in single px thickness
|
||||||
|
pen = pg.mkPen(hcolor(color))
|
||||||
|
pen.setStyle(_line_styles[style])
|
||||||
|
|
||||||
|
if 'dash' in style:
|
||||||
|
pen.setDashPattern([8, 3])
|
||||||
|
|
||||||
|
self._pen = pen
|
||||||
|
|
||||||
|
# last segment is drawn in 2px thickness for emphasis
|
||||||
|
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
||||||
|
self.last_step_pen = pg.mkPen(pen, width=2)
|
||||||
|
|
||||||
|
# self._last_line: Optional[QLineF] = None
|
||||||
|
self._last_line = QLineF()
|
||||||
|
self._last_w: float = 1
|
||||||
|
|
||||||
|
# flat-top style histogram-like discrete curve
|
||||||
|
# self._step_mode: bool = step_mode
|
||||||
|
|
||||||
# self._fill = True
|
# self._fill = True
|
||||||
|
self._brush = pg.functions.mkBrush(hcolor(fill_color or color))
|
||||||
|
|
||||||
|
# NOTE: this setting seems to mostly prevent redraws on mouse
|
||||||
|
# interaction which is a huge boon for avg interaction latency.
|
||||||
|
|
||||||
|
# TODO: one question still remaining is if this makes trasform
|
||||||
|
# interactions slower (such as zooming) and if so maybe if/when
|
||||||
|
# we implement a "history" mode for the view we disable this in
|
||||||
|
# that mode?
|
||||||
|
# don't enable caching by default for the case where the
|
||||||
|
# only thing drawn is the "last" line segment which can
|
||||||
|
# have a weird artifact where it won't be fully drawn to its
|
||||||
|
# endpoint (something we saw on trade rate curves)
|
||||||
|
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
|
# XXX: see explanation for different caching modes:
|
||||||
|
# https://stackoverflow.com/a/39410081
|
||||||
|
# seems to only be useful if we don't re-generate the entire
|
||||||
|
# QPainterPath every time
|
||||||
|
# curve.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
|
# don't ever use this - it's a colossal nightmare of artefacts
|
||||||
|
# and is disastrous for performance.
|
||||||
|
# curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache)
|
||||||
|
|
||||||
# allow sub-type customization
|
# allow sub-type customization
|
||||||
declare = self.declare_paintables
|
declare = self.declare_paintables
|
||||||
if declare:
|
if declare:
|
||||||
declare()
|
declare()
|
||||||
|
|
||||||
|
# TODO: probably stick this in a new parent
|
||||||
|
# type which will contain our own version of
|
||||||
|
# what ``PlotCurveItem`` had in terms of base
|
||||||
|
# functionality? A `FlowGraphic` maybe?
|
||||||
|
def x_uppx(self) -> int:
|
||||||
|
|
||||||
|
px_vecs = self.pixelVectors()[0]
|
||||||
|
if px_vecs:
|
||||||
|
xs_in_px = px_vecs.x()
|
||||||
|
return round(xs_in_px)
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def px_width(self) -> float:
|
||||||
|
|
||||||
|
vb = self.getViewBox()
|
||||||
|
if not vb:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
vr = self.viewRect()
|
||||||
|
l, r = int(vr.left()), int(vr.right())
|
||||||
|
|
||||||
|
start, stop = self._xrange
|
||||||
|
lbar = max(l, start)
|
||||||
|
rbar = min(r, stop)
|
||||||
|
|
||||||
|
return vb.mapViewToDevice(
|
||||||
|
QLineF(lbar, 0, rbar, 0)
|
||||||
|
).length()
|
||||||
|
|
||||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||||
# our `LineDot`) required ``.getData()`` to work..
|
# our `LineDot`) required ``.getData()`` to work..
|
||||||
def getData(self):
|
def getData(self):
|
||||||
|
@ -245,8 +230,8 @@ class Curve(FlowGraphic):
|
||||||
self.path.clear()
|
self.path.clear()
|
||||||
|
|
||||||
if self.fast_path:
|
if self.fast_path:
|
||||||
self.fast_path.clear()
|
# self.fast_path.clear()
|
||||||
# self.fast_path = None
|
self.fast_path = None
|
||||||
|
|
||||||
@cm
|
@cm
|
||||||
def reset_cache(self) -> None:
|
def reset_cache(self) -> None:
|
||||||
|
@ -266,65 +251,77 @@ class Curve(FlowGraphic):
|
||||||
self.boundingRect = self._path_br
|
self.boundingRect = self._path_br
|
||||||
return self._path_br()
|
return self._path_br()
|
||||||
|
|
||||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
|
||||||
def _path_br(self):
|
def _path_br(self):
|
||||||
'''
|
'''
|
||||||
Post init ``.boundingRect()```.
|
Post init ``.boundingRect()```.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# profiler = Profiler(
|
# hb = self.path.boundingRect()
|
||||||
# msg=f'Curve.boundingRect(): `{self._name}`',
|
hb = self.path.controlPointRect()
|
||||||
# disabled=not pg_profile_enabled(),
|
hb_size = hb.size()
|
||||||
# ms_threshold=ms_slower_then,
|
|
||||||
|
fp = self.fast_path
|
||||||
|
if fp:
|
||||||
|
fhb = fp.controlPointRect()
|
||||||
|
hb_size = fhb.size() + hb_size
|
||||||
|
|
||||||
|
# print(f'hb_size: {hb_size}')
|
||||||
|
|
||||||
|
# if self._last_step_rect:
|
||||||
|
# hb_size += self._last_step_rect.size()
|
||||||
|
|
||||||
|
# if self._line:
|
||||||
|
# br = self._last_step_rect.bottomRight()
|
||||||
|
|
||||||
|
# tl = QPointF(
|
||||||
|
# # self._vr[0],
|
||||||
|
# # hb.topLeft().y(),
|
||||||
|
# # 0,
|
||||||
|
# # hb_size.height() + 1
|
||||||
# )
|
# )
|
||||||
pr = self.path.controlPointRect()
|
|
||||||
hb_tl, hb_br = (
|
# br = self._last_step_rect.bottomRight()
|
||||||
pr.topLeft(),
|
|
||||||
pr.bottomRight(),
|
w = hb_size.width()
|
||||||
)
|
h = hb_size.height()
|
||||||
mn_y = hb_tl.y()
|
|
||||||
mx_y = hb_br.y()
|
sbr = self.sub_br
|
||||||
most_left = hb_tl.x()
|
if sbr:
|
||||||
most_right = hb_br.x()
|
w, h = self.sub_br(w, h)
|
||||||
# profiler('calc path vertices')
|
else:
|
||||||
|
# assume plain line graphic and use
|
||||||
# TODO: if/when we get fast path appends working in the
|
# default unit step in each direction.
|
||||||
# `Renderer`, then we might need to actually use this..
|
|
||||||
# fp = self.fast_path
|
# only on a plane line do we include
|
||||||
# if fp:
|
# and extra index step's worth of width
|
||||||
# fhb = fp.controlPointRect()
|
# since in the step case the end of the curve
|
||||||
# # hb_size = fhb.size() + hb_size
|
# actually terminates earlier so we don't need
|
||||||
# br = pr.united(fhb)
|
# this for the last step.
|
||||||
|
w += self._last_w
|
||||||
# XXX: *was* a way to allow sub-types to extend the
|
# ll = self._last_line
|
||||||
# boundingrect calc, but in the one use case for a step curve
|
h += 1 # ll.y2() - ll.y1()
|
||||||
# doesn't seem like we need it as long as the last line segment
|
|
||||||
# is drawn as it is?
|
# br = QPointF(
|
||||||
|
# self._vr[-1],
|
||||||
# sbr = self.sub_br
|
# # tl.x() + w,
|
||||||
# if sbr:
|
# tl.y() + h,
|
||||||
# # w, h = self.sub_br(w, h)
|
# )
|
||||||
# sub_br = sbr()
|
|
||||||
# br = br.united(sub_br)
|
br = QRectF(
|
||||||
|
|
||||||
# assume plain line graphic and use
|
# top left
|
||||||
# default unit step in each direction.
|
# hb.topLeft()
|
||||||
ll = self._last_line
|
# tl,
|
||||||
y1, y2 = ll.y1(), ll.y2()
|
QPointF(hb.topLeft()),
|
||||||
x1, x2 = ll.x1(), ll.x2()
|
|
||||||
|
# br,
|
||||||
ymn = min(y1, y2, mn_y)
|
# total size
|
||||||
ymx = max(y1, y2, mx_y)
|
# QSizeF(hb_size)
|
||||||
most_left = min(x1, x2, most_left)
|
# hb_size,
|
||||||
most_right = max(x1, x2, most_right)
|
QSizeF(w, h)
|
||||||
# profiler('calc last line vertices')
|
|
||||||
|
|
||||||
return QRectF(
|
|
||||||
most_left,
|
|
||||||
ymn,
|
|
||||||
most_right - most_left + 1,
|
|
||||||
ymx,
|
|
||||||
)
|
)
|
||||||
|
# print(f'bounding rect: {br}')
|
||||||
|
return br
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
self,
|
self,
|
||||||
|
@ -334,7 +331,7 @@ class Curve(FlowGraphic):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = Profiler(
|
profiler = pg.debug.Profiler(
|
||||||
msg=f'Curve.paint(): `{self._name}`',
|
msg=f'Curve.paint(): `{self._name}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
|
@ -342,14 +339,18 @@ class Curve(FlowGraphic):
|
||||||
|
|
||||||
sub_paint = self.sub_paint
|
sub_paint = self.sub_paint
|
||||||
if sub_paint:
|
if sub_paint:
|
||||||
sub_paint(p)
|
sub_paint(p, profiler)
|
||||||
|
|
||||||
p.setPen(self.last_step_pen)
|
p.setPen(self.last_step_pen)
|
||||||
p.drawLine(self._last_line)
|
p.drawLine(self._last_line)
|
||||||
profiler('last datum `.drawLine()`')
|
profiler('.drawLine()')
|
||||||
|
|
||||||
p.setPen(self._pen)
|
p.setPen(self._pen)
|
||||||
|
|
||||||
path = self.path
|
path = self.path
|
||||||
|
# cap = path.capacity()
|
||||||
|
# if cap != self._last_cap:
|
||||||
|
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
||||||
|
# self._last_cap = cap
|
||||||
|
|
||||||
if path:
|
if path:
|
||||||
p.drawPath(path)
|
p.drawPath(path)
|
||||||
|
@ -372,30 +373,22 @@ class Curve(FlowGraphic):
|
||||||
self,
|
self,
|
||||||
path: QPainterPath,
|
path: QPainterPath,
|
||||||
src_data: np.ndarray,
|
src_data: np.ndarray,
|
||||||
|
render_data: np.ndarray,
|
||||||
reset: bool,
|
reset: bool,
|
||||||
array_key: str,
|
array_key: str,
|
||||||
index_field: str,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# default line draw last call
|
# default line draw last call
|
||||||
# with self.reset_cache():
|
# with self.reset_cache():
|
||||||
x = src_data[index_field]
|
x = render_data['index']
|
||||||
y = src_data[array_key]
|
y = render_data[array_key]
|
||||||
|
|
||||||
x_last = x[-1]
|
|
||||||
x_2last = x[-2]
|
|
||||||
|
|
||||||
# draw the "current" step graphic segment so it
|
# draw the "current" step graphic segment so it
|
||||||
# lines up with the "middle" of the current
|
# lines up with the "middle" of the current
|
||||||
# (OHLC) sample.
|
# (OHLC) sample.
|
||||||
self._last_line = QLineF(
|
self._last_line = QLineF(
|
||||||
|
x[-2], y[-2],
|
||||||
# NOTE: currently we draw in x-domain
|
x[-1], y[-1],
|
||||||
# from last datum to current such that
|
|
||||||
# the end of line touches the "beginning"
|
|
||||||
# of the current datum step span.
|
|
||||||
x_2last, y[-2],
|
|
||||||
x_last, y[-1],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return x, y
|
return x, y
|
||||||
|
@ -407,20 +400,17 @@ class Curve(FlowGraphic):
|
||||||
# (via it's max / min) even when highly zoomed out.
|
# (via it's max / min) even when highly zoomed out.
|
||||||
class FlattenedOHLC(Curve):
|
class FlattenedOHLC(Curve):
|
||||||
|
|
||||||
# avoids strange dragging/smearing artifacts when panning..
|
|
||||||
cache_mode: int = QGraphicsItem.NoCache
|
|
||||||
|
|
||||||
def draw_last_datum(
|
def draw_last_datum(
|
||||||
self,
|
self,
|
||||||
path: QPainterPath,
|
path: QPainterPath,
|
||||||
src_data: np.ndarray,
|
src_data: np.ndarray,
|
||||||
|
render_data: np.ndarray,
|
||||||
reset: bool,
|
reset: bool,
|
||||||
array_key: str,
|
array_key: str,
|
||||||
index_field: str,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
lasts = src_data[-2:]
|
lasts = src_data[-2:]
|
||||||
x = lasts[index_field]
|
x = lasts['index']
|
||||||
y = lasts['close']
|
y = lasts['close']
|
||||||
|
|
||||||
# draw the "current" step graphic segment so it
|
# draw the "current" step graphic segment so it
|
||||||
|
@ -444,9 +434,9 @@ class StepCurve(Curve):
|
||||||
self,
|
self,
|
||||||
path: QPainterPath,
|
path: QPainterPath,
|
||||||
src_data: np.ndarray,
|
src_data: np.ndarray,
|
||||||
|
render_data: np.ndarray,
|
||||||
reset: bool,
|
reset: bool,
|
||||||
array_key: str,
|
array_key: str,
|
||||||
index_field: str,
|
|
||||||
|
|
||||||
w: float = 0.5,
|
w: float = 0.5,
|
||||||
|
|
||||||
|
@ -455,31 +445,40 @@ class StepCurve(Curve):
|
||||||
# TODO: remove this and instead place all step curve
|
# TODO: remove this and instead place all step curve
|
||||||
# updating into pre-path data render callbacks.
|
# updating into pre-path data render callbacks.
|
||||||
# full input data
|
# full input data
|
||||||
x = src_data[index_field]
|
x = src_data['index']
|
||||||
y = src_data[array_key]
|
y = src_data[array_key]
|
||||||
|
|
||||||
x_last = x[-1]
|
x_last = x[-1]
|
||||||
x_2last = x[-2]
|
|
||||||
y_last = y[-1]
|
y_last = y[-1]
|
||||||
step_size = x_last - x_2last
|
|
||||||
|
|
||||||
# lol, commenting this makes step curves
|
# lol, commenting this makes step curves
|
||||||
# all "black" for me :eyeroll:..
|
# all "black" for me :eyeroll:..
|
||||||
self._last_line = QLineF(
|
self._last_line = QLineF(
|
||||||
x_2last, 0,
|
x_last - w, 0,
|
||||||
x_last, 0,
|
x_last + w, 0,
|
||||||
)
|
)
|
||||||
self._last_step_rect = QRectF(
|
self._last_step_rect = QRectF(
|
||||||
x_last, 0,
|
x_last - w, 0,
|
||||||
step_size, y_last,
|
x_last + w, y_last,
|
||||||
)
|
)
|
||||||
return x, y
|
return x, y
|
||||||
|
|
||||||
def sub_paint(
|
def sub_paint(
|
||||||
self,
|
self,
|
||||||
p: QPainter,
|
p: QPainter,
|
||||||
|
profiler: pg.debug.Profiler,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||||
# p.drawRect(self._last_step_rect)
|
# p.drawRect(self._last_step_rect)
|
||||||
p.fillRect(self._last_step_rect, self._brush)
|
p.fillRect(self._last_step_rect, self._brush)
|
||||||
|
profiler('.fillRect()')
|
||||||
|
|
||||||
|
def sub_br(
|
||||||
|
self,
|
||||||
|
path_w: float,
|
||||||
|
path_h: float,
|
||||||
|
|
||||||
|
) -> (float, float):
|
||||||
|
# passthrough
|
||||||
|
return path_w, path_h
|
||||||
|
|
1238
piker/ui/_dataviz.py
1238
piker/ui/_dataviz.py
File diff suppressed because it is too large
Load Diff
1732
piker/ui/_display.py
1732
piker/ui/_display.py
File diff suppressed because it is too large
Load Diff
|
@ -18,27 +18,11 @@
|
||||||
Higher level annotation editors.
|
Higher level annotation editors.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from dataclasses import dataclass, field
|
||||||
from collections import defaultdict
|
from typing import Optional
|
||||||
from typing import (
|
|
||||||
Optional,
|
|
||||||
TYPE_CHECKING
|
|
||||||
)
|
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from pyqtgraph import (
|
from pyqtgraph import ViewBox, Point, QtCore, QtGui
|
||||||
ViewBox,
|
|
||||||
Point,
|
|
||||||
QtCore,
|
|
||||||
QtWidgets,
|
|
||||||
)
|
|
||||||
from PyQt5.QtGui import (
|
|
||||||
QColor,
|
|
||||||
)
|
|
||||||
from PyQt5.QtWidgets import (
|
|
||||||
QLabel,
|
|
||||||
)
|
|
||||||
|
|
||||||
from pyqtgraph import functions as fn
|
from pyqtgraph import functions as fn
|
||||||
from PyQt5.QtCore import QPointF
|
from PyQt5.QtCore import QPointF
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -46,34 +30,28 @@ import numpy as np
|
||||||
from ._style import hcolor, _font
|
from ._style import hcolor, _font
|
||||||
from ._lines import LevelLine
|
from ._lines import LevelLine
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ..data.types import Struct
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ._chart import GodWidget
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ArrowEditor(Struct):
|
@dataclass
|
||||||
|
class ArrowEditor:
|
||||||
|
|
||||||
godw: GodWidget = None # type: ignore # noqa
|
chart: 'ChartPlotWidget' # noqa
|
||||||
_arrows: dict[str, list[pg.ArrowItem]] = {}
|
_arrows: field(default_factory=dict)
|
||||||
|
|
||||||
def add(
|
def add(
|
||||||
self,
|
self,
|
||||||
plot: pg.PlotItem,
|
|
||||||
uid: str,
|
uid: str,
|
||||||
x: float,
|
x: float,
|
||||||
y: float,
|
y: float,
|
||||||
color='default',
|
color='default',
|
||||||
pointing: Optional[str] = None,
|
pointing: Optional[str] = None,
|
||||||
|
|
||||||
) -> pg.ArrowItem:
|
) -> pg.ArrowItem:
|
||||||
'''
|
"""Add an arrow graphic to view at given (x, y).
|
||||||
Add an arrow graphic to view at given (x, y).
|
|
||||||
|
|
||||||
'''
|
"""
|
||||||
angle = {
|
angle = {
|
||||||
'up': 90,
|
'up': 90,
|
||||||
'down': -90,
|
'down': -90,
|
||||||
|
@ -96,25 +74,25 @@ class ArrowEditor(Struct):
|
||||||
brush=pg.mkBrush(hcolor(color)),
|
brush=pg.mkBrush(hcolor(color)),
|
||||||
)
|
)
|
||||||
arrow.setPos(x, y)
|
arrow.setPos(x, y)
|
||||||
self._arrows.setdefault(uid, []).append(arrow)
|
|
||||||
|
self._arrows[uid] = arrow
|
||||||
|
|
||||||
# render to view
|
# render to view
|
||||||
plot.addItem(arrow)
|
self.chart.plotItem.addItem(arrow)
|
||||||
|
|
||||||
return arrow
|
return arrow
|
||||||
|
|
||||||
def remove(self, arrow) -> bool:
|
def remove(self, arrow) -> bool:
|
||||||
for linked in self.godw.iter_linked():
|
self.chart.plotItem.removeItem(arrow)
|
||||||
linked.chart.plotItem.removeItem(arrow)
|
|
||||||
|
|
||||||
|
|
||||||
class LineEditor(Struct):
|
@dataclass
|
||||||
'''
|
class LineEditor:
|
||||||
The great editor of linez.
|
'''The great editor of linez.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
godw: GodWidget = None # type: ignore # noqa
|
chart: 'ChartPlotWidget' = None # type: ignore # noqa
|
||||||
_order_lines: defaultdict[str, LevelLine] = defaultdict(list)
|
_order_lines: dict[str, LevelLine] = field(default_factory=dict)
|
||||||
_active_staged_line: LevelLine = None
|
_active_staged_line: LevelLine = None
|
||||||
|
|
||||||
def stage_line(
|
def stage_line(
|
||||||
|
@ -122,11 +100,11 @@ class LineEditor(Struct):
|
||||||
line: LevelLine,
|
line: LevelLine,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
'''
|
"""Stage a line at the current chart's cursor position
|
||||||
Stage a line at the current chart's cursor position
|
|
||||||
and return it.
|
and return it.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
|
|
||||||
# add a "staged" cursor-tracking line to view
|
# add a "staged" cursor-tracking line to view
|
||||||
# and cash it in a a var
|
# and cash it in a a var
|
||||||
if self._active_staged_line:
|
if self._active_staged_line:
|
||||||
|
@ -137,25 +115,17 @@ class LineEditor(Struct):
|
||||||
return line
|
return line
|
||||||
|
|
||||||
def unstage_line(self) -> LevelLine:
|
def unstage_line(self) -> LevelLine:
|
||||||
'''
|
"""Inverse of ``.stage_line()``.
|
||||||
Inverse of ``.stage_line()``.
|
|
||||||
|
|
||||||
'''
|
"""
|
||||||
cursor = self.godw.get_cursor()
|
# chart = self.chart._cursor.active_plot
|
||||||
if not cursor:
|
# # chart.setCursor(QtCore.Qt.ArrowCursor)
|
||||||
return None
|
cursor = self.chart.linked.cursor
|
||||||
|
|
||||||
# delete "staged" cursor tracking line from view
|
# delete "staged" cursor tracking line from view
|
||||||
line = self._active_staged_line
|
line = self._active_staged_line
|
||||||
if line:
|
if line:
|
||||||
try:
|
cursor._trackers.remove(line)
|
||||||
cursor._trackers.remove(line)
|
|
||||||
except KeyError:
|
|
||||||
# when the current cursor doesn't have said line
|
|
||||||
# registered (probably means that user held order mode
|
|
||||||
# key while panning to another view) then we just
|
|
||||||
# ignore the remove error.
|
|
||||||
pass
|
|
||||||
line.delete()
|
line.delete()
|
||||||
|
|
||||||
self._active_staged_line = None
|
self._active_staged_line = None
|
||||||
|
@ -163,58 +133,55 @@ class LineEditor(Struct):
|
||||||
# show the crosshair y line and label
|
# show the crosshair y line and label
|
||||||
cursor.show_xhair()
|
cursor.show_xhair()
|
||||||
|
|
||||||
def submit_lines(
|
def submit_line(
|
||||||
self,
|
self,
|
||||||
lines: list[LevelLine],
|
line: LevelLine,
|
||||||
uuid: str,
|
uuid: str,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
|
|
||||||
# staged_line = self._active_staged_line
|
staged_line = self._active_staged_line
|
||||||
# if not staged_line:
|
if not staged_line:
|
||||||
# raise RuntimeError("No line is currently staged!?")
|
raise RuntimeError("No line is currently staged!?")
|
||||||
|
|
||||||
# for now, until submission reponse arrives
|
# for now, until submission reponse arrives
|
||||||
for line in lines:
|
line.hide_labels()
|
||||||
line.hide_labels()
|
|
||||||
|
|
||||||
# register for later lookup/deletion
|
# register for later lookup/deletion
|
||||||
self._order_lines[uuid] += lines
|
self._order_lines[uuid] = line
|
||||||
|
|
||||||
return lines
|
return line
|
||||||
|
|
||||||
def commit_line(self, uuid: str) -> list[LevelLine]:
|
def commit_line(self, uuid: str) -> LevelLine:
|
||||||
'''
|
"""Commit a "staged line" to view.
|
||||||
Commit a "staged line" to view.
|
|
||||||
|
|
||||||
Submits the line graphic under the cursor as a (new) permanent
|
Submits the line graphic under the cursor as a (new) permanent
|
||||||
graphic in view.
|
graphic in view.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
lines = self._order_lines[uuid]
|
try:
|
||||||
if lines:
|
line = self._order_lines[uuid]
|
||||||
for line in lines:
|
except KeyError:
|
||||||
line.show_labels()
|
log.warning(f'No line for {uuid} could be found?')
|
||||||
line.hide_markers()
|
return
|
||||||
log.debug(f'Level active for level: {line.value()}')
|
else:
|
||||||
# TODO: other flashy things to indicate the order is active
|
line.show_labels()
|
||||||
|
|
||||||
return lines
|
# TODO: other flashy things to indicate the order is active
|
||||||
|
|
||||||
|
log.debug(f'Level active for level: {line.value()}')
|
||||||
|
|
||||||
|
return line
|
||||||
|
|
||||||
def lines_under_cursor(self) -> list[LevelLine]:
|
def lines_under_cursor(self) -> list[LevelLine]:
|
||||||
'''
|
"""Get the line(s) under the cursor position.
|
||||||
Get the line(s) under the cursor position.
|
|
||||||
|
|
||||||
'''
|
"""
|
||||||
# Delete any hoverable under the cursor
|
# Delete any hoverable under the cursor
|
||||||
return self.godw.get_cursor()._hovered
|
return self.chart.linked.cursor._hovered
|
||||||
|
|
||||||
def all_lines(self) -> list[LevelLine]:
|
def all_lines(self) -> tuple[LevelLine]:
|
||||||
all_lines = []
|
return tuple(self._order_lines.values())
|
||||||
for lines in list(self._order_lines.values()):
|
|
||||||
all_lines.extend(lines)
|
|
||||||
|
|
||||||
return all_lines
|
|
||||||
|
|
||||||
def remove_line(
|
def remove_line(
|
||||||
self,
|
self,
|
||||||
|
@ -229,30 +196,29 @@ class LineEditor(Struct):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# try to look up line from our registry
|
# try to look up line from our registry
|
||||||
lines = self._order_lines.pop(uuid, None)
|
line = self._order_lines.pop(uuid, line)
|
||||||
if lines:
|
if line:
|
||||||
cursor = self.godw.get_cursor()
|
|
||||||
if cursor:
|
|
||||||
for line in lines:
|
|
||||||
# if hovered remove from cursor set
|
|
||||||
hovered = cursor._hovered
|
|
||||||
if line in hovered:
|
|
||||||
hovered.remove(line)
|
|
||||||
|
|
||||||
log.debug(f'deleting {line} with oid: {uuid}')
|
# if hovered remove from cursor set
|
||||||
line.delete()
|
cursor = self.chart.linked.cursor
|
||||||
|
hovered = cursor._hovered
|
||||||
|
if line in hovered:
|
||||||
|
hovered.remove(line)
|
||||||
|
|
||||||
# make sure the xhair doesn't get left off
|
# make sure the xhair doesn't get left off
|
||||||
# just because we never got a un-hover event
|
# just because we never got a un-hover event
|
||||||
cursor.show_xhair()
|
cursor.show_xhair()
|
||||||
|
|
||||||
|
log.debug(f'deleting {line} with oid: {uuid}')
|
||||||
|
line.delete()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.warning(f'Could not find line for {line}')
|
log.warning(f'Could not find line for {line}')
|
||||||
|
|
||||||
return lines
|
return line
|
||||||
|
|
||||||
|
|
||||||
class SelectRect(QtWidgets.QGraphicsRectItem):
|
class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -261,12 +227,12 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(0, 0, 1, 1)
|
super().__init__(0, 0, 1, 1)
|
||||||
|
|
||||||
# self.rbScaleBox = QGraphicsRectItem(0, 0, 1, 1)
|
# self.rbScaleBox = QtGui.QGraphicsRectItem(0, 0, 1, 1)
|
||||||
self.vb = viewbox
|
self.vb = viewbox
|
||||||
self._chart: 'ChartPlotWidget' = None # noqa
|
self._chart: 'ChartPlotWidget' = None # noqa
|
||||||
|
|
||||||
# override selection box color
|
# override selection box color
|
||||||
color = QColor(hcolor(color))
|
color = QtGui.QColor(hcolor(color))
|
||||||
self.setPen(fn.mkPen(color, width=1))
|
self.setPen(fn.mkPen(color, width=1))
|
||||||
color.setAlpha(66)
|
color.setAlpha(66)
|
||||||
self.setBrush(fn.mkBrush(color))
|
self.setBrush(fn.mkBrush(color))
|
||||||
|
@ -274,7 +240,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
self.hide()
|
self.hide()
|
||||||
self._label = None
|
self._label = None
|
||||||
|
|
||||||
label = self._label = QLabel()
|
label = self._label = QtGui.QLabel()
|
||||||
label.setTextFormat(0) # markdown
|
label.setTextFormat(0) # markdown
|
||||||
label.setFont(_font.font)
|
label.setFont(_font.font)
|
||||||
label.setMargin(0)
|
label.setMargin(0)
|
||||||
|
@ -311,8 +277,8 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
# TODO: get bg color working
|
# TODO: get bg color working
|
||||||
palette.setColor(
|
palette.setColor(
|
||||||
self._label.backgroundRole(),
|
self._label.backgroundRole(),
|
||||||
# QColor(chart.backgroundBrush()),
|
# QtGui.QColor(chart.backgroundBrush()),
|
||||||
QColor(hcolor('papas_special')),
|
QtGui.QColor(hcolor('papas_special')),
|
||||||
)
|
)
|
||||||
|
|
||||||
def update_on_resize(self, vr, r):
|
def update_on_resize(self, vr, r):
|
||||||
|
@ -360,7 +326,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
|
|
||||||
self.setPos(r.topLeft())
|
self.setPos(r.topLeft())
|
||||||
self.resetTransform()
|
self.resetTransform()
|
||||||
self.setRect(r)
|
self.scale(r.width(), r.height())
|
||||||
self.show()
|
self.show()
|
||||||
|
|
||||||
y1, y2 = start_pos.y(), end_pos.y()
|
y1, y2 = start_pos.y(), end_pos.y()
|
||||||
|
@ -377,7 +343,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
nbars = ixmx - ixmn + 1
|
nbars = ixmx - ixmn + 1
|
||||||
|
|
||||||
chart = self._chart
|
chart = self._chart
|
||||||
data = chart.get_viz(chart.name).shm.array[ixmn:ixmx]
|
data = chart._flows[chart.name].shm.array[ixmn:ixmx]
|
||||||
|
|
||||||
if len(data):
|
if len(data):
|
||||||
std = data['close'].std()
|
std = data['close'].std()
|
||||||
|
|
|
@ -18,11 +18,11 @@
|
||||||
Qt event proxying and processing using ``trio`` mem chans.
|
Qt event proxying and processing using ``trio`` mem chans.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager, AsyncExitStack
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
import trio
|
import trio
|
||||||
from tractor.trionics import gather_contexts
|
|
||||||
from PyQt5 import QtCore
|
from PyQt5 import QtCore
|
||||||
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
||||||
from PyQt5.QtWidgets import QWidget
|
from PyQt5.QtWidgets import QWidget
|
||||||
|
@ -30,8 +30,6 @@ from PyQt5.QtWidgets import (
|
||||||
QGraphicsSceneMouseEvent as gs_mouse,
|
QGraphicsSceneMouseEvent as gs_mouse,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..data.types import Struct
|
|
||||||
|
|
||||||
|
|
||||||
MOUSE_EVENTS = {
|
MOUSE_EVENTS = {
|
||||||
gs_mouse.GraphicsSceneMousePress,
|
gs_mouse.GraphicsSceneMousePress,
|
||||||
|
@ -45,10 +43,13 @@ MOUSE_EVENTS = {
|
||||||
# TODO: maybe consider some constrained ints down the road?
|
# TODO: maybe consider some constrained ints down the road?
|
||||||
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
||||||
|
|
||||||
class KeyboardMsg(Struct):
|
class KeyboardMsg(BaseModel):
|
||||||
'''Unpacked Qt keyboard event data.
|
'''Unpacked Qt keyboard event data.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
class Config:
|
||||||
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
event: QEvent
|
event: QEvent
|
||||||
etype: int
|
etype: int
|
||||||
key: int
|
key: int
|
||||||
|
@ -56,13 +57,16 @@ class KeyboardMsg(Struct):
|
||||||
txt: str
|
txt: str
|
||||||
|
|
||||||
def to_tuple(self) -> tuple:
|
def to_tuple(self) -> tuple:
|
||||||
return tuple(self.to_dict().values())
|
return tuple(self.dict().values())
|
||||||
|
|
||||||
|
|
||||||
class MouseMsg(Struct):
|
class MouseMsg(BaseModel):
|
||||||
'''Unpacked Qt keyboard event data.
|
'''Unpacked Qt keyboard event data.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
class Config:
|
||||||
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
event: QEvent
|
event: QEvent
|
||||||
etype: int
|
etype: int
|
||||||
button: int
|
button: int
|
||||||
|
@ -156,7 +160,7 @@ class EventRelay(QtCore.QObject):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@asynccontextmanager
|
||||||
async def open_event_stream(
|
async def open_event_stream(
|
||||||
|
|
||||||
source_widget: QWidget,
|
source_widget: QWidget,
|
||||||
|
@ -182,7 +186,7 @@ async def open_event_stream(
|
||||||
source_widget.removeEventFilter(kc)
|
source_widget.removeEventFilter(kc)
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@asynccontextmanager
|
||||||
async def open_signal_handler(
|
async def open_signal_handler(
|
||||||
|
|
||||||
signal: pyqtBoundSignal,
|
signal: pyqtBoundSignal,
|
||||||
|
@ -207,7 +211,7 @@ async def open_signal_handler(
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@asynccontextmanager
|
||||||
async def open_handlers(
|
async def open_handlers(
|
||||||
|
|
||||||
source_widgets: list[QWidget],
|
source_widgets: list[QWidget],
|
||||||
|
@ -216,14 +220,16 @@ async def open_handlers(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
gather_contexts([
|
AsyncExitStack() as stack,
|
||||||
open_event_stream(widget, event_types, **kwargs)
|
|
||||||
for widget in source_widgets
|
|
||||||
]) as streams,
|
|
||||||
):
|
):
|
||||||
for widget, event_recv_stream in zip(source_widgets, streams):
|
for widget in source_widgets:
|
||||||
|
|
||||||
|
event_recv_stream = await stack.enter_async_context(
|
||||||
|
open_event_stream(widget, event_types, **kwargs)
|
||||||
|
)
|
||||||
n.start_soon(async_handler, widget, event_recv_stream)
|
n.start_soon(async_handler, widget, event_recv_stream)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
|
@ -20,24 +20,16 @@ Trio - Qt integration
|
||||||
Run ``trio`` in guest mode on top of the Qt event loop.
|
Run ``trio`` in guest mode on top of the Qt event loop.
|
||||||
All global Qt runtime settings are mostly defined here.
|
All global Qt runtime settings are mostly defined here.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from typing import Tuple, Callable, Dict, Any
|
||||||
from typing import (
|
|
||||||
Callable,
|
|
||||||
Any,
|
|
||||||
Type,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
import platform
|
import platform
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
# Qt specific
|
# Qt specific
|
||||||
import PyQt5 # noqa
|
import PyQt5 # noqa
|
||||||
from PyQt5.QtWidgets import (
|
import pyqtgraph as pg
|
||||||
QWidget,
|
from pyqtgraph import QtGui
|
||||||
QMainWindow,
|
|
||||||
QApplication,
|
|
||||||
)
|
|
||||||
from PyQt5 import QtCore
|
from PyQt5 import QtCore
|
||||||
|
# from PyQt5.QtGui import QLabel, QStatusBar
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
pyqtRemoveInputHook,
|
pyqtRemoveInputHook,
|
||||||
Qt,
|
Qt,
|
||||||
|
@ -45,19 +37,15 @@ from PyQt5.QtCore import (
|
||||||
)
|
)
|
||||||
import qdarkstyle
|
import qdarkstyle
|
||||||
from qdarkstyle import DarkPalette
|
from qdarkstyle import DarkPalette
|
||||||
# import qdarkgraystyle # TODO: play with it
|
# import qdarkgraystyle
|
||||||
import trio
|
import trio
|
||||||
from outcome import Error
|
from outcome import Error
|
||||||
|
|
||||||
from ..service import (
|
from .._daemon import maybe_open_pikerd, _tractor_kwargs
|
||||||
maybe_open_pikerd,
|
|
||||||
get_tractor_runtime_kwargs,
|
|
||||||
)
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._pg_overrides import _do_overrides
|
from ._pg_overrides import _do_overrides
|
||||||
from . import _style
|
from . import _style
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# pyqtgraph global config
|
# pyqtgraph global config
|
||||||
|
@ -84,18 +72,17 @@ if platform.system() == "Windows":
|
||||||
|
|
||||||
def run_qtractor(
|
def run_qtractor(
|
||||||
func: Callable,
|
func: Callable,
|
||||||
args: tuple,
|
args: Tuple,
|
||||||
main_widget_type: Type[QWidget],
|
main_widget: QtGui.QWidget,
|
||||||
tractor_kwargs: dict[str, Any] = {},
|
tractor_kwargs: Dict[str, Any] = {},
|
||||||
window_type: QMainWindow = None,
|
window_type: QtGui.QMainWindow = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# avoids annoying message when entering debugger from qt loop
|
# avoids annoying message when entering debugger from qt loop
|
||||||
pyqtRemoveInputHook()
|
pyqtRemoveInputHook()
|
||||||
|
|
||||||
app = QApplication.instance()
|
app = QtGui.QApplication.instance()
|
||||||
if app is None:
|
if app is None:
|
||||||
app = QApplication([])
|
app = PyQt5.QtWidgets.QApplication([])
|
||||||
|
|
||||||
# TODO: we might not need this if it's desired
|
# TODO: we might not need this if it's desired
|
||||||
# to cancel the tractor machinery on Qt loop
|
# to cancel the tractor machinery on Qt loop
|
||||||
|
@ -169,11 +156,11 @@ def run_qtractor(
|
||||||
# hook into app focus change events
|
# hook into app focus change events
|
||||||
app.focusChanged.connect(window.on_focus_change)
|
app.focusChanged.connect(window.on_focus_change)
|
||||||
|
|
||||||
instance = main_widget_type()
|
instance = main_widget()
|
||||||
instance.window = window
|
instance.window = window
|
||||||
|
|
||||||
# override tractor's defaults
|
# override tractor's defaults
|
||||||
tractor_kwargs.update(get_tractor_runtime_kwargs())
|
tractor_kwargs.update(_tractor_kwargs)
|
||||||
|
|
||||||
# define tractor entrypoint
|
# define tractor entrypoint
|
||||||
async def main():
|
async def main():
|
||||||
|
@ -191,7 +178,7 @@ def run_qtractor(
|
||||||
# restrict_keyboard_interrupt_to_checkpoints=True,
|
# restrict_keyboard_interrupt_to_checkpoints=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
window.godwidget: GodWidget = instance
|
window.main_widget = main_widget
|
||||||
window.setCentralWidget(instance)
|
window.setCentralWidget(instance)
|
||||||
if is_windows:
|
if is_windows:
|
||||||
window.configure_to_desktop()
|
window.configure_to_desktop()
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -619,7 +619,7 @@ class FillStatusBar(QProgressBar):
|
||||||
# color: #19232D;
|
# color: #19232D;
|
||||||
# width: 10px;
|
# width: 10px;
|
||||||
|
|
||||||
self.setRange(0, int(slots))
|
self.setRange(0, slots)
|
||||||
self.setValue(value)
|
self.setValue(value)
|
||||||
|
|
||||||
|
|
||||||
|
@ -644,7 +644,7 @@ def mk_fill_status_bar(
|
||||||
|
|
||||||
# TODO: calc this height from the ``ChartnPane``
|
# TODO: calc this height from the ``ChartnPane``
|
||||||
chart_h = round(parent_pane.height() * 5/8)
|
chart_h = round(parent_pane.height() * 5/8)
|
||||||
bar_h = chart_h * 0.375*0.9
|
bar_h = chart_h * 0.375
|
||||||
|
|
||||||
# TODO: once things are sized to screen
|
# TODO: once things are sized to screen
|
||||||
bar_label_font_size = label_font_size or _font.px_size - 2
|
bar_label_font_size = label_font_size or _font.px_size - 2
|
||||||
|
|
282
piker/ui/_fsp.py
282
piker/ui/_fsp.py
|
@ -27,13 +27,12 @@ from itertools import cycle
|
||||||
from typing import Optional, AsyncGenerator, Any
|
from typing import Optional, AsyncGenerator, Any
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import msgspec
|
from pydantic import create_model
|
||||||
import tractor
|
import tractor
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
from piker.data.types import Struct
|
|
||||||
from ._axes import PriceAxis
|
from ._axes import PriceAxis
|
||||||
from .._cacheables import maybe_open_context
|
from .._cacheables import maybe_open_context
|
||||||
from ..calc import humanize
|
from ..calc import humanize
|
||||||
|
@ -42,8 +41,6 @@ from ..data._sharedmem import (
|
||||||
_Token,
|
_Token,
|
||||||
try_read,
|
try_read,
|
||||||
)
|
)
|
||||||
from ..data.feed import Flume
|
|
||||||
from ..data._source import Symbol
|
|
||||||
from ._chart import (
|
from ._chart import (
|
||||||
ChartPlotWidget,
|
ChartPlotWidget,
|
||||||
LinkedSplits,
|
LinkedSplits,
|
||||||
|
@ -53,18 +50,14 @@ from ._forms import (
|
||||||
mk_form,
|
mk_form,
|
||||||
open_form_input_handling,
|
open_form_input_handling,
|
||||||
)
|
)
|
||||||
from ..fsp._api import (
|
from ..fsp._api import maybe_mk_fsp_shm, Fsp
|
||||||
maybe_mk_fsp_shm,
|
|
||||||
Fsp,
|
|
||||||
)
|
|
||||||
from ..fsp import cascade
|
from ..fsp import cascade
|
||||||
from ..fsp._volume import (
|
from ..fsp._volume import (
|
||||||
# tina_vwap,
|
tina_vwap,
|
||||||
dolla_vlm,
|
dolla_vlm,
|
||||||
flow_rates,
|
flow_rates,
|
||||||
)
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from .._profile import Profiler
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -78,14 +71,15 @@ def has_vlm(ohlcv: ShmArray) -> bool:
|
||||||
|
|
||||||
|
|
||||||
def update_fsp_chart(
|
def update_fsp_chart(
|
||||||
viz,
|
chart: ChartPlotWidget,
|
||||||
|
flow,
|
||||||
graphics_name: str,
|
graphics_name: str,
|
||||||
array_key: Optional[str],
|
array_key: Optional[str],
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
shm = viz.shm
|
shm = flow.shm
|
||||||
if not shm:
|
if not shm:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -100,15 +94,18 @@ def update_fsp_chart(
|
||||||
# update graphics
|
# update graphics
|
||||||
# NOTE: this does a length check internally which allows it
|
# NOTE: this does a length check internally which allows it
|
||||||
# staying above the last row check below..
|
# staying above the last row check below..
|
||||||
viz.update_graphics()
|
chart.update_graphics_from_flow(
|
||||||
|
graphics_name,
|
||||||
|
array_key=array_key or graphics_name,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
# XXX: re: ``array_key``: fsp func names must be unique meaning we
|
# XXX: re: ``array_key``: fsp func names must be unique meaning we
|
||||||
# can't have duplicates of the underlying data even if multiple
|
# can't have duplicates of the underlying data even if multiple
|
||||||
# sub-charts reference it under different 'named charts'.
|
# sub-charts reference it under different 'named charts'.
|
||||||
|
|
||||||
# read from last calculated value and update any label
|
# read from last calculated value and update any label
|
||||||
last_val_sticky = viz.plot.getAxis(
|
last_val_sticky = chart._ysticks.get(graphics_name)
|
||||||
'right')._stickies.get(graphics_name)
|
|
||||||
if last_val_sticky:
|
if last_val_sticky:
|
||||||
last = last_row[array_key]
|
last = last_row[array_key]
|
||||||
last_val_sticky.update_from_data(-1, last)
|
last_val_sticky.update_from_data(-1, last)
|
||||||
|
@ -156,13 +153,12 @@ async def open_fsp_sidepane(
|
||||||
)
|
)
|
||||||
|
|
||||||
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
||||||
FspConfig = msgspec.defstruct(
|
FspConfig = create_model(
|
||||||
"Point",
|
'FspConfig',
|
||||||
[('name', name)] + list(params.items()),
|
name=name,
|
||||||
bases=(Struct,),
|
**params,
|
||||||
)
|
)
|
||||||
model = FspConfig(name=name, **params)
|
sidepane.model = FspConfig()
|
||||||
sidepane.model = model
|
|
||||||
|
|
||||||
# just a logger for now until we get fsp configs up and running.
|
# just a logger for now until we get fsp configs up and running.
|
||||||
async def settings_change(
|
async def settings_change(
|
||||||
|
@ -192,7 +188,7 @@ async def open_fsp_actor_cluster(
|
||||||
|
|
||||||
from tractor._clustering import open_actor_cluster
|
from tractor._clustering import open_actor_cluster
|
||||||
|
|
||||||
# profiler = Profiler(
|
# profiler = pg.debug.Profiler(
|
||||||
# delayed=False,
|
# delayed=False,
|
||||||
# disabled=False
|
# disabled=False
|
||||||
# )
|
# )
|
||||||
|
@ -209,12 +205,12 @@ async def open_fsp_actor_cluster(
|
||||||
async def run_fsp_ui(
|
async def run_fsp_ui(
|
||||||
|
|
||||||
linkedsplits: LinkedSplits,
|
linkedsplits: LinkedSplits,
|
||||||
flume: Flume,
|
shm: ShmArray,
|
||||||
started: trio.Event,
|
started: trio.Event,
|
||||||
target: Fsp,
|
target: Fsp,
|
||||||
conf: dict[str, dict],
|
conf: dict[str, dict],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
# profiler: Profiler,
|
# profiler: pg.debug.Profiler,
|
||||||
# _quote_throttle_rate: int = 58,
|
# _quote_throttle_rate: int = 58,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -246,11 +242,9 @@ async def run_fsp_ui(
|
||||||
else:
|
else:
|
||||||
chart = linkedsplits.subplots[overlay_with]
|
chart = linkedsplits.subplots[overlay_with]
|
||||||
|
|
||||||
shm = flume.rt_shm
|
|
||||||
chart.draw_curve(
|
chart.draw_curve(
|
||||||
name,
|
name=name,
|
||||||
shm,
|
shm=shm,
|
||||||
flume,
|
|
||||||
overlay=True,
|
overlay=True,
|
||||||
color='default_light',
|
color='default_light',
|
||||||
array_key=name,
|
array_key=name,
|
||||||
|
@ -260,9 +254,8 @@ async def run_fsp_ui(
|
||||||
else:
|
else:
|
||||||
# create a new sub-chart widget for this fsp
|
# create a new sub-chart widget for this fsp
|
||||||
chart = linkedsplits.add_plot(
|
chart = linkedsplits.add_plot(
|
||||||
name,
|
name=name,
|
||||||
shm,
|
shm=shm,
|
||||||
flume,
|
|
||||||
|
|
||||||
array_key=name,
|
array_key=name,
|
||||||
sidepane=sidepane,
|
sidepane=sidepane,
|
||||||
|
@ -282,10 +275,9 @@ async def run_fsp_ui(
|
||||||
# profiler(f'fsp:{name} chart created')
|
# profiler(f'fsp:{name} chart created')
|
||||||
|
|
||||||
# first UI update, usually from shm pushed history
|
# first UI update, usually from shm pushed history
|
||||||
viz = chart.get_viz(array_key)
|
|
||||||
update_fsp_chart(
|
update_fsp_chart(
|
||||||
chart,
|
chart,
|
||||||
viz,
|
chart._flows[array_key],
|
||||||
name,
|
name,
|
||||||
array_key=array_key,
|
array_key=array_key,
|
||||||
)
|
)
|
||||||
|
@ -312,7 +304,7 @@ async def run_fsp_ui(
|
||||||
# level_line(chart, 70, orient_v='bottom')
|
# level_line(chart, 70, orient_v='bottom')
|
||||||
# level_line(chart, 80, orient_v='top')
|
# level_line(chart, 80, orient_v='top')
|
||||||
|
|
||||||
chart.view._set_yrange(viz=viz)
|
chart.view._set_yrange()
|
||||||
# done() # status updates
|
# done() # status updates
|
||||||
|
|
||||||
# profiler(f'fsp:{func_name} starting update loop')
|
# profiler(f'fsp:{func_name} starting update loop')
|
||||||
|
@ -353,9 +345,6 @@ async def run_fsp_ui(
|
||||||
# last = time.time()
|
# last = time.time()
|
||||||
|
|
||||||
|
|
||||||
# TODO: maybe this should be our ``Viz`` type since it maps
|
|
||||||
# one flume to the next? The machinery for task/actor mgmt should
|
|
||||||
# be part of the instantiation API?
|
|
||||||
class FspAdmin:
|
class FspAdmin:
|
||||||
'''
|
'''
|
||||||
Client API for orchestrating FSP actors and displaying
|
Client API for orchestrating FSP actors and displaying
|
||||||
|
@ -367,7 +356,7 @@ class FspAdmin:
|
||||||
tn: trio.Nursery,
|
tn: trio.Nursery,
|
||||||
cluster: dict[str, tractor.Portal],
|
cluster: dict[str, tractor.Portal],
|
||||||
linked: LinkedSplits,
|
linked: LinkedSplits,
|
||||||
flume: Flume,
|
src_shm: ShmArray,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
self.tn = tn
|
self.tn = tn
|
||||||
|
@ -379,11 +368,7 @@ class FspAdmin:
|
||||||
tuple[tractor.MsgStream, ShmArray]
|
tuple[tractor.MsgStream, ShmArray]
|
||||||
] = {}
|
] = {}
|
||||||
self._flow_registry: dict[_Token, str] = {}
|
self._flow_registry: dict[_Token, str] = {}
|
||||||
|
self.src_shm = src_shm
|
||||||
# TODO: make this a `.src_flume` and add
|
|
||||||
# a `dst_flume`?
|
|
||||||
# (=> but then wouldn't this be the most basic `Viz`?)
|
|
||||||
self.flume = flume
|
|
||||||
|
|
||||||
def rr_next_portal(self) -> tractor.Portal:
|
def rr_next_portal(self) -> tractor.Portal:
|
||||||
name, portal = next(self._rr_next_actor)
|
name, portal = next(self._rr_next_actor)
|
||||||
|
@ -396,7 +381,7 @@ class FspAdmin:
|
||||||
complete: trio.Event,
|
complete: trio.Event,
|
||||||
started: trio.Event,
|
started: trio.Event,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
dst_fsp_flume: Flume,
|
dst_shm: ShmArray,
|
||||||
conf: dict,
|
conf: dict,
|
||||||
target: Fsp,
|
target: Fsp,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
@ -417,10 +402,9 @@ class FspAdmin:
|
||||||
# data feed key
|
# data feed key
|
||||||
fqsn=fqsn,
|
fqsn=fqsn,
|
||||||
|
|
||||||
# TODO: pass `Flume.to_msg()`s here?
|
|
||||||
# mems
|
# mems
|
||||||
src_shm_token=self.flume.rt_shm.token,
|
src_shm_token=self.src_shm.token,
|
||||||
dst_shm_token=dst_fsp_flume.rt_shm.token,
|
dst_shm_token=dst_shm.token,
|
||||||
|
|
||||||
# target
|
# target
|
||||||
ns_path=ns_path,
|
ns_path=ns_path,
|
||||||
|
@ -437,14 +421,12 @@ class FspAdmin:
|
||||||
ctx.open_stream() as stream,
|
ctx.open_stream() as stream,
|
||||||
):
|
):
|
||||||
|
|
||||||
dst_fsp_flume.stream: tractor.MsgStream = stream
|
|
||||||
|
|
||||||
# register output data
|
# register output data
|
||||||
self._registry[
|
self._registry[
|
||||||
(fqsn, ns_path)
|
(fqsn, ns_path)
|
||||||
] = (
|
] = (
|
||||||
stream,
|
stream,
|
||||||
dst_fsp_flume.rt_shm,
|
dst_shm,
|
||||||
complete
|
complete
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -458,9 +440,7 @@ class FspAdmin:
|
||||||
# if the chart isn't hidden try to update
|
# if the chart isn't hidden try to update
|
||||||
# the data on screen.
|
# the data on screen.
|
||||||
if not self.linked.isHidden():
|
if not self.linked.isHidden():
|
||||||
log.debug(
|
log.debug(f'Re-syncing graphics for fsp: {ns_path}')
|
||||||
f'Re-syncing graphics for fsp: {ns_path}'
|
|
||||||
)
|
|
||||||
self.linked.graphics_cycle(
|
self.linked.graphics_cycle(
|
||||||
trigger_all=True,
|
trigger_all=True,
|
||||||
prepend_update_index=info['first'],
|
prepend_update_index=info['first'],
|
||||||
|
@ -479,9 +459,9 @@ class FspAdmin:
|
||||||
worker_name: Optional[str] = None,
|
worker_name: Optional[str] = None,
|
||||||
loglevel: str = 'info',
|
loglevel: str = 'info',
|
||||||
|
|
||||||
) -> (Flume, trio.Event):
|
) -> (ShmArray, trio.Event):
|
||||||
|
|
||||||
fqsn = self.flume.symbol.fqsn
|
fqsn = self.linked.symbol.front_fqsn()
|
||||||
|
|
||||||
# allocate an output shm array
|
# allocate an output shm array
|
||||||
key, dst_shm, opened = maybe_mk_fsp_shm(
|
key, dst_shm, opened = maybe_mk_fsp_shm(
|
||||||
|
@ -489,36 +469,16 @@ class FspAdmin:
|
||||||
target=target,
|
target=target,
|
||||||
readonly=True,
|
readonly=True,
|
||||||
)
|
)
|
||||||
|
self._flow_registry[
|
||||||
portal = self.cluster.get(worker_name) or self.rr_next_portal()
|
(self.src_shm._token, target.name)
|
||||||
provider_tag = portal.channel.uid
|
] = dst_shm._token
|
||||||
|
|
||||||
symbol = Symbol(
|
|
||||||
key=key,
|
|
||||||
broker_info={
|
|
||||||
provider_tag: {'asset_type': 'fsp'},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
dst_fsp_flume = Flume(
|
|
||||||
symbol=symbol,
|
|
||||||
_rt_shm_token=dst_shm.token,
|
|
||||||
first_quote={},
|
|
||||||
|
|
||||||
# set to 0 presuming for now that we can't load
|
|
||||||
# FSP history (though we should eventually).
|
|
||||||
izero_hist=0,
|
|
||||||
izero_rt=0,
|
|
||||||
)
|
|
||||||
self._flow_registry[(
|
|
||||||
self.flume.rt_shm._token,
|
|
||||||
target.name
|
|
||||||
)] = dst_shm._token
|
|
||||||
|
|
||||||
# if not opened:
|
# if not opened:
|
||||||
# raise RuntimeError(
|
# raise RuntimeError(
|
||||||
# f'Already started FSP `{fqsn}:{func_name}`'
|
# f'Already started FSP `{fqsn}:{func_name}`'
|
||||||
# )
|
# )
|
||||||
|
|
||||||
|
portal = self.cluster.get(worker_name) or self.rr_next_portal()
|
||||||
complete = trio.Event()
|
complete = trio.Event()
|
||||||
started = trio.Event()
|
started = trio.Event()
|
||||||
self.tn.start_soon(
|
self.tn.start_soon(
|
||||||
|
@ -527,13 +487,13 @@ class FspAdmin:
|
||||||
complete,
|
complete,
|
||||||
started,
|
started,
|
||||||
fqsn,
|
fqsn,
|
||||||
dst_fsp_flume,
|
dst_shm,
|
||||||
conf,
|
conf,
|
||||||
target,
|
target,
|
||||||
loglevel,
|
loglevel,
|
||||||
)
|
)
|
||||||
|
|
||||||
return dst_fsp_flume, started
|
return dst_shm, started
|
||||||
|
|
||||||
async def open_fsp_chart(
|
async def open_fsp_chart(
|
||||||
self,
|
self,
|
||||||
|
@ -545,7 +505,7 @@ class FspAdmin:
|
||||||
|
|
||||||
) -> (trio.Event, ChartPlotWidget):
|
) -> (trio.Event, ChartPlotWidget):
|
||||||
|
|
||||||
flume, started = await self.start_engine_task(
|
shm, started = await self.start_engine_task(
|
||||||
target,
|
target,
|
||||||
conf,
|
conf,
|
||||||
loglevel,
|
loglevel,
|
||||||
|
@ -557,7 +517,7 @@ class FspAdmin:
|
||||||
run_fsp_ui,
|
run_fsp_ui,
|
||||||
|
|
||||||
self.linked,
|
self.linked,
|
||||||
flume,
|
shm,
|
||||||
started,
|
started,
|
||||||
target,
|
target,
|
||||||
|
|
||||||
|
@ -571,7 +531,7 @@ class FspAdmin:
|
||||||
@acm
|
@acm
|
||||||
async def open_fsp_admin(
|
async def open_fsp_admin(
|
||||||
linked: LinkedSplits,
|
linked: LinkedSplits,
|
||||||
flume: Flume,
|
src_shm: ShmArray,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> AsyncGenerator[dict, dict[str, tractor.Portal]]:
|
) -> AsyncGenerator[dict, dict[str, tractor.Portal]]:
|
||||||
|
@ -592,7 +552,7 @@ async def open_fsp_admin(
|
||||||
tn,
|
tn,
|
||||||
cluster_map,
|
cluster_map,
|
||||||
linked,
|
linked,
|
||||||
flume,
|
src_shm,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
yield admin
|
yield admin
|
||||||
|
@ -606,7 +566,7 @@ async def open_fsp_admin(
|
||||||
async def open_vlm_displays(
|
async def open_vlm_displays(
|
||||||
|
|
||||||
linked: LinkedSplits,
|
linked: LinkedSplits,
|
||||||
flume: Flume,
|
ohlcv: ShmArray,
|
||||||
dvlm: bool = True,
|
dvlm: bool = True,
|
||||||
|
|
||||||
task_status: TaskStatus[ChartPlotWidget] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[ChartPlotWidget] = trio.TASK_STATUS_IGNORED,
|
||||||
|
@ -628,8 +588,6 @@ async def open_vlm_displays(
|
||||||
sig = inspect.signature(flow_rates.func)
|
sig = inspect.signature(flow_rates.func)
|
||||||
params = sig.parameters
|
params = sig.parameters
|
||||||
|
|
||||||
ohlcv: ShmArray = flume.rt_shm
|
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_fsp_sidepane(
|
open_fsp_sidepane(
|
||||||
linked, {
|
linked, {
|
||||||
|
@ -649,7 +607,7 @@ async def open_vlm_displays(
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
) as sidepane,
|
) as sidepane,
|
||||||
open_fsp_admin(linked, flume) as admin,
|
open_fsp_admin(linked, ohlcv) as admin,
|
||||||
):
|
):
|
||||||
# TODO: support updates
|
# TODO: support updates
|
||||||
# period_field = sidepane.fields['period']
|
# period_field = sidepane.fields['period']
|
||||||
|
@ -657,21 +615,12 @@ async def open_vlm_displays(
|
||||||
# str(period_param.default)
|
# str(period_param.default)
|
||||||
# )
|
# )
|
||||||
|
|
||||||
# use slightly less light (then bracket) gray
|
|
||||||
# for volume from "main exchange" and a more "bluey"
|
|
||||||
# gray for "dark" vlm.
|
|
||||||
vlm_color = 'i3'
|
|
||||||
dark_vlm_color = 'charcoal'
|
|
||||||
|
|
||||||
# built-in vlm which we plot ASAP since it's
|
# built-in vlm which we plot ASAP since it's
|
||||||
# usually data provided directly with OHLC history.
|
# usually data provided directly with OHLC history.
|
||||||
shm = ohlcv
|
shm = ohlcv
|
||||||
# ohlc_chart = linked.chart
|
chart = linked.add_plot(
|
||||||
|
|
||||||
vlm_chart = linked.add_plot(
|
|
||||||
name='volume',
|
name='volume',
|
||||||
shm=shm,
|
shm=shm,
|
||||||
flume=flume,
|
|
||||||
|
|
||||||
array_key='volume',
|
array_key='volume',
|
||||||
sidepane=sidepane,
|
sidepane=sidepane,
|
||||||
|
@ -684,47 +633,63 @@ async def open_vlm_displays(
|
||||||
# the curve item internals are pretty convoluted.
|
# the curve item internals are pretty convoluted.
|
||||||
style='step',
|
style='step',
|
||||||
)
|
)
|
||||||
vlm_viz = vlm_chart._vizs['volume']
|
|
||||||
|
# force 0 to always be in view
|
||||||
|
def multi_maxmin(
|
||||||
|
names: list[str],
|
||||||
|
|
||||||
|
) -> tuple[float, float]:
|
||||||
|
|
||||||
|
mx = 0
|
||||||
|
for name in names:
|
||||||
|
|
||||||
|
mxmn = chart.maxmin(name=name)
|
||||||
|
if mxmn:
|
||||||
|
ymax = mxmn[1]
|
||||||
|
if ymax > mx:
|
||||||
|
mx = ymax
|
||||||
|
|
||||||
|
return 0, mx
|
||||||
|
|
||||||
|
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
|
||||||
|
|
||||||
# TODO: fix the x-axis label issue where if you put
|
# TODO: fix the x-axis label issue where if you put
|
||||||
# the axis on the left it's totally not lined up...
|
# the axis on the left it's totally not lined up...
|
||||||
# show volume units value on LHS (for dinkus)
|
# show volume units value on LHS (for dinkus)
|
||||||
# vlm_chart.hideAxis('right')
|
# chart.hideAxis('right')
|
||||||
# vlm_chart.showAxis('left')
|
# chart.showAxis('left')
|
||||||
|
|
||||||
# send back new chart to caller
|
# send back new chart to caller
|
||||||
task_status.started(vlm_chart)
|
task_status.started(chart)
|
||||||
|
|
||||||
# should **not** be the same sub-chart widget
|
# should **not** be the same sub-chart widget
|
||||||
assert vlm_chart.name != linked.chart.name
|
assert chart.name != linked.chart.name
|
||||||
|
|
||||||
# sticky only on sub-charts atm
|
# sticky only on sub-charts atm
|
||||||
last_val_sticky = vlm_chart.plotItem.getAxis(
|
last_val_sticky = chart._ysticks[chart.name]
|
||||||
'right')._stickies.get(vlm_chart.name)
|
|
||||||
|
|
||||||
# read from last calculated value
|
# read from last calculated value
|
||||||
value = shm.array['volume'][-1]
|
value = shm.array['volume'][-1]
|
||||||
|
|
||||||
last_val_sticky.update_from_data(-1, value)
|
last_val_sticky.update_from_data(-1, value)
|
||||||
|
|
||||||
_, _, vlm_curve = vlm_chart.update_graphics_from_flow(
|
vlm_curve = chart.update_graphics_from_flow(
|
||||||
'volume',
|
'volume',
|
||||||
|
# shm.array,
|
||||||
)
|
)
|
||||||
|
|
||||||
# size view to data once at outset
|
# size view to data once at outset
|
||||||
vlm_chart.view._set_yrange(
|
chart.view._set_yrange()
|
||||||
viz=vlm_viz
|
|
||||||
)
|
|
||||||
|
|
||||||
# add axis title
|
# add axis title
|
||||||
axis = vlm_chart.getAxis('right')
|
axis = chart.getAxis('right')
|
||||||
axis.set_title(' vlm')
|
axis.set_title(' vlm')
|
||||||
|
|
||||||
if dvlm:
|
if dvlm:
|
||||||
|
|
||||||
tasks_ready = []
|
tasks_ready = []
|
||||||
# spawn and overlay $ vlm on the same subchart
|
# spawn and overlay $ vlm on the same subchart
|
||||||
dvlm_flume, started = await admin.start_engine_task(
|
dvlm_shm, started = await admin.start_engine_task(
|
||||||
dolla_vlm,
|
dolla_vlm,
|
||||||
|
|
||||||
{ # fsp engine conf
|
{ # fsp engine conf
|
||||||
|
@ -743,7 +708,7 @@ async def open_vlm_displays(
|
||||||
# FIXME: we should error on starting the same fsp right
|
# FIXME: we should error on starting the same fsp right
|
||||||
# since it might collide with existing shm.. or wait we
|
# since it might collide with existing shm.. or wait we
|
||||||
# had this before??
|
# had this before??
|
||||||
# dolla_vlm
|
# dolla_vlm,
|
||||||
|
|
||||||
tasks_ready.append(started)
|
tasks_ready.append(started)
|
||||||
# profiler(f'created shm for fsp actor: {display_name}')
|
# profiler(f'created shm for fsp actor: {display_name}')
|
||||||
|
@ -757,29 +722,22 @@ async def open_vlm_displays(
|
||||||
# XXX: the main chart already contains a vlm "units" axis
|
# XXX: the main chart already contains a vlm "units" axis
|
||||||
# so here we add an overlay wth a y-range in
|
# so here we add an overlay wth a y-range in
|
||||||
# $ liquidity-value units (normally a fiat like USD).
|
# $ liquidity-value units (normally a fiat like USD).
|
||||||
dvlm_pi = vlm_chart.overlay_plotitem(
|
dvlm_pi = chart.overlay_plotitem(
|
||||||
'dolla_vlm',
|
'dolla_vlm',
|
||||||
index=0, # place axis on inside (nearest to chart)
|
index=0, # place axis on inside (nearest to chart)
|
||||||
|
|
||||||
axis_title=' $vlm',
|
axis_title=' $vlm',
|
||||||
axis_side='left',
|
axis_side='right',
|
||||||
|
|
||||||
axis_kwargs={
|
axis_kwargs={
|
||||||
'typical_max_str': ' 100.0 M ',
|
'typical_max_str': ' 100.0 M ',
|
||||||
'formatter': partial(
|
'formatter': partial(
|
||||||
humanize,
|
humanize,
|
||||||
digits=2,
|
digits=2,
|
||||||
),
|
),
|
||||||
'text_color': vlm_color,
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: should this maybe be implicit based on input args to
|
|
||||||
# `.overlay_plotitem()` above?
|
|
||||||
dvlm_pi.hideAxis('bottom')
|
|
||||||
|
|
||||||
# all to be overlayed curve names
|
# all to be overlayed curve names
|
||||||
dvlm_fields = [
|
fields = [
|
||||||
'dolla_vlm',
|
'dolla_vlm',
|
||||||
'dark_vlm',
|
'dark_vlm',
|
||||||
]
|
]
|
||||||
|
@ -792,18 +750,32 @@ async def open_vlm_displays(
|
||||||
'dark_trade_rate',
|
'dark_trade_rate',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
group_mxmn = partial(
|
||||||
|
multi_maxmin,
|
||||||
|
# keep both regular and dark vlm in view
|
||||||
|
names=fields,
|
||||||
|
# names=fields + dvlm_rate_fields,
|
||||||
|
)
|
||||||
|
|
||||||
|
# add custom auto range handler
|
||||||
|
dvlm_pi.vb._maxmin = group_mxmn
|
||||||
|
|
||||||
|
# use slightly less light (then bracket) gray
|
||||||
|
# for volume from "main exchange" and a more "bluey"
|
||||||
|
# gray for "dark" vlm.
|
||||||
|
vlm_color = 'i3'
|
||||||
|
dark_vlm_color = 'charcoal'
|
||||||
|
|
||||||
# add dvlm (step) curves to common view
|
# add dvlm (step) curves to common view
|
||||||
def chart_curves(
|
def chart_curves(
|
||||||
names: list[str],
|
names: list[str],
|
||||||
pi: pg.PlotItem,
|
pi: pg.PlotItem,
|
||||||
shm: ShmArray,
|
shm: ShmArray,
|
||||||
flume: Flume,
|
|
||||||
step_mode: bool = False,
|
step_mode: bool = False,
|
||||||
style: str = 'solid',
|
style: str = 'solid',
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
for name in names:
|
for name in names:
|
||||||
|
|
||||||
if 'dark' in name:
|
if 'dark' in name:
|
||||||
color = dark_vlm_color
|
color = dark_vlm_color
|
||||||
elif 'rate' in name:
|
elif 'rate' in name:
|
||||||
|
@ -811,13 +783,9 @@ async def open_vlm_displays(
|
||||||
else:
|
else:
|
||||||
color = 'bracket'
|
color = 'bracket'
|
||||||
|
|
||||||
assert isinstance(shm, ShmArray)
|
curve, _ = chart.draw_curve(
|
||||||
assert isinstance(flume, Flume)
|
name=name,
|
||||||
|
shm=shm,
|
||||||
viz = vlm_chart.draw_curve(
|
|
||||||
name,
|
|
||||||
shm,
|
|
||||||
flume,
|
|
||||||
array_key=name,
|
array_key=name,
|
||||||
overlay=pi,
|
overlay=pi,
|
||||||
color=color,
|
color=color,
|
||||||
|
@ -825,24 +793,29 @@ async def open_vlm_displays(
|
||||||
style=style,
|
style=style,
|
||||||
pi=pi,
|
pi=pi,
|
||||||
)
|
)
|
||||||
assert viz.plot is pi
|
|
||||||
|
# TODO: we need a better API to do this..
|
||||||
|
# specially store ref to shm for lookup in display loop
|
||||||
|
# since only a placeholder of `None` is entered in
|
||||||
|
# ``.draw_curve()``.
|
||||||
|
flow = chart._flows[name]
|
||||||
|
assert flow.plot is pi
|
||||||
|
|
||||||
chart_curves(
|
chart_curves(
|
||||||
dvlm_fields,
|
fields,
|
||||||
dvlm_pi,
|
dvlm_pi,
|
||||||
dvlm_flume.rt_shm,
|
dvlm_shm,
|
||||||
dvlm_flume,
|
|
||||||
step_mode=True,
|
step_mode=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# spawn flow rates fsp **ONLY AFTER** the 'dolla_vlm' fsp is
|
# spawn flow rates fsp **ONLY AFTER** the 'dolla_vlm' fsp is
|
||||||
# up since this one depends on it.
|
# up since this one depends on it.
|
||||||
|
|
||||||
fr_flume, started = await admin.start_engine_task(
|
fr_shm, started = await admin.start_engine_task(
|
||||||
flow_rates,
|
flow_rates,
|
||||||
{ # fsp engine conf
|
{ # fsp engine conf
|
||||||
'func_name': 'flow_rates',
|
'func_name': 'flow_rates',
|
||||||
'zero_on_step': True,
|
'zero_on_step': False,
|
||||||
},
|
},
|
||||||
# loglevel,
|
# loglevel,
|
||||||
)
|
)
|
||||||
|
@ -851,7 +824,7 @@ async def open_vlm_displays(
|
||||||
# chart_curves(
|
# chart_curves(
|
||||||
# dvlm_rate_fields,
|
# dvlm_rate_fields,
|
||||||
# dvlm_pi,
|
# dvlm_pi,
|
||||||
# fr_flume.rt_shm,
|
# fr_shm,
|
||||||
# )
|
# )
|
||||||
|
|
||||||
# TODO: is there a way to "sync" the dual axes such that only
|
# TODO: is there a way to "sync" the dual axes such that only
|
||||||
|
@ -860,24 +833,24 @@ async def open_vlm_displays(
|
||||||
# displayed and the curves are effectively the same minus
|
# displayed and the curves are effectively the same minus
|
||||||
# liquidity events (well at least on low OHLC periods - 1s).
|
# liquidity events (well at least on low OHLC periods - 1s).
|
||||||
vlm_curve.hide()
|
vlm_curve.hide()
|
||||||
vlm_chart.removeItem(vlm_curve)
|
chart.removeItem(vlm_curve)
|
||||||
vlm_viz = vlm_chart._vizs['volume']
|
vflow = chart._flows['volume']
|
||||||
vlm_viz.render = False
|
vflow.render = False
|
||||||
|
|
||||||
# avoid range sorting on volume once disabled
|
# avoid range sorting on volume once disabled
|
||||||
vlm_chart.view.disable_auto_yrange()
|
chart.view.disable_auto_yrange()
|
||||||
|
|
||||||
# Trade rate overlay
|
# Trade rate overlay
|
||||||
# XXX: requires an additional overlay for
|
# XXX: requires an additional overlay for
|
||||||
# a trades-per-period (time) y-range.
|
# a trades-per-period (time) y-range.
|
||||||
tr_pi = vlm_chart.overlay_plotitem(
|
tr_pi = chart.overlay_plotitem(
|
||||||
'trade_rates',
|
'trade_rates',
|
||||||
|
|
||||||
# TODO: dynamically update period (and thus this axis?)
|
# TODO: dynamically update period (and thus this axis?)
|
||||||
# title from user input.
|
# title from user input.
|
||||||
axis_title='clears',
|
axis_title='clears',
|
||||||
axis_side='left',
|
|
||||||
|
|
||||||
|
axis_side='left',
|
||||||
axis_kwargs={
|
axis_kwargs={
|
||||||
'typical_max_str': ' 10.0 M ',
|
'typical_max_str': ' 10.0 M ',
|
||||||
'formatter': partial(
|
'formatter': partial(
|
||||||
|
@ -888,13 +861,17 @@ async def open_vlm_displays(
|
||||||
},
|
},
|
||||||
|
|
||||||
)
|
)
|
||||||
tr_pi.hideAxis('bottom')
|
# add custom auto range handler
|
||||||
|
tr_pi.vb.maxmin = partial(
|
||||||
|
multi_maxmin,
|
||||||
|
# keep both regular and dark vlm in view
|
||||||
|
names=trade_rate_fields,
|
||||||
|
)
|
||||||
|
|
||||||
chart_curves(
|
chart_curves(
|
||||||
trade_rate_fields,
|
trade_rate_fields,
|
||||||
tr_pi,
|
tr_pi,
|
||||||
fr_flume.rt_shm,
|
fr_shm,
|
||||||
fr_flume,
|
|
||||||
# step_mode=True,
|
# step_mode=True,
|
||||||
|
|
||||||
# dashed line to represent "individual trades" being
|
# dashed line to represent "individual trades" being
|
||||||
|
@ -928,7 +905,7 @@ async def open_vlm_displays(
|
||||||
async def start_fsp_displays(
|
async def start_fsp_displays(
|
||||||
|
|
||||||
linked: LinkedSplits,
|
linked: LinkedSplits,
|
||||||
flume: Flume,
|
ohlcv: ShmArray,
|
||||||
group_status_key: str,
|
group_status_key: str,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
|
@ -963,7 +940,7 @@ async def start_fsp_displays(
|
||||||
# },
|
# },
|
||||||
# },
|
# },
|
||||||
}
|
}
|
||||||
profiler = Profiler(
|
profiler = pg.debug.Profiler(
|
||||||
delayed=False,
|
delayed=False,
|
||||||
disabled=False
|
disabled=False
|
||||||
)
|
)
|
||||||
|
@ -971,10 +948,7 @@ async def start_fsp_displays(
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
# NOTE: this admin internally opens an actor cluster
|
# NOTE: this admin internally opens an actor cluster
|
||||||
open_fsp_admin(
|
open_fsp_admin(linked, ohlcv) as admin,
|
||||||
linked,
|
|
||||||
flume,
|
|
||||||
) as admin,
|
|
||||||
):
|
):
|
||||||
statuses = []
|
statuses = []
|
||||||
for target, conf in fsp_conf.items():
|
for target, conf in fsp_conf.items():
|
||||||
|
|
|
@ -20,13 +20,8 @@ Chart view box primitives
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from functools import partial
|
|
||||||
import time
|
import time
|
||||||
from typing import (
|
from typing import Optional, Callable
|
||||||
Optional,
|
|
||||||
Callable,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
# from pyqtgraph.GraphicsScene import mouseEvents
|
# from pyqtgraph.GraphicsScene import mouseEvents
|
||||||
|
@ -38,16 +33,11 @@ import numpy as np
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from .._profile import Profiler
|
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
# from ._style import _min_points_to_show
|
# from ._style import _min_points_to_show
|
||||||
from ._editors import SelectRect
|
from ._editors import SelectRect
|
||||||
from . import _event
|
from . import _event
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ._chart import ChartPlotWidget
|
|
||||||
from ._dataviz import Viz
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -85,6 +75,7 @@ async def handle_viewmode_kb_inputs(
|
||||||
pressed: set[str] = set()
|
pressed: set[str] = set()
|
||||||
|
|
||||||
last = time.time()
|
last = time.time()
|
||||||
|
trigger_mode: str
|
||||||
action: str
|
action: str
|
||||||
|
|
||||||
on_next_release: Optional[Callable] = None
|
on_next_release: Optional[Callable] = None
|
||||||
|
@ -150,16 +141,13 @@ async def handle_viewmode_kb_inputs(
|
||||||
Qt.Key_Space,
|
Qt.Key_Space,
|
||||||
}
|
}
|
||||||
):
|
):
|
||||||
godw = view._chart.linked.godwidget
|
view._chart.linked.godwidget.search.focus()
|
||||||
godw.hist_linked.resize_sidepanes(from_linked=godw.rt_linked)
|
|
||||||
godw.search.focus()
|
|
||||||
|
|
||||||
# esc and ctrl-c
|
# esc and ctrl-c
|
||||||
if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C):
|
if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C):
|
||||||
# ctrl-c as cancel
|
# ctrl-c as cancel
|
||||||
# https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9
|
# https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9
|
||||||
view.select_box.clear()
|
view.select_box.clear()
|
||||||
view.linked.focus()
|
|
||||||
|
|
||||||
# cancel order or clear graphics
|
# cancel order or clear graphics
|
||||||
if key == Qt.Key_C or key == Qt.Key_Delete:
|
if key == Qt.Key_C or key == Qt.Key_Delete:
|
||||||
|
@ -190,17 +178,17 @@ async def handle_viewmode_kb_inputs(
|
||||||
if key in pressed:
|
if key in pressed:
|
||||||
pressed.remove(key)
|
pressed.remove(key)
|
||||||
|
|
||||||
# QUERY/QUOTE MODE
|
# QUERY/QUOTE MODE #
|
||||||
# ----------------
|
|
||||||
if {Qt.Key_Q}.intersection(pressed):
|
if {Qt.Key_Q}.intersection(pressed):
|
||||||
|
|
||||||
view.linked.cursor.in_query_mode = True
|
view.linkedsplits.cursor.in_query_mode = True
|
||||||
|
|
||||||
else:
|
else:
|
||||||
view.linked.cursor.in_query_mode = False
|
view.linkedsplits.cursor.in_query_mode = False
|
||||||
|
|
||||||
# SELECTION MODE
|
# SELECTION MODE
|
||||||
# --------------
|
# --------------
|
||||||
|
|
||||||
if shift:
|
if shift:
|
||||||
if view.state['mouseMode'] == ViewBox.PanMode:
|
if view.state['mouseMode'] == ViewBox.PanMode:
|
||||||
view.setMouseMode(ViewBox.RectMode)
|
view.setMouseMode(ViewBox.RectMode)
|
||||||
|
@ -221,27 +209,18 @@ async def handle_viewmode_kb_inputs(
|
||||||
|
|
||||||
# ORDER MODE
|
# ORDER MODE
|
||||||
# ----------
|
# ----------
|
||||||
|
|
||||||
# live vs. dark trigger + an action {buy, sell, alert}
|
# live vs. dark trigger + an action {buy, sell, alert}
|
||||||
order_keys_pressed = ORDER_MODE.intersection(pressed)
|
order_keys_pressed = ORDER_MODE.intersection(pressed)
|
||||||
|
|
||||||
if order_keys_pressed:
|
if order_keys_pressed:
|
||||||
|
|
||||||
# TODO: it seems like maybe the composition should be
|
# show the pp size label
|
||||||
# reversed here? Like, maybe we should have the nav have
|
order_mode.current_pp.show()
|
||||||
# access to the pos state and then make encapsulated logic
|
|
||||||
# that shows the right stuff on screen instead or order mode
|
|
||||||
# and position-related abstractions doing this?
|
|
||||||
|
|
||||||
# show the pp size label only if there is
|
|
||||||
# a non-zero pos existing
|
|
||||||
tracker = order_mode.current_pp
|
|
||||||
if tracker.live_pp.size:
|
|
||||||
tracker.nav.show()
|
|
||||||
|
|
||||||
# TODO: show pp config mini-params in status bar widget
|
# TODO: show pp config mini-params in status bar widget
|
||||||
# mode.pp_config.show()
|
# mode.pp_config.show()
|
||||||
|
|
||||||
trigger_type: str = 'dark'
|
|
||||||
if (
|
if (
|
||||||
# 's' for "submit" to activate "live" order
|
# 's' for "submit" to activate "live" order
|
||||||
Qt.Key_S in pressed or
|
Qt.Key_S in pressed or
|
||||||
|
@ -249,6 +228,9 @@ async def handle_viewmode_kb_inputs(
|
||||||
):
|
):
|
||||||
trigger_type: str = 'live'
|
trigger_type: str = 'live'
|
||||||
|
|
||||||
|
else:
|
||||||
|
trigger_type: str = 'dark'
|
||||||
|
|
||||||
# order mode trigger "actions"
|
# order mode trigger "actions"
|
||||||
if Qt.Key_D in pressed: # for "damp eet"
|
if Qt.Key_D in pressed: # for "damp eet"
|
||||||
action = 'sell'
|
action = 'sell'
|
||||||
|
@ -277,8 +259,8 @@ async def handle_viewmode_kb_inputs(
|
||||||
Qt.Key_S in pressed or
|
Qt.Key_S in pressed or
|
||||||
order_keys_pressed or
|
order_keys_pressed or
|
||||||
Qt.Key_O in pressed
|
Qt.Key_O in pressed
|
||||||
)
|
) and
|
||||||
and key in NUMBER_LINE
|
key in NUMBER_LINE
|
||||||
):
|
):
|
||||||
# hot key to set order slots size.
|
# hot key to set order slots size.
|
||||||
# change edit field to current number line value,
|
# change edit field to current number line value,
|
||||||
|
@ -296,7 +278,7 @@ async def handle_viewmode_kb_inputs(
|
||||||
else: # none active
|
else: # none active
|
||||||
|
|
||||||
# hide pp label
|
# hide pp label
|
||||||
order_mode.current_pp.nav.hide_info()
|
order_mode.current_pp.hide_info()
|
||||||
|
|
||||||
# if none are pressed, remove "staged" level
|
# if none are pressed, remove "staged" level
|
||||||
# line under cursor position
|
# line under cursor position
|
||||||
|
@ -337,6 +319,7 @@ async def handle_viewmode_mouse(
|
||||||
):
|
):
|
||||||
# when in order mode, submit execution
|
# when in order mode, submit execution
|
||||||
# msg.event.accept()
|
# msg.event.accept()
|
||||||
|
# breakpoint()
|
||||||
view.order_mode.submit_order()
|
view.order_mode.submit_order()
|
||||||
|
|
||||||
|
|
||||||
|
@ -353,6 +336,16 @@ class ChartView(ViewBox):
|
||||||
'''
|
'''
|
||||||
mode_name: str = 'view'
|
mode_name: str = 'view'
|
||||||
|
|
||||||
|
# "relay events" for making overlaid views work.
|
||||||
|
# NOTE: these MUST be defined here (and can't be monkey patched
|
||||||
|
# on later) due to signal construction requiring refs to be
|
||||||
|
# in place during the run of meta-class machinery.
|
||||||
|
mouseDragEventRelay = QtCore.Signal(object, object, object)
|
||||||
|
wheelEventRelay = QtCore.Signal(object, object, object)
|
||||||
|
|
||||||
|
event_relay_source: 'Optional[ViewBox]' = None
|
||||||
|
relays: dict[str, QtCore.Signal] = {}
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
|
@ -374,6 +367,7 @@ class ChartView(ViewBox):
|
||||||
)
|
)
|
||||||
# for "known y-range style"
|
# for "known y-range style"
|
||||||
self._static_yrange = static_yrange
|
self._static_yrange = static_yrange
|
||||||
|
self._maxmin = None
|
||||||
|
|
||||||
# disable vertical scrolling
|
# disable vertical scrolling
|
||||||
self.setMouseEnabled(
|
self.setMouseEnabled(
|
||||||
|
@ -381,8 +375,8 @@ class ChartView(ViewBox):
|
||||||
y=True,
|
y=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.linked = None
|
self.linkedsplits = None
|
||||||
self._chart: ChartPlotWidget | None = None # noqa
|
self._chart: 'ChartPlotWidget' = None # noqa
|
||||||
|
|
||||||
# add our selection box annotator
|
# add our selection box annotator
|
||||||
self.select_box = SelectRect(self)
|
self.select_box = SelectRect(self)
|
||||||
|
@ -393,7 +387,6 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||||
self._ic = None
|
self._ic = None
|
||||||
self._yranger: Callable | None = None
|
|
||||||
|
|
||||||
def start_ic(
|
def start_ic(
|
||||||
self,
|
self,
|
||||||
|
@ -404,11 +397,8 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if self._ic is None:
|
if self._ic is None:
|
||||||
try:
|
self.chart.pause_all_feeds()
|
||||||
self.chart.pause_all_feeds()
|
self._ic = trio.Event()
|
||||||
self._ic = trio.Event()
|
|
||||||
except RuntimeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def signal_ic(
|
def signal_ic(
|
||||||
self,
|
self,
|
||||||
|
@ -421,12 +411,9 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if self._ic:
|
if self._ic:
|
||||||
try:
|
self._ic.set()
|
||||||
self._ic.set()
|
self._ic = None
|
||||||
self._ic = None
|
self.chart.resume_all_feeds()
|
||||||
self.chart.resume_all_feeds()
|
|
||||||
except RuntimeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_async_input_handler(
|
async def open_async_input_handler(
|
||||||
|
@ -454,18 +441,29 @@ class ChartView(ViewBox):
|
||||||
yield self
|
yield self
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def chart(self) -> ChartPlotWidget: # type: ignore # noqa
|
def chart(self) -> 'ChartPlotWidget': # type: ignore # noqa
|
||||||
return self._chart
|
return self._chart
|
||||||
|
|
||||||
@chart.setter
|
@chart.setter
|
||||||
def chart(self, chart: ChartPlotWidget) -> None: # type: ignore # noqa
|
def chart(self, chart: 'ChartPlotWidget') -> None: # type: ignore # noqa
|
||||||
self._chart = chart
|
self._chart = chart
|
||||||
self.select_box.chart = chart
|
self.select_box.chart = chart
|
||||||
|
if self._maxmin is None:
|
||||||
|
self._maxmin = chart.maxmin
|
||||||
|
|
||||||
|
@property
|
||||||
|
def maxmin(self) -> Callable:
|
||||||
|
return self._maxmin
|
||||||
|
|
||||||
|
@maxmin.setter
|
||||||
|
def maxmin(self, callback: Callable) -> None:
|
||||||
|
self._maxmin = callback
|
||||||
|
|
||||||
def wheelEvent(
|
def wheelEvent(
|
||||||
self,
|
self,
|
||||||
ev,
|
ev,
|
||||||
axis=None,
|
axis=None,
|
||||||
|
relayed_from: ChartView = None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Override "center-point" location for scrolling.
|
Override "center-point" location for scrolling.
|
||||||
|
@ -476,34 +474,27 @@ class ChartView(ViewBox):
|
||||||
TODO: PR a method into ``pyqtgraph`` to make this configurable
|
TODO: PR a method into ``pyqtgraph`` to make this configurable
|
||||||
|
|
||||||
'''
|
'''
|
||||||
linked = self.linked
|
|
||||||
if (
|
|
||||||
not linked
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
if axis in (0, 1):
|
if axis in (0, 1):
|
||||||
mask = [False, False]
|
mask = [False, False]
|
||||||
mask[axis] = self.state['mouseEnabled'][axis]
|
mask[axis] = self.state['mouseEnabled'][axis]
|
||||||
else:
|
else:
|
||||||
mask = self.state['mouseEnabled'][:]
|
mask = self.state['mouseEnabled'][:]
|
||||||
|
|
||||||
chart = self.linked.chart
|
chart = self.linkedsplits.chart
|
||||||
|
|
||||||
# don't zoom more then the min points setting
|
# don't zoom more then the min points setting
|
||||||
viz = chart.get_viz(chart.name)
|
l, lbar, rbar, r = chart.bars_range()
|
||||||
vl, lbar, rbar, vr = viz.bars_range()
|
# vl = r - l
|
||||||
|
|
||||||
# TODO: max/min zoom limits incorporating time step size.
|
# if ev.delta() > 0 and vl <= _min_points_to_show:
|
||||||
# rl = vr - vl
|
# log.debug("Max zoom bruh...")
|
||||||
# if ev.delta() > 0 and rl <= _min_points_to_show:
|
|
||||||
# log.warning("Max zoom bruh...")
|
|
||||||
# return
|
# return
|
||||||
|
|
||||||
# if (
|
# if (
|
||||||
# ev.delta() < 0
|
# ev.delta() < 0
|
||||||
# and rl >= len(chart._vizs[chart.name].shm.array) + 666
|
# and vl >= len(chart._flows[chart.name].shm.array) + 666
|
||||||
# ):
|
# ):
|
||||||
# log.warning("Min zoom bruh...")
|
# log.debug("Min zoom bruh...")
|
||||||
# return
|
# return
|
||||||
|
|
||||||
# actual scaling factor
|
# actual scaling factor
|
||||||
|
@ -534,17 +525,49 @@ class ChartView(ViewBox):
|
||||||
self.scaleBy(s, center)
|
self.scaleBy(s, center)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# use right-most point of current curve graphic
|
|
||||||
xl = viz.graphics.x_last()
|
# center = pg.Point(
|
||||||
|
# fn.invertQTransform(self.childGroup.transform()).map(ev.pos())
|
||||||
|
# )
|
||||||
|
|
||||||
|
# XXX: scroll "around" the right most element in the view
|
||||||
|
# which stays "pinned" in place.
|
||||||
|
|
||||||
|
# furthest_right_coord = self.boundingRect().topRight()
|
||||||
|
|
||||||
|
# yaxis = pg.Point(
|
||||||
|
# fn.invertQTransform(
|
||||||
|
# self.childGroup.transform()
|
||||||
|
# ).map(furthest_right_coord)
|
||||||
|
# )
|
||||||
|
|
||||||
|
# This seems like the most "intuitive option, a hybrid of
|
||||||
|
# tws and tv styles
|
||||||
|
last_bar = pg.Point(int(rbar)) + 1
|
||||||
|
|
||||||
|
ryaxis = chart.getAxis('right')
|
||||||
|
r_axis_x = ryaxis.pos().x()
|
||||||
|
|
||||||
|
end_of_l1 = pg.Point(
|
||||||
|
round(
|
||||||
|
chart.cv.mapToView(
|
||||||
|
pg.Point(r_axis_x - chart._max_l1_line_len)
|
||||||
|
# QPointF(chart._max_l1_line_len, 0)
|
||||||
|
).x()
|
||||||
|
)
|
||||||
|
) # .x()
|
||||||
|
|
||||||
|
# self.state['viewRange'][0][1] = end_of_l1
|
||||||
|
# focal = pg.Point((last_bar.x() + end_of_l1)/2)
|
||||||
|
|
||||||
focal = min(
|
focal = min(
|
||||||
xl,
|
last_bar,
|
||||||
vr,
|
end_of_l1,
|
||||||
|
key=lambda p: p.x()
|
||||||
)
|
)
|
||||||
|
# focal = pg.Point(last_bar.x() + end_of_l1)
|
||||||
|
|
||||||
self._resetTarget()
|
self._resetTarget()
|
||||||
|
|
||||||
# NOTE: scroll "around" the right most datum-element in view
|
|
||||||
# gives the feeling of staying "pinned" in place.
|
|
||||||
self.scaleBy(s, focal)
|
self.scaleBy(s, focal)
|
||||||
|
|
||||||
# XXX: the order of the next 2 lines i'm pretty sure
|
# XXX: the order of the next 2 lines i'm pretty sure
|
||||||
|
@ -570,8 +593,10 @@ class ChartView(ViewBox):
|
||||||
self,
|
self,
|
||||||
ev,
|
ev,
|
||||||
axis: Optional[int] = None,
|
axis: Optional[int] = None,
|
||||||
|
relayed_from: ChartView = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
pos = ev.pos()
|
pos = ev.pos()
|
||||||
lastPos = ev.lastPos()
|
lastPos = ev.lastPos()
|
||||||
dif = pos - lastPos
|
dif = pos - lastPos
|
||||||
|
@ -641,10 +666,10 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
# PANNING MODE
|
# PANNING MODE
|
||||||
else:
|
else:
|
||||||
try:
|
# XXX: WHY
|
||||||
self.start_ic()
|
ev.accept()
|
||||||
except RuntimeError:
|
|
||||||
pass
|
self.start_ic()
|
||||||
# if self._ic is None:
|
# if self._ic is None:
|
||||||
# self.chart.pause_all_feeds()
|
# self.chart.pause_all_feeds()
|
||||||
# self._ic = trio.Event()
|
# self._ic = trio.Event()
|
||||||
|
@ -672,9 +697,6 @@ class ChartView(ViewBox):
|
||||||
# self._ic = None
|
# self._ic = None
|
||||||
# self.chart.resume_all_feeds()
|
# self.chart.resume_all_feeds()
|
||||||
|
|
||||||
# XXX: WHY
|
|
||||||
ev.accept()
|
|
||||||
|
|
||||||
# WEIRD "RIGHT-CLICK CENTER ZOOM" MODE
|
# WEIRD "RIGHT-CLICK CENTER ZOOM" MODE
|
||||||
elif button & QtCore.Qt.RightButton:
|
elif button & QtCore.Qt.RightButton:
|
||||||
|
|
||||||
|
@ -720,12 +742,7 @@ class ChartView(ViewBox):
|
||||||
*,
|
*,
|
||||||
|
|
||||||
yrange: Optional[tuple[float, float]] = None,
|
yrange: Optional[tuple[float, float]] = None,
|
||||||
viz: Viz | None = None,
|
range_margin: float = 0.06,
|
||||||
|
|
||||||
# NOTE: this value pairs (more or less) with L1 label text
|
|
||||||
# height offset from from the bid/ask lines.
|
|
||||||
range_margin: float = 0.09,
|
|
||||||
|
|
||||||
bars_range: Optional[tuple[int, int, int, int]] = None,
|
bars_range: Optional[tuple[int, int, int, int]] = None,
|
||||||
|
|
||||||
# flag to prevent triggering sibling charts from the same linked
|
# flag to prevent triggering sibling charts from the same linked
|
||||||
|
@ -744,7 +761,7 @@ class ChartView(ViewBox):
|
||||||
'''
|
'''
|
||||||
name = self.name
|
name = self.name
|
||||||
# print(f'YRANGE ON {name}')
|
# print(f'YRANGE ON {name}')
|
||||||
profiler = Profiler(
|
profiler = pg.debug.Profiler(
|
||||||
msg=f'`ChartView._set_yrange()`: `{name}`',
|
msg=f'`ChartView._set_yrange()`: `{name}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
|
@ -778,28 +795,18 @@ class ChartView(ViewBox):
|
||||||
# XXX: only compute the mxmn range
|
# XXX: only compute the mxmn range
|
||||||
# if none is provided as input!
|
# if none is provided as input!
|
||||||
if not yrange:
|
if not yrange:
|
||||||
|
# flow = chart._flows[name]
|
||||||
if not viz:
|
yrange = self._maxmin()
|
||||||
breakpoint()
|
|
||||||
|
|
||||||
out = viz.maxmin()
|
|
||||||
if out is None:
|
|
||||||
log.warning(f'No yrange provided for {name}!?')
|
|
||||||
return
|
|
||||||
(
|
|
||||||
ixrng,
|
|
||||||
_,
|
|
||||||
yrange
|
|
||||||
) = out
|
|
||||||
|
|
||||||
profiler(f'`{self.name}:Viz.maxmin()` -> {ixrng}=>{yrange}')
|
|
||||||
|
|
||||||
if yrange is None:
|
if yrange is None:
|
||||||
log.warning(f'No yrange provided for {name}!?')
|
log.warning(f'No yrange provided for {name}!?')
|
||||||
|
print(f"WTF NO YRANGE {name}")
|
||||||
return
|
return
|
||||||
|
|
||||||
ylow, yhigh = yrange
|
ylow, yhigh = yrange
|
||||||
|
|
||||||
|
profiler(f'callback ._maxmin(): {yrange}')
|
||||||
|
|
||||||
# view margins: stay within a % of the "true range"
|
# view margins: stay within a % of the "true range"
|
||||||
diff = yhigh - ylow
|
diff = yhigh - ylow
|
||||||
ylow = ylow - (diff * range_margin)
|
ylow = ylow - (diff * range_margin)
|
||||||
|
@ -819,55 +826,54 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
def enable_auto_yrange(
|
def enable_auto_yrange(
|
||||||
self,
|
self,
|
||||||
viz: Viz,
|
|
||||||
src_vb: Optional[ChartView] = None,
|
src_vb: Optional[ChartView] = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Assign callbacks for rescaling and resampling y-axis data
|
Assign callback for rescaling y-axis automatically
|
||||||
automatically based on data contents and ``ViewBox`` state.
|
based on data contents and ``ViewBox`` state.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if src_vb is None:
|
if src_vb is None:
|
||||||
src_vb = self
|
src_vb = self
|
||||||
|
|
||||||
if self._yranger is None:
|
# splitter(s) resizing
|
||||||
self._yranger = partial(
|
src_vb.sigResized.connect(self._set_yrange)
|
||||||
self._set_yrange,
|
|
||||||
viz=viz,
|
|
||||||
)
|
|
||||||
|
|
||||||
# widget-UIs/splitter(s) resizing
|
|
||||||
src_vb.sigResized.connect(self._yranger)
|
|
||||||
|
|
||||||
# mouse wheel doesn't emit XRangeChanged
|
|
||||||
src_vb.sigRangeChangedManually.connect(self._yranger)
|
|
||||||
|
|
||||||
# re-sampling trigger:
|
|
||||||
# TODO: a smarter way to avoid calling this needlessly?
|
# TODO: a smarter way to avoid calling this needlessly?
|
||||||
# 2 things i can think of:
|
# 2 things i can think of:
|
||||||
# - register downsample-able graphics specially and only
|
# - register downsample-able graphics specially and only
|
||||||
# iterate those.
|
# iterate those.
|
||||||
# - only register this when certain downsample-able graphics are
|
# - only register this when certain downsampleable graphics are
|
||||||
# "added to scene".
|
# "added to scene".
|
||||||
src_vb.sigRangeChangedManually.connect(
|
src_vb.sigRangeChangedManually.connect(
|
||||||
self.maybe_downsample_graphics
|
self.maybe_downsample_graphics
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# mouse wheel doesn't emit XRangeChanged
|
||||||
|
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
||||||
|
|
||||||
|
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
||||||
|
# src_vb.sigXRangeChanged.connect(
|
||||||
|
# self.maybe_downsample_graphics
|
||||||
|
# )
|
||||||
|
|
||||||
def disable_auto_yrange(self) -> None:
|
def disable_auto_yrange(self) -> None:
|
||||||
|
|
||||||
# XXX: not entirely sure why we can't de-reg this..
|
|
||||||
self.sigResized.disconnect(
|
self.sigResized.disconnect(
|
||||||
self._yranger,
|
self._set_yrange,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.sigRangeChangedManually.disconnect(
|
|
||||||
self._yranger,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.sigRangeChangedManually.disconnect(
|
self.sigRangeChangedManually.disconnect(
|
||||||
self.maybe_downsample_graphics
|
self.maybe_downsample_graphics
|
||||||
)
|
)
|
||||||
|
self.sigRangeChangedManually.disconnect(
|
||||||
|
self._set_yrange,
|
||||||
|
)
|
||||||
|
|
||||||
|
# self.sigXRangeChanged.disconnect(self._set_yrange)
|
||||||
|
# self.sigXRangeChanged.disconnect(
|
||||||
|
# self.maybe_downsample_graphics
|
||||||
|
# )
|
||||||
|
|
||||||
def x_uppx(self) -> float:
|
def x_uppx(self) -> float:
|
||||||
'''
|
'''
|
||||||
|
@ -876,7 +882,7 @@ class ChartView(ViewBox):
|
||||||
graphics items which are our children.
|
graphics items which are our children.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
graphics = [f.graphics for f in self._chart._vizs.values()]
|
graphics = [f.graphics for f in self._chart._flows.values()]
|
||||||
if not graphics:
|
if not graphics:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
@ -889,9 +895,10 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
def maybe_downsample_graphics(
|
def maybe_downsample_graphics(
|
||||||
self,
|
self,
|
||||||
autoscale_overlays: bool = False,
|
autoscale_overlays: bool = True,
|
||||||
):
|
):
|
||||||
profiler = Profiler(
|
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
|
|
||||||
|
@ -905,14 +912,10 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
# TODO: a faster single-loop-iterator way of doing this XD
|
# TODO: a faster single-loop-iterator way of doing this XD
|
||||||
chart = self._chart
|
chart = self._chart
|
||||||
plots = {chart.name: chart}
|
linked = self.linkedsplits
|
||||||
|
plots = linked.subplots | {chart.name: chart}
|
||||||
linked = self.linked
|
|
||||||
if linked:
|
|
||||||
plots |= linked.subplots
|
|
||||||
|
|
||||||
for chart_name, chart in plots.items():
|
for chart_name, chart in plots.items():
|
||||||
for name, flow in chart._vizs.items():
|
for name, flow in chart._flows.items():
|
||||||
|
|
||||||
if (
|
if (
|
||||||
not flow.render
|
not flow.render
|
||||||
|
@ -920,24 +923,25 @@ class ChartView(ViewBox):
|
||||||
# XXX: super important to be aware of this.
|
# XXX: super important to be aware of this.
|
||||||
# or not flow.graphics.isVisible()
|
# or not flow.graphics.isVisible()
|
||||||
):
|
):
|
||||||
# print(f'skipping {flow.name}')
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# pass in no array which will read and render from the last
|
# pass in no array which will read and render from the last
|
||||||
# passed array (normally provided by the display loop.)
|
# passed array (normally provided by the display loop.)
|
||||||
chart.update_graphics_from_flow(name)
|
chart.update_graphics_from_flow(
|
||||||
|
name,
|
||||||
|
use_vr=True,
|
||||||
|
)
|
||||||
|
|
||||||
# for each overlay on this chart auto-scale the
|
# for each overlay on this chart auto-scale the
|
||||||
# y-range to max-min values.
|
# y-range to max-min values.
|
||||||
# if autoscale_overlays:
|
if autoscale_overlays:
|
||||||
# overlay = chart.pi_overlay
|
overlay = chart.pi_overlay
|
||||||
# if overlay:
|
if overlay:
|
||||||
# for pi in overlay.overlays:
|
for pi in overlay.overlays:
|
||||||
# pi.vb._set_yrange(
|
pi.vb._set_yrange(
|
||||||
# # TODO: get the range once up front...
|
# TODO: get the range once up front...
|
||||||
# # bars_range=br,
|
# bars_range=br,
|
||||||
# viz=pi.viz,
|
)
|
||||||
# )
|
profiler('autoscaled linked plots')
|
||||||
# profiler('autoscaled linked plots')
|
|
||||||
|
|
||||||
profiler(f'<{chart_name}>.update_graphics_from_flow({name})')
|
profiler(f'<{chart_name}>.update_graphics_from_flow({name})')
|
||||||
|
|
|
@ -26,24 +26,22 @@ from PyQt5.QtCore import QPointF
|
||||||
|
|
||||||
from ._axes import YAxisLabel
|
from ._axes import YAxisLabel
|
||||||
from ._style import hcolor
|
from ._style import hcolor
|
||||||
from ._pg_overrides import PlotItem
|
|
||||||
|
|
||||||
|
|
||||||
class LevelLabel(YAxisLabel):
|
class LevelLabel(YAxisLabel):
|
||||||
'''
|
"""Y-axis (vertically) oriented, horizontal label that sticks to
|
||||||
Y-axis (vertically) oriented, horizontal label that sticks to
|
|
||||||
where it's placed despite chart resizing and supports displaying
|
where it's placed despite chart resizing and supports displaying
|
||||||
multiple fields.
|
multiple fields.
|
||||||
|
|
||||||
|
|
||||||
TODO: replace the rectangle-text part with our new ``Label`` type.
|
TODO: replace the rectangle-text part with our new ``Label`` type.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
_x_br_offset: float = -16
|
_x_margin = 0
|
||||||
_y_txt_h_scaling: float = 2
|
_y_margin = 0
|
||||||
|
|
||||||
# adjustment "further away from" anchor point
|
# adjustment "further away from" anchor point
|
||||||
_x_offset = 0
|
_x_offset = 9
|
||||||
_y_offset = 0
|
_y_offset = 0
|
||||||
|
|
||||||
# fields to be displayed in the label string
|
# fields to be displayed in the label string
|
||||||
|
@ -59,12 +57,12 @@ class LevelLabel(YAxisLabel):
|
||||||
chart,
|
chart,
|
||||||
parent,
|
parent,
|
||||||
|
|
||||||
color: str = 'default_light',
|
color: str = 'bracket',
|
||||||
|
|
||||||
orient_v: str = 'bottom',
|
orient_v: str = 'bottom',
|
||||||
orient_h: str = 'right',
|
orient_h: str = 'left',
|
||||||
|
|
||||||
opacity: float = 1,
|
opacity: float = 0,
|
||||||
|
|
||||||
# makes order line labels offset from their parent axis
|
# makes order line labels offset from their parent axis
|
||||||
# such that they don't collide with the L1/L2 lines/prices
|
# such that they don't collide with the L1/L2 lines/prices
|
||||||
|
@ -100,15 +98,13 @@ class LevelLabel(YAxisLabel):
|
||||||
|
|
||||||
self._h_shift = {
|
self._h_shift = {
|
||||||
'left': -1.,
|
'left': -1.,
|
||||||
'right': 0.,
|
'right': 0.
|
||||||
}[orient_h]
|
}[orient_h]
|
||||||
|
|
||||||
self.fields = self._fields.copy()
|
self.fields = self._fields.copy()
|
||||||
# ensure default format fields are in correct
|
# ensure default format fields are in correct
|
||||||
self.set_fmt_str(self._fmt_str, self.fields)
|
self.set_fmt_str(self._fmt_str, self.fields)
|
||||||
|
|
||||||
self.setZValue(10)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def color(self):
|
def color(self):
|
||||||
return self._hcolor
|
return self._hcolor
|
||||||
|
@ -116,10 +112,7 @@ class LevelLabel(YAxisLabel):
|
||||||
@color.setter
|
@color.setter
|
||||||
def color(self, color: str) -> None:
|
def color(self, color: str) -> None:
|
||||||
self._hcolor = color
|
self._hcolor = color
|
||||||
self._pen = self.pen = pg.mkPen(
|
self._pen = self.pen = pg.mkPen(hcolor(color))
|
||||||
hcolor(color),
|
|
||||||
width=3,
|
|
||||||
)
|
|
||||||
|
|
||||||
def update_on_resize(self, vr, r):
|
def update_on_resize(self, vr, r):
|
||||||
"""Tiis is a ``.sigRangeChanged()`` handler.
|
"""Tiis is a ``.sigRangeChanged()`` handler.
|
||||||
|
@ -131,16 +124,15 @@ class LevelLabel(YAxisLabel):
|
||||||
self,
|
self,
|
||||||
fields: dict = None,
|
fields: dict = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
"""Update the label's text contents **and** position from
|
||||||
Update the label's text contents **and** position from
|
|
||||||
a view box coordinate datum.
|
a view box coordinate datum.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
self.fields.update(fields)
|
self.fields.update(fields)
|
||||||
level = self.fields['level']
|
level = self.fields['level']
|
||||||
|
|
||||||
# map "level" to local coords
|
# map "level" to local coords
|
||||||
abs_xy = self._pi.mapFromView(QPointF(0, level))
|
abs_xy = self._chart.mapFromView(QPointF(0, level))
|
||||||
|
|
||||||
self.update_label(
|
self.update_label(
|
||||||
abs_xy,
|
abs_xy,
|
||||||
|
@ -157,7 +149,7 @@ class LevelLabel(YAxisLabel):
|
||||||
h, w = self.set_label_str(fields)
|
h, w = self.set_label_str(fields)
|
||||||
|
|
||||||
if self._adjust_to_l1:
|
if self._adjust_to_l1:
|
||||||
self._x_offset = self._pi.chart_widget._max_l1_line_len
|
self._x_offset = self._chart._max_l1_line_len
|
||||||
|
|
||||||
self.setPos(QPointF(
|
self.setPos(QPointF(
|
||||||
self._h_shift * (w + self._x_offset),
|
self._h_shift * (w + self._x_offset),
|
||||||
|
@ -182,8 +174,7 @@ class LevelLabel(YAxisLabel):
|
||||||
fields: dict,
|
fields: dict,
|
||||||
):
|
):
|
||||||
# use space as e3 delim
|
# use space as e3 delim
|
||||||
self.label_str = self._fmt_str.format(
|
self.label_str = self._fmt_str.format(**fields).replace(',', ' ')
|
||||||
**fields).replace(',', ' ')
|
|
||||||
|
|
||||||
br = self.boundingRect()
|
br = self.boundingRect()
|
||||||
h, w = br.height(), br.width()
|
h, w = br.height(), br.width()
|
||||||
|
@ -196,14 +187,14 @@ class LevelLabel(YAxisLabel):
|
||||||
self,
|
self,
|
||||||
p: QtGui.QPainter,
|
p: QtGui.QPainter,
|
||||||
rect: QtCore.QRectF
|
rect: QtCore.QRectF
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
p.setPen(self._pen)
|
p.setPen(self._pen)
|
||||||
|
|
||||||
rect = self.rect
|
rect = self.rect
|
||||||
|
|
||||||
if self._orient_v == 'bottom':
|
if self._orient_v == 'bottom':
|
||||||
lp, rp = rect.topLeft(), rect.topRight()
|
lp, rp = rect.topLeft(), rect.topRight()
|
||||||
|
# p.drawLine(rect.topLeft(), rect.topRight())
|
||||||
|
|
||||||
elif self._orient_v == 'top':
|
elif self._orient_v == 'top':
|
||||||
lp, rp = rect.bottomLeft(), rect.bottomRight()
|
lp, rp = rect.bottomLeft(), rect.bottomRight()
|
||||||
|
@ -217,11 +208,6 @@ class LevelLabel(YAxisLabel):
|
||||||
])
|
])
|
||||||
)
|
)
|
||||||
|
|
||||||
p.fillRect(
|
|
||||||
self.rect,
|
|
||||||
self.bg_color,
|
|
||||||
)
|
|
||||||
|
|
||||||
def highlight(self, pen) -> None:
|
def highlight(self, pen) -> None:
|
||||||
self._pen = pen
|
self._pen = pen
|
||||||
self.update()
|
self.update()
|
||||||
|
@ -250,46 +236,43 @@ class L1Label(LevelLabel):
|
||||||
# Set a global "max L1 label length" so we can
|
# Set a global "max L1 label length" so we can
|
||||||
# look it up on order lines and adjust their
|
# look it up on order lines and adjust their
|
||||||
# labels not to overlap with it.
|
# labels not to overlap with it.
|
||||||
chart = self._pi.chart_widget
|
chart = self._chart
|
||||||
chart._max_l1_line_len: float = max(
|
chart._max_l1_line_len: float = max(
|
||||||
chart._max_l1_line_len,
|
chart._max_l1_line_len,
|
||||||
w,
|
w
|
||||||
)
|
)
|
||||||
|
|
||||||
return h, w
|
return h, w
|
||||||
|
|
||||||
|
|
||||||
class L1Labels:
|
class L1Labels:
|
||||||
'''
|
"""Level 1 bid ask labels for dynamic update on price-axis.
|
||||||
Level 1 bid ask labels for dynamic update on price-axis.
|
|
||||||
|
|
||||||
'''
|
"""
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
plotitem: PlotItem,
|
chart: 'ChartPlotWidget', # noqa
|
||||||
digits: int = 2,
|
digits: int = 2,
|
||||||
size_digits: int = 3,
|
size_digits: int = 3,
|
||||||
font_size: str = 'small',
|
font_size: str = 'small',
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
chart = self.chart = plotitem.chart_widget
|
self.chart = chart
|
||||||
|
|
||||||
raxis = plotitem.getAxis('right')
|
raxis = chart.getAxis('right')
|
||||||
kwargs = {
|
kwargs = {
|
||||||
'chart': plotitem,
|
'chart': chart,
|
||||||
'parent': raxis,
|
'parent': raxis,
|
||||||
|
|
||||||
'opacity': .9,
|
'opacity': 1,
|
||||||
'font_size': font_size,
|
'font_size': font_size,
|
||||||
'fg_color': 'default_light',
|
'fg_color': chart.pen_color,
|
||||||
'bg_color': chart.view_color, # normally 'papas_special'
|
'bg_color': chart.view_color,
|
||||||
}
|
}
|
||||||
|
|
||||||
# TODO: add humanized source-asset
|
|
||||||
# info format.
|
|
||||||
fmt_str = (
|
fmt_str = (
|
||||||
' {size:.{size_digits}f} u'
|
' {size:.{size_digits}f} x '
|
||||||
# '{level:,.{level_digits}f} '
|
'{level:,.{level_digits}f} '
|
||||||
)
|
)
|
||||||
fields = {
|
fields = {
|
||||||
'level': 0,
|
'level': 0,
|
||||||
|
@ -302,17 +285,12 @@ class L1Labels:
|
||||||
orient_v='bottom',
|
orient_v='bottom',
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
bid.set_fmt_str(
|
bid.set_fmt_str(fmt_str=fmt_str, fields=fields)
|
||||||
fmt_str='\n' + fmt_str,
|
|
||||||
fields=fields,
|
|
||||||
)
|
|
||||||
bid.show()
|
bid.show()
|
||||||
|
|
||||||
ask = self.ask_label = L1Label(
|
ask = self.ask_label = L1Label(
|
||||||
orient_v='top',
|
orient_v='top',
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
ask.set_fmt_str(
|
ask.set_fmt_str(fmt_str=fmt_str, fields=fields)
|
||||||
fmt_str=fmt_str,
|
|
||||||
fields=fields)
|
|
||||||
ask.show()
|
ask.show()
|
||||||
|
|
|
@ -233,36 +233,6 @@ class Label:
|
||||||
def delete(self) -> None:
|
def delete(self) -> None:
|
||||||
self.vb.scene().removeItem(self.txt)
|
self.vb.scene().removeItem(self.txt)
|
||||||
|
|
||||||
# NOTE: pulled out from ``ChartPlotWidget`` from way way old code.
|
|
||||||
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
|
||||||
# # compute contents label "height" in view terms
|
|
||||||
# # to avoid having data "contents" overlap with them
|
|
||||||
# if self._labels:
|
|
||||||
# label = self._labels[self.name][0]
|
|
||||||
|
|
||||||
# rect = label.itemRect()
|
|
||||||
# tl, br = rect.topLeft(), rect.bottomRight()
|
|
||||||
# vb = self.plotItem.vb
|
|
||||||
|
|
||||||
# try:
|
|
||||||
# # on startup labels might not yet be rendered
|
|
||||||
# top, bottom = (vb.mapToView(tl).y(), vb.mapToView(br).y())
|
|
||||||
|
|
||||||
# # XXX: magic hack, how do we compute exactly?
|
|
||||||
# label_h = (top - bottom) * 0.42
|
|
||||||
|
|
||||||
# except np.linalg.LinAlgError:
|
|
||||||
# label_h = 0
|
|
||||||
# else:
|
|
||||||
# label_h = 0
|
|
||||||
|
|
||||||
# # print(f'label height {self.name}: {label_h}')
|
|
||||||
|
|
||||||
# if label_h > yhigh - ylow:
|
|
||||||
# label_h = 0
|
|
||||||
|
|
||||||
# print(f"bounds (ylow, yhigh): {(ylow, yhigh)}")
|
|
||||||
|
|
||||||
|
|
||||||
class FormatLabel(QLabel):
|
class FormatLabel(QLabel):
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -18,14 +18,9 @@
|
||||||
Lines for orders, alerts, L2.
|
Lines for orders, alerts, L2.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from math import floor
|
from math import floor
|
||||||
from typing import (
|
from typing import Optional, Callable
|
||||||
Optional,
|
|
||||||
Callable,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from pyqtgraph import Point, functions as fn
|
from pyqtgraph import Point, functions as fn
|
||||||
|
@ -42,9 +37,6 @@ from ..calc import humanize
|
||||||
from ._label import Label
|
from ._label import Label
|
||||||
from ._style import hcolor, _font
|
from ._style import hcolor, _font
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ._cursor import Cursor
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: probably worth investigating if we can
|
# TODO: probably worth investigating if we can
|
||||||
# make .boundingRect() faster:
|
# make .boundingRect() faster:
|
||||||
|
@ -92,7 +84,7 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
self._marker = None
|
self._marker = None
|
||||||
self.only_show_markers_on_hover = only_show_markers_on_hover
|
self.only_show_markers_on_hover = only_show_markers_on_hover
|
||||||
self.track_marker_pos: bool = False
|
self.show_markers: bool = True # presuming the line is hovered at init
|
||||||
|
|
||||||
# should line go all the way to far end or leave a "margin"
|
# should line go all the way to far end or leave a "margin"
|
||||||
# space for other graphics (eg. L1 book)
|
# space for other graphics (eg. L1 book)
|
||||||
|
@ -130,9 +122,6 @@ class LevelLine(pg.InfiniteLine):
|
||||||
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
||||||
self._right_end_sc: float = 0
|
self._right_end_sc: float = 0
|
||||||
|
|
||||||
# use px caching
|
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
|
||||||
|
|
||||||
def txt_offsets(self) -> tuple[int, int]:
|
def txt_offsets(self) -> tuple[int, int]:
|
||||||
return 0, 0
|
return 0, 0
|
||||||
|
|
||||||
|
@ -227,23 +216,20 @@ class LevelLine(pg.InfiniteLine):
|
||||||
y: float
|
y: float
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''Chart coordinates cursor tracking callback.
|
||||||
Chart coordinates cursor tracking callback.
|
|
||||||
|
|
||||||
this is called by our ``Cursor`` type once this line is set to
|
this is called by our ``Cursor`` type once this line is set to
|
||||||
track the cursor: for every movement this callback is invoked to
|
track the cursor: for every movement this callback is invoked to
|
||||||
reposition the line with the current view coordinates.
|
reposition the line with the current view coordinates.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
self.movable = True
|
self.movable = True
|
||||||
self.set_level(y) # implictly calls reposition handler
|
self.set_level(y) # implictly calls reposition handler
|
||||||
|
|
||||||
def mouseDragEvent(self, ev):
|
def mouseDragEvent(self, ev):
|
||||||
'''
|
"""Override the ``InfiniteLine`` handler since we need more
|
||||||
Override the ``InfiniteLine`` handler since we need more
|
|
||||||
detailed control and start end signalling.
|
detailed control and start end signalling.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
cursor = self._chart.linked.cursor
|
cursor = self._chart.linked.cursor
|
||||||
|
|
||||||
# hide y-crosshair
|
# hide y-crosshair
|
||||||
|
@ -295,20 +281,10 @@ class LevelLine(pg.InfiniteLine):
|
||||||
# show y-crosshair again
|
# show y-crosshair again
|
||||||
cursor.show_xhair()
|
cursor.show_xhair()
|
||||||
|
|
||||||
def get_cursor(self) -> Optional[Cursor]:
|
|
||||||
|
|
||||||
chart = self._chart
|
|
||||||
cur = chart.linked.cursor
|
|
||||||
if self in cur._hovered:
|
|
||||||
return cur
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def delete(self) -> None:
|
def delete(self) -> None:
|
||||||
'''
|
"""Remove this line from containing chart/view/scene.
|
||||||
Remove this line from containing chart/view/scene.
|
|
||||||
|
|
||||||
'''
|
"""
|
||||||
scene = self.scene()
|
scene = self.scene()
|
||||||
if scene:
|
if scene:
|
||||||
for label in self._labels:
|
for label in self._labels:
|
||||||
|
@ -322,8 +298,9 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
# remove from chart/cursor states
|
# remove from chart/cursor states
|
||||||
chart = self._chart
|
chart = self._chart
|
||||||
cur = self.get_cursor()
|
cur = chart.linked.cursor
|
||||||
if cur:
|
|
||||||
|
if self in cur._hovered:
|
||||||
cur._hovered.remove(self)
|
cur._hovered.remove(self)
|
||||||
|
|
||||||
chart.plotItem.removeItem(self)
|
chart.plotItem.removeItem(self)
|
||||||
|
@ -331,8 +308,8 @@ class LevelLine(pg.InfiniteLine):
|
||||||
def mouseDoubleClickEvent(
|
def mouseDoubleClickEvent(
|
||||||
self,
|
self,
|
||||||
ev: QtGui.QMouseEvent,
|
ev: QtGui.QMouseEvent,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# TODO: enter labels edit mode
|
# TODO: enter labels edit mode
|
||||||
print(f'double click {ev}')
|
print(f'double click {ev}')
|
||||||
|
|
||||||
|
@ -357,22 +334,30 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
||||||
|
|
||||||
# (legacy) NOTE: at one point this seemed slower when moving around
|
if self.show_markers and self.markers:
|
||||||
# order lines.. not sure if that's still true or why but we've
|
|
||||||
# dropped the original hacky `.pain()` transform stuff for inf
|
p.setPen(self.pen)
|
||||||
# line markers now - check the git history if it needs to be
|
qgo_draw_markers(
|
||||||
# reverted.
|
self.markers,
|
||||||
if self._marker:
|
self.pen.color(),
|
||||||
if self.track_marker_pos:
|
p,
|
||||||
# make the line end at the marker's x pos
|
vb_left,
|
||||||
line_end = marker_right = self._marker.pos().x()
|
vb_right,
|
||||||
|
marker_right,
|
||||||
|
)
|
||||||
|
# marker_size = self.markers[0][2]
|
||||||
|
self._maxMarkerSize = max([m[2] / 2. for m in self.markers])
|
||||||
|
|
||||||
|
# this seems slower when moving around
|
||||||
|
# order lines.. not sure wtf is up with that.
|
||||||
|
# for now we're just using it on the position line.
|
||||||
|
elif self._marker:
|
||||||
|
|
||||||
# TODO: make this label update part of a scene-aware-marker
|
# TODO: make this label update part of a scene-aware-marker
|
||||||
# composed annotation
|
# composed annotation
|
||||||
self._marker.setPos(
|
self._marker.setPos(
|
||||||
QPointF(marker_right, self.scene_y())
|
QPointF(marker_right, self.scene_y())
|
||||||
)
|
)
|
||||||
|
|
||||||
if hasattr(self._marker, 'label'):
|
if hasattr(self._marker, 'label'):
|
||||||
self._marker.label.update()
|
self._marker.label.update()
|
||||||
|
|
||||||
|
@ -394,14 +379,16 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
def hide(self) -> None:
|
def hide(self) -> None:
|
||||||
super().hide()
|
super().hide()
|
||||||
mkr = self._marker
|
if self._marker:
|
||||||
if mkr:
|
self._marker.hide()
|
||||||
mkr.hide()
|
# needed for ``order_line()`` lines currently
|
||||||
|
self._marker.label.hide()
|
||||||
|
|
||||||
def show(self) -> None:
|
def show(self) -> None:
|
||||||
super().show()
|
super().show()
|
||||||
if self._marker:
|
if self._marker:
|
||||||
self._marker.show()
|
self._marker.show()
|
||||||
|
# self._marker.label.show()
|
||||||
|
|
||||||
def scene_y(self) -> float:
|
def scene_y(self) -> float:
|
||||||
return self.getViewBox().mapFromView(
|
return self.getViewBox().mapFromView(
|
||||||
|
@ -434,10 +421,6 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
@property
|
|
||||||
def marker(self) -> LevelMarker:
|
|
||||||
return self._marker
|
|
||||||
|
|
||||||
def hoverEvent(self, ev):
|
def hoverEvent(self, ev):
|
||||||
'''
|
'''
|
||||||
Mouse hover callback.
|
Mouse hover callback.
|
||||||
|
@ -446,16 +429,17 @@ class LevelLine(pg.InfiniteLine):
|
||||||
cur = self._chart.linked.cursor
|
cur = self._chart.linked.cursor
|
||||||
|
|
||||||
# hovered
|
# hovered
|
||||||
if (
|
if (not ev.isExit()) and ev.acceptDrags(QtCore.Qt.LeftButton):
|
||||||
not ev.isExit()
|
|
||||||
and ev.acceptDrags(QtCore.Qt.LeftButton)
|
|
||||||
):
|
|
||||||
# if already hovered we don't need to run again
|
# if already hovered we don't need to run again
|
||||||
if self.mouseHovering is True:
|
if self.mouseHovering is True:
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.only_show_markers_on_hover:
|
if self.only_show_markers_on_hover:
|
||||||
self.show_markers()
|
self.show_markers = True
|
||||||
|
|
||||||
|
if self._marker:
|
||||||
|
self._marker.show()
|
||||||
|
|
||||||
# highlight if so configured
|
# highlight if so configured
|
||||||
if self.highlight_on_hover:
|
if self.highlight_on_hover:
|
||||||
|
@ -498,7 +482,11 @@ class LevelLine(pg.InfiniteLine):
|
||||||
cur._hovered.remove(self)
|
cur._hovered.remove(self)
|
||||||
|
|
||||||
if self.only_show_markers_on_hover:
|
if self.only_show_markers_on_hover:
|
||||||
self.hide_markers()
|
self.show_markers = False
|
||||||
|
|
||||||
|
if self._marker:
|
||||||
|
self._marker.hide()
|
||||||
|
self._marker.label.hide()
|
||||||
|
|
||||||
if self not in cur._trackers:
|
if self not in cur._trackers:
|
||||||
cur.show_xhair(y_label_level=self.value())
|
cur.show_xhair(y_label_level=self.value())
|
||||||
|
@ -510,15 +498,6 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
self.update()
|
self.update()
|
||||||
|
|
||||||
def hide_markers(self) -> None:
|
|
||||||
if self._marker:
|
|
||||||
self._marker.hide()
|
|
||||||
self._marker.label.hide()
|
|
||||||
|
|
||||||
def show_markers(self) -> None:
|
|
||||||
if self._marker:
|
|
||||||
self._marker.show()
|
|
||||||
|
|
||||||
|
|
||||||
def level_line(
|
def level_line(
|
||||||
|
|
||||||
|
@ -539,10 +518,9 @@ def level_line(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
'''
|
"""Convenience routine to add a styled horizontal line to a plot.
|
||||||
Convenience routine to add a styled horizontal line to a plot.
|
|
||||||
|
|
||||||
'''
|
"""
|
||||||
hl_color = color + '_light' if highlight_on_hover else color
|
hl_color = color + '_light' if highlight_on_hover else color
|
||||||
|
|
||||||
line = LevelLine(
|
line = LevelLine(
|
||||||
|
@ -724,7 +702,7 @@ def order_line(
|
||||||
marker = LevelMarker(
|
marker = LevelMarker(
|
||||||
chart=chart,
|
chart=chart,
|
||||||
style=marker_style,
|
style=marker_style,
|
||||||
get_level=line.value, # callback
|
get_level=line.value,
|
||||||
size=marker_size,
|
size=marker_size,
|
||||||
keep_in_view=False,
|
keep_in_view=False,
|
||||||
)
|
)
|
||||||
|
@ -733,8 +711,7 @@ def order_line(
|
||||||
marker = line.add_marker(marker)
|
marker = line.add_marker(marker)
|
||||||
|
|
||||||
# XXX: DON'T COMMENT THIS!
|
# XXX: DON'T COMMENT THIS!
|
||||||
# this fixes it the artifact issue!
|
# this fixes it the artifact issue! .. of course, bounding rect stuff
|
||||||
# .. of course, bounding rect stuff
|
|
||||||
line._maxMarkerSize = marker_size
|
line._maxMarkerSize = marker_size
|
||||||
|
|
||||||
assert line._marker is marker
|
assert line._marker is marker
|
||||||
|
@ -755,8 +732,7 @@ def order_line(
|
||||||
|
|
||||||
if action != 'alert':
|
if action != 'alert':
|
||||||
|
|
||||||
# add a partial position label if we also added a level
|
# add a partial position label if we also added a level marker
|
||||||
# marker
|
|
||||||
pp_size_label = Label(
|
pp_size_label = Label(
|
||||||
view=view,
|
view=view,
|
||||||
color=line.color,
|
color=line.color,
|
||||||
|
@ -790,9 +766,9 @@ def order_line(
|
||||||
# XXX: without this the pp proportion label next the marker
|
# XXX: without this the pp proportion label next the marker
|
||||||
# seems to lag? this is the same issue we had with position
|
# seems to lag? this is the same issue we had with position
|
||||||
# lines which we handle with ``.update_graphcis()``.
|
# lines which we handle with ``.update_graphcis()``.
|
||||||
|
# marker._on_paint=lambda marker: pp_size_label.update()
|
||||||
marker._on_paint = lambda marker: pp_size_label.update()
|
marker._on_paint = lambda marker: pp_size_label.update()
|
||||||
|
|
||||||
# XXX: THIS IS AN UNTYPED MONKEY PATCH!?!?!
|
|
||||||
marker.label = label
|
marker.label = label
|
||||||
|
|
||||||
# sanity check
|
# sanity check
|
||||||
|
|
|
@ -1,108 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Notifications utils.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import subprocess
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import trio
|
|
||||||
|
|
||||||
from ..log import get_logger
|
|
||||||
from ..clearing._messages import (
|
|
||||||
Status,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
_dbus_uid: Optional[str] = ''
|
|
||||||
|
|
||||||
|
|
||||||
async def notify_from_ems_status_msg(
|
|
||||||
msg: Status,
|
|
||||||
duration: int = 3000,
|
|
||||||
is_subproc: bool = False,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Send a linux desktop notification.
|
|
||||||
|
|
||||||
Handle subprocesses by discovering the dbus user id
|
|
||||||
on first call.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if platform.system() != "Linux":
|
|
||||||
return
|
|
||||||
|
|
||||||
# TODO: this in another task?
|
|
||||||
# not sure if this will ever be a bottleneck,
|
|
||||||
# we probably could do graphics stuff first tho?
|
|
||||||
|
|
||||||
if is_subproc:
|
|
||||||
global _dbus_uid
|
|
||||||
su = os.environ.get('SUDO_USER')
|
|
||||||
if (
|
|
||||||
not _dbus_uid
|
|
||||||
and su
|
|
||||||
):
|
|
||||||
|
|
||||||
# TODO: use `trio` but we need to use nursery.start()
|
|
||||||
# to use pipes?
|
|
||||||
# result = await trio.run_process(
|
|
||||||
result = subprocess.run(
|
|
||||||
[
|
|
||||||
'id',
|
|
||||||
'-u',
|
|
||||||
su,
|
|
||||||
],
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
# check=True
|
|
||||||
)
|
|
||||||
_dbus_uid = result.stdout.decode("utf-8").replace('\n', '')
|
|
||||||
|
|
||||||
os.environ['DBUS_SESSION_BUS_ADDRESS'] = (
|
|
||||||
f'unix:path=/run/user/{_dbus_uid}/bus'
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = await trio.run_process(
|
|
||||||
[
|
|
||||||
'notify-send',
|
|
||||||
'-u', 'normal',
|
|
||||||
'-t', f'{duration}',
|
|
||||||
'piker',
|
|
||||||
|
|
||||||
# TODO: add in standard fill/exec info that maybe we
|
|
||||||
# pack in a broker independent way?
|
|
||||||
f"'{msg.pformat()}'",
|
|
||||||
],
|
|
||||||
capture_stdout=True,
|
|
||||||
capture_stderr=True,
|
|
||||||
check=False,
|
|
||||||
)
|
|
||||||
if result.returncode != 0:
|
|
||||||
log.warn(f'Notification daemon crashed stderr: {result.stderr}')
|
|
||||||
|
|
||||||
log.runtime(result)
|
|
||||||
|
|
||||||
except FileNotFoundError:
|
|
||||||
log.warn('Tried to send a notification but \'notify-send\' not present')
|
|
|
@ -18,23 +18,23 @@ Super fast OHLC sampling graphics types.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from PyQt5 import (
|
import pyqtgraph as pg
|
||||||
QtGui,
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
QtWidgets,
|
from PyQt5.QtCore import QLineF, QPointF
|
||||||
)
|
|
||||||
from PyQt5.QtCore import (
|
|
||||||
QLineF,
|
|
||||||
QRectF,
|
|
||||||
)
|
|
||||||
from PyQt5.QtWidgets import QGraphicsItem
|
|
||||||
from PyQt5.QtGui import QPainterPath
|
from PyQt5.QtGui import QPainterPath
|
||||||
|
|
||||||
from ._curve import FlowGraphic
|
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
|
from ._style import hcolor
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from .._profile import Profiler
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._chart import LinkedSplits
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -43,8 +43,7 @@ log = get_logger(__name__)
|
||||||
def bar_from_ohlc_row(
|
def bar_from_ohlc_row(
|
||||||
row: np.ndarray,
|
row: np.ndarray,
|
||||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||||
bar_w: float,
|
w: float = 0.43
|
||||||
bar_gap: float = 0.16
|
|
||||||
|
|
||||||
) -> tuple[QLineF]:
|
) -> tuple[QLineF]:
|
||||||
'''
|
'''
|
||||||
|
@ -52,7 +51,8 @@ def bar_from_ohlc_row(
|
||||||
OHLC "bar" for use in the "last datum" of a series.
|
OHLC "bar" for use in the "last datum" of a series.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
open, high, low, close, index = row
|
open, high, low, close, index = row[
|
||||||
|
['open', 'high', 'low', 'close', 'index']]
|
||||||
|
|
||||||
# TODO: maybe consider using `QGraphicsLineItem` ??
|
# TODO: maybe consider using `QGraphicsLineItem` ??
|
||||||
# gives us a ``.boundingRect()`` on the objects which may make
|
# gives us a ``.boundingRect()`` on the objects which may make
|
||||||
|
@ -60,11 +60,9 @@ def bar_from_ohlc_row(
|
||||||
# history path faster since it's done in C++:
|
# history path faster since it's done in C++:
|
||||||
# https://doc.qt.io/qt-5/qgraphicslineitem.html
|
# https://doc.qt.io/qt-5/qgraphicslineitem.html
|
||||||
|
|
||||||
mid: float = (bar_w / 2) + index
|
|
||||||
|
|
||||||
# high -> low vertical (body) line
|
# high -> low vertical (body) line
|
||||||
if low != high:
|
if low != high:
|
||||||
hl = QLineF(mid, low, mid, high)
|
hl = QLineF(index, low, index, high)
|
||||||
else:
|
else:
|
||||||
# XXX: if we don't do it renders a weird rectangle?
|
# XXX: if we don't do it renders a weird rectangle?
|
||||||
# see below for filtering this later...
|
# see below for filtering this later...
|
||||||
|
@ -75,55 +73,48 @@ def bar_from_ohlc_row(
|
||||||
# the index's range according to the view mapping coordinates.
|
# the index's range according to the view mapping coordinates.
|
||||||
|
|
||||||
# open line
|
# open line
|
||||||
o = QLineF(index + bar_gap, open, mid, open)
|
o = QLineF(index - w, open, index, open)
|
||||||
|
|
||||||
# close line
|
# close line
|
||||||
c = QLineF(
|
c = QLineF(index, close, index + w, close)
|
||||||
mid, close,
|
|
||||||
index + bar_w - bar_gap, close,
|
|
||||||
)
|
|
||||||
|
|
||||||
return [hl, o, c]
|
return [hl, o, c]
|
||||||
|
|
||||||
|
|
||||||
class BarItems(FlowGraphic):
|
class BarItems(pg.GraphicsObject):
|
||||||
'''
|
'''
|
||||||
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# XXX: causes this weird jitter bug when click-drag panning
|
|
||||||
# where the path curve will awkwardly flicker back and forth?
|
|
||||||
cache_mode: int = QGraphicsItem.NoCache
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
*args,
|
linked: LinkedSplits,
|
||||||
**kwargs,
|
plotitem: 'pg.PlotItem', # noqa
|
||||||
|
pen_color: str = 'bracket',
|
||||||
|
last_bar_color: str = 'bracket',
|
||||||
|
|
||||||
|
name: Optional[str] = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.linked = linked
|
||||||
|
# XXX: for the mega-lulz increasing width here increases draw
|
||||||
|
# latency... so probably don't do it until we figure that out.
|
||||||
|
self._color = pen_color
|
||||||
|
self.bars_pen = pg.mkPen(hcolor(pen_color), width=1)
|
||||||
|
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
||||||
|
self._name = name
|
||||||
|
|
||||||
super().__init__(*args, **kwargs)
|
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
self._last_bar_lines: tuple[QLineF, ...] | None = None
|
self.path = QPainterPath()
|
||||||
|
self._last_bar_lines: Optional[tuple[QLineF, ...]] = None
|
||||||
|
|
||||||
def x_last(self) -> None | float:
|
def x_uppx(self) -> int:
|
||||||
'''
|
# we expect the downsample curve report this.
|
||||||
Return the last most x value of the close line segment
|
return 0
|
||||||
or if not drawn yet, ``None``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if self._last_bar_lines:
|
|
||||||
close_arm_line = self._last_bar_lines[-1]
|
|
||||||
return close_arm_line.x2() if close_arm_line else None
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
|
||||||
def boundingRect(self):
|
def boundingRect(self):
|
||||||
# profiler = Profiler(
|
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||||
# msg=f'BarItems.boundingRect(): `{self._name}`',
|
|
||||||
# disabled=not pg_profile_enabled(),
|
|
||||||
# ms_threshold=ms_slower_then,
|
|
||||||
# )
|
|
||||||
|
|
||||||
# TODO: Can we do rect caching to make this faster
|
# TODO: Can we do rect caching to make this faster
|
||||||
# like `pg.PlotCurveItem` does? In theory it's just
|
# like `pg.PlotCurveItem` does? In theory it's just
|
||||||
|
@ -143,37 +134,32 @@ class BarItems(FlowGraphic):
|
||||||
hb.topLeft(),
|
hb.topLeft(),
|
||||||
hb.bottomRight(),
|
hb.bottomRight(),
|
||||||
)
|
)
|
||||||
mn_y = hb_tl.y()
|
|
||||||
mx_y = hb_br.y()
|
|
||||||
most_left = hb_tl.x()
|
|
||||||
most_right = hb_br.x()
|
|
||||||
# profiler('calc path vertices')
|
|
||||||
|
|
||||||
# need to include last bar height or BR will be off
|
# need to include last bar height or BR will be off
|
||||||
# OHLC line segments: [hl, o, c]
|
mx_y = hb_br.y()
|
||||||
last_lines: tuple[QLineF] | None = self._last_bar_lines
|
mn_y = hb_tl.y()
|
||||||
|
|
||||||
|
last_lines = self._last_bar_lines
|
||||||
if last_lines:
|
if last_lines:
|
||||||
(
|
body_line = self._last_bar_lines[0]
|
||||||
hl,
|
if body_line:
|
||||||
o,
|
mx_y = max(mx_y, max(body_line.y1(), body_line.y2()))
|
||||||
c,
|
mn_y = min(mn_y, min(body_line.y1(), body_line.y2()))
|
||||||
) = last_lines
|
|
||||||
most_right = c.x2() + 1
|
|
||||||
ymx = ymn = c.y2()
|
|
||||||
|
|
||||||
if hl:
|
return QtCore.QRectF(
|
||||||
y1, y2 = hl.y1(), hl.y2()
|
|
||||||
ymn = min(y1, y2)
|
# top left
|
||||||
ymx = max(y1, y2)
|
QPointF(
|
||||||
mx_y = max(ymx, mx_y)
|
hb_tl.x(),
|
||||||
mn_y = min(ymn, mn_y)
|
mn_y,
|
||||||
# profiler('calc last bar vertices')
|
),
|
||||||
|
|
||||||
|
# bottom right
|
||||||
|
QPointF(
|
||||||
|
hb_br.x() + 1,
|
||||||
|
mx_y,
|
||||||
|
)
|
||||||
|
|
||||||
return QRectF(
|
|
||||||
most_left,
|
|
||||||
mn_y,
|
|
||||||
most_right - most_left + 1,
|
|
||||||
mx_y - mn_y,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
|
@ -184,7 +170,7 @@ class BarItems(FlowGraphic):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = Profiler(
|
profiler = pg.debug.Profiler(
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
)
|
)
|
||||||
|
@ -197,12 +183,12 @@ class BarItems(FlowGraphic):
|
||||||
# as is necesarry for what's in "view". Not sure if this will
|
# as is necesarry for what's in "view". Not sure if this will
|
||||||
# lead to any perf gains other then when zoomed in to less bars
|
# lead to any perf gains other then when zoomed in to less bars
|
||||||
# in view.
|
# in view.
|
||||||
p.setPen(self.last_step_pen)
|
p.setPen(self.last_bar_pen)
|
||||||
if self._last_bar_lines:
|
if self._last_bar_lines:
|
||||||
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
||||||
profiler('draw last bar')
|
profiler('draw last bar')
|
||||||
|
|
||||||
p.setPen(self._pen)
|
p.setPen(self.bars_pen)
|
||||||
p.drawPath(self.path)
|
p.drawPath(self.path)
|
||||||
profiler(f'draw history path: {self.path.capacity()}')
|
profiler(f'draw history path: {self.path.capacity()}')
|
||||||
|
|
||||||
|
@ -210,40 +196,29 @@ class BarItems(FlowGraphic):
|
||||||
self,
|
self,
|
||||||
path: QPainterPath,
|
path: QPainterPath,
|
||||||
src_data: np.ndarray,
|
src_data: np.ndarray,
|
||||||
|
render_data: np.ndarray,
|
||||||
reset: bool,
|
reset: bool,
|
||||||
array_key: str,
|
array_key: str,
|
||||||
index_field: str,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
# relevant fields
|
|
||||||
fields: list[str] = [
|
fields: list[str] = [
|
||||||
|
'index',
|
||||||
'open',
|
'open',
|
||||||
'high',
|
'high',
|
||||||
'low',
|
'low',
|
||||||
'close',
|
'close',
|
||||||
index_field,
|
],
|
||||||
]
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# relevant fields
|
||||||
ohlc = src_data[fields]
|
ohlc = src_data[fields]
|
||||||
# last_row = ohlc[-1:]
|
last_row = ohlc[-1:]
|
||||||
|
|
||||||
# individual values
|
# individual values
|
||||||
last_row = o, h, l, last, i = ohlc[-1]
|
last_row = i, o, h, l, last = ohlc[-1]
|
||||||
|
|
||||||
# times = src_data['time']
|
|
||||||
# if times[-1] - times[-2]:
|
|
||||||
# breakpoint()
|
|
||||||
|
|
||||||
index = src_data[index_field]
|
|
||||||
step_size = index[-1] - index[-2]
|
|
||||||
|
|
||||||
# generate new lines objects for updatable "current bar"
|
# generate new lines objects for updatable "current bar"
|
||||||
bg: float = 0.16 * step_size
|
self._last_bar_lines = bar_from_ohlc_row(last_row)
|
||||||
self._last_bar_lines = bar_from_ohlc_row(
|
|
||||||
last_row,
|
|
||||||
bar_w=step_size,
|
|
||||||
bar_gap=bg,
|
|
||||||
)
|
|
||||||
|
|
||||||
# assert i == graphics.start_index - 1
|
# assert i == graphics.start_index - 1
|
||||||
# assert i == last_index
|
# assert i == last_index
|
||||||
|
@ -258,16 +233,10 @@ class BarItems(FlowGraphic):
|
||||||
if l != h: # noqa
|
if l != h: # noqa
|
||||||
|
|
||||||
if body is None:
|
if body is None:
|
||||||
body = self._last_bar_lines[0] = QLineF(
|
body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
||||||
i + bg, l,
|
|
||||||
i + step_size - bg, h,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
# update body
|
# update body
|
||||||
body.setLine(
|
body.setLine(i, l, i, h)
|
||||||
body.x1(), l,
|
|
||||||
body.x2(), h,
|
|
||||||
)
|
|
||||||
|
|
||||||
# XXX: pretty sure this is causing an issue where the
|
# XXX: pretty sure this is causing an issue where the
|
||||||
# bar has a large upward move right before the next
|
# bar has a large upward move right before the next
|
||||||
|
@ -278,4 +247,4 @@ class BarItems(FlowGraphic):
|
||||||
# date / from some previous sample. It's weird though
|
# date / from some previous sample. It's weird though
|
||||||
# because i've seen it do this to bars i - 3 back?
|
# because i've seen it do this to bars i - 3 back?
|
||||||
|
|
||||||
return ohlc[index_field], ohlc['close']
|
return ohlc['index'], ohlc['close']
|
||||||
|
|
|
@ -22,9 +22,12 @@ from __future__ import annotations
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional, Generic,
|
Optional, Generic,
|
||||||
TypeVar, Callable,
|
TypeVar, Callable,
|
||||||
|
Literal,
|
||||||
)
|
)
|
||||||
|
import enum
|
||||||
|
import sys
|
||||||
|
|
||||||
# from pydantic import BaseModel, validator
|
from pydantic import BaseModel, validator
|
||||||
from pydantic.generics import GenericModel
|
from pydantic.generics import GenericModel
|
||||||
from PyQt5.QtWidgets import (
|
from PyQt5.QtWidgets import (
|
||||||
QWidget,
|
QWidget,
|
||||||
|
@ -35,7 +38,6 @@ from ._forms import (
|
||||||
# FontScaledDelegate,
|
# FontScaledDelegate,
|
||||||
Edit,
|
Edit,
|
||||||
)
|
)
|
||||||
from ..data.types import Struct
|
|
||||||
|
|
||||||
|
|
||||||
DataType = TypeVar('DataType')
|
DataType = TypeVar('DataType')
|
||||||
|
@ -60,7 +62,7 @@ class Selection(Field[DataType], Generic[DataType]):
|
||||||
options: dict[str, DataType]
|
options: dict[str, DataType]
|
||||||
# value: DataType = None
|
# value: DataType = None
|
||||||
|
|
||||||
# @validator('value') # , always=True)
|
@validator('value') # , always=True)
|
||||||
def set_value_first(
|
def set_value_first(
|
||||||
cls,
|
cls,
|
||||||
|
|
||||||
|
@ -98,7 +100,7 @@ class Edit(Field[DataType], Generic[DataType]):
|
||||||
widget_factory = Edit
|
widget_factory = Edit
|
||||||
|
|
||||||
|
|
||||||
class AllocatorPane(Struct):
|
class AllocatorPane(BaseModel):
|
||||||
|
|
||||||
account = Selection[str](
|
account = Selection[str](
|
||||||
options=dict.fromkeys(
|
options=dict.fromkeys(
|
||||||
|
|
|
@ -18,27 +18,23 @@
|
||||||
Charting overlay helpers.
|
Charting overlay helpers.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from collections import defaultdict
|
from typing import Callable, Optional
|
||||||
from functools import partial
|
|
||||||
from typing import (
|
from pyqtgraph.Qt.QtCore import (
|
||||||
Callable,
|
# QObject,
|
||||||
Optional,
|
# Signal,
|
||||||
|
Qt,
|
||||||
|
# QEvent,
|
||||||
)
|
)
|
||||||
|
|
||||||
from pyqtgraph.graphicsItems.AxisItem import AxisItem
|
from pyqtgraph.graphicsItems.AxisItem import AxisItem
|
||||||
from pyqtgraph.graphicsItems.ViewBox import ViewBox
|
from pyqtgraph.graphicsItems.ViewBox import ViewBox
|
||||||
# from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
||||||
from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
|
from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
|
||||||
from pyqtgraph.Qt.QtCore import (
|
from pyqtgraph.Qt.QtCore import QObject, Signal, QEvent
|
||||||
QObject,
|
from pyqtgraph.Qt.QtWidgets import QGraphicsGridLayout, QGraphicsLinearLayout
|
||||||
Signal,
|
|
||||||
QEvent,
|
from ._interaction import ChartView
|
||||||
Qt,
|
|
||||||
)
|
|
||||||
from pyqtgraph.Qt.QtWidgets import (
|
|
||||||
# QGraphicsGridLayout,
|
|
||||||
QGraphicsLinearLayout,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = ["PlotItemOverlay"]
|
__all__ = ["PlotItemOverlay"]
|
||||||
|
|
||||||
|
@ -84,20 +80,25 @@ class ComposedGridLayout:
|
||||||
``<axis_name>i`` in the layout.
|
``<axis_name>i`` in the layout.
|
||||||
|
|
||||||
The ``item: PlotItem`` passed to the constructor's grid layout is
|
The ``item: PlotItem`` passed to the constructor's grid layout is
|
||||||
used verbatim as the "main plot" who's view box is given precedence
|
used verbatim as the "main plot" who's view box is give precedence
|
||||||
for input handling. The main plot's axes are removed from its
|
for input handling. The main plot's axes are removed from it's
|
||||||
layout and placed in the surrounding exterior layouts to allow for
|
layout and placed in the surrounding exterior layouts to allow for
|
||||||
re-ordering if desired.
|
re-ordering if desired.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
pi: PlotItem,
|
item: PlotItem,
|
||||||
|
grid: QGraphicsGridLayout,
|
||||||
|
reverse: bool = False, # insert items to the "center"
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
self.items: list[PlotItem] = []
|
||||||
|
# self.grid = grid
|
||||||
|
self.reverse = reverse
|
||||||
|
|
||||||
self.pitems: list[PlotItem] = []
|
# TODO: use a ``bidict`` here?
|
||||||
self._pi2axes: dict[ # TODO: use a ``bidict`` here?
|
self._pi2axes: dict[
|
||||||
int,
|
int,
|
||||||
dict[str, AxisItem],
|
dict[str, AxisItem],
|
||||||
] = {}
|
] = {}
|
||||||
|
@ -119,13 +120,12 @@ class ComposedGridLayout:
|
||||||
|
|
||||||
if name in ('top', 'bottom'):
|
if name in ('top', 'bottom'):
|
||||||
orient = Qt.Vertical
|
orient = Qt.Vertical
|
||||||
|
|
||||||
elif name in ('left', 'right'):
|
elif name in ('left', 'right'):
|
||||||
orient = Qt.Horizontal
|
orient = Qt.Horizontal
|
||||||
|
|
||||||
layout.setOrientation(orient)
|
layout.setOrientation(orient)
|
||||||
|
|
||||||
self.insert_plotitem(0, pi)
|
self.insert(0, item)
|
||||||
|
|
||||||
# insert surrounding linear layouts into the parent pi's layout
|
# insert surrounding linear layouts into the parent pi's layout
|
||||||
# such that additional axes can be appended arbitrarily without
|
# such that additional axes can be appended arbitrarily without
|
||||||
|
@ -135,14 +135,13 @@ class ComposedGridLayout:
|
||||||
# TODO: do we need this?
|
# TODO: do we need this?
|
||||||
# axis should have been removed during insert above
|
# axis should have been removed during insert above
|
||||||
index = _axes_layout_indices[name]
|
index = _axes_layout_indices[name]
|
||||||
axis = pi.layout.itemAt(*index)
|
axis = item.layout.itemAt(*index)
|
||||||
if axis and axis.isVisible():
|
if axis and axis.isVisible():
|
||||||
assert linlayout.itemAt(0) is axis
|
assert linlayout.itemAt(0) is axis
|
||||||
|
|
||||||
# XXX: see comment in ``.insert_plotitem()``...
|
# item.layout.removeItem(axis)
|
||||||
# pi.layout.removeItem(axis)
|
item.layout.addItem(linlayout, *index)
|
||||||
pi.layout.addItem(linlayout, *index)
|
layout = item.layout.itemAt(*index)
|
||||||
layout = pi.layout.itemAt(*index)
|
|
||||||
assert layout is linlayout
|
assert layout is linlayout
|
||||||
|
|
||||||
def _register_item(
|
def _register_item(
|
||||||
|
@ -158,32 +157,27 @@ class ComposedGridLayout:
|
||||||
self._pi2axes.setdefault(name, {})[index] = axis
|
self._pi2axes.setdefault(name, {})[index] = axis
|
||||||
|
|
||||||
# enter plot into list for index tracking
|
# enter plot into list for index tracking
|
||||||
self.pitems.insert(index, plotitem)
|
self.items.insert(index, plotitem)
|
||||||
|
|
||||||
def insert_plotitem(
|
def insert(
|
||||||
self,
|
self,
|
||||||
index: int,
|
index: int,
|
||||||
plotitem: PlotItem,
|
plotitem: PlotItem,
|
||||||
|
|
||||||
) -> tuple[int, list[AxisItem]]:
|
) -> (int, int):
|
||||||
'''
|
'''
|
||||||
Place item at index by inserting all axes into the grid
|
Place item at index by inserting all axes into the grid
|
||||||
at list-order appropriate position.
|
at list-order appropriate position.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if index < 0:
|
if index < 0:
|
||||||
raise ValueError(
|
raise ValueError('`insert()` only supports an index >= 0')
|
||||||
'`.insert_plotitem()` only supports an index >= 0'
|
|
||||||
)
|
|
||||||
|
|
||||||
inserted_axes: list[AxisItem] = []
|
|
||||||
|
|
||||||
# add plot's axes in sequence to the embedded linear layouts
|
# add plot's axes in sequence to the embedded linear layouts
|
||||||
# for each "side" thus avoiding graphics collisions.
|
# for each "side" thus avoiding graphics collisions.
|
||||||
for name, axis_info in plotitem.axes.copy().items():
|
for name, axis_info in plotitem.axes.copy().items():
|
||||||
linlayout, axes = self.sides[name]
|
linlayout, axes = self.sides[name]
|
||||||
axis = axis_info['item']
|
axis = axis_info['item']
|
||||||
inserted_axes.append(axis)
|
|
||||||
|
|
||||||
if axis in axes:
|
if axis in axes:
|
||||||
# TODO: re-order using ``.pop()`` ?
|
# TODO: re-order using ``.pop()`` ?
|
||||||
|
@ -196,20 +190,19 @@ class ComposedGridLayout:
|
||||||
if (
|
if (
|
||||||
not axis.isVisible()
|
not axis.isVisible()
|
||||||
|
|
||||||
# XXX: we never skip moving the axes for the *root*
|
# XXX: we never skip moving the axes for the *first*
|
||||||
# plotitem inserted (even if not shown) since we need to
|
# plotitem inserted (even if not shown) since we need to
|
||||||
# move all the hidden axes into linear sub-layouts for
|
# move all the hidden axes into linear sub-layouts for
|
||||||
# that "central" plot in the overlay. Also if we don't
|
# that "central" plot in the overlay. Also if we don't
|
||||||
# do it there's weird geomoetry calc offsets that make
|
# do it there's weird geomoetry calc offsets that make
|
||||||
# view coords slightly off somehow .. smh
|
# view coords slightly off somehow .. smh
|
||||||
and not len(self.pitems) == 0
|
and not len(self.items) == 0
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# XXX: Remove old axis?
|
# XXX: Remove old axis? No, turns out we don't need this?
|
||||||
# No, turns out we don't need this?
|
# DON'T unlink it since we the original ``ViewBox``
|
||||||
# DON'T UNLINK IT since we need the original ``ViewBox`` to
|
# to still drive it B)
|
||||||
# still drive it with events/handlers B)
|
|
||||||
# popped = plotitem.removeAxis(name, unlink=False)
|
# popped = plotitem.removeAxis(name, unlink=False)
|
||||||
# assert axis is popped
|
# assert axis is popped
|
||||||
|
|
||||||
|
@ -225,9 +218,9 @@ class ComposedGridLayout:
|
||||||
|
|
||||||
self._register_item(index, plotitem)
|
self._register_item(index, plotitem)
|
||||||
|
|
||||||
return (index, inserted_axes)
|
return index
|
||||||
|
|
||||||
def append_plotitem(
|
def append(
|
||||||
self,
|
self,
|
||||||
item: PlotItem,
|
item: PlotItem,
|
||||||
|
|
||||||
|
@ -239,7 +232,7 @@ class ComposedGridLayout:
|
||||||
'''
|
'''
|
||||||
# for left and bottom axes we have to first remove
|
# for left and bottom axes we have to first remove
|
||||||
# items and re-insert to maintain a list-order.
|
# items and re-insert to maintain a list-order.
|
||||||
return self.insert_plotitem(len(self.pitems), item)
|
return self.insert(len(self.items), item)
|
||||||
|
|
||||||
def get_axis(
|
def get_axis(
|
||||||
self,
|
self,
|
||||||
|
@ -252,20 +245,20 @@ class ComposedGridLayout:
|
||||||
if axis for that name is not shown.
|
if axis for that name is not shown.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
index = self.pitems.index(plot)
|
index = self.items.index(plot)
|
||||||
named = self._pi2axes[name]
|
named = self._pi2axes[name]
|
||||||
return named.get(index)
|
return named.get(index)
|
||||||
|
|
||||||
# def pop(
|
def pop(
|
||||||
# self,
|
self,
|
||||||
# item: PlotItem,
|
item: PlotItem,
|
||||||
|
|
||||||
# ) -> PlotItem:
|
) -> PlotItem:
|
||||||
# '''
|
'''
|
||||||
# Remove item and restack all axes in list-order.
|
Remove item and restack all axes in list-order.
|
||||||
|
|
||||||
# '''
|
'''
|
||||||
# raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
# Unimplemented features TODO:
|
# Unimplemented features TODO:
|
||||||
|
@ -286,6 +279,194 @@ class ComposedGridLayout:
|
||||||
# axis?
|
# axis?
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: we might want to enabled some kind of manual flag to disable
|
||||||
|
# this method wrapping during type creation? As example a user could
|
||||||
|
# definitively decide **not** to enable broadcasting support by
|
||||||
|
# setting something like ``ViewBox.disable_relays = True``?
|
||||||
|
def mk_relay_method(
|
||||||
|
|
||||||
|
signame: str,
|
||||||
|
slot: Callable[
|
||||||
|
[ViewBox,
|
||||||
|
'QEvent',
|
||||||
|
Optional[AxisItem]],
|
||||||
|
None,
|
||||||
|
],
|
||||||
|
|
||||||
|
) -> Callable[
|
||||||
|
[
|
||||||
|
ViewBox,
|
||||||
|
# lol, there isn't really a generic type thanks
|
||||||
|
# to the rewrite of Qt's event system XD
|
||||||
|
'QEvent',
|
||||||
|
|
||||||
|
'Optional[AxisItem]',
|
||||||
|
'Optional[ViewBox]', # the ``relayed_from`` arg we provide
|
||||||
|
],
|
||||||
|
None,
|
||||||
|
]:
|
||||||
|
|
||||||
|
def maybe_broadcast(
|
||||||
|
vb: 'ViewBox',
|
||||||
|
ev: 'QEvent',
|
||||||
|
axis: 'Optional[int]' = None,
|
||||||
|
relayed_from: 'ViewBox' = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
(soon to be) Decorator which makes an event handler
|
||||||
|
"broadcastable" to overlayed ``GraphicsWidget``s.
|
||||||
|
|
||||||
|
Adds relay signals based on the decorated handler's name
|
||||||
|
and conducts a signal broadcast of the relay signal if there
|
||||||
|
are consumers registered.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# When no relay source has been set just bypass all
|
||||||
|
# the broadcast machinery.
|
||||||
|
if vb.event_relay_source is None:
|
||||||
|
ev.accept()
|
||||||
|
return slot(
|
||||||
|
vb,
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
)
|
||||||
|
|
||||||
|
if relayed_from:
|
||||||
|
assert axis is None
|
||||||
|
|
||||||
|
# this is a relayed event and should be ignored (so it does not
|
||||||
|
# halt/short circuit the graphicscene loop). Further the
|
||||||
|
# surrounding handler for this signal must be allowed to execute
|
||||||
|
# and get processed by **this consumer**.
|
||||||
|
# print(f'{vb.name} rx relayed from {relayed_from.name}')
|
||||||
|
ev.ignore()
|
||||||
|
|
||||||
|
return slot(
|
||||||
|
vb,
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
)
|
||||||
|
|
||||||
|
if axis is not None:
|
||||||
|
# print(f'{vb.name} handling axis event:\n{str(ev)}')
|
||||||
|
ev.accept()
|
||||||
|
return slot(
|
||||||
|
vb,
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
)
|
||||||
|
|
||||||
|
elif (
|
||||||
|
relayed_from is None
|
||||||
|
and vb.event_relay_source is vb # we are the broadcaster
|
||||||
|
and axis is None
|
||||||
|
):
|
||||||
|
# Broadcast case: this is a source event which will be
|
||||||
|
# relayed to attached consumers and accepted after all
|
||||||
|
# consumers complete their own handling followed by this
|
||||||
|
# routine's processing. Sequence is,
|
||||||
|
# - pre-relay to all consumers *first* - ``.emit()`` blocks
|
||||||
|
# until all downstream relay handlers have run.
|
||||||
|
# - run the source handler for **this** event and accept
|
||||||
|
# the event
|
||||||
|
|
||||||
|
# Access the "bound signal" that is created
|
||||||
|
# on the widget type as part of instantiation.
|
||||||
|
signal = getattr(vb, signame)
|
||||||
|
# print(f'{vb.name} emitting {signame}')
|
||||||
|
|
||||||
|
# TODO/NOTE: we could also just bypass a "relay" signal
|
||||||
|
# entirely and instead call the handlers manually in
|
||||||
|
# a loop? This probably is a lot simpler and also doesn't
|
||||||
|
# have any downside, and allows not touching target widget
|
||||||
|
# internals.
|
||||||
|
signal.emit(
|
||||||
|
ev,
|
||||||
|
axis,
|
||||||
|
# passing this demarks a broadcasted/relayed event
|
||||||
|
vb,
|
||||||
|
)
|
||||||
|
# accept event so no more relays are fired.
|
||||||
|
ev.accept()
|
||||||
|
|
||||||
|
# call underlying wrapped method with an extra
|
||||||
|
# ``relayed_from`` value to denote that this is a relayed
|
||||||
|
# event handling case.
|
||||||
|
return slot(
|
||||||
|
vb,
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
)
|
||||||
|
|
||||||
|
return maybe_broadcast
|
||||||
|
|
||||||
|
|
||||||
|
# XXX: :( can't define signals **after** class compile time
|
||||||
|
# so this is not really useful.
|
||||||
|
# def mk_relay_signal(
|
||||||
|
# func,
|
||||||
|
# name: str = None,
|
||||||
|
|
||||||
|
# ) -> Signal:
|
||||||
|
# (
|
||||||
|
# args,
|
||||||
|
# varargs,
|
||||||
|
# varkw,
|
||||||
|
# defaults,
|
||||||
|
# kwonlyargs,
|
||||||
|
# kwonlydefaults,
|
||||||
|
# annotations
|
||||||
|
# ) = inspect.getfullargspec(func)
|
||||||
|
|
||||||
|
# # XXX: generate a relay signal with 1 extra
|
||||||
|
# # argument for a ``relayed_from`` kwarg. Since
|
||||||
|
# # ``'self'`` is already ignored by signals we just need
|
||||||
|
# # to count the arguments since we're adding only 1 (and
|
||||||
|
# # ``args`` will capture that).
|
||||||
|
# numargs = len(args + list(defaults))
|
||||||
|
# signal = Signal(*tuple(numargs * [object]))
|
||||||
|
# signame = name or func.__name__ + 'Relay'
|
||||||
|
# return signame, signal
|
||||||
|
|
||||||
|
|
||||||
|
def enable_relays(
|
||||||
|
widget: GraphicsWidget,
|
||||||
|
handler_names: list[str],
|
||||||
|
|
||||||
|
) -> list[Signal]:
|
||||||
|
'''
|
||||||
|
Method override helper which enables relay of a particular
|
||||||
|
``Signal`` from some chosen broadcaster widget to a set of
|
||||||
|
consumer widgets which should operate their event handlers normally
|
||||||
|
but instead of signals "relayed" from the broadcaster.
|
||||||
|
|
||||||
|
Mostly useful for overlaying widgets that handle user input
|
||||||
|
that you want to overlay graphically. The target ``widget`` type must
|
||||||
|
define ``QtCore.Signal``s each with a `'Relay'` suffix for each
|
||||||
|
name provided in ``handler_names: list[str]``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
signals = []
|
||||||
|
for name in handler_names:
|
||||||
|
handler = getattr(widget, name)
|
||||||
|
signame = name + 'Relay'
|
||||||
|
# ensure the target widget defines a relay signal
|
||||||
|
relay = getattr(widget, signame)
|
||||||
|
widget.relays[signame] = name
|
||||||
|
signals.append(relay)
|
||||||
|
method = mk_relay_method(signame, handler)
|
||||||
|
setattr(widget, name, method)
|
||||||
|
|
||||||
|
return signals
|
||||||
|
|
||||||
|
|
||||||
|
enable_relays(
|
||||||
|
ChartView,
|
||||||
|
['wheelEvent', 'mouseDragEvent']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PlotItemOverlay:
|
class PlotItemOverlay:
|
||||||
'''
|
'''
|
||||||
A composite for managing overlaid ``PlotItem`` instances such that
|
A composite for managing overlaid ``PlotItem`` instances such that
|
||||||
|
@ -301,191 +482,86 @@ class PlotItemOverlay:
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
self.root_plotitem: PlotItem = root_plotitem
|
self.root_plotitem: PlotItem = root_plotitem
|
||||||
self.relay_handlers: defaultdict[
|
|
||||||
str,
|
|
||||||
list[Callable],
|
|
||||||
] = defaultdict(list)
|
|
||||||
|
|
||||||
# NOTE: required for scene layering/relaying; this guarantees
|
vb = root_plotitem.vb
|
||||||
# the "root" plot receives priority for interaction
|
vb.event_relay_source = vb # TODO: maybe change name?
|
||||||
# events/signals.
|
vb.setZValue(1000) # XXX: critical for scene layering/relaying
|
||||||
root_plotitem.vb.setZValue(10)
|
|
||||||
|
|
||||||
self.layout = ComposedGridLayout(root_plotitem)
|
self.overlays: list[PlotItem] = []
|
||||||
|
self.layout = ComposedGridLayout(
|
||||||
|
root_plotitem,
|
||||||
|
root_plotitem.layout,
|
||||||
|
)
|
||||||
self._relays: dict[str, Signal] = {}
|
self._relays: dict[str, Signal] = {}
|
||||||
|
|
||||||
@property
|
|
||||||
def overlays(self) -> list[PlotItem]:
|
|
||||||
return self.layout.pitems
|
|
||||||
|
|
||||||
def add_plotitem(
|
def add_plotitem(
|
||||||
self,
|
self,
|
||||||
plotitem: PlotItem,
|
plotitem: PlotItem,
|
||||||
index: Optional[int] = None,
|
index: Optional[int] = None,
|
||||||
|
|
||||||
# event/signal names which will be broadcasted to all added
|
# TODO: we could also put the ``ViewBox.XAxis``
|
||||||
# (relayee) ``PlotItem``s (eg. ``ViewBox.mouseDragEvent``).
|
# style enum here?
|
||||||
relay_events: list[str] = [],
|
|
||||||
|
|
||||||
# (0,), # link x
|
# (0,), # link x
|
||||||
# (1,), # link y
|
# (1,), # link y
|
||||||
# (0, 1), # link both
|
# (0, 1), # link both
|
||||||
link_axes: tuple[int] = (),
|
link_axes: tuple[int] = (),
|
||||||
|
|
||||||
) -> tuple[int, list[AxisItem]]:
|
) -> None:
|
||||||
|
|
||||||
|
index = index or len(self.overlays)
|
||||||
root = self.root_plotitem
|
root = self.root_plotitem
|
||||||
|
# layout: QGraphicsGridLayout = root.layout
|
||||||
|
self.overlays.insert(index, plotitem)
|
||||||
vb: ViewBox = plotitem.vb
|
vb: ViewBox = plotitem.vb
|
||||||
|
|
||||||
|
# mark this consumer overlay as ready to expect relayed events
|
||||||
|
# from the root plotitem.
|
||||||
|
vb.event_relay_source = root.vb
|
||||||
|
|
||||||
# TODO: some sane way to allow menu event broadcast XD
|
# TODO: some sane way to allow menu event broadcast XD
|
||||||
# vb.setMenuEnabled(False)
|
# vb.setMenuEnabled(False)
|
||||||
|
|
||||||
# wire up any relay signal(s) from the source plot to added
|
# TODO: inside the `maybe_broadcast()` (soon to be) decorator
|
||||||
# "overlays". We use a plain loop instead of mucking with
|
# we need have checks that consumers have been attached to
|
||||||
# re-connecting signal/slots which tends to be more invasive and
|
# these relay signals.
|
||||||
# harder to implement and provides no measurable performance
|
if link_axes != (0, 1):
|
||||||
# gain.
|
|
||||||
if relay_events:
|
|
||||||
for ev_name in relay_events:
|
|
||||||
relayee_handler: Callable[
|
|
||||||
[
|
|
||||||
ViewBox,
|
|
||||||
# lol, there isn't really a generic type thanks
|
|
||||||
# to the rewrite of Qt's event system XD
|
|
||||||
QEvent,
|
|
||||||
|
|
||||||
AxisItem | None,
|
# wire up relay signals
|
||||||
],
|
for relay_signal_name, handler_name in vb.relays.items():
|
||||||
None,
|
# print(handler_name)
|
||||||
] = getattr(vb, ev_name)
|
# XXX: Signal class attrs are bound after instantiation
|
||||||
|
# of the defining type, so we need to access that bound
|
||||||
sub_handlers: list[Callable] = self.relay_handlers[ev_name]
|
# version here.
|
||||||
|
signal = getattr(root.vb, relay_signal_name)
|
||||||
# on the first registry of a relayed event we pop the
|
handler = getattr(vb, handler_name)
|
||||||
# root's handler and override it to a custom broadcaster
|
signal.connect(handler)
|
||||||
# routine.
|
|
||||||
if not sub_handlers:
|
|
||||||
|
|
||||||
src_handler = getattr(
|
|
||||||
root.vb,
|
|
||||||
ev_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
def broadcast(
|
|
||||||
ev: 'QEvent',
|
|
||||||
|
|
||||||
# TODO: drop this viewbox specific input and
|
|
||||||
# allow a predicate to be passed in by user.
|
|
||||||
axis: 'Optional[int]' = None,
|
|
||||||
|
|
||||||
*,
|
|
||||||
|
|
||||||
# these are bound in by the ``partial`` below
|
|
||||||
# and ensure a unique broadcaster per event.
|
|
||||||
ev_name: str = None,
|
|
||||||
src_handler: Callable = None,
|
|
||||||
relayed_from: 'ViewBox' = None,
|
|
||||||
|
|
||||||
# remaining inputs the source handler expects
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Broadcast signal or event: this is a source
|
|
||||||
event which will be relayed to attached
|
|
||||||
"relayee" plot item consumers.
|
|
||||||
|
|
||||||
The event is accepted halting any further
|
|
||||||
handlers from being triggered.
|
|
||||||
|
|
||||||
Sequence is,
|
|
||||||
- pre-relay to all consumers *first* - exactly
|
|
||||||
like how a ``Signal.emit()`` blocks until all
|
|
||||||
downstream relay handlers have run.
|
|
||||||
- run the event's source handler event
|
|
||||||
|
|
||||||
'''
|
|
||||||
ev.accept()
|
|
||||||
|
|
||||||
# broadcast first to relayees *first*. trigger
|
|
||||||
# relay of event to all consumers **before**
|
|
||||||
# processing/consumption in the source handler.
|
|
||||||
relayed_handlers = self.relay_handlers[ev_name]
|
|
||||||
|
|
||||||
assert getattr(vb, ev_name).__name__ == ev_name
|
|
||||||
|
|
||||||
# TODO: generalize as an input predicate
|
|
||||||
if axis is None:
|
|
||||||
for handler in relayed_handlers:
|
|
||||||
handler(
|
|
||||||
ev,
|
|
||||||
axis=axis,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
# run "source" widget's handler last
|
|
||||||
src_handler(
|
|
||||||
ev,
|
|
||||||
axis=axis,
|
|
||||||
)
|
|
||||||
|
|
||||||
# dynamic handler override on the publisher plot
|
|
||||||
setattr(
|
|
||||||
root.vb,
|
|
||||||
ev_name,
|
|
||||||
partial(
|
|
||||||
broadcast,
|
|
||||||
ev_name=ev_name,
|
|
||||||
src_handler=src_handler
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
assert getattr(root.vb, ev_name)
|
|
||||||
assert relayee_handler not in sub_handlers
|
|
||||||
|
|
||||||
# append relayed-to widget's handler to relay table
|
|
||||||
sub_handlers.append(relayee_handler)
|
|
||||||
|
|
||||||
# link dim-axes to root if requested by user.
|
# link dim-axes to root if requested by user.
|
||||||
|
# TODO: solve more-then-wanted scaled panning on click drag
|
||||||
|
# which seems to be due to broadcast. So we probably need to
|
||||||
|
# disable broadcast when axes are linked in a particular
|
||||||
|
# dimension?
|
||||||
for dim in link_axes:
|
for dim in link_axes:
|
||||||
# link x and y axes to new view box such that the top level
|
# link x and y axes to new view box such that the top level
|
||||||
# viewbox propagates to the root (and whatever other
|
# viewbox propagates to the root (and whatever other
|
||||||
# plotitem overlays that have been added).
|
# plotitem overlays that have been added).
|
||||||
vb.linkView(dim, root.vb)
|
vb.linkView(dim, root.vb)
|
||||||
|
|
||||||
# => NOTE: in order to prevent "more-then-linear" scaled
|
# make overlaid viewbox impossible to focus since the top
|
||||||
# panning moves on (for eg. click-drag) certain range change
|
# level should handle all input and relay to overlays.
|
||||||
# signals (i.e. ``.sigXRangeChanged``), the user needs to be
|
# NOTE: this was solved with the `setZValue()` above!
|
||||||
# careful that any broadcasted ``relay_events`` are are short
|
|
||||||
# circuited in sub-handlers (aka relayee's) implementations. As
|
|
||||||
# an example if a ``ViewBox.mouseDragEvent`` is broadcasted, the
|
|
||||||
# overlayed implementations need to be sure they either don't
|
|
||||||
# also link the x-axes (by not providing ``link_axes=(0,)``
|
|
||||||
# above) or that the relayee ``.mouseDragEvent()`` handlers are
|
|
||||||
# ready to "``return`` early" in the case that
|
|
||||||
# ``.sigXRangeChanged`` is emitted as part of linked axes.
|
|
||||||
# For more details on such signalling mechanics peek in
|
|
||||||
# ``ViewBox.linkView()``.
|
|
||||||
|
|
||||||
# make overlaid viewbox impossible to focus since the top level
|
# TODO: we will probably want to add a "focus" api such that
|
||||||
# should handle all input and relay to overlays. Note that the
|
# a new "top level" ``PlotItem`` can be selected dynamically
|
||||||
# "root" plot item gettingn interaction priority is configured
|
# (and presumably the axes dynamically sorted to match).
|
||||||
# with the ``.setZValue()`` during init.
|
|
||||||
vb.setFlag(
|
vb.setFlag(
|
||||||
vb.GraphicsItemFlag.ItemIsFocusable,
|
vb.GraphicsItemFlag.ItemIsFocusable,
|
||||||
False
|
False
|
||||||
)
|
)
|
||||||
vb.setFocusPolicy(Qt.NoFocus)
|
vb.setFocusPolicy(Qt.NoFocus)
|
||||||
|
|
||||||
# => TODO: add a "focus" api for switching the "top level"
|
|
||||||
# ``PlotItem`` dynamically.
|
|
||||||
|
|
||||||
# append-compose into the layout all axes from this plot
|
# append-compose into the layout all axes from this plot
|
||||||
if index is None:
|
self.layout.insert(index, plotitem)
|
||||||
insert_index, axes = self.layout.append_plotitem(plotitem)
|
|
||||||
else:
|
|
||||||
insert_index, axes = self.layout.insert_plotitem(index, plotitem)
|
|
||||||
|
|
||||||
plotitem.setGeometry(root.vb.sceneBoundingRect())
|
plotitem.setGeometry(root.vb.sceneBoundingRect())
|
||||||
|
|
||||||
|
@ -503,12 +579,24 @@ class PlotItemOverlay:
|
||||||
root.vb.setFocus()
|
root.vb.setFocus()
|
||||||
assert root.vb.focusWidget()
|
assert root.vb.focusWidget()
|
||||||
|
|
||||||
vb.setZValue(100)
|
# XXX: do we need this? Why would you build then destroy?
|
||||||
|
def remove_plotitem(self, plotItem: PlotItem) -> None:
|
||||||
|
'''
|
||||||
|
Remove this ``PlotItem`` from the overlayed set making not shown
|
||||||
|
and unable to accept input.
|
||||||
|
|
||||||
return (
|
'''
|
||||||
index,
|
...
|
||||||
axes,
|
|
||||||
)
|
# TODO: i think this would be super hot B)
|
||||||
|
def focus_item(self, plotitem: PlotItem) -> PlotItem:
|
||||||
|
'''
|
||||||
|
Apply focus to a contained PlotItem thus making it the "top level"
|
||||||
|
item in the overlay able to accept peripheral's input from the user
|
||||||
|
and responsible for zoom and panning control via its ``ViewBox``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
...
|
||||||
|
|
||||||
def get_axis(
|
def get_axis(
|
||||||
self,
|
self,
|
||||||
|
@ -542,9 +630,8 @@ class PlotItemOverlay:
|
||||||
|
|
||||||
return axes
|
return axes
|
||||||
|
|
||||||
# XXX: untested as of now.
|
# TODO: i guess we need this if you want to detach existing plots
|
||||||
# TODO: need this as part of selecting a different root/source
|
# dynamically? XXX: untested as of now.
|
||||||
# plot to rewire interaction event broadcast dynamically.
|
|
||||||
def _disconnect_all(
|
def _disconnect_all(
|
||||||
self,
|
self,
|
||||||
plotitem: PlotItem,
|
plotitem: PlotItem,
|
||||||
|
@ -559,22 +646,3 @@ class PlotItemOverlay:
|
||||||
disconnected.append(sig)
|
disconnected.append(sig)
|
||||||
|
|
||||||
return disconnected
|
return disconnected
|
||||||
|
|
||||||
# XXX: do we need this? Why would you build then destroy?
|
|
||||||
# def remove_plotitem(self, plotItem: PlotItem) -> None:
|
|
||||||
# '''
|
|
||||||
# Remove this ``PlotItem`` from the overlayed set making not shown
|
|
||||||
# and unable to accept input.
|
|
||||||
|
|
||||||
# '''
|
|
||||||
# ...
|
|
||||||
|
|
||||||
# TODO: i think this would be super hot B)
|
|
||||||
# def focus_plotitem(self, plotitem: PlotItem) -> PlotItem:
|
|
||||||
# '''
|
|
||||||
# Apply focus to a contained PlotItem thus making it the "top level"
|
|
||||||
# item in the overlay able to accept peripheral's input from the user
|
|
||||||
# and responsible for zoom and panning control via its ``ViewBox``.
|
|
||||||
|
|
||||||
# '''
|
|
||||||
# ...
|
|
||||||
|
|
|
@ -0,0 +1,236 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
"""
|
||||||
|
Super fast ``QPainterPath`` generation related operator routines.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
from typing import (
|
||||||
|
# Optional,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from numpy.lib import recfunctions as rfn
|
||||||
|
from numba import njit, float64, int64 # , optional
|
||||||
|
# import pyqtgraph as pg
|
||||||
|
from PyQt5 import QtGui
|
||||||
|
# from PyQt5.QtCore import QLineF, QPointF
|
||||||
|
|
||||||
|
from ..data._sharedmem import (
|
||||||
|
ShmArray,
|
||||||
|
)
|
||||||
|
# from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
|
from ._compression import (
|
||||||
|
ds_m4,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._flows import Renderer
|
||||||
|
|
||||||
|
|
||||||
|
def xy_downsample(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
uppx,
|
||||||
|
|
||||||
|
x_spacer: float = 0.5,
|
||||||
|
|
||||||
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
|
||||||
|
# downsample whenever more then 1 pixels per datum can be shown.
|
||||||
|
# always refresh data bounds until we get diffing
|
||||||
|
# working properly, see above..
|
||||||
|
bins, x, y = ds_m4(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
uppx,
|
||||||
|
)
|
||||||
|
|
||||||
|
# flatten output to 1d arrays suitable for path-graphics generation.
|
||||||
|
x = np.broadcast_to(x[:, None], y.shape)
|
||||||
|
x = (x + np.array(
|
||||||
|
[-x_spacer, 0, 0, x_spacer]
|
||||||
|
)).flatten()
|
||||||
|
y = y.flatten()
|
||||||
|
|
||||||
|
return x, y
|
||||||
|
|
||||||
|
|
||||||
|
@njit(
|
||||||
|
# TODO: for now need to construct this manually for readonly arrays, see
|
||||||
|
# https://github.com/numba/numba/issues/4511
|
||||||
|
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
||||||
|
# numba_ohlc_dtype[::1], # contiguous
|
||||||
|
# int64,
|
||||||
|
# optional(float64),
|
||||||
|
# ),
|
||||||
|
nogil=True
|
||||||
|
)
|
||||||
|
def path_arrays_from_ohlc(
|
||||||
|
data: np.ndarray,
|
||||||
|
start: int64,
|
||||||
|
bar_gap: float64 = 0.43,
|
||||||
|
|
||||||
|
) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Generate an array of lines objects from input ohlc data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size = int(data.shape[0] * 6)
|
||||||
|
|
||||||
|
x = np.zeros(
|
||||||
|
# data,
|
||||||
|
shape=size,
|
||||||
|
dtype=float64,
|
||||||
|
)
|
||||||
|
y, c = x.copy(), x.copy()
|
||||||
|
|
||||||
|
# TODO: report bug for assert @
|
||||||
|
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||||
|
for i, q in enumerate(data[start:], start):
|
||||||
|
|
||||||
|
# TODO: ask numba why this doesn't work..
|
||||||
|
# open, high, low, close, index = q[
|
||||||
|
# ['open', 'high', 'low', 'close', 'index']]
|
||||||
|
|
||||||
|
open = q['open']
|
||||||
|
high = q['high']
|
||||||
|
low = q['low']
|
||||||
|
close = q['close']
|
||||||
|
index = float64(q['index'])
|
||||||
|
|
||||||
|
istart = i * 6
|
||||||
|
istop = istart + 6
|
||||||
|
|
||||||
|
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||||
|
x[istart:istop] = (
|
||||||
|
index - bar_gap,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index + bar_gap,
|
||||||
|
)
|
||||||
|
y[istart:istop] = (
|
||||||
|
open,
|
||||||
|
open,
|
||||||
|
low,
|
||||||
|
high,
|
||||||
|
close,
|
||||||
|
close,
|
||||||
|
)
|
||||||
|
|
||||||
|
# specifies that the first edge is never connected to the
|
||||||
|
# prior bars last edge thus providing a small "gap"/"space"
|
||||||
|
# between bars determined by ``bar_gap``.
|
||||||
|
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||||
|
|
||||||
|
return x, y, c
|
||||||
|
|
||||||
|
|
||||||
|
def gen_ohlc_qpath(
|
||||||
|
r: Renderer,
|
||||||
|
data: np.ndarray,
|
||||||
|
array_key: str, # we ignore this
|
||||||
|
vr: tuple[int, int],
|
||||||
|
|
||||||
|
start: int = 0, # XXX: do we need this?
|
||||||
|
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||||
|
w: float = 0.43,
|
||||||
|
|
||||||
|
) -> QtGui.QPainterPath:
|
||||||
|
'''
|
||||||
|
More or less direct proxy to ``path_arrays_from_ohlc()``
|
||||||
|
but with closed in kwargs for line spacing.
|
||||||
|
|
||||||
|
'''
|
||||||
|
x, y, c = path_arrays_from_ohlc(
|
||||||
|
data,
|
||||||
|
start,
|
||||||
|
bar_gap=w,
|
||||||
|
)
|
||||||
|
return x, y, c
|
||||||
|
|
||||||
|
|
||||||
|
def ohlc_to_line(
|
||||||
|
ohlc_shm: ShmArray,
|
||||||
|
data_field: str,
|
||||||
|
fields: list[str] = ['open', 'high', 'low', 'close']
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Convert an input struct-array holding OHLC samples into a pair of
|
||||||
|
flattened x, y arrays with the same size (datums wise) as the source
|
||||||
|
data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
y_out = ohlc_shm.ustruct(fields)
|
||||||
|
first = ohlc_shm._first.value
|
||||||
|
last = ohlc_shm._last.value
|
||||||
|
|
||||||
|
# write pushed data to flattened copy
|
||||||
|
y_out[first:last] = rfn.structured_to_unstructured(
|
||||||
|
ohlc_shm.array[fields]
|
||||||
|
)
|
||||||
|
|
||||||
|
# generate an flat-interpolated x-domain
|
||||||
|
x_out = (
|
||||||
|
np.broadcast_to(
|
||||||
|
ohlc_shm._array['index'][:, None],
|
||||||
|
(
|
||||||
|
ohlc_shm._array.size,
|
||||||
|
# 4, # only ohlc
|
||||||
|
y_out.shape[1],
|
||||||
|
),
|
||||||
|
) + np.array([-0.5, 0, 0, 0.5])
|
||||||
|
)
|
||||||
|
assert y_out.any()
|
||||||
|
|
||||||
|
return (
|
||||||
|
x_out,
|
||||||
|
y_out,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def to_step_format(
|
||||||
|
shm: ShmArray,
|
||||||
|
data_field: str,
|
||||||
|
index_field: str = 'index',
|
||||||
|
|
||||||
|
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||||
|
'''
|
||||||
|
Convert an input 1d shm array to a "step array" format
|
||||||
|
for use by path graphics generation.
|
||||||
|
|
||||||
|
'''
|
||||||
|
i = shm._array['index'].copy()
|
||||||
|
out = shm._array[data_field].copy()
|
||||||
|
|
||||||
|
x_out = np.broadcast_to(
|
||||||
|
i[:, None],
|
||||||
|
(i.size, 2),
|
||||||
|
) + np.array([-0.5, 0.5])
|
||||||
|
|
||||||
|
y_out = np.empty((len(out), 2), dtype=out.dtype)
|
||||||
|
y_out[:] = out[:, np.newaxis]
|
||||||
|
|
||||||
|
# start y at origin level
|
||||||
|
y_out[0, 0] = 0
|
||||||
|
return x_out, y_out
|
|
@ -15,19 +15,13 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Customization of ``pyqtgraph`` core routines and various types normally
|
Customization of ``pyqtgraph`` core routines to speed up our use mostly
|
||||||
for speedups.
|
based on not requiring "scentific precision" for pixel perfect view
|
||||||
|
transforms.
|
||||||
Generally, our does not require "scentific precision" for pixel perfect
|
|
||||||
view transforms.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
|
|
||||||
from ._axes import Axis
|
|
||||||
|
|
||||||
|
|
||||||
def invertQTransform(tr):
|
def invertQTransform(tr):
|
||||||
"""Return a QTransform that is the inverse of *tr*.
|
"""Return a QTransform that is the inverse of *tr*.
|
||||||
|
@ -52,236 +46,3 @@ def _do_overrides() -> None:
|
||||||
"""
|
"""
|
||||||
# we don't care about potential fp issues inside Qt
|
# we don't care about potential fp issues inside Qt
|
||||||
pg.functions.invertQTransform = invertQTransform
|
pg.functions.invertQTransform = invertQTransform
|
||||||
pg.PlotItem = PlotItem
|
|
||||||
|
|
||||||
# enable "QPainterPathPrivate for faster arrayToQPath" from
|
|
||||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2324
|
|
||||||
pg.setConfigOption('enableExperimental', True)
|
|
||||||
|
|
||||||
|
|
||||||
# NOTE: the below customized type contains all our changes on a method
|
|
||||||
# by method basis as per the diff:
|
|
||||||
# https://github.com/pyqtgraph/pyqtgraph/commit/8e60bc14234b6bec1369ff4192dbfb82f8682920#diff-a2b5865955d2ba703dbc4c35ff01aa761aa28d2aeaac5e68d24e338bc82fb5b1R500
|
|
||||||
|
|
||||||
class PlotItem(pg.PlotItem):
|
|
||||||
'''
|
|
||||||
Overrides for the core plot object mostly pertaining to overlayed
|
|
||||||
multi-view management as it relates to multi-axis managment.
|
|
||||||
|
|
||||||
This object is the combination of a ``ViewBox`` and multiple
|
|
||||||
``AxisItem``s and so far we've added additional functionality and
|
|
||||||
APIs for:
|
|
||||||
- removal of axes
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
From ``pyqtgraph`` super type docs:
|
|
||||||
- Manage placement of ViewBox, AxisItems, and LabelItems
|
|
||||||
- Create and manage a list of PlotDataItems displayed inside the
|
|
||||||
ViewBox
|
|
||||||
- Implement a context menu with commonly used display and analysis
|
|
||||||
options
|
|
||||||
|
|
||||||
'''
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
parent=None,
|
|
||||||
name=None,
|
|
||||||
labels=None,
|
|
||||||
title=None,
|
|
||||||
viewBox=None,
|
|
||||||
axisItems=None,
|
|
||||||
default_axes=['left', 'bottom'],
|
|
||||||
enableMenu=True,
|
|
||||||
**kargs
|
|
||||||
):
|
|
||||||
super().__init__(
|
|
||||||
parent=parent,
|
|
||||||
name=name,
|
|
||||||
labels=labels,
|
|
||||||
title=title,
|
|
||||||
viewBox=viewBox,
|
|
||||||
axisItems=axisItems,
|
|
||||||
# default_axes=default_axes,
|
|
||||||
enableMenu=enableMenu,
|
|
||||||
kargs=kargs,
|
|
||||||
)
|
|
||||||
self.name = name
|
|
||||||
self.chart_widget = None
|
|
||||||
# self.setAxisItems(
|
|
||||||
# axisItems,
|
|
||||||
# default_axes=default_axes,
|
|
||||||
# )
|
|
||||||
|
|
||||||
# NOTE: this is an entirely new method not in upstream.
|
|
||||||
def removeAxis(
|
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
unlink: bool = True,
|
|
||||||
|
|
||||||
) -> Optional[pg.AxisItem]:
|
|
||||||
"""
|
|
||||||
Remove an axis from the contained axis items
|
|
||||||
by ```name: str```.
|
|
||||||
|
|
||||||
This means the axis graphics object will be removed
|
|
||||||
from the ``.layout: QGraphicsGridLayout`` as well as unlinked
|
|
||||||
from the underlying associated ``ViewBox``.
|
|
||||||
|
|
||||||
If the ``unlink: bool`` is set to ``False`` then the axis will
|
|
||||||
stay linked to its view and will only be removed from the
|
|
||||||
layoutonly be removed from the layout.
|
|
||||||
|
|
||||||
If no axis with ``name: str`` is found then this is a noop.
|
|
||||||
|
|
||||||
Return the axis instance that was removed.
|
|
||||||
|
|
||||||
"""
|
|
||||||
entry = self.axes.pop(name, None)
|
|
||||||
|
|
||||||
if not entry:
|
|
||||||
return
|
|
||||||
|
|
||||||
axis = entry['item']
|
|
||||||
self.layout.removeItem(axis)
|
|
||||||
axis.scene().removeItem(axis)
|
|
||||||
if unlink:
|
|
||||||
axis.unlinkFromView()
|
|
||||||
|
|
||||||
self.update()
|
|
||||||
|
|
||||||
return axis
|
|
||||||
|
|
||||||
# Why do we need to always have all axes created?
|
|
||||||
#
|
|
||||||
# I don't understand this at all.
|
|
||||||
#
|
|
||||||
# Everything seems to work if you just always apply the
|
|
||||||
# set passed to this method **EXCEPT** for some super weird reason
|
|
||||||
# the view box geometry still computes as though the space for the
|
|
||||||
# `'bottom'` axis is always there **UNLESS** you always add that
|
|
||||||
# axis but hide it?
|
|
||||||
#
|
|
||||||
# Why in tf would this be the case!?!?
|
|
||||||
def setAxisItems(
|
|
||||||
self,
|
|
||||||
# XXX: yeah yeah, i know we can't use type annots like this yet.
|
|
||||||
axisItems: Optional[dict[str, pg.AxisItem]] = None,
|
|
||||||
add_to_layout: bool = True,
|
|
||||||
default_axes: list[str] = ['left', 'bottom'],
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Override axis item setting to only
|
|
||||||
|
|
||||||
"""
|
|
||||||
axisItems = axisItems or {}
|
|
||||||
|
|
||||||
# XXX: wth is is this even saying?!?
|
|
||||||
# Array containing visible axis items
|
|
||||||
# Also containing potentially hidden axes, but they are not
|
|
||||||
# touched so it does not matter
|
|
||||||
# visibleAxes = ['left', 'bottom']
|
|
||||||
# Note that it does not matter that this adds
|
|
||||||
# some values to visibleAxes a second time
|
|
||||||
|
|
||||||
# XXX: uhhh wat^ ..?
|
|
||||||
|
|
||||||
visibleAxes = list(default_axes) + list(axisItems.keys())
|
|
||||||
|
|
||||||
# TODO: we should probably invert the loop here to not loop the
|
|
||||||
# predefined "axis name set" and instead loop the `axisItems`
|
|
||||||
# input and lookup indices from a predefined map.
|
|
||||||
for name, pos in (
|
|
||||||
('top', (1, 1)),
|
|
||||||
('bottom', (3, 1)),
|
|
||||||
('left', (2, 0)),
|
|
||||||
('right', (2, 2))
|
|
||||||
):
|
|
||||||
if (
|
|
||||||
name in self.axes and
|
|
||||||
name in axisItems
|
|
||||||
):
|
|
||||||
# we already have an axis entry for this name
|
|
||||||
# so remove the existing entry.
|
|
||||||
self.removeAxis(name)
|
|
||||||
|
|
||||||
# elif name not in axisItems:
|
|
||||||
# # this axis entry is not provided in this call
|
|
||||||
# # so remove any old/existing entry.
|
|
||||||
# self.removeAxis(name)
|
|
||||||
|
|
||||||
# Create new axis
|
|
||||||
if name in axisItems:
|
|
||||||
axis = axisItems[name]
|
|
||||||
if axis.scene() is not None:
|
|
||||||
if (
|
|
||||||
name not in self.axes
|
|
||||||
or axis != self.axes[name]["item"]
|
|
||||||
):
|
|
||||||
raise RuntimeError(
|
|
||||||
"Can't add an axis to multiple plots. Shared axes"
|
|
||||||
" can be achieved with multiple AxisItem instances"
|
|
||||||
" and set[X/Y]Link.")
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Set up new axis
|
|
||||||
|
|
||||||
# XXX: ok but why do we want to add axes for all entries
|
|
||||||
# if not desired by the user? The only reason I can see
|
|
||||||
# adding this is without it there's some weird
|
|
||||||
# ``ViewBox`` geometry bug.. where a gap for the
|
|
||||||
# 'bottom' axis is somehow left in?
|
|
||||||
# axis = pg.AxisItem(orientation=name, parent=self)
|
|
||||||
axis = Axis(
|
|
||||||
self,
|
|
||||||
orientation=name,
|
|
||||||
parent=self,
|
|
||||||
)
|
|
||||||
|
|
||||||
axis.linkToView(self.vb)
|
|
||||||
|
|
||||||
# XXX: shouldn't you already know the ``pos`` from the name?
|
|
||||||
# Oh right instead of a global map that would let you
|
|
||||||
# reasily look that up it's redefined over and over and over
|
|
||||||
# again in methods..
|
|
||||||
self.axes[name] = {'item': axis, 'pos': pos}
|
|
||||||
|
|
||||||
# NOTE: in the overlay case the axis may be added to some
|
|
||||||
# other layout and should not be added here.
|
|
||||||
if add_to_layout:
|
|
||||||
self.layout.addItem(axis, *pos)
|
|
||||||
|
|
||||||
# place axis above images at z=0, items that want to draw
|
|
||||||
# over the axes should be placed at z>=1:
|
|
||||||
axis.setZValue(0.5)
|
|
||||||
axis.setFlag(
|
|
||||||
axis.GraphicsItemFlag.ItemNegativeZStacksBehindParent
|
|
||||||
)
|
|
||||||
if name in visibleAxes:
|
|
||||||
self.showAxis(name, True)
|
|
||||||
else:
|
|
||||||
# why do we need to insert all axes to ``.axes`` and
|
|
||||||
# only hide the ones the user doesn't specify? It all
|
|
||||||
# seems to work fine without doing this except for this
|
|
||||||
# weird gap for the 'bottom' axis that always shows up
|
|
||||||
# in the view box geometry??
|
|
||||||
self.hideAxis(name)
|
|
||||||
|
|
||||||
def updateGrid(
|
|
||||||
self,
|
|
||||||
*args,
|
|
||||||
):
|
|
||||||
alpha = self.ctrl.gridAlphaSlider.value()
|
|
||||||
x = alpha if self.ctrl.xGridCheck.isChecked() else False
|
|
||||||
y = alpha if self.ctrl.yGridCheck.isChecked() else False
|
|
||||||
for name, dim in (
|
|
||||||
('top', x),
|
|
||||||
('bottom', x),
|
|
||||||
('left', y),
|
|
||||||
('right', y)
|
|
||||||
):
|
|
||||||
if name in self.axes:
|
|
||||||
self.getAxis(name).setGrid(dim)
|
|
||||||
# self.getAxis('bottom').setGrid(x)
|
|
||||||
# self.getAxis('left').setGrid(y)
|
|
||||||
# self.getAxis('right').setGrid(y)
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue