Compare commits
No commits in common. "decimalization_take_2" and "310_plus" have entirely different histories.
decimaliza
...
310_plus
|
@ -3,9 +3,10 @@ name: CI
|
|||
|
||||
on:
|
||||
# Triggers the workflow on push or pull request events but only for the master branch
|
||||
pull_request:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
@ -13,27 +14,6 @@ on:
|
|||
|
||||
jobs:
|
||||
|
||||
# test that we can generate a software distribution and install it
|
||||
# thus avoid missing file issues after packaging.
|
||||
sdist-linux:
|
||||
name: 'sdist'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Build sdist
|
||||
run: python setup.py sdist --formats=zip
|
||||
|
||||
- name: Install sdist from .zips
|
||||
run: python -m pip install dist/*.zip
|
||||
|
||||
testing:
|
||||
name: 'install + test-suite'
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -42,16 +22,13 @@ jobs:
|
|||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Build DB container
|
||||
run: docker build -t piker:elastic dockering/elastic
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||
run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||
|
||||
- name: Test suite
|
||||
run: pytest tests -rs
|
||||
|
|
13
README.rst
13
README.rst
|
@ -71,19 +71,6 @@ for a development install::
|
|||
source ./env/bin/activate
|
||||
pip install -r requirements.txt -e .
|
||||
|
||||
install for nixos
|
||||
*****************
|
||||
for users of `NixOS` we offer a development shell envoirment that can be
|
||||
loaded with::
|
||||
|
||||
nix-shell develop.nix
|
||||
|
||||
this will setup the required python environment to run piker, make sure to
|
||||
run::
|
||||
|
||||
pip install -r requirements.txt -e .
|
||||
|
||||
once after loading the shell
|
||||
|
||||
install for tinas
|
||||
*****************
|
||||
|
|
|
@ -50,8 +50,3 @@ prefer_data_account = [
|
|||
paper = "XX0000000"
|
||||
margin = "X0000000"
|
||||
ira = "X0000000"
|
||||
|
||||
|
||||
[deribit]
|
||||
key_id = 'XXXXXXXX'
|
||||
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
||||
|
|
32
develop.nix
32
develop.nix
|
@ -1,32 +0,0 @@
|
|||
with (import <nixpkgs> {});
|
||||
with python310Packages;
|
||||
stdenv.mkDerivation {
|
||||
name = "pip-env";
|
||||
buildInputs = [
|
||||
# System requirements.
|
||||
readline
|
||||
|
||||
# Python requirements (enough to get a virtualenv going).
|
||||
python310Full
|
||||
virtualenv
|
||||
setuptools
|
||||
pyqt5
|
||||
pip
|
||||
];
|
||||
src = null;
|
||||
shellHook = ''
|
||||
# Allow the use of wheels.
|
||||
SOURCE_DATE_EPOCH=$(date +%s)
|
||||
|
||||
# Augment the dynamic linker path
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${R}/lib/R/lib:${readline}/lib
|
||||
|
||||
export QT_QPA_PLATFORM_PLUGIN_PATH="${qt5.qtbase.bin}/lib/qt-${qt5.qtbase.version}/plugins";
|
||||
|
||||
if [ ! -d "venv" ]; then
|
||||
virtualenv venv
|
||||
fi
|
||||
|
||||
source venv/bin/activate
|
||||
'';
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
FROM elasticsearch:7.17.4
|
||||
|
||||
ENV ES_JAVA_OPTS "-Xms2g -Xmx2g"
|
||||
ENV ELASTIC_USERNAME "elastic"
|
||||
ENV ELASTIC_PASSWORD "password"
|
||||
|
||||
COPY elasticsearch.yml /usr/share/elasticsearch/config/
|
||||
|
||||
RUN printf "password" | ./bin/elasticsearch-keystore add -f -x "bootstrap.password"
|
||||
|
||||
EXPOSE 19200
|
|
@ -1,5 +0,0 @@
|
|||
network.host: 0.0.0.0
|
||||
|
||||
http.port: 19200
|
||||
|
||||
discovery.type: single-node
|
|
@ -3,12 +3,11 @@
|
|||
version: "3.5"
|
||||
|
||||
services:
|
||||
ib_gw_paper:
|
||||
ib-gateway:
|
||||
# other image tags available:
|
||||
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
||||
# image: waytrade/ib-gateway:981.3j
|
||||
image: waytrade/ib-gateway:1012.2i
|
||||
restart: 'no' # restart on boot whenev there's a crash or user clicsk
|
||||
image: waytrade/ib-gateway:981.3j
|
||||
restart: always
|
||||
network_mode: 'host'
|
||||
|
||||
volumes:
|
||||
|
@ -40,12 +39,14 @@ services:
|
|||
# this compose file which looks something like:
|
||||
# TWS_USERID='myuser'
|
||||
# TWS_PASSWORD='guest'
|
||||
# TRADING_MODE=paper (or live)
|
||||
# VNC_SERVER_PASSWORD='diggity'
|
||||
|
||||
environment:
|
||||
TWS_USERID: ${TWS_USERID}
|
||||
TWS_PASSWORD: ${TWS_PASSWORD}
|
||||
TRADING_MODE: 'paper'
|
||||
VNC_SERVER_PASSWORD: 'doggy'
|
||||
VNC_SERVER_PORT: '3003'
|
||||
TRADING_MODE: ${TRADING_MODE:-paper}
|
||||
VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-}
|
||||
|
||||
# ports:
|
||||
# - target: 4002
|
||||
|
@ -61,40 +62,3 @@ services:
|
|||
# - "127.0.0.1:4001:4001"
|
||||
# - "127.0.0.1:4002:4002"
|
||||
# - "127.0.0.1:5900:5900"
|
||||
|
||||
# ib_gw_live:
|
||||
# image: waytrade/ib-gateway:1012.2i
|
||||
# restart: no
|
||||
# network_mode: 'host'
|
||||
|
||||
# volumes:
|
||||
# - type: bind
|
||||
# source: ./jts_live.ini
|
||||
# target: /root/jts/jts.ini
|
||||
# # don't let ibc clobber this file for
|
||||
# # the main reason of not having a stupid
|
||||
# # timezone set..
|
||||
# read_only: true
|
||||
|
||||
# # force our own ibc config
|
||||
# - type: bind
|
||||
# source: ./ibc.ini
|
||||
# target: /root/ibc/config.ini
|
||||
|
||||
# # force our noop script - socat isn't needed in host mode.
|
||||
# - type: bind
|
||||
# source: ./fork_ports_delayed.sh
|
||||
# target: /root/scripts/fork_ports_delayed.sh
|
||||
|
||||
# # force our noop script - socat isn't needed in host mode.
|
||||
# - type: bind
|
||||
# source: ./run_x11_vnc.sh
|
||||
# target: /root/scripts/run_x11_vnc.sh
|
||||
# read_only: true
|
||||
|
||||
# # NOTE: to fill these out, define an `.env` file in the same dir as
|
||||
# # this compose file which looks something like:
|
||||
# environment:
|
||||
# TRADING_MODE: 'live'
|
||||
# VNC_SERVER_PASSWORD: 'doggy'
|
||||
# VNC_SERVER_PORT: '3004'
|
||||
|
|
|
@ -188,7 +188,7 @@ AcceptNonBrokerageAccountWarning=yes
|
|||
#
|
||||
# The default value is 60.
|
||||
|
||||
LoginDialogDisplayTimeout=20
|
||||
LoginDialogDisplayTimeout = 60
|
||||
|
||||
|
||||
|
||||
|
@ -292,7 +292,7 @@ ExistingSessionDetectedAction=primary
|
|||
# be set dynamically at run-time: most users will never need it,
|
||||
# so don't use it unless you know you need it.
|
||||
|
||||
; OverrideTwsApiPort=4002
|
||||
OverrideTwsApiPort=4002
|
||||
|
||||
|
||||
# Read-only Login
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
[IBGateway]
|
||||
ApiOnly=true
|
||||
LocalServerPort=4001
|
||||
# NOTE: must be set if using IBC's "reject" mode
|
||||
TrustedIPs=127.0.0.1
|
||||
; RemoteHostOrderRouting=ndc1.ibllc.com
|
||||
; WriteDebug=true
|
||||
; RemotePortOrderRouting=4001
|
||||
; useRemoteSettings=false
|
||||
; tradingMode=p
|
||||
; Steps=8
|
||||
; colorPalletName=dark
|
||||
|
||||
# window geo, this may be useful for sending `xdotool` commands?
|
||||
; MainWindow.Width=1986
|
||||
; screenHeight=3960
|
||||
|
||||
|
||||
[Logon]
|
||||
Locale=en
|
||||
# most markets are oriented around this zone
|
||||
# so might as well hard code it.
|
||||
TimeZone=America/New_York
|
||||
UseSSL=true
|
||||
displayedproxymsg=1
|
||||
os_titlebar=true
|
||||
s3store=true
|
||||
useRemoteSettings=false
|
||||
|
||||
[Communication]
|
||||
ctciAutoEncrypt=true
|
||||
Region=usr
|
||||
; Peer=cdc1.ibllc.com:4001
|
|
@ -1,35 +1,16 @@
|
|||
#!/bin/sh
|
||||
# start vnc server and listen for connections
|
||||
# on port specced in `$VNC_SERVER_PORT`
|
||||
|
||||
# start VNC server
|
||||
x11vnc \
|
||||
-listen 127.0.0.1 \
|
||||
-allow 127.0.0.1 \
|
||||
-rfbport "${VNC_SERVER_PORT}" \
|
||||
-ncache_cr \
|
||||
-listen localhost \
|
||||
-display :1 \
|
||||
-forever \
|
||||
-shared \
|
||||
-logappend /var/log/x11vnc.log \
|
||||
-bg \
|
||||
-nowf \
|
||||
-noxdamage \
|
||||
-noxfixes \
|
||||
-no6 \
|
||||
-noipv6 \
|
||||
|
||||
|
||||
# -nowcr \
|
||||
# TODO: can't use this because of ``asyncvnc`` issue:
|
||||
-autoport 3003 \
|
||||
# can't use this because of ``asyncvnc`` issue:
|
||||
# https://github.com/barneygale/asyncvnc/issues/1
|
||||
# -passwd 'ibcansmbz'
|
||||
|
||||
# XXX: optional graphics caching flags that seem to rekt the overlay
|
||||
# of the 2 gw windows? When running a single gateway
|
||||
# this seems to maybe optimize some memory usage?
|
||||
# -ncache_cr \
|
||||
# -ncache \
|
||||
|
||||
# NOTE: this will prevent logs from going to the console.
|
||||
# -logappend /var/log/x11vnc.log \
|
||||
|
||||
# where to start allocating ports
|
||||
# -autoport "${VNC_SERVER_PORT}" \
|
||||
|
|
|
@ -18,10 +18,3 @@
|
|||
piker: trading gear for hackers.
|
||||
|
||||
"""
|
||||
from ._daemon import open_piker_runtime
|
||||
from .data.feed import open_feed
|
||||
|
||||
__all__ = [
|
||||
'open_piker_runtime',
|
||||
'open_feed',
|
||||
]
|
||||
|
|
535
piker/_daemon.py
535
piker/_daemon.py
|
@ -18,149 +18,45 @@
|
|||
Structured, daemon tree service management.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import os
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
Any,
|
||||
ClassVar,
|
||||
)
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
)
|
||||
from typing import Optional, Union, Callable, Any
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from collections import defaultdict
|
||||
|
||||
import tractor
|
||||
from pydantic import BaseModel
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
|
||||
from .log import (
|
||||
get_logger,
|
||||
get_console_log,
|
||||
)
|
||||
from .log import get_logger, get_console_log
|
||||
from .brokers import get_brokermod
|
||||
|
||||
from pprint import pformat
|
||||
from functools import partial
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_root_dname = 'pikerd'
|
||||
|
||||
_default_registry_host: str = '127.0.0.1'
|
||||
_default_registry_port: int = 6116
|
||||
_default_reg_addr: tuple[str, int] = (
|
||||
_default_registry_host,
|
||||
_default_registry_port,
|
||||
)
|
||||
|
||||
|
||||
# NOTE: this value is set as an actor-global once the first endpoint
|
||||
# who is capable, spawns a `pikerd` service tree.
|
||||
_registry: Registry | None = None
|
||||
|
||||
|
||||
class Registry:
|
||||
addr: None | tuple[str, int] = None
|
||||
|
||||
# TODO: table of uids to sockaddrs
|
||||
peers: dict[
|
||||
tuple[str, str],
|
||||
tuple[str, int],
|
||||
] = {}
|
||||
|
||||
|
||||
_tractor_kwargs: dict[str, Any] = {}
|
||||
|
||||
|
||||
@acm
|
||||
async def open_registry(
|
||||
addr: None | tuple[str, int] = None,
|
||||
ensure_exists: bool = True,
|
||||
|
||||
) -> tuple[str, int]:
|
||||
|
||||
global _tractor_kwargs
|
||||
actor = tractor.current_actor()
|
||||
uid = actor.uid
|
||||
if (
|
||||
Registry.addr is not None
|
||||
and addr
|
||||
):
|
||||
raise RuntimeError(
|
||||
f'`{uid}` registry addr already bound @ {_registry.sockaddr}'
|
||||
)
|
||||
|
||||
was_set: bool = False
|
||||
|
||||
if (
|
||||
not tractor.is_root_process()
|
||||
and Registry.addr is None
|
||||
):
|
||||
Registry.addr = actor._arb_addr
|
||||
|
||||
if (
|
||||
ensure_exists
|
||||
and Registry.addr is None
|
||||
):
|
||||
raise RuntimeError(
|
||||
f"`{uid}` registry should already exist bug doesn't?"
|
||||
)
|
||||
|
||||
if (
|
||||
Registry.addr is None
|
||||
):
|
||||
was_set = True
|
||||
Registry.addr = addr or _default_reg_addr
|
||||
|
||||
_tractor_kwargs['arbiter_addr'] = Registry.addr
|
||||
|
||||
try:
|
||||
yield Registry.addr
|
||||
finally:
|
||||
# XXX: always clear the global addr if we set it so that the
|
||||
# next (set of) calls will apply whatever new one is passed
|
||||
# in.
|
||||
if was_set:
|
||||
Registry.addr = None
|
||||
|
||||
|
||||
def get_tractor_runtime_kwargs() -> dict[str, Any]:
|
||||
'''
|
||||
Deliver ``tractor`` related runtime variables in a `dict`.
|
||||
|
||||
'''
|
||||
return _tractor_kwargs
|
||||
|
||||
|
||||
_registry_addr = ('127.0.0.1', 6116)
|
||||
_tractor_kwargs: dict[str, Any] = {
|
||||
# use a different registry addr then tractor's default
|
||||
'arbiter_addr': _registry_addr
|
||||
}
|
||||
_root_modules = [
|
||||
__name__,
|
||||
'piker.clearing._ems',
|
||||
'piker.clearing._client',
|
||||
'piker.data._sampling',
|
||||
]
|
||||
|
||||
|
||||
# TODO: factor this into a ``tractor.highlevel`` extension
|
||||
# pack for the library.
|
||||
class Services:
|
||||
class Services(BaseModel):
|
||||
|
||||
actor_n: tractor._supervise.ActorNursery
|
||||
service_n: trio.Nursery
|
||||
debug_mode: bool # tractor sub-actor debug mode flag
|
||||
service_tasks: dict[
|
||||
str,
|
||||
tuple[
|
||||
trio.CancelScope,
|
||||
tractor.Portal,
|
||||
trio.Event,
|
||||
]
|
||||
] = {}
|
||||
locks = defaultdict(trio.Lock)
|
||||
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
@classmethod
|
||||
async def start_service_task(
|
||||
self,
|
||||
name: str,
|
||||
|
@ -179,12 +75,7 @@ class Services:
|
|||
'''
|
||||
async def open_context_in_task(
|
||||
task_status: TaskStatus[
|
||||
tuple[
|
||||
trio.CancelScope,
|
||||
trio.Event,
|
||||
Any,
|
||||
]
|
||||
] = trio.TASK_STATUS_IGNORED,
|
||||
trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> Any:
|
||||
|
||||
|
@ -196,220 +87,143 @@ class Services:
|
|||
) as (ctx, first):
|
||||
|
||||
# unblock once the remote context has started
|
||||
complete = trio.Event()
|
||||
task_status.started((cs, complete, first))
|
||||
task_status.started((cs, first))
|
||||
log.info(
|
||||
f'`pikerd` service {name} started with value {first}'
|
||||
)
|
||||
try:
|
||||
# wait on any context's return value
|
||||
# and any final portal result from the
|
||||
# sub-actor.
|
||||
ctx_res = await ctx.result()
|
||||
|
||||
# NOTE: blocks indefinitely until cancelled
|
||||
# either by error from the target context
|
||||
# function or by being cancelled here by the
|
||||
# surrounding cancel scope.
|
||||
except tractor.ContextCancelled:
|
||||
return await self.cancel_service(name)
|
||||
else:
|
||||
# wait on any error from the sub-actor
|
||||
# NOTE: this will block indefinitely until
|
||||
# cancelled either by error from the target
|
||||
# context function or by being cancelled here by
|
||||
# the surrounding cancel scope
|
||||
return (await portal.result(), ctx_res)
|
||||
|
||||
finally:
|
||||
await portal.cancel_actor()
|
||||
complete.set()
|
||||
self.service_tasks.pop(name)
|
||||
|
||||
cs, complete, first = await self.service_n.start(open_context_in_task)
|
||||
cs, first = await self.service_n.start(open_context_in_task)
|
||||
|
||||
# store the cancel scope and portal for later cancellation or
|
||||
# retstart if needed.
|
||||
self.service_tasks[name] = (cs, portal, complete)
|
||||
self.service_tasks[name] = (cs, portal)
|
||||
|
||||
return cs, first
|
||||
|
||||
@classmethod
|
||||
# TODO: per service cancellation by scope, we aren't using this
|
||||
# anywhere right?
|
||||
async def cancel_service(
|
||||
self,
|
||||
name: str,
|
||||
|
||||
) -> Any:
|
||||
log.info(f'Cancelling `pikerd` service {name}')
|
||||
cs, portal = self.service_tasks[name]
|
||||
# XXX: not entirely sure why this is required,
|
||||
# and should probably be better fine tuned in
|
||||
# ``tractor``?
|
||||
cs.cancel()
|
||||
return await portal.cancel_actor()
|
||||
|
||||
|
||||
_services: Optional[Services] = None
|
||||
|
||||
|
||||
@acm
|
||||
async def open_pikerd(
|
||||
start_method: str = 'trio',
|
||||
loglevel: Optional[str] = None,
|
||||
|
||||
# XXX: you should pretty much never want debug mode
|
||||
# for data daemons when running in production.
|
||||
debug_mode: bool = False,
|
||||
|
||||
) -> Optional[tractor._portal.Portal]:
|
||||
'''
|
||||
Cancel the service task and actor for the given ``name``.
|
||||
Start a root piker daemon who's lifetime extends indefinitely
|
||||
until cancelled.
|
||||
|
||||
A root actor nursery is created which can be used to create and keep
|
||||
alive underling services (see below).
|
||||
|
||||
'''
|
||||
log.info(f'Cancelling `pikerd` service {name}')
|
||||
cs, portal, complete = self.service_tasks[name]
|
||||
cs.cancel()
|
||||
await complete.wait()
|
||||
assert name not in self.service_tasks, \
|
||||
f'Serice task for {name} not terminated?'
|
||||
global _services
|
||||
assert _services is None
|
||||
|
||||
# XXX: this may open a root actor as well
|
||||
async with (
|
||||
tractor.open_root_actor(
|
||||
|
||||
# passed through to ``open_root_actor``
|
||||
arbiter_addr=_registry_addr,
|
||||
name=_root_dname,
|
||||
loglevel=loglevel,
|
||||
debug_mode=debug_mode,
|
||||
start_method=start_method,
|
||||
|
||||
# TODO: eventually we should be able to avoid
|
||||
# having the root have more then permissions to
|
||||
# spawn other specialized daemons I think?
|
||||
enable_modules=_root_modules,
|
||||
) as _,
|
||||
|
||||
tractor.open_nursery() as actor_nursery,
|
||||
):
|
||||
async with trio.open_nursery() as service_nursery:
|
||||
|
||||
# # setup service mngr singleton instance
|
||||
# async with AsyncExitStack() as stack:
|
||||
|
||||
# assign globally for future daemon/task creation
|
||||
_services = Services(
|
||||
actor_n=actor_nursery,
|
||||
service_n=service_nursery,
|
||||
debug_mode=debug_mode,
|
||||
)
|
||||
|
||||
yield _services
|
||||
|
||||
|
||||
@acm
|
||||
async def open_piker_runtime(
|
||||
name: str,
|
||||
enable_modules: list[str] = [],
|
||||
start_method: str = 'trio',
|
||||
loglevel: Optional[str] = None,
|
||||
|
||||
# XXX NOTE XXX: you should pretty much never want debug mode
|
||||
# XXX: you should pretty much never want debug mode
|
||||
# for data daemons when running in production.
|
||||
debug_mode: bool = False,
|
||||
|
||||
registry_addr: None | tuple[str, int] = None,
|
||||
|
||||
# TODO: once we have `rsyscall` support we will read a config
|
||||
# and spawn the service tree distributed per that.
|
||||
start_method: str = 'trio',
|
||||
|
||||
**tractor_kwargs,
|
||||
|
||||
) -> tuple[
|
||||
tractor.Actor,
|
||||
tuple[str, int],
|
||||
]:
|
||||
) -> Optional[tractor._portal.Portal]:
|
||||
'''
|
||||
Start a piker actor who's runtime will automatically sync with
|
||||
existing piker actors on the local link based on configuration.
|
||||
|
||||
Can be called from a subactor or any program that needs to start
|
||||
a root actor.
|
||||
Start a piker actor who's runtime will automatically
|
||||
sync with existing piker actors in local network
|
||||
based on configuration.
|
||||
|
||||
'''
|
||||
try:
|
||||
# check for existing runtime
|
||||
actor = tractor.current_actor().uid
|
||||
|
||||
except tractor._exceptions.NoRuntime:
|
||||
|
||||
registry_addr = registry_addr or _default_reg_addr
|
||||
global _services
|
||||
assert _services is None
|
||||
|
||||
# XXX: this may open a root actor as well
|
||||
async with (
|
||||
tractor.open_root_actor(
|
||||
|
||||
# passed through to ``open_root_actor``
|
||||
arbiter_addr=registry_addr,
|
||||
arbiter_addr=_registry_addr,
|
||||
name=name,
|
||||
loglevel=loglevel,
|
||||
debug_mode=debug_mode,
|
||||
start_method=start_method,
|
||||
|
||||
# TODO: eventually we should be able to avoid
|
||||
# having the root have more then permissions to
|
||||
# spawn other specialized daemons I think?
|
||||
enable_modules=enable_modules,
|
||||
|
||||
**tractor_kwargs,
|
||||
) as _,
|
||||
|
||||
open_registry(registry_addr, ensure_exists=False) as addr,
|
||||
):
|
||||
yield (
|
||||
tractor.current_actor(),
|
||||
addr,
|
||||
)
|
||||
else:
|
||||
async with open_registry(registry_addr) as addr:
|
||||
yield (
|
||||
actor,
|
||||
addr,
|
||||
)
|
||||
|
||||
|
||||
@acm
|
||||
async def open_pikerd(
|
||||
|
||||
loglevel: str | None = None,
|
||||
|
||||
# XXX: you should pretty much never want debug mode
|
||||
# for data daemons when running in production.
|
||||
debug_mode: bool = False,
|
||||
registry_addr: None | tuple[str, int] = None,
|
||||
|
||||
# db init flags
|
||||
tsdb: bool = False,
|
||||
es: bool = False,
|
||||
|
||||
) -> Services:
|
||||
'''
|
||||
Start a root piker daemon who's lifetime extends indefinitely until
|
||||
cancelled.
|
||||
|
||||
A root actor nursery is created which can be used to create and keep
|
||||
alive underling services (see below).
|
||||
|
||||
'''
|
||||
|
||||
async with (
|
||||
open_piker_runtime(
|
||||
|
||||
name=_root_dname,
|
||||
# TODO: eventually we should be able to avoid
|
||||
# having the root have more then permissions to
|
||||
# spawn other specialized daemons I think?
|
||||
enable_modules=_root_modules,
|
||||
|
||||
loglevel=loglevel,
|
||||
debug_mode=debug_mode,
|
||||
registry_addr=registry_addr,
|
||||
|
||||
) as (root_actor, reg_addr),
|
||||
tractor.open_nursery() as actor_nursery,
|
||||
trio.open_nursery() as service_nursery,
|
||||
) as _,
|
||||
):
|
||||
assert root_actor.accept_addr == reg_addr
|
||||
|
||||
if tsdb:
|
||||
from piker.data._ahab import start_ahab
|
||||
from piker.data.marketstore import start_marketstore
|
||||
|
||||
log.info('Spawning `marketstore` supervisor')
|
||||
ctn_ready, config, (cid, pid) = await service_nursery.start(
|
||||
start_ahab,
|
||||
'marketstored',
|
||||
start_marketstore,
|
||||
|
||||
)
|
||||
log.info(
|
||||
f'`marketstored` up!\n'
|
||||
f'pid: {pid}\n'
|
||||
f'container id: {cid[:12]}\n'
|
||||
f'config: {pformat(config)}'
|
||||
)
|
||||
|
||||
if es:
|
||||
from piker.data._ahab import start_ahab
|
||||
from piker.data.elastic import start_elasticsearch
|
||||
|
||||
log.info('Spawning `elasticsearch` supervisor')
|
||||
ctn_ready, config, (cid, pid) = await service_nursery.start(
|
||||
partial(
|
||||
start_ahab,
|
||||
'elasticsearch',
|
||||
start_elasticsearch,
|
||||
start_timeout=240.0 # high cause ci
|
||||
)
|
||||
)
|
||||
|
||||
log.info(
|
||||
f'`elasticsearch` up!\n'
|
||||
f'pid: {pid}\n'
|
||||
f'container id: {cid[:12]}\n'
|
||||
f'config: {pformat(config)}'
|
||||
)
|
||||
|
||||
# assign globally for future daemon/task creation
|
||||
Services.actor_n = actor_nursery
|
||||
Services.service_n = service_nursery
|
||||
Services.debug_mode = debug_mode
|
||||
|
||||
|
||||
try:
|
||||
yield Services
|
||||
|
||||
finally:
|
||||
# TODO: is this more clever/efficient?
|
||||
# if 'samplerd' in Services.service_tasks:
|
||||
# await Services.cancel_service('samplerd')
|
||||
service_nursery.cancel_scope.cancel()
|
||||
yield tractor.current_actor()
|
||||
|
||||
|
||||
@acm
|
||||
|
@ -418,93 +232,61 @@ async def maybe_open_runtime(
|
|||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
"""
|
||||
Start the ``tractor`` runtime (a root actor) if none exists.
|
||||
|
||||
'''
|
||||
name = kwargs.pop('name')
|
||||
"""
|
||||
settings = _tractor_kwargs
|
||||
settings.update(kwargs)
|
||||
|
||||
if not tractor.current_actor(err_on_no_runtime=False):
|
||||
async with open_piker_runtime(
|
||||
name,
|
||||
async with tractor.open_root_actor(
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
) as (_, addr):
|
||||
yield addr,
|
||||
**settings,
|
||||
):
|
||||
yield
|
||||
else:
|
||||
async with open_registry() as addr:
|
||||
yield addr
|
||||
yield
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_pikerd(
|
||||
loglevel: Optional[str] = None,
|
||||
registry_addr: None | tuple = None,
|
||||
tsdb: bool = False,
|
||||
es: bool = False,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> tractor._portal.Portal | ClassVar[Services]:
|
||||
'''
|
||||
If no ``pikerd`` daemon-root-actor can be found start it and
|
||||
) -> Union[tractor._portal.Portal, Services]:
|
||||
"""If no ``pikerd`` daemon-root-actor can be found start it and
|
||||
yield up (we should probably figure out returning a portal to self
|
||||
though).
|
||||
|
||||
'''
|
||||
"""
|
||||
if loglevel:
|
||||
get_console_log(loglevel)
|
||||
|
||||
# subtle, we must have the runtime up here or portal lookup will fail
|
||||
query_name = kwargs.pop('name', f'piker_query_{os.getpid()}')
|
||||
async with maybe_open_runtime(loglevel, **kwargs):
|
||||
|
||||
# TODO: if we need to make the query part faster we could not init
|
||||
# an actor runtime and instead just hit the socket?
|
||||
# from tractor._ipc import _connect_chan, Channel
|
||||
# async with _connect_chan(host, port) as chan:
|
||||
# async with open_portal(chan) as arb_portal:
|
||||
# yield arb_portal
|
||||
|
||||
async with (
|
||||
open_piker_runtime(
|
||||
name=query_name,
|
||||
registry_addr=registry_addr,
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
) as _,
|
||||
tractor.find_actor(
|
||||
_root_dname,
|
||||
arbiter_sockaddr=registry_addr,
|
||||
) as portal
|
||||
):
|
||||
# connect to any existing daemon presuming
|
||||
# its registry socket was selected.
|
||||
if (
|
||||
portal is not None
|
||||
):
|
||||
async with tractor.find_actor(_root_dname) as portal:
|
||||
# assert portal is not None
|
||||
if portal is not None:
|
||||
yield portal
|
||||
return
|
||||
|
||||
# presume pikerd role since no daemon could be found at
|
||||
# configured address
|
||||
async with open_pikerd(
|
||||
|
||||
loglevel=loglevel,
|
||||
debug_mode=kwargs.get('debug_mode', False),
|
||||
registry_addr=registry_addr,
|
||||
tsdb=tsdb,
|
||||
es=es,
|
||||
|
||||
) as service_manager:
|
||||
) as _:
|
||||
# in the case where we're starting up the
|
||||
# tractor-piker runtime stack in **this** process
|
||||
# we return no portal to self.
|
||||
assert service_manager
|
||||
yield service_manager
|
||||
yield None
|
||||
|
||||
|
||||
# `brokerd` enabled modules
|
||||
# NOTE: keeping this list as small as possible is part of our caps-sec
|
||||
# model and should be treated with utmost care!
|
||||
# brokerd enabled modules
|
||||
_data_mods = [
|
||||
'piker.brokers.core',
|
||||
'piker.brokers.data',
|
||||
|
@ -514,17 +296,20 @@ _data_mods = [
|
|||
]
|
||||
|
||||
|
||||
class Brokerd:
|
||||
locks = defaultdict(trio.Lock)
|
||||
|
||||
|
||||
@acm
|
||||
async def find_service(
|
||||
service_name: str,
|
||||
) -> tractor.Portal | None:
|
||||
) -> Optional[tractor.Portal]:
|
||||
|
||||
async with open_registry() as reg_addr:
|
||||
log.info(f'Scanning for service `{service_name}`')
|
||||
# attach to existing daemon by name if possible
|
||||
async with tractor.find_actor(
|
||||
service_name,
|
||||
arbiter_sockaddr=reg_addr,
|
||||
arbiter_sockaddr=_registry_addr,
|
||||
) as maybe_portal:
|
||||
yield maybe_portal
|
||||
|
||||
|
@ -532,15 +317,14 @@ async def find_service(
|
|||
async def check_for_service(
|
||||
service_name: str,
|
||||
|
||||
) -> None | tuple[str, int]:
|
||||
) -> bool:
|
||||
'''
|
||||
Service daemon "liveness" predicate.
|
||||
|
||||
'''
|
||||
async with open_registry(ensure_exists=False) as reg_addr:
|
||||
async with tractor.query_actor(
|
||||
service_name,
|
||||
arbiter_sockaddr=reg_addr,
|
||||
arbiter_sockaddr=_registry_addr,
|
||||
) as sockaddr:
|
||||
return sockaddr
|
||||
|
||||
|
@ -552,8 +336,6 @@ async def maybe_spawn_daemon(
|
|||
service_task_target: Callable,
|
||||
spawn_args: dict[str, Any],
|
||||
loglevel: Optional[str] = None,
|
||||
|
||||
singleton: bool = False,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor.Portal:
|
||||
|
@ -574,7 +356,7 @@ async def maybe_spawn_daemon(
|
|||
|
||||
# serialize access to this section to avoid
|
||||
# 2 or more tasks racing to create a daemon
|
||||
lock = Services.locks[service_name]
|
||||
lock = Brokerd.locks[service_name]
|
||||
await lock.acquire()
|
||||
|
||||
async with find_service(service_name) as portal:
|
||||
|
@ -585,9 +367,6 @@ async def maybe_spawn_daemon(
|
|||
|
||||
log.warning(f"Couldn't find any existing {service_name}")
|
||||
|
||||
# TODO: really shouldn't the actor spawning be part of the service
|
||||
# starting method `Services.start_service()` ?
|
||||
|
||||
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
||||
# pikerd is not live we now become the root of the
|
||||
# process tree
|
||||
|
@ -598,6 +377,7 @@ async def maybe_spawn_daemon(
|
|||
|
||||
) as pikerd_portal:
|
||||
|
||||
if pikerd_portal is None:
|
||||
# we are the root and thus are `pikerd`
|
||||
# so spawn the target service directly by calling
|
||||
# the provided target routine.
|
||||
|
@ -605,9 +385,7 @@ async def maybe_spawn_daemon(
|
|||
# do the right things to setup both a sub-actor **and** call
|
||||
# the ``_Services`` api from above to start the top level
|
||||
# service task for that actor.
|
||||
started: bool
|
||||
if pikerd_portal is None:
|
||||
started = await service_task_target(**spawn_args)
|
||||
await service_task_target(**spawn_args)
|
||||
|
||||
else:
|
||||
# tell the remote `pikerd` to start the target,
|
||||
|
@ -616,14 +394,11 @@ async def maybe_spawn_daemon(
|
|||
# non-blocking and the target task will persist running
|
||||
# on `pikerd` after the client requesting it's start
|
||||
# disconnects.
|
||||
started = await pikerd_portal.run(
|
||||
await pikerd_portal.run(
|
||||
service_task_target,
|
||||
**spawn_args,
|
||||
)
|
||||
|
||||
if started:
|
||||
log.info(f'Service {service_name} started!')
|
||||
|
||||
async with tractor.wait_for_actor(service_name) as portal:
|
||||
lock.release()
|
||||
yield portal
|
||||
|
@ -646,6 +421,9 @@ async def spawn_brokerd(
|
|||
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
||||
tractor_kwargs.update(extra_tractor_kwargs)
|
||||
|
||||
global _services
|
||||
assert _services
|
||||
|
||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||
# actor nursery
|
||||
modpath = brokermod.__name__
|
||||
|
@ -658,18 +436,18 @@ async def spawn_brokerd(
|
|||
subpath = f'{modpath}.{submodname}'
|
||||
broker_enable.append(subpath)
|
||||
|
||||
portal = await Services.actor_n.start_actor(
|
||||
portal = await _services.actor_n.start_actor(
|
||||
dname,
|
||||
enable_modules=_data_mods + broker_enable,
|
||||
loglevel=loglevel,
|
||||
debug_mode=Services.debug_mode,
|
||||
debug_mode=_services.debug_mode,
|
||||
**tractor_kwargs
|
||||
)
|
||||
|
||||
# non-blocking setup of brokerd service nursery
|
||||
from .data import _setup_persistent_brokerd
|
||||
|
||||
await Services.start_service_task(
|
||||
await _services.start_service_task(
|
||||
dname,
|
||||
portal,
|
||||
_setup_persistent_brokerd,
|
||||
|
@ -715,21 +493,24 @@ async def spawn_emsd(
|
|||
"""
|
||||
log.info('Spawning emsd')
|
||||
|
||||
portal = await Services.actor_n.start_actor(
|
||||
global _services
|
||||
assert _services
|
||||
|
||||
portal = await _services.actor_n.start_actor(
|
||||
'emsd',
|
||||
enable_modules=[
|
||||
'piker.clearing._ems',
|
||||
'piker.clearing._client',
|
||||
],
|
||||
loglevel=loglevel,
|
||||
debug_mode=Services.debug_mode, # set by pikerd flag
|
||||
debug_mode=_services.debug_mode, # set by pikerd flag
|
||||
**extra_tractor_kwargs
|
||||
)
|
||||
|
||||
# non-blocking setup of clearing service
|
||||
from .clearing._ems import _setup_persistent_emsd
|
||||
|
||||
await Services.start_service_task(
|
||||
await _services.start_service_task(
|
||||
'emsd',
|
||||
portal,
|
||||
_setup_persistent_emsd,
|
||||
|
@ -756,3 +537,25 @@ async def maybe_open_emsd(
|
|||
|
||||
) as portal:
|
||||
yield portal
|
||||
|
||||
|
||||
# TODO: ideally we can start the tsdb "on demand" but it's
|
||||
# probably going to require "rootless" docker, at least if we don't
|
||||
# want to expect the user to start ``pikerd`` with root perms all the
|
||||
# time.
|
||||
# async def maybe_open_marketstored(
|
||||
# loglevel: Optional[str] = None,
|
||||
# **kwargs,
|
||||
|
||||
# ) -> tractor._portal.Portal: # noqa
|
||||
|
||||
# async with maybe_spawn_daemon(
|
||||
|
||||
# 'marketstored',
|
||||
# service_task_target=spawn_emsd,
|
||||
# spawn_args={'loglevel': loglevel},
|
||||
# loglevel=loglevel,
|
||||
# **kwargs,
|
||||
|
||||
# ) as portal:
|
||||
# yield portal
|
||||
|
|
|
@ -18,10 +18,7 @@
|
|||
Profiling wrappers for internal libs.
|
||||
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from time import perf_counter
|
||||
from functools import wraps
|
||||
|
||||
# NOTE: you can pass a flag to enable this:
|
||||
|
@ -47,193 +44,3 @@ def timeit(fn):
|
|||
return res
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# Modified version of ``pyqtgraph.debug.Profiler`` that
|
||||
# core seems hesitant to land in:
|
||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2281
|
||||
class Profiler(object):
|
||||
'''
|
||||
Simple profiler allowing measurement of multiple time intervals.
|
||||
|
||||
By default, profilers are disabled. To enable profiling, set the
|
||||
environment variable `PYQTGRAPHPROFILE` to a comma-separated list of
|
||||
fully-qualified names of profiled functions.
|
||||
|
||||
Calling a profiler registers a message (defaulting to an increasing
|
||||
counter) that contains the time elapsed since the last call. When the
|
||||
profiler is about to be garbage-collected, the messages are passed to the
|
||||
outer profiler if one is running, or printed to stdout otherwise.
|
||||
|
||||
If `delayed` is set to False, messages are immediately printed instead.
|
||||
|
||||
Example:
|
||||
def function(...):
|
||||
profiler = Profiler()
|
||||
... do stuff ...
|
||||
profiler('did stuff')
|
||||
... do other stuff ...
|
||||
profiler('did other stuff')
|
||||
# profiler is garbage-collected and flushed at function end
|
||||
|
||||
If this function is a method of class C, setting `PYQTGRAPHPROFILE` to
|
||||
"C.function" (without the module name) will enable this profiler.
|
||||
|
||||
For regular functions, use the qualified name of the function, stripping
|
||||
only the initial "pyqtgraph." prefix from the module.
|
||||
'''
|
||||
|
||||
_profilers = os.environ.get("PYQTGRAPHPROFILE", None)
|
||||
_profilers = _profilers.split(",") if _profilers is not None else []
|
||||
|
||||
_depth = 0
|
||||
|
||||
# NOTE: without this defined at the class level
|
||||
# you won't see apprpriately "nested" sub-profiler
|
||||
# instance calls.
|
||||
_msgs = []
|
||||
|
||||
# set this flag to disable all or individual profilers at runtime
|
||||
disable = False
|
||||
|
||||
class DisabledProfiler(object):
|
||||
def __init__(self, *args, **kwds):
|
||||
pass
|
||||
|
||||
def __call__(self, *args):
|
||||
pass
|
||||
|
||||
def finish(self):
|
||||
pass
|
||||
|
||||
def mark(self, msg=None):
|
||||
pass
|
||||
|
||||
_disabledProfiler = DisabledProfiler()
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
msg=None,
|
||||
disabled='env',
|
||||
delayed=True,
|
||||
ms_threshold: float = 0.0,
|
||||
):
|
||||
"""Optionally create a new profiler based on caller's qualname.
|
||||
|
||||
``ms_threshold`` can be set to value in ms for which, if the
|
||||
total measured time of the lifetime of this profiler is **less
|
||||
than** this value, then no profiling messages will be printed.
|
||||
Setting ``delayed=False`` disables this feature since messages
|
||||
are emitted immediately.
|
||||
|
||||
"""
|
||||
if (
|
||||
disabled is True
|
||||
or (
|
||||
disabled == 'env'
|
||||
and len(cls._profilers) == 0
|
||||
)
|
||||
):
|
||||
return cls._disabledProfiler
|
||||
|
||||
# determine the qualified name of the caller function
|
||||
caller_frame = sys._getframe(1)
|
||||
try:
|
||||
caller_object_type = type(caller_frame.f_locals["self"])
|
||||
|
||||
except KeyError: # we are in a regular function
|
||||
qualifier = caller_frame.f_globals["__name__"].split(".", 1)[-1]
|
||||
|
||||
else: # we are in a method
|
||||
qualifier = caller_object_type.__name__
|
||||
func_qualname = qualifier + "." + caller_frame.f_code.co_name
|
||||
|
||||
if disabled == 'env' and func_qualname not in cls._profilers:
|
||||
# don't do anything
|
||||
return cls._disabledProfiler
|
||||
|
||||
cls._depth += 1
|
||||
obj = super(Profiler, cls).__new__(cls)
|
||||
obj._msgs = []
|
||||
|
||||
# create an actual profiling object
|
||||
if cls._depth < 1:
|
||||
cls._msgs = []
|
||||
|
||||
obj._name = msg or func_qualname
|
||||
obj._delayed = delayed
|
||||
obj._markCount = 0
|
||||
obj._finished = False
|
||||
obj._firstTime = obj._lastTime = perf_counter()
|
||||
obj._mt = ms_threshold
|
||||
obj._newMsg("> Entering " + obj._name)
|
||||
return obj
|
||||
|
||||
def __call__(self, msg=None):
|
||||
"""Register or print a new message with timing information.
|
||||
"""
|
||||
if self.disable:
|
||||
return
|
||||
if msg is None:
|
||||
msg = str(self._markCount)
|
||||
|
||||
self._markCount += 1
|
||||
newTime = perf_counter()
|
||||
tot_ms = (newTime - self._firstTime) * 1000
|
||||
ms = (newTime - self._lastTime) * 1000
|
||||
self._newMsg(
|
||||
f" {msg}: {ms:0.4f}, tot:{tot_ms:0.4f}"
|
||||
)
|
||||
|
||||
self._lastTime = newTime
|
||||
|
||||
def mark(self, msg=None):
|
||||
self(msg)
|
||||
|
||||
def _newMsg(self, msg, *args):
|
||||
msg = " " * (self._depth - 1) + msg
|
||||
if self._delayed:
|
||||
self._msgs.append((msg, args))
|
||||
else:
|
||||
print(msg % args)
|
||||
|
||||
def __del__(self):
|
||||
self.finish()
|
||||
|
||||
def finish(self, msg=None):
|
||||
"""Add a final message; flush the message list if no parent profiler.
|
||||
"""
|
||||
if self._finished or self.disable:
|
||||
return
|
||||
|
||||
self._finished = True
|
||||
if msg is not None:
|
||||
self(msg)
|
||||
|
||||
tot_ms = (perf_counter() - self._firstTime) * 1000
|
||||
self._newMsg(
|
||||
"< Exiting %s, total time: %0.4f ms",
|
||||
self._name,
|
||||
tot_ms,
|
||||
)
|
||||
|
||||
if tot_ms < self._mt:
|
||||
# print(f'{tot_ms} < {self._mt}, clearing')
|
||||
# NOTE: this list **must** be an instance var to avoid
|
||||
# deleting common messages during GC I think?
|
||||
self._msgs.clear()
|
||||
# else:
|
||||
# print(f'{tot_ms} > {self._mt}, not clearing')
|
||||
|
||||
# XXX: why is this needed?
|
||||
# don't we **want to show** nested profiler messages?
|
||||
if self._msgs: # and self._depth < 1:
|
||||
|
||||
# if self._msgs:
|
||||
print("\n".join([m[0] % m[1] for m in self._msgs]))
|
||||
|
||||
# clear all entries
|
||||
self._msgs.clear()
|
||||
# type(self)._msgs = []
|
||||
|
||||
type(self)._depth -= 1
|
||||
|
|
|
@ -20,31 +20,22 @@ Broker clients, daemons and general back end machinery.
|
|||
from importlib import import_module
|
||||
from types import ModuleType
|
||||
|
||||
# TODO: move to urllib3/requests once supported
|
||||
import asks
|
||||
asks.init('trio')
|
||||
|
||||
__brokers__ = [
|
||||
'binance',
|
||||
'questrade',
|
||||
'robinhood',
|
||||
'ib',
|
||||
'kraken',
|
||||
|
||||
# broken but used to work
|
||||
# 'questrade',
|
||||
# 'robinhood',
|
||||
|
||||
# TODO: we should get on these stat!
|
||||
# alpaca
|
||||
# wstrade
|
||||
# iex
|
||||
|
||||
# deribit
|
||||
# kucoin
|
||||
# bitso
|
||||
]
|
||||
|
||||
|
||||
def get_brokermod(brokername: str) -> ModuleType:
|
||||
'''
|
||||
Return the imported broker module by name.
|
||||
|
||||
'''
|
||||
"""Return the imported broker module by name.
|
||||
"""
|
||||
module = import_module('.' + brokername, 'piker.brokers')
|
||||
# we only allow monkeying because it's for internal keying
|
||||
module.name = module.__name__.split('.')[-1]
|
||||
|
@ -52,9 +43,7 @@ def get_brokermod(brokername: str) -> ModuleType:
|
|||
|
||||
|
||||
def iter_brokermods():
|
||||
'''
|
||||
Iterate all built-in broker modules.
|
||||
|
||||
'''
|
||||
"""Iterate all built-in broker modules.
|
||||
"""
|
||||
for name in __brokers__:
|
||||
yield get_brokermod(name)
|
||||
|
|
|
@ -33,23 +33,15 @@ import asks
|
|||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import tractor
|
||||
from pydantic.dataclasses import dataclass
|
||||
from pydantic import BaseModel
|
||||
import wsproto
|
||||
|
||||
from .._cacheables import open_cached_client
|
||||
from ._util import (
|
||||
resproc,
|
||||
SymbolNotFound,
|
||||
DataUnavailable,
|
||||
)
|
||||
from ..log import (
|
||||
get_logger,
|
||||
get_console_log,
|
||||
)
|
||||
from ..data.types import Struct
|
||||
from ..data._web_bs import (
|
||||
open_autorecon_ws,
|
||||
NoBsWs,
|
||||
)
|
||||
from ._util import resproc, SymbolNotFound
|
||||
from ..log import get_logger, get_console_log
|
||||
from ..data import ShmArray
|
||||
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -87,14 +79,12 @@ _show_wap_in_history = False
|
|||
|
||||
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
||||
class Pair(Struct, frozen=True):
|
||||
class Pair(BaseModel):
|
||||
symbol: str
|
||||
status: str
|
||||
|
||||
baseAsset: str
|
||||
baseAssetPrecision: int
|
||||
cancelReplaceAllowed: bool
|
||||
allowTrailingStop: bool
|
||||
quoteAsset: str
|
||||
quotePrecision: int
|
||||
quoteAssetPrecision: int
|
||||
|
@ -110,21 +100,18 @@ class Pair(Struct, frozen=True):
|
|||
isSpotTradingAllowed: bool
|
||||
isMarginTradingAllowed: bool
|
||||
|
||||
defaultSelfTradePreventionMode: str
|
||||
allowedSelfTradePreventionModes: list[str]
|
||||
|
||||
filters: list[dict[str, Union[str, int, float]]]
|
||||
permissions: list[str]
|
||||
|
||||
|
||||
class OHLC(Struct):
|
||||
'''
|
||||
Description of the flattened OHLC quote format.
|
||||
@dataclass
|
||||
class OHLC:
|
||||
"""Description of the flattened OHLC quote format.
|
||||
|
||||
For schema details see:
|
||||
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
||||
|
||||
'''
|
||||
"""
|
||||
time: int
|
||||
|
||||
open: float
|
||||
|
@ -147,9 +134,7 @@ class OHLC(Struct):
|
|||
|
||||
|
||||
# convert datetime obj timestamp to unixtime in milliseconds
|
||||
def binance_timestamp(
|
||||
when: datetime
|
||||
) -> int:
|
||||
def binance_timestamp(when):
|
||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||
|
||||
|
||||
|
@ -188,7 +173,7 @@ class Client:
|
|||
params = {}
|
||||
|
||||
if sym is not None:
|
||||
sym = sym.lower()
|
||||
sym = sym.upper()
|
||||
params = {'symbol': sym}
|
||||
|
||||
resp = await self._api(
|
||||
|
@ -245,7 +230,7 @@ class Client:
|
|||
) -> dict:
|
||||
|
||||
if end_dt is None:
|
||||
end_dt = pendulum.now('UTC').add(minutes=1)
|
||||
end_dt = pendulum.now('UTC')
|
||||
|
||||
if start_dt is None:
|
||||
start_dt = end_dt.start_of(
|
||||
|
@ -275,7 +260,6 @@ class Client:
|
|||
for i, bar in enumerate(bars):
|
||||
|
||||
bar = OHLC(*bar)
|
||||
bar.typecast()
|
||||
|
||||
row = []
|
||||
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
||||
|
@ -303,7 +287,7 @@ async def get_client() -> Client:
|
|||
|
||||
|
||||
# validation type
|
||||
class AggTrade(Struct):
|
||||
class AggTrade(BaseModel):
|
||||
e: str # Event type
|
||||
E: int # Event time
|
||||
s: str # Symbol
|
||||
|
@ -357,9 +341,7 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
|||
|
||||
elif msg.get('e') == 'aggTrade':
|
||||
|
||||
# NOTE: this is purely for a definition, ``msgspec.Struct``
|
||||
# does not runtime-validate until you decode/encode.
|
||||
# see: https://jcristharif.com/msgspec/structs.html#type-validation
|
||||
# validate
|
||||
msg = AggTrade(**msg)
|
||||
|
||||
# TODO: type out and require this quote format
|
||||
|
@ -370,8 +352,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
|||
'brokerd_ts': time.time(),
|
||||
'ticks': [{
|
||||
'type': 'trade',
|
||||
'price': float(msg.p),
|
||||
'size': float(msg.q),
|
||||
'price': msg.p,
|
||||
'size': msg.q,
|
||||
'broker_ts': msg.T,
|
||||
}],
|
||||
}
|
||||
|
@ -402,39 +384,41 @@ async def open_history_client(
|
|||
async with open_cached_client('binance') as client:
|
||||
|
||||
async def get_ohlc(
|
||||
timeframe: float,
|
||||
end_dt: datetime | None = None,
|
||||
start_dt: datetime | None = None,
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
if timeframe != 60:
|
||||
raise DataUnavailable('Only 1m bars are supported')
|
||||
|
||||
array = await client.bars(
|
||||
symbol,
|
||||
start_dt=start_dt,
|
||||
end_dt=end_dt,
|
||||
)
|
||||
times = array['time']
|
||||
if (
|
||||
end_dt is None
|
||||
):
|
||||
inow = round(time.time())
|
||||
if (inow - times[-1]) > 60:
|
||||
await tractor.breakpoint()
|
||||
|
||||
start_dt = pendulum.from_timestamp(times[0])
|
||||
end_dt = pendulum.from_timestamp(times[-1])
|
||||
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||
|
||||
|
||||
async def backfill_bars(
|
||||
sym: str,
|
||||
shm: ShmArray, # type: ignore # noqa
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
) -> None:
|
||||
"""Fill historical bars into shared mem / storage afap.
|
||||
"""
|
||||
with trio.CancelScope() as cs:
|
||||
async with open_cached_client('binance') as client:
|
||||
bars = await client.bars(symbol=sym)
|
||||
shm.push(bars)
|
||||
task_status.started(cs)
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
|
@ -464,20 +448,12 @@ async def stream_quotes(
|
|||
d = cache[sym.upper()]
|
||||
syminfo = Pair(**d) # validation
|
||||
|
||||
si = sym_infos[sym] = syminfo.to_dict()
|
||||
filters = {}
|
||||
for entry in syminfo.filters:
|
||||
ftype = entry['filterType']
|
||||
filters[ftype] = entry
|
||||
si = sym_infos[sym] = syminfo.dict()
|
||||
|
||||
# XXX: after manually inspecting the response format we
|
||||
# just directly pick out the info we need
|
||||
si['price_tick_size'] = float(
|
||||
filters['PRICE_FILTER']['tickSize']
|
||||
)
|
||||
si['lot_tick_size'] = float(
|
||||
filters['LOT_SIZE']['stepSize']
|
||||
)
|
||||
si['price_tick_size'] = float(syminfo.filters[0]['tickSize'])
|
||||
si['lot_tick_size'] = float(syminfo.filters[2]['stepSize'])
|
||||
si['asset_type'] = 'crypto'
|
||||
|
||||
symbol = symbols[0]
|
||||
|
@ -519,7 +495,6 @@ async def stream_quotes(
|
|||
subs.append("{sym}@bookTicker")
|
||||
|
||||
# unsub from all pairs on teardown
|
||||
if ws.connected():
|
||||
await ws.send_msg({
|
||||
"method": "UNSUBSCRIBE",
|
||||
"params": subs,
|
||||
|
|
|
@ -39,148 +39,6 @@ _config_dir = click.get_app_dir('piker')
|
|||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||
|
||||
|
||||
OK = '\033[92m'
|
||||
WARNING = '\033[93m'
|
||||
FAIL = '\033[91m'
|
||||
ENDC = '\033[0m'
|
||||
|
||||
|
||||
def print_ok(s: str, **kwargs):
|
||||
print(OK + s + ENDC, **kwargs)
|
||||
|
||||
|
||||
def print_error(s: str, **kwargs):
|
||||
print(FAIL + s + ENDC, **kwargs)
|
||||
|
||||
|
||||
def get_method(client, meth_name: str):
|
||||
print(f'checking client for method \'{meth_name}\'...', end='', flush=True)
|
||||
method = getattr(client, meth_name, None)
|
||||
assert method
|
||||
print_ok('found!.')
|
||||
return method
|
||||
|
||||
async def run_method(client, meth_name: str, **kwargs):
|
||||
method = get_method(client, meth_name)
|
||||
print('running...', end='', flush=True)
|
||||
result = await method(**kwargs)
|
||||
print_ok(f'done! result: {type(result)}')
|
||||
return result
|
||||
|
||||
async def run_test(broker_name: str):
|
||||
brokermod = get_brokermod(broker_name)
|
||||
total = 0
|
||||
passed = 0
|
||||
failed = 0
|
||||
|
||||
print(f'getting client...', end='', flush=True)
|
||||
if not hasattr(brokermod, 'get_client'):
|
||||
print_error('fail! no \'get_client\' context manager found.')
|
||||
return
|
||||
|
||||
async with brokermod.get_client(is_brokercheck=True) as client:
|
||||
print_ok(f'done! inside client context.')
|
||||
|
||||
# check for methods present on brokermod
|
||||
method_list = [
|
||||
'backfill_bars',
|
||||
'get_client',
|
||||
'trades_dialogue',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
|
||||
]
|
||||
|
||||
for method in method_list:
|
||||
print(
|
||||
f'checking brokermod for method \'{method}\'...',
|
||||
end='', flush=True)
|
||||
if not hasattr(brokermod, method):
|
||||
print_error(f'fail! method \'{method}\' not found.')
|
||||
failed += 1
|
||||
else:
|
||||
print_ok('done!')
|
||||
passed += 1
|
||||
|
||||
total += 1
|
||||
|
||||
# check for methods present con brokermod.Client and their
|
||||
# results
|
||||
|
||||
# for private methods only check is present
|
||||
method_list = [
|
||||
'get_balances',
|
||||
'get_assets',
|
||||
'get_trades',
|
||||
'get_xfers',
|
||||
'submit_limit',
|
||||
'submit_cancel',
|
||||
'search_symbols',
|
||||
]
|
||||
|
||||
for method_name in method_list:
|
||||
try:
|
||||
get_method(client, method_name)
|
||||
passed += 1
|
||||
|
||||
except AssertionError:
|
||||
print_error(f'fail! method \'{method_name}\' not found.')
|
||||
failed += 1
|
||||
|
||||
total += 1
|
||||
|
||||
|
||||
# check for methods present con brokermod.Client and their
|
||||
# results
|
||||
|
||||
syms = await run_method(client, 'symbol_info')
|
||||
total += 1
|
||||
|
||||
if len(syms) == 0:
|
||||
raise BaseException('Empty Symbol list?')
|
||||
|
||||
passed += 1
|
||||
|
||||
first_sym = tuple(syms.keys())[0]
|
||||
|
||||
method_list = [
|
||||
('cache_symbols', {}),
|
||||
('search_symbols', {'pattern': first_sym[:-1]}),
|
||||
('bars', {'symbol': first_sym})
|
||||
]
|
||||
|
||||
for method_name, method_kwargs in method_list:
|
||||
try:
|
||||
await run_method(client, method_name, **method_kwargs)
|
||||
passed += 1
|
||||
|
||||
except AssertionError:
|
||||
print_error(f'fail! method \'{method_name}\' not found.')
|
||||
failed += 1
|
||||
|
||||
total += 1
|
||||
|
||||
print(f'total: {total}, passed: {passed}, failed: {failed}')
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument('broker', nargs=1, required=True)
|
||||
@click.pass_obj
|
||||
def brokercheck(config, broker):
|
||||
'''
|
||||
Test broker apis for completeness.
|
||||
|
||||
'''
|
||||
async def bcheck_main():
|
||||
async with maybe_spawn_brokerd(broker) as portal:
|
||||
await portal.run(run_test, broker)
|
||||
await portal.cancel_actor()
|
||||
|
||||
trio.run(run_test, broker)
|
||||
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option('--keys', '-k', multiple=True,
|
||||
help='Return results only for these keys')
|
||||
|
@ -335,8 +193,6 @@ def contracts(ctx, loglevel, broker, symbol, ids):
|
|||
brokermod = get_brokermod(broker)
|
||||
get_console_log(loglevel)
|
||||
|
||||
|
||||
|
||||
contracts = trio.run(partial(core.contracts, brokermod, symbol))
|
||||
if not ids:
|
||||
# just print out expiry dates which can be used with
|
||||
|
|
|
@ -227,28 +227,26 @@ async def get_cached_feed(
|
|||
|
||||
@tractor.stream
|
||||
async def start_quote_stream(
|
||||
stream: tractor.Context, # marks this as a streaming func
|
||||
ctx: tractor.Context, # marks this as a streaming func
|
||||
broker: str,
|
||||
symbols: List[Any],
|
||||
feed_type: str = 'stock',
|
||||
rate: int = 3,
|
||||
) -> None:
|
||||
'''
|
||||
Handle per-broker quote stream subscriptions using a "lazy" pub-sub
|
||||
"""Handle per-broker quote stream subscriptions using a "lazy" pub-sub
|
||||
pattern.
|
||||
|
||||
Spawns new quoter tasks for each broker backend on-demand.
|
||||
Since most brokers seems to support batch quote requests we
|
||||
limit to one task per process (for now).
|
||||
|
||||
'''
|
||||
"""
|
||||
# XXX: why do we need this again?
|
||||
get_console_log(tractor.current_actor().loglevel)
|
||||
|
||||
# pull global vars from local actor
|
||||
symbols = list(symbols)
|
||||
log.info(
|
||||
f"{stream.chan.uid} subscribed to {broker} for symbols {symbols}")
|
||||
f"{ctx.chan.uid} subscribed to {broker} for symbols {symbols}")
|
||||
# another actor task may have already created it
|
||||
async with get_cached_feed(broker) as feed:
|
||||
|
||||
|
@ -292,13 +290,13 @@ async def start_quote_stream(
|
|||
assert fquote['displayable']
|
||||
payload[sym] = fquote
|
||||
|
||||
await stream.send_yield(payload)
|
||||
await ctx.send_yield(payload)
|
||||
|
||||
await stream_poll_requests(
|
||||
|
||||
# ``trionics.msgpub`` required kwargs
|
||||
task_name=feed_type,
|
||||
ctx=stream,
|
||||
ctx=ctx,
|
||||
topics=symbols,
|
||||
packetizer=feed.mod.packetizer,
|
||||
|
||||
|
@ -321,11 +319,9 @@ async def call_client(
|
|||
|
||||
|
||||
class DataFeed:
|
||||
'''
|
||||
Data feed client for streaming symbol data from and making API
|
||||
client calls to a (remote) ``brokerd`` daemon.
|
||||
|
||||
'''
|
||||
"""Data feed client for streaming symbol data from and making API client calls
|
||||
to a (remote) ``brokerd`` daemon.
|
||||
"""
|
||||
_allowed = ('stock', 'option')
|
||||
|
||||
def __init__(self, portal, brokermod):
|
||||
|
|
|
@ -1,70 +0,0 @@
|
|||
``deribit`` backend
|
||||
------------------
|
||||
pretty good liquidity crypto derivatives, uses custom json rpc over ws for
|
||||
client methods, then `cryptofeed` for data streams.
|
||||
|
||||
status
|
||||
******
|
||||
- supports option charts
|
||||
- no order support yet
|
||||
|
||||
|
||||
config
|
||||
******
|
||||
In order to get order mode support your ``brokers.toml``
|
||||
needs to have something like the following:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[deribit]
|
||||
key_id = 'XXXXXXXX'
|
||||
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
||||
|
||||
To obtain an api id and secret you need to create an account, which can be a
|
||||
real market account over at:
|
||||
|
||||
- deribit.com (requires KYC for deposit address)
|
||||
|
||||
Or a testnet account over at:
|
||||
|
||||
- test.deribit.com
|
||||
|
||||
For testnet once the account is created here is how you deposit fake crypto to
|
||||
try it out:
|
||||
|
||||
1) Go to Wallet:
|
||||
|
||||
.. figure:: assets/0_wallet.png
|
||||
:align: center
|
||||
:target: assets/0_wallet.png
|
||||
:alt: wallet page
|
||||
|
||||
2) Then click on the elipsis menu and select deposit
|
||||
|
||||
.. figure:: assets/1_wallet_select_deposit.png
|
||||
:align: center
|
||||
:target: assets/1_wallet_select_deposit.png
|
||||
:alt: wallet deposit page
|
||||
|
||||
3) This will take you to the deposit address page
|
||||
|
||||
.. figure:: assets/2_gen_deposit_addr.png
|
||||
:align: center
|
||||
:target: assets/2_gen_deposit_addr.png
|
||||
:alt: generate deposit address page
|
||||
|
||||
4) After clicking generate you should see the address, copy it and go to the
|
||||
`coin faucet <https://test.deribit.com/dericoin/BTC/deposit>`_ and send fake
|
||||
coins to that address.
|
||||
|
||||
.. figure:: assets/3_deposit_address.png
|
||||
:align: center
|
||||
:target: assets/3_deposit_address.png
|
||||
:alt: generated address
|
||||
|
||||
5) Back in the deposit address page you should see the deposit in your history
|
||||
|
||||
.. figure:: assets/4_wallet_deposit_history.png
|
||||
:align: center
|
||||
:target: assets/4_wallet_deposit_history.png
|
||||
:alt: wallet deposit history
|
|
@ -1,65 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Deribit backend.
|
||||
|
||||
'''
|
||||
|
||||
from piker.log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
from .api import (
|
||||
get_client,
|
||||
)
|
||||
from .feed import (
|
||||
open_history_client,
|
||||
open_symbol_search,
|
||||
stream_quotes,
|
||||
backfill_bars
|
||||
)
|
||||
# from .broker import (
|
||||
# trades_dialogue,
|
||||
# norm_trade_records,
|
||||
# )
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
# 'trades_dialogue',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
# 'norm_trade_records',
|
||||
]
|
||||
|
||||
|
||||
# tractor RPC enable arg
|
||||
__enable_modules__: list[str] = [
|
||||
'api',
|
||||
'feed',
|
||||
# 'broker',
|
||||
]
|
||||
|
||||
# passed to ``tractor.ActorNursery.start_actor()``
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
# annotation to let backend agnostic code
|
||||
# know if ``brokerd`` should be spawned with
|
||||
# ``tractor``'s aio mode.
|
||||
_infect_asyncio: bool = True
|
|
@ -1,672 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Deribit backend.
|
||||
|
||||
'''
|
||||
import json
|
||||
import time
|
||||
import asyncio
|
||||
|
||||
from contextlib import asynccontextmanager as acm, AsyncExitStack
|
||||
from functools import partial
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional, Iterable, Callable
|
||||
|
||||
import pendulum
|
||||
import asks
|
||||
import trio
|
||||
from trio_typing import Nursery, TaskStatus
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
|
||||
from piker.data.types import Struct
|
||||
from piker.data._web_bs import (
|
||||
NoBsWs,
|
||||
open_autorecon_ws,
|
||||
open_jsonrpc_session
|
||||
)
|
||||
|
||||
from .._util import resproc
|
||||
|
||||
from piker import config
|
||||
from piker.log import get_logger
|
||||
|
||||
from tractor.trionics import (
|
||||
broadcast_receiver,
|
||||
BroadcastReceiver,
|
||||
maybe_open_context
|
||||
)
|
||||
from tractor import to_asyncio
|
||||
|
||||
from cryptofeed import FeedHandler
|
||||
|
||||
from cryptofeed.defines import (
|
||||
DERIBIT,
|
||||
L1_BOOK, TRADES,
|
||||
OPTION, CALL, PUT
|
||||
)
|
||||
from cryptofeed.symbols import Symbol
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
|
||||
_url = 'https://www.deribit.com'
|
||||
_ws_url = 'wss://www.deribit.com/ws/api/v2'
|
||||
_testnet_ws_url = 'wss://test.deribit.com/ws/api/v2'
|
||||
|
||||
|
||||
# Broker specific ohlc schema (rest)
|
||||
_ohlc_dtype = [
|
||||
('index', int),
|
||||
('time', int),
|
||||
('open', float),
|
||||
('high', float),
|
||||
('low', float),
|
||||
('close', float),
|
||||
('volume', float),
|
||||
('bar_wap', float), # will be zeroed by sampler if not filled
|
||||
]
|
||||
|
||||
|
||||
class JSONRPCResult(Struct):
|
||||
jsonrpc: str = '2.0'
|
||||
id: int
|
||||
result: Optional[dict] = None
|
||||
error: Optional[dict] = None
|
||||
usIn: int
|
||||
usOut: int
|
||||
usDiff: int
|
||||
testnet: bool
|
||||
|
||||
class JSONRPCChannel(Struct):
|
||||
jsonrpc: str = '2.0'
|
||||
method: str
|
||||
params: dict
|
||||
|
||||
|
||||
class KLinesResult(Struct):
|
||||
close: list[float]
|
||||
cost: list[float]
|
||||
high: list[float]
|
||||
low: list[float]
|
||||
open: list[float]
|
||||
status: str
|
||||
ticks: list[int]
|
||||
volume: list[float]
|
||||
|
||||
class Trade(Struct):
|
||||
trade_seq: int
|
||||
trade_id: str
|
||||
timestamp: int
|
||||
tick_direction: int
|
||||
price: float
|
||||
mark_price: float
|
||||
iv: float
|
||||
instrument_name: str
|
||||
index_price: float
|
||||
direction: str
|
||||
combo_trade_id: Optional[int] = 0,
|
||||
combo_id: Optional[str] = '',
|
||||
amount: float
|
||||
|
||||
class LastTradesResult(Struct):
|
||||
trades: list[Trade]
|
||||
has_more: bool
|
||||
|
||||
|
||||
# convert datetime obj timestamp to unixtime in milliseconds
|
||||
def deribit_timestamp(when):
|
||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||
|
||||
|
||||
def str_to_cb_sym(name: str) -> Symbol:
|
||||
base, strike_price, expiry_date, option_type = name.split('-')
|
||||
|
||||
quote = base
|
||||
|
||||
if option_type == 'put':
|
||||
option_type = PUT
|
||||
elif option_type == 'call':
|
||||
option_type = CALL
|
||||
else:
|
||||
raise Exception("Couldn\'t parse option type")
|
||||
|
||||
return Symbol(
|
||||
base, quote,
|
||||
type=OPTION,
|
||||
strike_price=strike_price,
|
||||
option_type=option_type,
|
||||
expiry_date=expiry_date,
|
||||
expiry_normalize=False)
|
||||
|
||||
|
||||
def piker_sym_to_cb_sym(name: str) -> Symbol:
|
||||
base, expiry_date, strike_price, option_type = tuple(
|
||||
name.upper().split('-'))
|
||||
|
||||
quote = base
|
||||
|
||||
if option_type == 'P':
|
||||
option_type = PUT
|
||||
elif option_type == 'C':
|
||||
option_type = CALL
|
||||
else:
|
||||
raise Exception("Couldn\'t parse option type")
|
||||
|
||||
return Symbol(
|
||||
base, quote,
|
||||
type=OPTION,
|
||||
strike_price=strike_price,
|
||||
option_type=option_type,
|
||||
expiry_date=expiry_date.upper())
|
||||
|
||||
|
||||
def cb_sym_to_deribit_inst(sym: Symbol):
|
||||
# cryptofeed normalized
|
||||
cb_norm = ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z']
|
||||
|
||||
# deribit specific
|
||||
months = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']
|
||||
|
||||
exp = sym.expiry_date
|
||||
|
||||
# YYMDD
|
||||
# 01234
|
||||
year, month, day = (
|
||||
exp[:2], months[cb_norm.index(exp[2:3])], exp[3:])
|
||||
|
||||
otype = 'C' if sym.option_type == CALL else 'P'
|
||||
|
||||
return f'{sym.base}-{day}{month}{year}-{sym.strike_price}-{otype}'
|
||||
|
||||
|
||||
def get_config() -> dict[str, Any]:
|
||||
|
||||
conf, path = config.load()
|
||||
|
||||
section = conf.get('deribit')
|
||||
|
||||
# TODO: document why we send this, basically because logging params for cryptofeed
|
||||
conf['log'] = {}
|
||||
conf['log']['disabled'] = True
|
||||
|
||||
if section is None:
|
||||
log.warning(f'No config section found for deribit in {path}')
|
||||
|
||||
return conf
|
||||
|
||||
|
||||
class Client:
|
||||
|
||||
def __init__(self, json_rpc: Callable) -> None:
|
||||
self._pairs: dict[str, Any] = None
|
||||
|
||||
config = get_config().get('deribit', {})
|
||||
|
||||
if ('key_id' in config) and ('key_secret' in config):
|
||||
self._key_id = config['key_id']
|
||||
self._key_secret = config['key_secret']
|
||||
|
||||
else:
|
||||
self._key_id = None
|
||||
self._key_secret = None
|
||||
|
||||
self.json_rpc = json_rpc
|
||||
|
||||
@property
|
||||
def currencies(self):
|
||||
return ['btc', 'eth', 'sol', 'usd']
|
||||
|
||||
async def get_balances(self, kind: str = 'option') -> dict[str, float]:
|
||||
"""Return the set of positions for this account
|
||||
by symbol.
|
||||
"""
|
||||
balances = {}
|
||||
|
||||
for currency in self.currencies:
|
||||
resp = await self.json_rpc(
|
||||
'private/get_positions', params={
|
||||
'currency': currency.upper(),
|
||||
'kind': kind})
|
||||
|
||||
balances[currency] = resp.result
|
||||
|
||||
return balances
|
||||
|
||||
async def get_assets(self) -> dict[str, float]:
|
||||
"""Return the set of asset balances for this account
|
||||
by symbol.
|
||||
"""
|
||||
balances = {}
|
||||
|
||||
for currency in self.currencies:
|
||||
resp = await self.json_rpc(
|
||||
'private/get_account_summary', params={
|
||||
'currency': currency.upper()})
|
||||
|
||||
balances[currency] = resp.result['balance']
|
||||
|
||||
return balances
|
||||
|
||||
async def submit_limit(
|
||||
self,
|
||||
symbol: str,
|
||||
price: float,
|
||||
action: str,
|
||||
size: float
|
||||
) -> dict:
|
||||
"""Place an order
|
||||
"""
|
||||
params = {
|
||||
'instrument_name': symbol.upper(),
|
||||
'amount': size,
|
||||
'type': 'limit',
|
||||
'price': price,
|
||||
}
|
||||
resp = await self.json_rpc(
|
||||
f'private/{action}', params)
|
||||
|
||||
return resp.result
|
||||
|
||||
async def submit_cancel(self, oid: str):
|
||||
"""Send cancel request for order id
|
||||
"""
|
||||
resp = await self.json_rpc(
|
||||
'private/cancel', {'order_id': oid})
|
||||
return resp.result
|
||||
|
||||
async def symbol_info(
|
||||
self,
|
||||
instrument: Optional[str] = None,
|
||||
currency: str = 'btc', # BTC, ETH, SOL, USDC
|
||||
kind: str = 'option',
|
||||
expired: bool = False
|
||||
) -> dict[str, Any]:
|
||||
"""Get symbol info for the exchange.
|
||||
|
||||
"""
|
||||
if self._pairs:
|
||||
return self._pairs
|
||||
|
||||
# will retrieve all symbols by default
|
||||
params = {
|
||||
'currency': currency.upper(),
|
||||
'kind': kind,
|
||||
'expired': str(expired).lower()
|
||||
}
|
||||
|
||||
resp = await self.json_rpc('public/get_instruments', params)
|
||||
results = resp.result
|
||||
|
||||
instruments = {
|
||||
item['instrument_name'].lower(): item
|
||||
for item in results
|
||||
}
|
||||
|
||||
if instrument is not None:
|
||||
return instruments[instrument]
|
||||
else:
|
||||
return instruments
|
||||
|
||||
async def cache_symbols(
|
||||
self,
|
||||
) -> dict:
|
||||
if not self._pairs:
|
||||
self._pairs = await self.symbol_info()
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def search_symbols(
|
||||
self,
|
||||
pattern: str,
|
||||
limit: int = 30,
|
||||
) -> dict[str, Any]:
|
||||
data = await self.symbol_info()
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
data,
|
||||
score_cutoff=35,
|
||||
limit=limit
|
||||
)
|
||||
# repack in dict form
|
||||
return {item[0]['instrument_name'].lower(): item[0]
|
||||
for item in matches}
|
||||
|
||||
async def bars(
|
||||
self,
|
||||
symbol: str,
|
||||
start_dt: Optional[datetime] = None,
|
||||
end_dt: Optional[datetime] = None,
|
||||
limit: int = 1000,
|
||||
as_np: bool = True,
|
||||
) -> dict:
|
||||
instrument = symbol
|
||||
|
||||
if end_dt is None:
|
||||
end_dt = pendulum.now('UTC')
|
||||
|
||||
if start_dt is None:
|
||||
start_dt = end_dt.start_of(
|
||||
'minute').subtract(minutes=limit)
|
||||
|
||||
start_time = deribit_timestamp(start_dt)
|
||||
end_time = deribit_timestamp(end_dt)
|
||||
|
||||
# https://docs.deribit.com/#public-get_tradingview_chart_data
|
||||
resp = await self.json_rpc(
|
||||
'public/get_tradingview_chart_data',
|
||||
params={
|
||||
'instrument_name': instrument.upper(),
|
||||
'start_timestamp': start_time,
|
||||
'end_timestamp': end_time,
|
||||
'resolution': '1'
|
||||
})
|
||||
|
||||
result = KLinesResult(**resp.result)
|
||||
new_bars = []
|
||||
for i in range(len(result.close)):
|
||||
|
||||
_open = result.open[i]
|
||||
high = result.high[i]
|
||||
low = result.low[i]
|
||||
close = result.close[i]
|
||||
volume = result.volume[i]
|
||||
|
||||
row = [
|
||||
(start_time + (i * (60 * 1000))) / 1000.0, # time
|
||||
result.open[i],
|
||||
result.high[i],
|
||||
result.low[i],
|
||||
result.close[i],
|
||||
result.volume[i],
|
||||
0
|
||||
]
|
||||
|
||||
new_bars.append((i,) + tuple(row))
|
||||
|
||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines
|
||||
return array
|
||||
|
||||
async def last_trades(
|
||||
self,
|
||||
instrument: str,
|
||||
count: int = 10
|
||||
):
|
||||
resp = await self.json_rpc(
|
||||
'public/get_last_trades_by_instrument',
|
||||
params={
|
||||
'instrument_name': instrument,
|
||||
'count': count
|
||||
})
|
||||
|
||||
return LastTradesResult(**resp.result)
|
||||
|
||||
|
||||
@acm
|
||||
async def get_client(
|
||||
is_brokercheck: bool = False
|
||||
) -> Client:
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as n,
|
||||
open_jsonrpc_session(
|
||||
_testnet_ws_url, dtype=JSONRPCResult) as json_rpc
|
||||
):
|
||||
client = Client(json_rpc)
|
||||
|
||||
_refresh_token: Optional[str] = None
|
||||
_access_token: Optional[str] = None
|
||||
|
||||
async def _auth_loop(
|
||||
task_status: TaskStatus = trio.TASK_STATUS_IGNORED
|
||||
):
|
||||
"""Background task that adquires a first access token and then will
|
||||
refresh the access token while the nursery isn't cancelled.
|
||||
|
||||
https://docs.deribit.com/?python#authentication-2
|
||||
"""
|
||||
renew_time = 10
|
||||
access_scope = 'trade:read_write'
|
||||
_expiry_time = time.time()
|
||||
got_access = False
|
||||
nonlocal _refresh_token
|
||||
nonlocal _access_token
|
||||
|
||||
while True:
|
||||
if time.time() - _expiry_time < renew_time:
|
||||
# if we are close to token expiry time
|
||||
|
||||
if _refresh_token != None:
|
||||
# if we have a refresh token already dont need to send
|
||||
# secret
|
||||
params = {
|
||||
'grant_type': 'refresh_token',
|
||||
'refresh_token': _refresh_token,
|
||||
'scope': access_scope
|
||||
}
|
||||
|
||||
else:
|
||||
# we don't have refresh token, send secret to initialize
|
||||
params = {
|
||||
'grant_type': 'client_credentials',
|
||||
'client_id': client._key_id,
|
||||
'client_secret': client._key_secret,
|
||||
'scope': access_scope
|
||||
}
|
||||
|
||||
resp = await json_rpc('public/auth', params)
|
||||
result = resp.result
|
||||
|
||||
_expiry_time = time.time() + result['expires_in']
|
||||
_refresh_token = result['refresh_token']
|
||||
|
||||
if 'access_token' in result:
|
||||
_access_token = result['access_token']
|
||||
|
||||
if not got_access:
|
||||
# first time this loop runs we must indicate task is
|
||||
# started, we have auth
|
||||
got_access = True
|
||||
task_status.started()
|
||||
|
||||
else:
|
||||
await trio.sleep(renew_time / 2)
|
||||
|
||||
# if we have client creds launch auth loop
|
||||
if client._key_id is not None:
|
||||
await n.start(_auth_loop)
|
||||
|
||||
await client.cache_symbols()
|
||||
yield client
|
||||
n.cancel_scope.cancel()
|
||||
|
||||
|
||||
@acm
|
||||
async def open_feed_handler():
|
||||
fh = FeedHandler(config=get_config())
|
||||
yield fh
|
||||
await to_asyncio.run_task(fh.stop_async)
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_feed_handler() -> trio.abc.ReceiveStream:
|
||||
async with maybe_open_context(
|
||||
acm_func=open_feed_handler,
|
||||
key='feedhandler',
|
||||
) as (cache_hit, fh):
|
||||
yield fh
|
||||
|
||||
|
||||
async def aio_price_feed_relay(
|
||||
fh: FeedHandler,
|
||||
instrument: Symbol,
|
||||
from_trio: asyncio.Queue,
|
||||
to_trio: trio.abc.SendChannel,
|
||||
) -> None:
|
||||
async def _trade(data: dict, receipt_timestamp):
|
||||
to_trio.send_nowait(('trade', {
|
||||
'symbol': cb_sym_to_deribit_inst(
|
||||
str_to_cb_sym(data.symbol)).lower(),
|
||||
'last': data,
|
||||
'broker_ts': time.time(),
|
||||
'data': data.to_dict(),
|
||||
'receipt': receipt_timestamp
|
||||
}))
|
||||
|
||||
async def _l1(data: dict, receipt_timestamp):
|
||||
to_trio.send_nowait(('l1', {
|
||||
'symbol': cb_sym_to_deribit_inst(
|
||||
str_to_cb_sym(data.symbol)).lower(),
|
||||
'ticks': [
|
||||
{'type': 'bid',
|
||||
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
||||
{'type': 'bsize',
|
||||
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
||||
{'type': 'ask',
|
||||
'price': float(data.ask_price), 'size': float(data.ask_size)},
|
||||
{'type': 'asize',
|
||||
'price': float(data.ask_price), 'size': float(data.ask_size)}
|
||||
]
|
||||
}))
|
||||
|
||||
fh.add_feed(
|
||||
DERIBIT,
|
||||
channels=[TRADES, L1_BOOK],
|
||||
symbols=[piker_sym_to_cb_sym(instrument)],
|
||||
callbacks={
|
||||
TRADES: _trade,
|
||||
L1_BOOK: _l1
|
||||
})
|
||||
|
||||
if not fh.running:
|
||||
fh.run(
|
||||
start_loop=False,
|
||||
install_signal_handlers=False)
|
||||
|
||||
# sync with trio
|
||||
to_trio.send_nowait(None)
|
||||
|
||||
await asyncio.sleep(float('inf'))
|
||||
|
||||
|
||||
@acm
|
||||
async def open_price_feed(
|
||||
instrument: str
|
||||
) -> trio.abc.ReceiveStream:
|
||||
async with maybe_open_feed_handler() as fh:
|
||||
async with to_asyncio.open_channel_from(
|
||||
partial(
|
||||
aio_price_feed_relay,
|
||||
fh,
|
||||
instrument
|
||||
)
|
||||
) as (first, chan):
|
||||
yield chan
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_price_feed(
|
||||
instrument: str
|
||||
) -> trio.abc.ReceiveStream:
|
||||
|
||||
# TODO: add a predicate to maybe_open_context
|
||||
async with maybe_open_context(
|
||||
acm_func=open_price_feed,
|
||||
kwargs={
|
||||
'instrument': instrument
|
||||
},
|
||||
key=f'{instrument}-price',
|
||||
) as (cache_hit, feed):
|
||||
if cache_hit:
|
||||
yield broadcast_receiver(feed, 10)
|
||||
else:
|
||||
yield feed
|
||||
|
||||
|
||||
|
||||
async def aio_order_feed_relay(
|
||||
fh: FeedHandler,
|
||||
instrument: Symbol,
|
||||
from_trio: asyncio.Queue,
|
||||
to_trio: trio.abc.SendChannel,
|
||||
) -> None:
|
||||
async def _fill(data: dict, receipt_timestamp):
|
||||
breakpoint()
|
||||
|
||||
async def _order_info(data: dict, receipt_timestamp):
|
||||
breakpoint()
|
||||
|
||||
fh.add_feed(
|
||||
DERIBIT,
|
||||
channels=[FILLS, ORDER_INFO],
|
||||
symbols=[instrument.upper()],
|
||||
callbacks={
|
||||
FILLS: _fill,
|
||||
ORDER_INFO: _order_info,
|
||||
})
|
||||
|
||||
if not fh.running:
|
||||
fh.run(
|
||||
start_loop=False,
|
||||
install_signal_handlers=False)
|
||||
|
||||
# sync with trio
|
||||
to_trio.send_nowait(None)
|
||||
|
||||
await asyncio.sleep(float('inf'))
|
||||
|
||||
|
||||
@acm
|
||||
async def open_order_feed(
|
||||
instrument: list[str]
|
||||
) -> trio.abc.ReceiveStream:
|
||||
async with maybe_open_feed_handler() as fh:
|
||||
async with to_asyncio.open_channel_from(
|
||||
partial(
|
||||
aio_order_feed_relay,
|
||||
fh,
|
||||
instrument
|
||||
)
|
||||
) as (first, chan):
|
||||
yield chan
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_order_feed(
|
||||
instrument: str
|
||||
) -> trio.abc.ReceiveStream:
|
||||
|
||||
# TODO: add a predicate to maybe_open_context
|
||||
async with maybe_open_context(
|
||||
acm_func=open_order_feed,
|
||||
kwargs={
|
||||
'instrument': instrument,
|
||||
'fh': fh
|
||||
},
|
||||
key=f'{instrument}-order',
|
||||
) as (cache_hit, feed):
|
||||
if cache_hit:
|
||||
yield broadcast_receiver(feed, 10)
|
||||
else:
|
||||
yield feed
|
Binary file not shown.
Before Width: | Height: | Size: 169 KiB |
Binary file not shown.
Before Width: | Height: | Size: 106 KiB |
Binary file not shown.
Before Width: | Height: | Size: 59 KiB |
Binary file not shown.
Before Width: | Height: | Size: 70 KiB |
Binary file not shown.
Before Width: | Height: | Size: 132 KiB |
|
@ -1,185 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Deribit backend.
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional, Callable
|
||||
import time
|
||||
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import pendulum
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import tractor
|
||||
|
||||
from piker._cacheables import open_cached_client
|
||||
from piker.log import get_logger, get_console_log
|
||||
from piker.data import ShmArray
|
||||
from piker.brokers._util import (
|
||||
BrokerError,
|
||||
DataUnavailable,
|
||||
)
|
||||
|
||||
from cryptofeed import FeedHandler
|
||||
|
||||
from cryptofeed.defines import (
|
||||
DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
|
||||
)
|
||||
from cryptofeed.symbols import Symbol
|
||||
|
||||
from .api import (
|
||||
Client, Trade,
|
||||
get_config,
|
||||
str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst,
|
||||
maybe_open_price_feed
|
||||
)
|
||||
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
@acm
|
||||
async def open_history_client(
|
||||
instrument: str,
|
||||
) -> tuple[Callable, int]:
|
||||
|
||||
# TODO implement history getter for the new storage layer.
|
||||
async with open_cached_client('deribit') as client:
|
||||
|
||||
async def get_ohlc(
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
|
||||
array = await client.bars(
|
||||
instrument,
|
||||
start_dt=start_dt,
|
||||
end_dt=end_dt,
|
||||
)
|
||||
if len(array) == 0:
|
||||
raise DataUnavailable
|
||||
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: list[str],
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
sym = symbols[0]
|
||||
|
||||
async with (
|
||||
open_cached_client('deribit') as client,
|
||||
send_chan as send_chan
|
||||
):
|
||||
|
||||
init_msgs = {
|
||||
# pass back token, and bool, signalling if we're the writer
|
||||
# and that history has been written
|
||||
sym: {
|
||||
'symbol_info': {
|
||||
'asset_type': 'option',
|
||||
'price_tick_size': 0.0005
|
||||
},
|
||||
'shm_write_opts': {'sum_tick_vml': False},
|
||||
'fqsn': sym,
|
||||
},
|
||||
}
|
||||
|
||||
nsym = piker_sym_to_cb_sym(sym)
|
||||
|
||||
async with maybe_open_price_feed(sym) as stream:
|
||||
|
||||
cache = await client.cache_symbols()
|
||||
|
||||
last_trades = (await client.last_trades(
|
||||
cb_sym_to_deribit_inst(nsym), count=1)).trades
|
||||
|
||||
if len(last_trades) == 0:
|
||||
last_trade = None
|
||||
async for typ, quote in stream:
|
||||
if typ == 'trade':
|
||||
last_trade = Trade(**(quote['data']))
|
||||
break
|
||||
|
||||
else:
|
||||
last_trade = Trade(**(last_trades[0]))
|
||||
|
||||
first_quote = {
|
||||
'symbol': sym,
|
||||
'last': last_trade.price,
|
||||
'brokerd_ts': last_trade.timestamp,
|
||||
'ticks': [{
|
||||
'type': 'trade',
|
||||
'price': last_trade.price,
|
||||
'size': last_trade.amount,
|
||||
'broker_ts': last_trade.timestamp
|
||||
}]
|
||||
}
|
||||
task_status.started((init_msgs, first_quote))
|
||||
|
||||
feed_is_live.set()
|
||||
|
||||
async for typ, quote in stream:
|
||||
topic = quote['symbol']
|
||||
await send_chan.send({topic: quote})
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(
|
||||
ctx: tractor.Context,
|
||||
) -> Client:
|
||||
async with open_cached_client('deribit') as client:
|
||||
|
||||
# load all symbols locally for fast search
|
||||
cache = await client.cache_symbols()
|
||||
await ctx.started()
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
async for pattern in stream:
|
||||
# repack in dict form
|
||||
await stream.send(
|
||||
await client.search_symbols(pattern))
|
|
@ -1,134 +0,0 @@
|
|||
``ib`` backend
|
||||
--------------
|
||||
more or less the "everything broker" for traditional and international
|
||||
markets. they are the "go to" provider for automatic retail trading
|
||||
and we interface to their APIs using the `ib_insync` project.
|
||||
|
||||
status
|
||||
******
|
||||
current support is *production grade* and both real-time data and order
|
||||
management should be correct and fast. this backend is used by core devs
|
||||
for live trading.
|
||||
|
||||
currently there is not yet full support for:
|
||||
- options charting and trading
|
||||
- paxos based crypto rt feeds and trading
|
||||
|
||||
|
||||
config
|
||||
******
|
||||
In order to get order mode support your ``brokers.toml``
|
||||
needs to have something like the following:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[ib]
|
||||
hosts = [
|
||||
"127.0.0.1",
|
||||
]
|
||||
# TODO: when we eventually spawn gateways in our
|
||||
# container, we can just dynamically allocate these
|
||||
# using IBC.
|
||||
ports = [
|
||||
4002,
|
||||
4003,
|
||||
4006,
|
||||
4001,
|
||||
7497,
|
||||
]
|
||||
|
||||
# XXX: for a paper account the flex web query service
|
||||
# is not supported so you have to manually download
|
||||
# and XML report and put it in a location that can be
|
||||
# accessed by the ``brokerd.ib`` backend code for parsing.
|
||||
flex_token = '1111111111111111'
|
||||
flex_trades_query_id = '6969696' # live accounts only?
|
||||
|
||||
# 3rd party web-api token
|
||||
# (XXX: not sure if this works yet)
|
||||
trade_log_token = '111111111111111'
|
||||
|
||||
# when clients are being scanned this determines
|
||||
# which clients are preferred to be used for data feeds
|
||||
# based on account names which are detected as active
|
||||
# on each client.
|
||||
prefer_data_account = [
|
||||
# this has to be first in order to make data work with dual paper + live
|
||||
'main',
|
||||
'algopaper',
|
||||
]
|
||||
|
||||
[ib.accounts]
|
||||
main = 'U69696969'
|
||||
algopaper = 'DU9696969'
|
||||
|
||||
|
||||
If everything works correctly you should see any current positions
|
||||
loaded in the pps pane on chart load and you should also be able to
|
||||
check your trade records in the file::
|
||||
|
||||
<pikerk_conf_dir>/ledgers/trades_ib_algopaper.toml
|
||||
|
||||
|
||||
An example ledger file will have entries written verbatim from the
|
||||
trade events schema:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
["0000e1a7.630f5e5a.01.01"]
|
||||
secType = "FUT"
|
||||
conId = 515416577
|
||||
symbol = "MNQ"
|
||||
lastTradeDateOrContractMonth = "20221216"
|
||||
strike = 0.0
|
||||
right = ""
|
||||
multiplier = "2"
|
||||
exchange = "GLOBEX"
|
||||
primaryExchange = ""
|
||||
currency = "USD"
|
||||
localSymbol = "MNQZ2"
|
||||
tradingClass = "MNQ"
|
||||
includeExpired = false
|
||||
secIdType = ""
|
||||
secId = ""
|
||||
comboLegsDescrip = ""
|
||||
comboLegs = []
|
||||
execId = "0000e1a7.630f5e5a.01.01"
|
||||
time = 1661972086.0
|
||||
acctNumber = "DU69696969"
|
||||
side = "BOT"
|
||||
shares = 1.0
|
||||
price = 12372.75
|
||||
permId = 441472655
|
||||
clientId = 6116
|
||||
orderId = 985
|
||||
liquidation = 0
|
||||
cumQty = 1.0
|
||||
avgPrice = 12372.75
|
||||
orderRef = ""
|
||||
evRule = ""
|
||||
evMultiplier = 0.0
|
||||
modelCode = ""
|
||||
lastLiquidity = 1
|
||||
broker_time = 1661972086.0
|
||||
name = "ib"
|
||||
commission = 0.57
|
||||
realizedPNL = 243.41
|
||||
yield_ = 0.0
|
||||
yieldRedemptionDate = 0
|
||||
listingExchange = "GLOBEX"
|
||||
date = "2022-08-31T18:54:46+00:00"
|
||||
|
||||
|
||||
your ``pps.toml`` file will have position entries like,
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[ib.algopaper."mnq.globex.20221216"]
|
||||
size = -1.0
|
||||
ppu = 12423.630576923071
|
||||
bsuid = 515416577
|
||||
expiry = "2022-12-16T00:00:00+00:00"
|
||||
clears = [
|
||||
{ dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" },
|
||||
]
|
|
@ -20,10 +20,15 @@ Interactive Brokers API backend.
|
|||
Sub-modules within break into the core functionalities:
|
||||
|
||||
- ``broker.py`` part for orders / trading endpoints
|
||||
- ``feed.py`` for real-time data feed endpoints
|
||||
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||
- ``data.py`` for real-time data feed endpoints
|
||||
|
||||
- ``client.py`` for the core API machinery which is ``trio``-ized
|
||||
wrapping around ``ib_insync``.
|
||||
|
||||
- ``report.py`` for the hackery to build manual pp calcs
|
||||
to avoid ib's absolute bullshit FIFO style position
|
||||
tracking..
|
||||
|
||||
"""
|
||||
from .api import (
|
||||
get_client,
|
||||
|
@ -33,10 +38,7 @@ from .feed import (
|
|||
open_symbol_search,
|
||||
stream_quotes,
|
||||
)
|
||||
from .broker import (
|
||||
trades_dialogue,
|
||||
norm_trade_records,
|
||||
)
|
||||
from .broker import trades_dialogue
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -22,7 +22,6 @@ import asyncio
|
|||
from contextlib import asynccontextmanager as acm
|
||||
from dataclasses import asdict
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
from math import isnan
|
||||
import time
|
||||
from typing import (
|
||||
|
@ -39,14 +38,10 @@ import tractor
|
|||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
|
||||
from .._util import (
|
||||
NoData,
|
||||
DataUnavailable,
|
||||
SymbolNotFound,
|
||||
)
|
||||
from piker.data._sharedmem import ShmArray
|
||||
from .._util import SymbolNotFound, NoData
|
||||
from .api import (
|
||||
# _adhoc_futes_set,
|
||||
con2fqsn,
|
||||
_adhoc_futes_set,
|
||||
log,
|
||||
load_aio_clients,
|
||||
ibis,
|
||||
|
@ -107,7 +102,7 @@ async def open_data_client() -> MethodProxy:
|
|||
|
||||
@acm
|
||||
async def open_history_client(
|
||||
fqsn: str,
|
||||
symbol: str,
|
||||
|
||||
) -> tuple[Callable, int]:
|
||||
'''
|
||||
|
@ -115,75 +110,26 @@ async def open_history_client(
|
|||
that takes in ``pendulum.datetime`` and returns ``numpy`` arrays.
|
||||
|
||||
'''
|
||||
# TODO:
|
||||
# - add logic to handle tradable hours and only grab
|
||||
# valid bars in the range?
|
||||
# - we want to avoid overrunning the underlying shm array buffer and
|
||||
# we should probably calc the number of calls to make depending on
|
||||
# that until we have the `marketstore` daemon in place in which case
|
||||
# the shm size will be driven by user config and available sys
|
||||
# memory.
|
||||
|
||||
async with open_data_client() as proxy:
|
||||
|
||||
max_timeout: float = 2.
|
||||
mean: float = 0
|
||||
count: int = 0
|
||||
|
||||
head_dt: None | datetime = None
|
||||
if (
|
||||
# fx cons seem to not provide this endpoint?
|
||||
'idealpro' not in fqsn
|
||||
):
|
||||
try:
|
||||
head_dt = await proxy.get_head_time(fqsn=fqsn)
|
||||
except RequestError:
|
||||
head_dt = None
|
||||
|
||||
async def get_hist(
|
||||
timeframe: float,
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[np.ndarray, str]:
|
||||
nonlocal max_timeout, mean, count
|
||||
|
||||
query_start = time.time()
|
||||
out, timedout = await get_bars(
|
||||
proxy,
|
||||
fqsn,
|
||||
timeframe,
|
||||
end_dt=end_dt,
|
||||
)
|
||||
latency = time.time() - query_start
|
||||
if (
|
||||
not timedout
|
||||
# and latency <= max_timeout
|
||||
):
|
||||
count += 1
|
||||
mean += latency / count
|
||||
print(
|
||||
f'HISTORY FRAME QUERY LATENCY: {latency}\n'
|
||||
f'mean: {mean}'
|
||||
)
|
||||
out, fails = await get_bars(proxy, symbol, end_dt=end_dt)
|
||||
|
||||
if (
|
||||
out is None
|
||||
):
|
||||
# TODO: add logic here to handle tradable hours and only grab
|
||||
# valid bars in the range
|
||||
if out is None:
|
||||
# could be trying to retreive bars over weekend
|
||||
log.error(f"Can't grab bars starting at {end_dt}!?!?")
|
||||
raise NoData(
|
||||
f'{end_dt}',
|
||||
# frame_size=2000,
|
||||
frame_size=2000,
|
||||
)
|
||||
|
||||
if (
|
||||
end_dt
|
||||
and head_dt
|
||||
and end_dt <= head_dt
|
||||
):
|
||||
raise DataUnavailable(f'First timestamp is {head_dt}')
|
||||
|
||||
bars, bars_array, first_dt, last_dt = out
|
||||
|
||||
# volume cleaning since there's -ve entries,
|
||||
|
@ -198,7 +144,7 @@ async def open_history_client(
|
|||
# quite sure why.. needs some tinkering and probably
|
||||
# a lookthrough of the ``ib_insync`` machinery, for eg. maybe
|
||||
# we have to do the batch queries on the `asyncio` side?
|
||||
yield get_hist, {'erlangs': 1, 'rate': 3}
|
||||
yield get_hist, {'erlangs': 1, 'rate': 6}
|
||||
|
||||
|
||||
_pacing: str = (
|
||||
|
@ -207,19 +153,96 @@ _pacing: str = (
|
|||
)
|
||||
|
||||
|
||||
async def wait_on_data_reset(
|
||||
async def get_bars(
|
||||
|
||||
proxy: MethodProxy,
|
||||
reset_type: str = 'data',
|
||||
timeout: float = 16,
|
||||
fqsn: str,
|
||||
|
||||
task_status: TaskStatus[
|
||||
tuple[
|
||||
trio.CancelScope,
|
||||
trio.Event,
|
||||
]
|
||||
] = trio.TASK_STATUS_IGNORED,
|
||||
) -> bool:
|
||||
# blank to start which tells ib to look up the latest datum
|
||||
end_dt: str = '',
|
||||
|
||||
) -> (dict, np.ndarray):
|
||||
'''
|
||||
Retrieve historical data from a ``trio``-side task using
|
||||
a ``MethoProxy``.
|
||||
|
||||
'''
|
||||
fails = 0
|
||||
bars: Optional[list] = None
|
||||
first_dt: datetime = None
|
||||
last_dt: datetime = None
|
||||
|
||||
if end_dt:
|
||||
last_dt = pendulum.from_timestamp(end_dt.timestamp())
|
||||
|
||||
for _ in range(10):
|
||||
try:
|
||||
out = await proxy.bars(
|
||||
fqsn=fqsn,
|
||||
end_dt=end_dt,
|
||||
)
|
||||
if out:
|
||||
bars, bars_array = out
|
||||
|
||||
else:
|
||||
await tractor.breakpoint()
|
||||
|
||||
if bars_array is None:
|
||||
raise SymbolNotFound(fqsn)
|
||||
|
||||
first_dt = pendulum.from_timestamp(
|
||||
bars[0].date.timestamp())
|
||||
|
||||
last_dt = pendulum.from_timestamp(
|
||||
bars[-1].date.timestamp())
|
||||
|
||||
time = bars_array['time']
|
||||
assert time[-1] == last_dt.timestamp()
|
||||
assert time[0] == first_dt.timestamp()
|
||||
log.info(
|
||||
f'{len(bars)} bars retreived for {first_dt} -> {last_dt}'
|
||||
)
|
||||
|
||||
return (bars, bars_array, first_dt, last_dt), fails
|
||||
|
||||
except RequestError as err:
|
||||
msg = err.message
|
||||
# why do we always need to rebind this?
|
||||
# _err = err
|
||||
|
||||
if 'No market data permissions for' in msg:
|
||||
# TODO: signalling for no permissions searches
|
||||
raise NoData(
|
||||
f'Symbol: {fqsn}',
|
||||
)
|
||||
|
||||
elif (
|
||||
err.code == 162
|
||||
and 'HMDS query returned no data' in err.message
|
||||
):
|
||||
# XXX: this is now done in the storage mgmt layer
|
||||
# and we shouldn't implicitly decrement the frame dt
|
||||
# index since the upper layer may be doing so
|
||||
# concurrently and we don't want to be delivering frames
|
||||
# that weren't asked for.
|
||||
log.warning(
|
||||
f'NO DATA found ending @ {end_dt}\n'
|
||||
)
|
||||
|
||||
# try to decrement start point and look further back
|
||||
# end_dt = last_dt = last_dt.subtract(seconds=2000)
|
||||
|
||||
raise NoData(
|
||||
f'Symbol: {fqsn}',
|
||||
frame_size=2000,
|
||||
)
|
||||
|
||||
elif _pacing in msg:
|
||||
|
||||
log.warning(
|
||||
'History throttle rate reached!\n'
|
||||
'Resetting farms with `ctrl-alt-f` hack\n'
|
||||
)
|
||||
# TODO: we might have to put a task lock around this
|
||||
# method..
|
||||
hist_ev = proxy.status_event(
|
||||
|
@ -235,259 +258,144 @@ async def wait_on_data_reset(
|
|||
# live_ev = proxy.status_event(
|
||||
# 'Market data farm connection is OK:usfuture'
|
||||
# )
|
||||
|
||||
# try to wait on the reset event(s) to arrive, a timeout
|
||||
# will trigger a retry up to 6 times (for now).
|
||||
tries: int = 2
|
||||
timeout: float = 10
|
||||
|
||||
done = trio.Event()
|
||||
with trio.move_on_after(timeout) as cs:
|
||||
|
||||
task_status.started((cs, done))
|
||||
# try 3 time with a data reset then fail over to
|
||||
# a connection reset.
|
||||
for i in range(1, tries):
|
||||
|
||||
log.warning('Sending DATA RESET request')
|
||||
res = await data_reset_hack(reset_type=reset_type)
|
||||
await data_reset_hack(reset_type='data')
|
||||
|
||||
if not res:
|
||||
log.warning(
|
||||
'NO VNC DETECTED!\n'
|
||||
'Manually press ctrl-alt-f on your IB java app'
|
||||
)
|
||||
done.set()
|
||||
return False
|
||||
|
||||
# TODO: not sure if waiting on other events
|
||||
# is all that useful here or not.
|
||||
# - in theory you could wait on one of the ones above first
|
||||
# to verify the reset request was sent?
|
||||
# - we need the same for real-time quote feeds which can
|
||||
# sometimes flake out and stop delivering..
|
||||
with trio.move_on_after(timeout) as cs:
|
||||
for name, ev in [
|
||||
# TODO: not sure if waiting on other events
|
||||
# is all that useful here or not. in theory
|
||||
# you could wait on one of the ones above
|
||||
# first to verify the reset request was
|
||||
# sent?
|
||||
('history', hist_ev),
|
||||
]:
|
||||
await ev.wait()
|
||||
log.info(f"{name} DATA RESET")
|
||||
done.set()
|
||||
return True
|
||||
break
|
||||
|
||||
if cs.cancel_called:
|
||||
if cs.cancelled_caught:
|
||||
fails += 1
|
||||
log.warning(
|
||||
'Data reset task canceled?'
|
||||
f'Data reset {name} timeout, retrying {i}.'
|
||||
)
|
||||
|
||||
done.set()
|
||||
return False
|
||||
|
||||
|
||||
_data_resetter_task: trio.Task | None = None
|
||||
|
||||
|
||||
async def get_bars(
|
||||
|
||||
proxy: MethodProxy,
|
||||
fqsn: str,
|
||||
timeframe: int,
|
||||
|
||||
# blank to start which tells ib to look up the latest datum
|
||||
end_dt: str = '',
|
||||
|
||||
# TODO: make this more dynamic based on measured frame rx latency?
|
||||
# how long before we trigger a feed reset (seconds)
|
||||
feed_reset_timeout: float = 3,
|
||||
|
||||
# how many days to subtract before giving up on further
|
||||
# history queries for instrument, presuming that most don't
|
||||
# not trade for a week XD
|
||||
max_nodatas: int = 6,
|
||||
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> (dict, np.ndarray):
|
||||
'''
|
||||
Retrieve historical data from a ``trio``-side task using
|
||||
a ``MethoProxy``.
|
||||
|
||||
'''
|
||||
global _data_resetter_task
|
||||
nodatas_count: int = 0
|
||||
|
||||
data_cs: trio.CancelScope | None = None
|
||||
result: tuple[
|
||||
ibis.objects.BarDataList,
|
||||
np.ndarray,
|
||||
datetime,
|
||||
datetime,
|
||||
] | None = None
|
||||
result_ready = trio.Event()
|
||||
|
||||
async def query():
|
||||
nonlocal result, data_cs, end_dt, nodatas_count
|
||||
while True:
|
||||
try:
|
||||
out = await proxy.bars(
|
||||
fqsn=fqsn,
|
||||
end_dt=end_dt,
|
||||
sample_period_s=timeframe,
|
||||
|
||||
# ideally we cancel the request just before we
|
||||
# cancel on the ``trio``-side and trigger a data
|
||||
# reset hack.. the problem is there's no way (with
|
||||
# current impl) to detect a cancel case.
|
||||
# timeout=timeout,
|
||||
)
|
||||
if out is None:
|
||||
raise NoData(f'{end_dt}')
|
||||
|
||||
bars, bars_array, dt_duration = out
|
||||
|
||||
if not bars:
|
||||
log.warning(
|
||||
f'History is blank for {dt_duration} from {end_dt}'
|
||||
)
|
||||
end_dt -= dt_duration
|
||||
continue
|
||||
else:
|
||||
|
||||
if bars_array is None:
|
||||
raise SymbolNotFound(fqsn)
|
||||
log.warning('Sending CONNECTION RESET')
|
||||
await data_reset_hack(reset_type='connection')
|
||||
|
||||
first_dt = pendulum.from_timestamp(
|
||||
bars[0].date.timestamp())
|
||||
with trio.move_on_after(timeout) as cs:
|
||||
for name, ev in [
|
||||
# TODO: not sure if waiting on other events
|
||||
# is all that useful here or not. in theory
|
||||
# you could wait on one of the ones above
|
||||
# first to verify the reset request was
|
||||
# sent?
|
||||
('history', hist_ev),
|
||||
]:
|
||||
await ev.wait()
|
||||
log.info(f"{name} DATA RESET")
|
||||
|
||||
last_dt = pendulum.from_timestamp(
|
||||
bars[-1].date.timestamp())
|
||||
|
||||
time = bars_array['time']
|
||||
assert time[-1] == last_dt.timestamp()
|
||||
assert time[0] == first_dt.timestamp()
|
||||
log.info(
|
||||
f'{len(bars)} bars retreived {first_dt} -> {last_dt}'
|
||||
)
|
||||
|
||||
if data_cs:
|
||||
data_cs.cancel()
|
||||
|
||||
result = (bars, bars_array, first_dt, last_dt)
|
||||
|
||||
# signal data reset loop parent task
|
||||
result_ready.set()
|
||||
|
||||
return result
|
||||
|
||||
except RequestError as err:
|
||||
msg = err.message
|
||||
|
||||
if 'No market data permissions for' in msg:
|
||||
# TODO: signalling for no permissions searches
|
||||
raise NoData(
|
||||
f'Symbol: {fqsn}',
|
||||
)
|
||||
|
||||
elif err.code == 162:
|
||||
if (
|
||||
'HMDS query returned no data' in msg
|
||||
):
|
||||
# XXX: this is now done in the storage mgmt
|
||||
# layer and we shouldn't implicitly decrement
|
||||
# the frame dt index since the upper layer may
|
||||
# be doing so concurrently and we don't want to
|
||||
# be delivering frames that weren't asked for.
|
||||
# try to decrement start point and look further back
|
||||
# end_dt = end_dt.subtract(seconds=2000)
|
||||
logmsg = "SUBTRACTING DAY from DT index"
|
||||
if end_dt is not None:
|
||||
end_dt = end_dt.subtract(days=1)
|
||||
elif end_dt is None:
|
||||
end_dt = pendulum.now().subtract(days=1)
|
||||
|
||||
log.warning(
|
||||
f'NO DATA found ending @ {end_dt}\n'
|
||||
+ logmsg
|
||||
)
|
||||
|
||||
if nodatas_count >= max_nodatas:
|
||||
raise DataUnavailable(
|
||||
f'Presuming {fqsn} has no further history '
|
||||
f'after {max_nodatas} tries..'
|
||||
)
|
||||
|
||||
nodatas_count += 1
|
||||
continue
|
||||
|
||||
elif 'API historical data query cancelled' in err.message:
|
||||
log.warning(
|
||||
'Query cancelled by IB (:eyeroll:):\n'
|
||||
f'{err.message}'
|
||||
)
|
||||
continue
|
||||
elif (
|
||||
'Trading TWS session is connected from a different IP'
|
||||
in err.message
|
||||
):
|
||||
log.warning("ignoring ip address warning")
|
||||
continue
|
||||
|
||||
# XXX: more or less same as above timeout case
|
||||
elif _pacing in msg:
|
||||
log.warning(
|
||||
'History throttle rate reached!\n'
|
||||
'Resetting farms with `ctrl-alt-f` hack\n'
|
||||
)
|
||||
|
||||
# cancel any existing reset task
|
||||
if data_cs:
|
||||
data_cs.cancel()
|
||||
|
||||
# spawn new data reset task
|
||||
data_cs, reset_done = await nurse.start(
|
||||
partial(
|
||||
wait_on_data_reset,
|
||||
proxy,
|
||||
timeout=float('inf'),
|
||||
reset_type='connection'
|
||||
)
|
||||
)
|
||||
continue
|
||||
if cs.cancelled_caught:
|
||||
fails += 1
|
||||
log.warning('Data CONNECTION RESET timeout!?')
|
||||
|
||||
else:
|
||||
raise
|
||||
|
||||
# TODO: make this global across all history task/requests
|
||||
# such that simultaneous symbol queries don't try data resettingn
|
||||
# too fast..
|
||||
unset_resetter: bool = False
|
||||
async with trio.open_nursery() as nurse:
|
||||
return None, None
|
||||
# else: # throttle wasn't fixed so error out immediately
|
||||
# raise _err
|
||||
|
||||
# start history request that we allow
|
||||
# to run indefinitely until a result is acquired
|
||||
nurse.start_soon(query)
|
||||
|
||||
# start history reset loop which waits up to the timeout
|
||||
# for a result before triggering a data feed reset.
|
||||
while not result_ready.is_set():
|
||||
async def backfill_bars(
|
||||
|
||||
with trio.move_on_after(feed_reset_timeout):
|
||||
await result_ready.wait()
|
||||
break
|
||||
fqsn: str,
|
||||
shm: ShmArray, # type: ignore # noqa
|
||||
|
||||
if _data_resetter_task:
|
||||
# don't double invoke the reset hack if another
|
||||
# requester task already has it covered.
|
||||
# TODO: we want to avoid overrunning the underlying shm array buffer
|
||||
# and we should probably calc the number of calls to make depending
|
||||
# on that until we have the `marketstore` daemon in place in which
|
||||
# case the shm size will be driven by user config and available sys
|
||||
# memory.
|
||||
count: int = 16,
|
||||
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Fill historical bars into shared mem / storage afap.
|
||||
|
||||
TODO: avoid pacing constraints:
|
||||
https://github.com/pikers/piker/issues/128
|
||||
|
||||
'''
|
||||
# last_dt1 = None
|
||||
last_dt = None
|
||||
|
||||
with trio.CancelScope() as cs:
|
||||
|
||||
async with open_data_client() as proxy:
|
||||
|
||||
out, fails = await get_bars(proxy, fqsn)
|
||||
|
||||
if out is None:
|
||||
raise RuntimeError("Could not pull currrent history?!")
|
||||
|
||||
(first_bars, bars_array, first_dt, last_dt) = out
|
||||
vlm = bars_array['volume']
|
||||
vlm[vlm < 0] = 0
|
||||
last_dt = first_dt
|
||||
|
||||
# write historical data to buffer
|
||||
shm.push(bars_array)
|
||||
|
||||
task_status.started(cs)
|
||||
|
||||
i = 0
|
||||
while i < count:
|
||||
|
||||
out, fails = await get_bars(proxy, fqsn, end_dt=first_dt)
|
||||
|
||||
if out is None:
|
||||
# could be trying to retreive bars over weekend
|
||||
# TODO: add logic here to handle tradable hours and
|
||||
# only grab valid bars in the range
|
||||
log.error(f"Can't grab bars starting at {first_dt}!?!?")
|
||||
|
||||
# XXX: get_bars() should internally decrement dt by
|
||||
# 2k seconds and try again.
|
||||
continue
|
||||
else:
|
||||
_data_resetter_task = trio.lowlevel.current_task()
|
||||
unset_resetter = True
|
||||
|
||||
# spawn new data reset task
|
||||
data_cs, reset_done = await nurse.start(
|
||||
partial(
|
||||
wait_on_data_reset,
|
||||
proxy,
|
||||
timeout=float('inf'),
|
||||
)
|
||||
)
|
||||
# sync wait on reset to complete
|
||||
await reset_done.wait()
|
||||
(first_bars, bars_array, first_dt, last_dt) = out
|
||||
# last_dt1 = last_dt
|
||||
# last_dt = first_dt
|
||||
|
||||
_data_resetter_task = None if unset_resetter else _data_resetter_task
|
||||
return result, data_cs is not None
|
||||
# volume cleaning since there's -ve entries,
|
||||
# wood luv to know what crookery that is..
|
||||
vlm = bars_array['volume']
|
||||
vlm[vlm < 0] = 0
|
||||
|
||||
# TODO we should probably dig into forums to see what peeps
|
||||
# think this data "means" and then use it as an indicator of
|
||||
# sorts? dinkus has mentioned that $vlms for the day dont'
|
||||
# match other platforms nor the summary stat tws shows in
|
||||
# the monitor - it's probably worth investigating.
|
||||
|
||||
shm.push(bars_array, prepend=True)
|
||||
i += 1
|
||||
|
||||
|
||||
asset_type_map = {
|
||||
|
@ -505,7 +413,6 @@ asset_type_map = {
|
|||
'WAR': 'warrant',
|
||||
'IOPT': 'warran',
|
||||
'BAG': 'bag',
|
||||
'CRYPTO': 'crypto', # bc it's diff then fiat?
|
||||
# 'NEWS': 'news',
|
||||
}
|
||||
|
||||
|
@ -545,9 +452,7 @@ async def _setup_quote_stream(
|
|||
|
||||
to_trio.send_nowait(None)
|
||||
|
||||
async with load_aio_clients(
|
||||
disconnect_on_exit=False,
|
||||
) as accts2clients:
|
||||
async with load_aio_clients() as accts2clients:
|
||||
caccount_name, client = get_preferred_data_client(accts2clients)
|
||||
contract = contract or (await client.find_contract(symbol))
|
||||
ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts))
|
||||
|
@ -593,11 +498,10 @@ async def _setup_quote_stream(
|
|||
# Manually do the dereg ourselves.
|
||||
teardown()
|
||||
except trio.WouldBlock:
|
||||
# log.warning(
|
||||
# f'channel is blocking symbol feed for {symbol}?'
|
||||
# f'\n{to_trio.statistics}'
|
||||
# )
|
||||
pass
|
||||
log.warning(
|
||||
f'channel is blocking symbol feed for {symbol}?'
|
||||
f'\n{to_trio.statistics}'
|
||||
)
|
||||
|
||||
# except trio.WouldBlock:
|
||||
# # for slow debugging purposes to avoid clobbering prompt
|
||||
|
@ -627,8 +531,7 @@ async def open_aio_quote_stream(
|
|||
from_aio = _quote_streams.get(symbol)
|
||||
if from_aio:
|
||||
|
||||
# if we already have a cached feed deliver a rx side clone
|
||||
# to consumer
|
||||
# if we already have a cached feed deliver a rx side clone to consumer
|
||||
async with broadcast_receiver(
|
||||
from_aio,
|
||||
2**6,
|
||||
|
@ -650,17 +553,38 @@ async def open_aio_quote_stream(
|
|||
|
||||
|
||||
# TODO: cython/mypyc/numba this!
|
||||
# or we can at least cache a majority of the values
|
||||
# except for the ones we expect to change?..
|
||||
def normalize(
|
||||
ticker: Ticker,
|
||||
calc_price: bool = False
|
||||
|
||||
) -> dict:
|
||||
|
||||
# should be real volume for this contract by default
|
||||
calc_price = False
|
||||
|
||||
# check for special contract types
|
||||
con = ticker.contract
|
||||
fqsn, calc_price = con2fqsn(con)
|
||||
if type(con) in (
|
||||
ibis.Commodity,
|
||||
ibis.Forex,
|
||||
):
|
||||
# commodities and forex don't have an exchange name and
|
||||
# no real volume so we have to calculate the price
|
||||
suffix = con.secType
|
||||
# no real volume on this tract
|
||||
calc_price = True
|
||||
|
||||
else:
|
||||
suffix = con.primaryExchange
|
||||
if not suffix:
|
||||
suffix = con.exchange
|
||||
|
||||
# append a `.<suffix>` to the returned symbol
|
||||
# key for derivatives that normally is the expiry
|
||||
# date key.
|
||||
expiry = con.lastTradeDateOrContractMonth
|
||||
if expiry:
|
||||
suffix += f'.{expiry}'
|
||||
|
||||
# convert named tuples to dicts so we send usable keys
|
||||
new_ticks = []
|
||||
|
@ -692,7 +616,9 @@ def normalize(
|
|||
|
||||
# generate fqsn with possible specialized suffix
|
||||
# for derivatives, note the lowercase.
|
||||
data['symbol'] = data['fqsn'] = fqsn
|
||||
data['symbol'] = data['fqsn'] = '.'.join(
|
||||
(con.symbol, suffix)
|
||||
).lower()
|
||||
|
||||
# convert named tuples to dicts for transport
|
||||
tbts = data.get('tickByTicks')
|
||||
|
@ -757,20 +683,13 @@ async def stream_quotes(
|
|||
# TODO: more consistent field translation
|
||||
atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']]
|
||||
|
||||
if atype in {
|
||||
'forex',
|
||||
'index',
|
||||
'commodity',
|
||||
}:
|
||||
syminfo['no_vlm'] = True
|
||||
|
||||
# for stocks it seems TWS reports too small a tick size
|
||||
# such that you can't submit orders with that granularity?
|
||||
min_tick = 0.01 if atype == 'stock' else 0
|
||||
|
||||
syminfo['price_tick_size'] = max(syminfo['minTick'], min_tick)
|
||||
|
||||
# for "legacy" assets, volume is normally discreet, not
|
||||
# for "traditional" assets, volume is normally discreet, not
|
||||
# a float
|
||||
syminfo['lot_tick_size'] = 0.0
|
||||
|
||||
|
@ -790,9 +709,9 @@ async def stream_quotes(
|
|||
},
|
||||
|
||||
}
|
||||
return init_msgs, syminfo
|
||||
return init_msgs
|
||||
|
||||
init_msgs, syminfo = mk_init_msgs()
|
||||
init_msgs = mk_init_msgs()
|
||||
|
||||
# TODO: we should instead spawn a task that waits on a feed to start
|
||||
# and let it wait indefinitely..instead of this hard coded stuff.
|
||||
|
@ -801,14 +720,7 @@ async def stream_quotes(
|
|||
|
||||
# it might be outside regular trading hours so see if we can at
|
||||
# least grab history.
|
||||
if (
|
||||
isnan(first_ticker.last)
|
||||
and type(first_ticker.contract) not in (
|
||||
ibis.Commodity,
|
||||
ibis.Forex,
|
||||
ibis.Crypto,
|
||||
)
|
||||
):
|
||||
if isnan(first_ticker.last):
|
||||
task_status.started((init_msgs, first_quote))
|
||||
|
||||
# it's not really live but this will unblock
|
||||
|
@ -819,77 +731,41 @@ async def stream_quotes(
|
|||
await trio.sleep_forever()
|
||||
return # we never expect feed to come up?
|
||||
|
||||
cs: Optional[trio.CancelScope] = None
|
||||
startup: bool = True
|
||||
while (
|
||||
startup
|
||||
or cs.cancel_called
|
||||
):
|
||||
with trio.CancelScope() as cs:
|
||||
async with (
|
||||
trio.open_nursery() as nurse,
|
||||
open_aio_quote_stream(
|
||||
async with open_aio_quote_stream(
|
||||
symbol=sym,
|
||||
contract=con,
|
||||
) as stream,
|
||||
):
|
||||
) as stream:
|
||||
|
||||
# ugh, clear ticks since we've consumed them
|
||||
# (ahem, ib_insync is stateful trash)
|
||||
first_ticker.ticks = []
|
||||
|
||||
# only on first entry at feed boot up
|
||||
if startup:
|
||||
startup = False
|
||||
task_status.started((init_msgs, first_quote))
|
||||
|
||||
# start a stream restarter task which monitors the
|
||||
# data feed event.
|
||||
async def reset_on_feed():
|
||||
|
||||
# TODO: this seems to be surpressed from the
|
||||
# traceback in ``tractor``?
|
||||
# assert 0
|
||||
|
||||
rt_ev = proxy.status_event(
|
||||
'Market data farm connection is OK:usfarm'
|
||||
)
|
||||
await rt_ev.wait()
|
||||
cs.cancel() # cancel called should now be set
|
||||
|
||||
nurse.start_soon(reset_on_feed)
|
||||
|
||||
async with aclosing(stream):
|
||||
if syminfo.get('no_vlm', False):
|
||||
|
||||
# generally speaking these feeds don't
|
||||
# include vlm data.
|
||||
atype = syminfo['asset_type']
|
||||
log.info(
|
||||
f'No-vlm {sym}@{atype}, skipping quote poll'
|
||||
)
|
||||
|
||||
else:
|
||||
# wait for real volume on feed (trading might be
|
||||
# closed)
|
||||
if type(first_ticker.contract) not in (
|
||||
ibis.Commodity,
|
||||
ibis.Forex
|
||||
):
|
||||
# wait for real volume on feed (trading might be closed)
|
||||
while True:
|
||||
ticker = await stream.receive()
|
||||
|
||||
# for a real volume contract we rait for
|
||||
# the first "real" trade to take place
|
||||
# for a real volume contract we rait for the first
|
||||
# "real" trade to take place
|
||||
if (
|
||||
# not calc_price
|
||||
# and not ticker.rtTime
|
||||
not ticker.rtTime
|
||||
):
|
||||
# spin consuming tickers until we
|
||||
# get a real market datum
|
||||
# spin consuming tickers until we get a real
|
||||
# market datum
|
||||
log.debug(f"New unsent ticker: {ticker}")
|
||||
continue
|
||||
else:
|
||||
log.debug("Received first volume tick")
|
||||
# ugh, clear ticks since we've
|
||||
# consumed them (ahem, ib_insync is
|
||||
# truly stateful trash)
|
||||
log.debug("Received first real volume tick")
|
||||
# ugh, clear ticks since we've consumed them
|
||||
# (ahem, ib_insync is truly stateful trash)
|
||||
ticker.ticks = []
|
||||
|
||||
# XXX: this works because we don't use
|
||||
|
@ -905,9 +781,7 @@ async def stream_quotes(
|
|||
# last = time.time()
|
||||
async for ticker in stream:
|
||||
quote = normalize(ticker)
|
||||
fqsn = quote['fqsn']
|
||||
# print(f'sending {fqsn}:\n{quote}')
|
||||
await send_chan.send({fqsn: quote})
|
||||
await send_chan.send({quote['fqsn']: quote})
|
||||
|
||||
# ugh, clear ticks since we've consumed them
|
||||
ticker.ticks = []
|
||||
|
@ -931,9 +805,6 @@ async def data_reset_hack(
|
|||
successful.
|
||||
- other OS support?
|
||||
- integration with ``ib-gw`` run in docker + Xorg?
|
||||
- is it possible to offer a local server that can be accessed by
|
||||
a client? Would be sure be handy for running native java blobs
|
||||
that need to be wrangle.
|
||||
|
||||
'''
|
||||
|
||||
|
@ -964,10 +835,7 @@ async def data_reset_hack(
|
|||
client.mouse.click()
|
||||
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
||||
|
||||
try:
|
||||
await tractor.to_asyncio.run_task(vnc_click_hack)
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
# we don't really need the ``xdotool`` approach any more B)
|
||||
return True
|
||||
|
@ -982,30 +850,14 @@ async def open_symbol_search(
|
|||
# TODO: load user defined symbol set locally for fast search?
|
||||
await ctx.started({})
|
||||
|
||||
async with (
|
||||
open_client_proxies() as (proxies, clients),
|
||||
open_data_client() as data_proxy,
|
||||
):
|
||||
async with open_data_client() as proxy:
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
# select a non-history client for symbol search to lighten
|
||||
# the load in the main data node.
|
||||
proxy = data_proxy
|
||||
for name, proxy in proxies.items():
|
||||
if proxy is data_proxy:
|
||||
continue
|
||||
break
|
||||
|
||||
ib_client = proxy._aio_ns.ib
|
||||
log.info(f'Using {ib_client} for symbol search')
|
||||
|
||||
last = time.time()
|
||||
async for pattern in stream:
|
||||
log.info(f'received {pattern}')
|
||||
now = time.time()
|
||||
|
||||
# this causes tractor hang...
|
||||
# assert 0
|
||||
async for pattern in stream:
|
||||
log.debug(f'received {pattern}')
|
||||
now = time.time()
|
||||
|
||||
assert pattern, 'IB can not accept blank search pattern'
|
||||
|
||||
|
@ -1019,14 +871,7 @@ async def open_symbol_search(
|
|||
except trio.WouldBlock:
|
||||
pass
|
||||
|
||||
if (
|
||||
not pattern
|
||||
or pattern.isspace()
|
||||
|
||||
# XXX: not sure if this is a bad assumption but it
|
||||
# seems to make search snappier?
|
||||
or len(pattern) < 1
|
||||
):
|
||||
if not pattern or pattern.isspace():
|
||||
log.warning('empty pattern received, skipping..')
|
||||
|
||||
# TODO: *BUG* if nothing is returned here the client
|
||||
|
@ -1041,7 +886,7 @@ async def open_symbol_search(
|
|||
|
||||
continue
|
||||
|
||||
log.info(f'searching for {pattern}')
|
||||
log.debug(f'searching for {pattern}')
|
||||
|
||||
last = time.time()
|
||||
|
||||
|
@ -1050,16 +895,8 @@ async def open_symbol_search(
|
|||
stock_results = []
|
||||
|
||||
async def stash_results(target: Awaitable[list]):
|
||||
try:
|
||||
results = await target
|
||||
except tractor.trionics.Lagged:
|
||||
print("IB SYM-SEARCH OVERRUN?!?")
|
||||
return
|
||||
stock_results.extend(await target)
|
||||
|
||||
stock_results.extend(results)
|
||||
|
||||
for i in range(10):
|
||||
with trio.move_on_after(3) as cs:
|
||||
async with trio.open_nursery() as sn:
|
||||
sn.start_soon(
|
||||
stash_results,
|
||||
|
@ -1072,26 +909,17 @@ async def open_symbol_search(
|
|||
# trigger async request
|
||||
await trio.sleep(0)
|
||||
|
||||
if cs.cancelled_caught:
|
||||
log.warning(
|
||||
f'Search timeout? {proxy._aio_ns.ib.client}'
|
||||
# match against our ad-hoc set immediately
|
||||
adhoc_matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
list(_adhoc_futes_set),
|
||||
score_cutoff=90,
|
||||
)
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
# # match against our ad-hoc set immediately
|
||||
# adhoc_matches = fuzzy.extractBests(
|
||||
# pattern,
|
||||
# list(_adhoc_futes_set),
|
||||
# score_cutoff=90,
|
||||
# )
|
||||
# log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
||||
# adhoc_match_results = {}
|
||||
# if adhoc_matches:
|
||||
# # TODO: do we need to pull contract details?
|
||||
# adhoc_match_results = {i[0]: {} for i in
|
||||
# adhoc_matches}
|
||||
log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
||||
adhoc_match_results = {}
|
||||
if adhoc_matches:
|
||||
# TODO: do we need to pull contract details?
|
||||
adhoc_match_results = {i[0]: {} for i in adhoc_matches}
|
||||
|
||||
log.debug(f'fuzzy matching stocks {stock_results}')
|
||||
stock_matches = fuzzy.extractBests(
|
||||
|
@ -1100,8 +928,7 @@ async def open_symbol_search(
|
|||
score_cutoff=50,
|
||||
)
|
||||
|
||||
# matches = adhoc_match_results | {
|
||||
matches = {
|
||||
matches = adhoc_match_results | {
|
||||
item[0]: {} for item in stock_matches
|
||||
}
|
||||
# TODO: we used to deliver contract details
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,64 +0,0 @@
|
|||
``kraken`` backend
|
||||
------------------
|
||||
though they don't have the most liquidity of all the cexes they sure are
|
||||
accommodating to those of us who appreciate a little ``xmr``.
|
||||
|
||||
status
|
||||
******
|
||||
current support is *production grade* and both real-time data and order
|
||||
management should be correct and fast. this backend is used by core devs
|
||||
for live trading.
|
||||
|
||||
|
||||
config
|
||||
******
|
||||
In order to get order mode support your ``brokers.toml``
|
||||
needs to have something like the following:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[kraken]
|
||||
accounts.spot = 'spot'
|
||||
key_descr = "spot"
|
||||
api_key = "69696969696969696696969696969696969696969696969696969696"
|
||||
secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696"
|
||||
|
||||
|
||||
If everything works correctly you should see any current positions
|
||||
loaded in the pps pane on chart load and you should also be able to
|
||||
check your trade records in the file::
|
||||
|
||||
<pikerk_conf_dir>/ledgers/trades_kraken_spot.toml
|
||||
|
||||
|
||||
An example ledger file will have entries written verbatim from the
|
||||
trade events schema:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[TFJBKK-SMBZS-VJ4UWS]
|
||||
ordertxid = "SMBZSA-7CNQU-3HWLNJ"
|
||||
postxid = "SMBZSE-M7IF5-CFI7LT"
|
||||
pair = "XXMRZEUR"
|
||||
time = 1655691993.4133966
|
||||
type = "buy"
|
||||
ordertype = "limit"
|
||||
price = "103.97000000"
|
||||
cost = "499.99999977"
|
||||
fee = "0.80000000"
|
||||
vol = "4.80907954"
|
||||
margin = "0.00000000"
|
||||
misc = ""
|
||||
|
||||
|
||||
your ``pps.toml`` file will have position entries like,
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[kraken.spot."xmreur.kraken"]
|
||||
size = 4.80907954
|
||||
ppu = 103.97000000
|
||||
bsuid = "XXMRZEUR"
|
||||
clears = [
|
||||
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
|
||||
]
|
|
@ -1,61 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Kraken backend.
|
||||
|
||||
Sub-modules within break into the core functionalities:
|
||||
|
||||
- ``broker.py`` part for orders / trading endpoints
|
||||
- ``feed.py`` for real-time data feed endpoints
|
||||
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||
wrapping around ``ib_insync``.
|
||||
|
||||
'''
|
||||
|
||||
from piker.log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
from .api import (
|
||||
get_client,
|
||||
)
|
||||
from .feed import (
|
||||
open_history_client,
|
||||
open_symbol_search,
|
||||
stream_quotes,
|
||||
)
|
||||
from .broker import (
|
||||
trades_dialogue,
|
||||
norm_trade_records,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
'trades_dialogue',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
'norm_trade_records',
|
||||
]
|
||||
|
||||
|
||||
# tractor RPC enable arg
|
||||
__enable_modules__: list[str] = [
|
||||
'api',
|
||||
'feed',
|
||||
'broker',
|
||||
]
|
|
@ -1,621 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Kraken web API wrapping.
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
import itertools
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
import time
|
||||
|
||||
from bidict import bidict
|
||||
import pendulum
|
||||
import asks
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import urllib.parse
|
||||
import hashlib
|
||||
import hmac
|
||||
import base64
|
||||
import trio
|
||||
|
||||
from piker import config
|
||||
from piker.data.types import Struct
|
||||
from piker.data._source import Symbol
|
||||
from piker.brokers._util import (
|
||||
resproc,
|
||||
SymbolNotFound,
|
||||
BrokerError,
|
||||
DataThrottle,
|
||||
)
|
||||
from piker.pp import Transaction
|
||||
from . import log
|
||||
|
||||
# <uri>/<version>/
|
||||
_url = 'https://api.kraken.com/0'
|
||||
|
||||
|
||||
# Broker specific ohlc schema which includes a vwap field
|
||||
_ohlc_dtype = [
|
||||
('index', int),
|
||||
('time', int),
|
||||
('open', float),
|
||||
('high', float),
|
||||
('low', float),
|
||||
('close', float),
|
||||
('volume', float),
|
||||
('count', int),
|
||||
('bar_wap', float),
|
||||
]
|
||||
|
||||
# UI components allow this to be declared such that additional
|
||||
# (historical) fields can be exposed.
|
||||
ohlc_dtype = np.dtype(_ohlc_dtype)
|
||||
|
||||
_show_wap_in_history = True
|
||||
_symbol_info_translation: dict[str, str] = {
|
||||
'tick_decimals': 'pair_decimals',
|
||||
}
|
||||
|
||||
|
||||
def get_config() -> dict[str, Any]:
|
||||
|
||||
conf, path = config.load()
|
||||
section = conf.get('kraken')
|
||||
|
||||
if section is None:
|
||||
log.warning(f'No config section found for kraken in {path}')
|
||||
return {}
|
||||
|
||||
return section
|
||||
|
||||
|
||||
def get_kraken_signature(
|
||||
urlpath: str,
|
||||
data: dict[str, Any],
|
||||
secret: str
|
||||
) -> str:
|
||||
postdata = urllib.parse.urlencode(data)
|
||||
encoded = (str(data['nonce']) + postdata).encode()
|
||||
message = urlpath.encode() + hashlib.sha256(encoded).digest()
|
||||
|
||||
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
|
||||
sigdigest = base64.b64encode(mac.digest())
|
||||
return sigdigest.decode()
|
||||
|
||||
|
||||
class InvalidKey(ValueError):
|
||||
'''
|
||||
EAPI:Invalid key
|
||||
This error is returned when the API key used for the call is
|
||||
either expired or disabled, please review the API key in your
|
||||
Settings -> API tab of account management or generate a new one
|
||||
and update your application.
|
||||
|
||||
'''
|
||||
|
||||
|
||||
# https://www.kraken.com/features/api#get-tradable-pairs
|
||||
class Pair(Struct):
|
||||
altname: str # alternate pair name
|
||||
wsname: str # WebSocket pair name (if available)
|
||||
aclass_base: str # asset class of base component
|
||||
base: str # asset id of base component
|
||||
aclass_quote: str # asset class of quote component
|
||||
quote: str # asset id of quote component
|
||||
lot: str # volume lot size
|
||||
|
||||
cost_decimals: int
|
||||
costmin: float
|
||||
pair_decimals: int # scaling decimal places for pair
|
||||
lot_decimals: int # scaling decimal places for volume
|
||||
|
||||
# amount to multiply lot volume by to get currency volume
|
||||
lot_multiplier: float
|
||||
|
||||
# array of leverage amounts available when buying
|
||||
leverage_buy: list[int]
|
||||
# array of leverage amounts available when selling
|
||||
leverage_sell: list[int]
|
||||
|
||||
# fee schedule array in [volume, percent fee] tuples
|
||||
fees: list[tuple[int, float]]
|
||||
|
||||
# maker fee schedule array in [volume, percent fee] tuples (if on
|
||||
# maker/taker)
|
||||
fees_maker: list[tuple[int, float]]
|
||||
|
||||
fee_volume_currency: str # volume discount currency
|
||||
margin_call: str # margin call level
|
||||
margin_stop: str # stop-out/liquidation margin level
|
||||
ordermin: float # minimum order volume for pair
|
||||
tick_size: float # min price step size
|
||||
status: str
|
||||
|
||||
short_position_limit: float = 0
|
||||
long_position_limit: float = float('inf')
|
||||
|
||||
|
||||
class Client:
|
||||
|
||||
# global symbol normalization table
|
||||
_ntable: dict[str, str] = {}
|
||||
_atable: bidict[str, str] = bidict()
|
||||
_pairs: dict[str, Pair] = {}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: dict[str, str],
|
||||
name: str = '',
|
||||
api_key: str = '',
|
||||
secret: str = ''
|
||||
) -> None:
|
||||
self._sesh = asks.Session(connections=4)
|
||||
self._sesh.base_location = _url
|
||||
self._sesh.headers.update({
|
||||
'User-Agent':
|
||||
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||
})
|
||||
self.conf: dict[str, str] = config
|
||||
self._name = name
|
||||
self._api_key = api_key
|
||||
self._secret = secret
|
||||
|
||||
@property
|
||||
def pairs(self) -> dict[str, Pair]:
|
||||
if self._pairs is None:
|
||||
raise RuntimeError(
|
||||
"Make sure to run `cache_symbols()` on startup!"
|
||||
)
|
||||
# retreive and cache all symbols
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def _public(
|
||||
self,
|
||||
method: str,
|
||||
data: dict,
|
||||
) -> dict[str, Any]:
|
||||
resp = await self._sesh.post(
|
||||
path=f'/public/{method}',
|
||||
json=data,
|
||||
timeout=float('inf')
|
||||
)
|
||||
return resproc(resp, log)
|
||||
|
||||
async def _private(
|
||||
self,
|
||||
method: str,
|
||||
data: dict,
|
||||
uri_path: str
|
||||
) -> dict[str, Any]:
|
||||
headers = {
|
||||
'Content-Type':
|
||||
'application/x-www-form-urlencoded',
|
||||
'API-Key':
|
||||
self._api_key,
|
||||
'API-Sign':
|
||||
get_kraken_signature(uri_path, data, self._secret)
|
||||
}
|
||||
resp = await self._sesh.post(
|
||||
path=f'/private/{method}',
|
||||
data=data,
|
||||
headers=headers,
|
||||
timeout=float('inf')
|
||||
)
|
||||
return resproc(resp, log)
|
||||
|
||||
async def endpoint(
|
||||
self,
|
||||
method: str,
|
||||
data: dict[str, Any]
|
||||
|
||||
) -> dict[str, Any]:
|
||||
uri_path = f'/0/private/{method}'
|
||||
data['nonce'] = str(int(1000*time.time()))
|
||||
return await self._private(method, data, uri_path)
|
||||
|
||||
async def get_balances(
|
||||
self,
|
||||
) -> dict[str, float]:
|
||||
'''
|
||||
Return the set of asset balances for this account
|
||||
by symbol.
|
||||
|
||||
'''
|
||||
resp = await self.endpoint(
|
||||
'Balance',
|
||||
{},
|
||||
)
|
||||
by_bsuid = resp['result']
|
||||
return {
|
||||
self._atable[sym].lower(): float(bal)
|
||||
for sym, bal in by_bsuid.items()
|
||||
}
|
||||
|
||||
async def get_assets(self) -> dict[str, dict]:
|
||||
resp = await self._public('Assets', {})
|
||||
return resp['result']
|
||||
|
||||
async def cache_assets(self) -> None:
|
||||
assets = self.assets = await self.get_assets()
|
||||
for bsuid, info in assets.items():
|
||||
self._atable[bsuid] = info['altname']
|
||||
|
||||
async def get_trades(
|
||||
self,
|
||||
fetch_limit: int | None = None,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
'''
|
||||
Get the trades (aka cleared orders) history from the rest endpoint:
|
||||
https://docs.kraken.com/rest/#operation/getTradeHistory
|
||||
|
||||
'''
|
||||
ofs = 0
|
||||
trades_by_id: dict[str, Any] = {}
|
||||
|
||||
for i in itertools.count():
|
||||
if (
|
||||
fetch_limit
|
||||
and i >= fetch_limit
|
||||
):
|
||||
break
|
||||
|
||||
# increment 'ofs' pagination offset
|
||||
ofs = i*50
|
||||
|
||||
resp = await self.endpoint(
|
||||
'TradesHistory',
|
||||
{'ofs': ofs},
|
||||
)
|
||||
by_id = resp['result']['trades']
|
||||
trades_by_id.update(by_id)
|
||||
|
||||
# can get up to 50 results per query, see:
|
||||
# https://docs.kraken.com/rest/#tag/User-Data/operation/getTradeHistory
|
||||
if (
|
||||
len(by_id) < 50
|
||||
):
|
||||
err = resp.get('error')
|
||||
if err:
|
||||
raise BrokerError(err)
|
||||
|
||||
# we know we received the max amount of
|
||||
# trade results so there may be more history.
|
||||
# catch the end of the trades
|
||||
count = resp['result']['count']
|
||||
break
|
||||
|
||||
# santity check on update
|
||||
assert count == len(trades_by_id.values())
|
||||
return trades_by_id
|
||||
|
||||
async def get_xfers(
|
||||
self,
|
||||
asset: str,
|
||||
src_asset: str = '',
|
||||
|
||||
) -> dict[str, Transaction]:
|
||||
'''
|
||||
Get asset balance transfer transactions.
|
||||
|
||||
Currently only withdrawals are supported.
|
||||
|
||||
'''
|
||||
xfers: list[dict] = (await self.endpoint(
|
||||
'WithdrawStatus',
|
||||
{'asset': asset},
|
||||
))['result']
|
||||
|
||||
# eg. resp schema:
|
||||
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
|
||||
# 'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid':
|
||||
# 'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44',
|
||||
# 'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z',
|
||||
# 'amount': '0.00300726', 'fee': '0.00001000', 'time':
|
||||
# 1658347714, 'status': 'Success'}]}
|
||||
|
||||
trans: dict[str, Transaction] = {}
|
||||
for entry in xfers:
|
||||
|
||||
# look up the normalized name and asset info
|
||||
asset_key = entry['asset']
|
||||
asset_info = self.assets[asset_key]
|
||||
asset = self._atable[asset_key].lower()
|
||||
|
||||
# XXX: this is in the asset units (likely) so it isn't
|
||||
# quite the same as a commisions cost necessarily..)
|
||||
cost = float(entry['fee'])
|
||||
|
||||
fqsn = asset + '.kraken'
|
||||
pairinfo = Symbol.from_fqsn(
|
||||
fqsn,
|
||||
info={
|
||||
'asset_type': 'crypto',
|
||||
'lot_tick_size': asset_info['decimals'],
|
||||
},
|
||||
)
|
||||
|
||||
tran = Transaction(
|
||||
fqsn=fqsn,
|
||||
sym=pairinfo,
|
||||
tid=entry['txid'],
|
||||
dt=pendulum.from_timestamp(entry['time']),
|
||||
bsuid=f'{asset}{src_asset}',
|
||||
size=-1*(
|
||||
float(entry['amount'])
|
||||
+
|
||||
cost
|
||||
),
|
||||
# since this will be treated as a "sell" it
|
||||
# shouldn't be needed to compute the be price.
|
||||
price='NaN',
|
||||
|
||||
# XXX: see note above
|
||||
cost=cost,
|
||||
)
|
||||
trans[tran.tid] = tran
|
||||
|
||||
return trans
|
||||
|
||||
async def submit_limit(
|
||||
self,
|
||||
symbol: str,
|
||||
price: float,
|
||||
action: str,
|
||||
size: float,
|
||||
reqid: str = None,
|
||||
validate: bool = False # set True test call without a real submission
|
||||
|
||||
) -> dict:
|
||||
'''
|
||||
Place an order and return integer request id provided by client.
|
||||
|
||||
'''
|
||||
# Build common data dict for common keys from both endpoints
|
||||
data = {
|
||||
"pair": symbol,
|
||||
"price": str(price),
|
||||
"validate": validate
|
||||
}
|
||||
if reqid is None:
|
||||
# Build order data for kraken api
|
||||
data |= {
|
||||
"ordertype": "limit",
|
||||
"type": action,
|
||||
"volume": str(size),
|
||||
}
|
||||
return await self.endpoint('AddOrder', data)
|
||||
|
||||
else:
|
||||
# Edit order data for kraken api
|
||||
data["txid"] = reqid
|
||||
return await self.endpoint('EditOrder', data)
|
||||
|
||||
async def submit_cancel(
|
||||
self,
|
||||
reqid: str,
|
||||
) -> dict:
|
||||
'''
|
||||
Send cancel request for order id ``reqid``.
|
||||
|
||||
'''
|
||||
# txid is a transaction id given by kraken
|
||||
return await self.endpoint('CancelOrder', {"txid": reqid})
|
||||
|
||||
async def symbol_info(
|
||||
self,
|
||||
pair: Optional[str] = None,
|
||||
|
||||
) -> dict[str, Pair] | Pair:
|
||||
|
||||
if pair is not None:
|
||||
pairs = {'pair': pair}
|
||||
else:
|
||||
pairs = None # get all pairs
|
||||
|
||||
resp = await self._public('AssetPairs', pairs)
|
||||
err = resp['error']
|
||||
if err:
|
||||
symbolname = pairs['pair'] if pair else None
|
||||
raise SymbolNotFound(f'{symbolname}.kraken')
|
||||
|
||||
pairs = resp['result']
|
||||
|
||||
if pair is not None:
|
||||
_, data = next(iter(pairs.items()))
|
||||
return Pair(**data)
|
||||
else:
|
||||
return {key: Pair(**data) for key, data in pairs.items()}
|
||||
|
||||
async def cache_symbols(self) -> dict:
|
||||
'''
|
||||
Load all market pair info build and cache it for downstream use.
|
||||
|
||||
A ``._ntable: dict[str, str]`` is available for mapping the
|
||||
websocket pair name-keys and their http endpoint API (smh)
|
||||
equivalents to the "alternative name" which is generally the one
|
||||
we actually want to use XD
|
||||
|
||||
'''
|
||||
if not self._pairs:
|
||||
self._pairs.update(await self.symbol_info())
|
||||
|
||||
# table of all ws and rest keys to their alt-name values.
|
||||
ntable: dict[str, str] = {}
|
||||
|
||||
for rest_key in list(self._pairs.keys()):
|
||||
|
||||
pair: Pair = self._pairs[rest_key]
|
||||
altname = pair.altname
|
||||
wsname = pair.wsname
|
||||
ntable[rest_key] = ntable[wsname] = altname
|
||||
|
||||
# register the pair under all monikers, a giant flat
|
||||
# surjection of all possible names to each info obj.
|
||||
self._pairs[altname] = self._pairs[wsname] = pair
|
||||
|
||||
self._ntable.update(ntable)
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def search_symbols(
|
||||
self,
|
||||
pattern: str,
|
||||
limit: int = None,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
'''
|
||||
Search for a symbol by "alt name"..
|
||||
|
||||
It is expected that the ``Client._pairs`` table
|
||||
gets populated before conducting the underlying fuzzy-search
|
||||
over the pair-key set.
|
||||
|
||||
'''
|
||||
if not len(self._pairs):
|
||||
await self.cache_symbols()
|
||||
assert self._pairs, '`Client.cache_symbols()` was never called!?'
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
self._pairs,
|
||||
score_cutoff=50,
|
||||
)
|
||||
# repack in dict form
|
||||
return {item[0].altname: item[0] for item in matches}
|
||||
|
||||
async def bars(
|
||||
self,
|
||||
symbol: str = 'XBTUSD',
|
||||
|
||||
# UTC 2017-07-02 12:53:20
|
||||
since: Union[int, datetime] | None = None,
|
||||
count: int = 720, # <- max allowed per query
|
||||
as_np: bool = True,
|
||||
|
||||
) -> dict:
|
||||
|
||||
if since is None:
|
||||
since = pendulum.now('UTC').start_of('minute').subtract(
|
||||
minutes=count).timestamp()
|
||||
|
||||
elif isinstance(since, int):
|
||||
since = pendulum.from_timestamp(since).timestamp()
|
||||
|
||||
else: # presumably a pendulum datetime
|
||||
since = since.timestamp()
|
||||
|
||||
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||
since = str(max(1499000000, int(since)))
|
||||
json = await self._public(
|
||||
'OHLC',
|
||||
data={
|
||||
'pair': symbol,
|
||||
'since': since,
|
||||
},
|
||||
)
|
||||
try:
|
||||
res = json['result']
|
||||
res.pop('last')
|
||||
bars = next(iter(res.values()))
|
||||
|
||||
new_bars = []
|
||||
|
||||
first = bars[0]
|
||||
last_nz_vwap = first[-3]
|
||||
if last_nz_vwap == 0:
|
||||
# use close if vwap is zero
|
||||
last_nz_vwap = first[-4]
|
||||
|
||||
# convert all fields to native types
|
||||
for i, bar in enumerate(bars):
|
||||
# normalize weird zero-ed vwap values..cmon kraken..
|
||||
# indicates vwap didn't change since last bar
|
||||
vwap = float(bar.pop(-3))
|
||||
if vwap != 0:
|
||||
last_nz_vwap = vwap
|
||||
if vwap == 0:
|
||||
vwap = last_nz_vwap
|
||||
|
||||
# re-insert vwap as the last of the fields
|
||||
bar.append(vwap)
|
||||
|
||||
new_bars.append(
|
||||
(i,) + tuple(
|
||||
ftype(bar[j]) for j, (name, ftype) in enumerate(
|
||||
_ohlc_dtype[1:]
|
||||
)
|
||||
)
|
||||
)
|
||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||
return array
|
||||
except KeyError:
|
||||
errmsg = json['error'][0]
|
||||
|
||||
if 'not found' in errmsg:
|
||||
raise SymbolNotFound(errmsg + f': {symbol}')
|
||||
|
||||
elif 'Too many requests' in errmsg:
|
||||
raise DataThrottle(f'{symbol}')
|
||||
|
||||
else:
|
||||
raise BrokerError(errmsg)
|
||||
|
||||
@classmethod
|
||||
def normalize_symbol(
|
||||
cls,
|
||||
ticker: str
|
||||
) -> tuple[str, Pair]:
|
||||
'''
|
||||
Normalize symbol names to to a 3x3 pair from the global
|
||||
definition map which we build out from the data retreived from
|
||||
the 'AssetPairs' endpoint, see methods above.
|
||||
|
||||
'''
|
||||
ticker = cls._ntable[ticker]
|
||||
return ticker.lower(), cls._pairs[ticker]
|
||||
|
||||
|
||||
@acm
|
||||
async def get_client() -> Client:
|
||||
|
||||
conf = get_config()
|
||||
if conf:
|
||||
client = Client(
|
||||
conf,
|
||||
name=conf['key_descr'],
|
||||
api_key=conf['api_key'],
|
||||
secret=conf['secret']
|
||||
)
|
||||
else:
|
||||
client = Client({})
|
||||
|
||||
# at startup, load all symbols, and asset info in
|
||||
# batch requests.
|
||||
async with trio.open_nursery() as nurse:
|
||||
nurse.start_soon(client.cache_assets)
|
||||
await client.cache_symbols()
|
||||
|
||||
yield client
|
File diff suppressed because it is too large
Load Diff
|
@ -1,459 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Real-time and historical data feed endpoints.
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Callable,
|
||||
)
|
||||
import time
|
||||
|
||||
from async_generator import aclosing
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import pendulum
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
import trio
|
||||
|
||||
from piker._cacheables import open_cached_client
|
||||
from piker.brokers._util import (
|
||||
BrokerError,
|
||||
DataThrottle,
|
||||
DataUnavailable,
|
||||
)
|
||||
from piker.log import get_console_log
|
||||
from piker.data.types import Struct
|
||||
from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
||||
from . import log
|
||||
from .api import (
|
||||
Client,
|
||||
Pair,
|
||||
)
|
||||
|
||||
|
||||
class OHLC(Struct):
|
||||
'''
|
||||
Description of the flattened OHLC quote format.
|
||||
|
||||
For schema details see:
|
||||
https://docs.kraken.com/websockets/#message-ohlc
|
||||
|
||||
'''
|
||||
chan_id: int # internal kraken id
|
||||
chan_name: str # eg. ohlc-1 (name-interval)
|
||||
pair: str # fx pair
|
||||
time: float # Begin time of interval, in seconds since epoch
|
||||
etime: float # End time of interval, in seconds since epoch
|
||||
open: float # Open price of interval
|
||||
high: float # High price within interval
|
||||
low: float # Low price within interval
|
||||
close: float # Close price of interval
|
||||
vwap: float # Volume weighted average price within interval
|
||||
volume: float # Accumulated volume **within interval**
|
||||
count: int # Number of trades within interval
|
||||
# (sampled) generated tick data
|
||||
ticks: list[Any] = []
|
||||
|
||||
|
||||
async def stream_messages(
|
||||
ws: NoBsWs,
|
||||
):
|
||||
'''
|
||||
Message stream parser and heartbeat handler.
|
||||
|
||||
Deliver ws subscription messages as well as handle heartbeat logic
|
||||
though a single async generator.
|
||||
|
||||
'''
|
||||
too_slow_count = last_hb = 0
|
||||
|
||||
while True:
|
||||
|
||||
with trio.move_on_after(5) as cs:
|
||||
msg = await ws.recv_msg()
|
||||
|
||||
# trigger reconnection if heartbeat is laggy
|
||||
if cs.cancelled_caught:
|
||||
|
||||
too_slow_count += 1
|
||||
|
||||
if too_slow_count > 20:
|
||||
log.warning(
|
||||
"Heartbeat is too slow, resetting ws connection")
|
||||
|
||||
await ws._connect()
|
||||
too_slow_count = 0
|
||||
continue
|
||||
|
||||
match msg:
|
||||
case {'event': 'heartbeat'}:
|
||||
now = time.time()
|
||||
delay = now - last_hb
|
||||
last_hb = now
|
||||
|
||||
# XXX: why tf is this not printing without --tl flag?
|
||||
log.debug(f"Heartbeat after {delay}")
|
||||
# print(f"Heartbeat after {delay}")
|
||||
|
||||
continue
|
||||
|
||||
case _:
|
||||
# passthrough sub msgs
|
||||
yield msg
|
||||
|
||||
|
||||
async def process_data_feed_msgs(
|
||||
ws: NoBsWs,
|
||||
):
|
||||
'''
|
||||
Parse and pack data feed messages.
|
||||
|
||||
'''
|
||||
async for msg in stream_messages(ws):
|
||||
match msg:
|
||||
case {
|
||||
'errorMessage': errmsg
|
||||
}:
|
||||
raise BrokerError(errmsg)
|
||||
|
||||
case {
|
||||
'event': 'subscriptionStatus',
|
||||
} as sub:
|
||||
log.info(
|
||||
'WS subscription is active:\n'
|
||||
f'{sub}'
|
||||
)
|
||||
continue
|
||||
|
||||
case [
|
||||
chan_id,
|
||||
*payload_array,
|
||||
chan_name,
|
||||
pair
|
||||
]:
|
||||
if 'ohlc' in chan_name:
|
||||
ohlc = OHLC(
|
||||
chan_id,
|
||||
chan_name,
|
||||
pair,
|
||||
*payload_array[0]
|
||||
)
|
||||
ohlc.typecast()
|
||||
yield 'ohlc', ohlc
|
||||
|
||||
elif 'spread' in chan_name:
|
||||
|
||||
bid, ask, ts, bsize, asize = map(
|
||||
float, payload_array[0])
|
||||
|
||||
# TODO: really makes you think IB has a horrible API...
|
||||
quote = {
|
||||
'symbol': pair.replace('/', ''),
|
||||
'ticks': [
|
||||
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||
|
||||
{'type': 'ask', 'price': ask, 'size': asize},
|
||||
{'type': 'asize', 'price': ask, 'size': asize},
|
||||
],
|
||||
}
|
||||
yield 'l1', quote
|
||||
|
||||
# elif 'book' in msg[-2]:
|
||||
# chan_id, *payload_array, chan_name, pair = msg
|
||||
# print(msg)
|
||||
|
||||
case _:
|
||||
print(f'UNHANDLED MSG: {msg}')
|
||||
# yield msg
|
||||
|
||||
|
||||
def normalize(
|
||||
ohlc: OHLC,
|
||||
|
||||
) -> dict:
|
||||
quote = ohlc.to_dict()
|
||||
quote['broker_ts'] = quote['time']
|
||||
quote['brokerd_ts'] = time.time()
|
||||
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
||||
quote['last'] = quote['close']
|
||||
quote['bar_wap'] = ohlc.vwap
|
||||
|
||||
# seriously eh? what's with this non-symmetry everywhere
|
||||
# in subscription systems...
|
||||
# XXX: piker style is always lowercases symbols.
|
||||
topic = quote['pair'].replace('/', '').lower()
|
||||
|
||||
# print(quote)
|
||||
return topic, quote
|
||||
|
||||
|
||||
@acm
|
||||
async def open_history_client(
|
||||
symbol: str,
|
||||
|
||||
) -> tuple[Callable, int]:
|
||||
|
||||
# TODO implement history getter for the new storage layer.
|
||||
async with open_cached_client('kraken') as client:
|
||||
|
||||
# lol, kraken won't send any more then the "last"
|
||||
# 720 1m bars.. so we have to just ignore further
|
||||
# requests of this type..
|
||||
queries: int = 0
|
||||
|
||||
async def get_ohlc(
|
||||
timeframe: float,
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
|
||||
nonlocal queries
|
||||
if (
|
||||
queries > 0
|
||||
or timeframe != 60
|
||||
):
|
||||
raise DataUnavailable(
|
||||
'Only a single query for 1m bars supported')
|
||||
|
||||
count = 0
|
||||
while count <= 3:
|
||||
try:
|
||||
array = await client.bars(
|
||||
symbol,
|
||||
since=end_dt,
|
||||
)
|
||||
count += 1
|
||||
queries += 1
|
||||
break
|
||||
except DataThrottle:
|
||||
log.warning(f'kraken OHLC throttle for {symbol}')
|
||||
await trio.sleep(1)
|
||||
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: list[str],
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# backend specific
|
||||
sub_type: str = 'ohlc',
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Subscribe for ohlc stream of quotes for ``pairs``.
|
||||
|
||||
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||
|
||||
'''
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
ws_pairs = {}
|
||||
sym_infos = {}
|
||||
|
||||
async with open_cached_client('kraken') as client, send_chan as send_chan:
|
||||
|
||||
# keep client cached for real-time section
|
||||
for sym in symbols:
|
||||
|
||||
# transform to upper since piker style is always lower
|
||||
sym = sym.upper()
|
||||
si: Pair = await client.symbol_info(sym)
|
||||
# try:
|
||||
# si = Pair(**sym_info) # validation
|
||||
# except TypeError:
|
||||
# fields_diff = set(sym_info) - set(Pair.__struct_fields__)
|
||||
# raise TypeError(
|
||||
# f'Missing msg fields {fields_diff}'
|
||||
# )
|
||||
syminfo = si.to_dict()
|
||||
syminfo['price_tick_size'] = 1. / 10**si.pair_decimals
|
||||
syminfo['lot_tick_size'] = 1. / 10**si.lot_decimals
|
||||
syminfo['asset_type'] = 'crypto'
|
||||
sym_infos[sym] = syminfo
|
||||
ws_pairs[sym] = si.wsname
|
||||
|
||||
symbol = symbols[0].lower()
|
||||
|
||||
init_msgs = {
|
||||
# pass back token, and bool, signalling if we're the writer
|
||||
# and that history has been written
|
||||
symbol: {
|
||||
'symbol_info': sym_infos[sym],
|
||||
'shm_write_opts': {'sum_tick_vml': False},
|
||||
'fqsn': sym,
|
||||
},
|
||||
}
|
||||
|
||||
@acm
|
||||
async def subscribe(ws: NoBsWs):
|
||||
|
||||
# XXX: setup subs
|
||||
# https://docs.kraken.com/websockets/#message-subscribe
|
||||
# specific logic for this in kraken's sync client:
|
||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||
ohlc_sub = {
|
||||
'event': 'subscribe',
|
||||
'pair': list(ws_pairs.values()),
|
||||
'subscription': {
|
||||
'name': 'ohlc',
|
||||
'interval': 1,
|
||||
},
|
||||
}
|
||||
|
||||
# TODO: we want to eventually allow unsubs which should
|
||||
# be completely fine to request from a separate task
|
||||
# since internally the ws methods appear to be FIFO
|
||||
# locked.
|
||||
await ws.send_msg(ohlc_sub)
|
||||
|
||||
# trade data (aka L1)
|
||||
l1_sub = {
|
||||
'event': 'subscribe',
|
||||
'pair': list(ws_pairs.values()),
|
||||
'subscription': {
|
||||
'name': 'spread',
|
||||
# 'depth': 10}
|
||||
},
|
||||
}
|
||||
|
||||
# pull a first quote and deliver
|
||||
await ws.send_msg(l1_sub)
|
||||
|
||||
yield
|
||||
|
||||
# unsub from all pairs on teardown
|
||||
if ws.connected():
|
||||
await ws.send_msg({
|
||||
'pair': list(ws_pairs.values()),
|
||||
'event': 'unsubscribe',
|
||||
'subscription': ['ohlc', 'spread'],
|
||||
})
|
||||
|
||||
# XXX: do we need to ack the unsub?
|
||||
# await ws.recv_msg()
|
||||
|
||||
# see the tips on reconnection logic:
|
||||
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||
ws: NoBsWs
|
||||
async with (
|
||||
open_autorecon_ws(
|
||||
'wss://ws.kraken.com/',
|
||||
fixture=subscribe,
|
||||
) as ws,
|
||||
aclosing(process_data_feed_msgs(ws)) as msg_gen,
|
||||
):
|
||||
# pull a first quote and deliver
|
||||
typ, ohlc_last = await anext(msg_gen)
|
||||
topic, quote = normalize(ohlc_last)
|
||||
|
||||
task_status.started((init_msgs, quote))
|
||||
|
||||
# lol, only "closes" when they're margin squeezing clients ;P
|
||||
feed_is_live.set()
|
||||
|
||||
# keep start of last interval for volume tracking
|
||||
last_interval_start = ohlc_last.etime
|
||||
|
||||
# start streaming
|
||||
async for typ, ohlc in msg_gen:
|
||||
|
||||
if typ == 'ohlc':
|
||||
|
||||
# TODO: can get rid of all this by using
|
||||
# ``trades`` subscription...
|
||||
|
||||
# generate tick values to match time & sales pane:
|
||||
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
|
||||
volume = ohlc.volume
|
||||
|
||||
# new OHLC sample interval
|
||||
if ohlc.etime > last_interval_start:
|
||||
last_interval_start = ohlc.etime
|
||||
tick_volume = volume
|
||||
|
||||
else:
|
||||
# this is the tick volume *within the interval*
|
||||
tick_volume = volume - ohlc_last.volume
|
||||
|
||||
ohlc_last = ohlc
|
||||
last = ohlc.close
|
||||
|
||||
if tick_volume:
|
||||
ohlc.ticks.append({
|
||||
'type': 'trade',
|
||||
'price': last,
|
||||
'size': tick_volume,
|
||||
})
|
||||
|
||||
topic, quote = normalize(ohlc)
|
||||
|
||||
elif typ == 'l1':
|
||||
quote = ohlc
|
||||
topic = quote['symbol'].lower()
|
||||
|
||||
await send_chan.send({topic: quote})
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(
|
||||
ctx: tractor.Context,
|
||||
|
||||
) -> Client:
|
||||
async with open_cached_client('kraken') as client:
|
||||
|
||||
# load all symbols locally for fast search
|
||||
cache = await client.cache_symbols()
|
||||
await ctx.started(cache)
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
async for pattern in stream:
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
cache,
|
||||
score_cutoff=50,
|
||||
)
|
||||
# repack in dict form
|
||||
await stream.send(
|
||||
{item[0]['altname']: item[0]
|
||||
for item in matches}
|
||||
)
|
|
@ -18,9 +18,3 @@
|
|||
Market machinery for order executions, book, management.
|
||||
|
||||
"""
|
||||
from ._client import open_ems
|
||||
|
||||
|
||||
__all__ = [
|
||||
'open_ems',
|
||||
]
|
||||
|
|
|
@ -22,10 +22,54 @@ from enum import Enum
|
|||
from typing import Optional
|
||||
|
||||
from bidict import bidict
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
from ..data._source import Symbol
|
||||
from ..data.types import Struct
|
||||
from ..pp import Position
|
||||
from ._messages import BrokerdPosition, Status
|
||||
|
||||
|
||||
class Position(BaseModel):
|
||||
'''
|
||||
Basic pp (personal position) model with attached fills history.
|
||||
|
||||
This type should be IPC wire ready?
|
||||
|
||||
'''
|
||||
symbol: Symbol
|
||||
|
||||
# last size and avg entry price
|
||||
size: float
|
||||
avg_price: float # TODO: contextual pricing
|
||||
|
||||
# ordered record of known constituent trade messages
|
||||
fills: list[Status] = []
|
||||
|
||||
def update_from_msg(
|
||||
self,
|
||||
msg: BrokerdPosition,
|
||||
|
||||
) -> None:
|
||||
|
||||
# XXX: better place to do this?
|
||||
symbol = self.symbol
|
||||
|
||||
lot_size_digits = symbol.lot_size_digits
|
||||
avg_price, size = (
|
||||
round(msg['avg_price'], ndigits=symbol.tick_size_digits),
|
||||
round(msg['size'], ndigits=lot_size_digits),
|
||||
)
|
||||
|
||||
self.avg_price = avg_price
|
||||
self.size = size
|
||||
|
||||
@property
|
||||
def dsize(self) -> float:
|
||||
'''
|
||||
The "dollar" size of the pp, normally in trading (fiat) unit
|
||||
terms.
|
||||
|
||||
'''
|
||||
return self.avg_price * self.size
|
||||
|
||||
|
||||
_size_units = bidict({
|
||||
|
@ -40,9 +84,34 @@ SizeUnit = Enum(
|
|||
)
|
||||
|
||||
|
||||
class Allocator(Struct):
|
||||
class Allocator(BaseModel):
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
copy_on_model_validation = False
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
# required to get the account validator lookup working?
|
||||
extra = 'allow'
|
||||
underscore_attrs_are_private = False
|
||||
|
||||
symbol: Symbol
|
||||
account: Optional[str] = 'paper'
|
||||
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||
# a default at startup by passing in a `dict` but yet you can set
|
||||
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||
# unintuitive garbage.
|
||||
size_unit: str = 'currency'
|
||||
_size_units: dict[str, Optional[str]] = _size_units
|
||||
|
||||
@validator('size_unit', pre=True)
|
||||
def maybe_lookup_key(cls, v):
|
||||
# apply the corresponding enum key for the text "description" value
|
||||
if v not in _size_units:
|
||||
return _size_units.inverse[v]
|
||||
|
||||
assert v in _size_units
|
||||
return v
|
||||
|
||||
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
||||
# "sizes"
|
||||
|
@ -51,28 +120,6 @@ class Allocator(Struct):
|
|||
units_limit: float
|
||||
currency_limit: float
|
||||
slots: int
|
||||
account: Optional[str] = 'paper'
|
||||
|
||||
_size_units: bidict[str, Optional[str]] = _size_units
|
||||
|
||||
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||
# a default at startup by passing in a `dict` but yet you can set
|
||||
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||
# unintuitive garbage.
|
||||
_size_unit: str = 'currency'
|
||||
|
||||
@property
|
||||
def size_unit(self) -> str:
|
||||
return self._size_unit
|
||||
|
||||
@size_unit.setter
|
||||
def size_unit(self, v: str) -> Optional[str]:
|
||||
if v not in _size_units:
|
||||
v = _size_units.inverse[v]
|
||||
|
||||
assert v in _size_units
|
||||
self._size_unit = v
|
||||
return v
|
||||
|
||||
def step_sizes(
|
||||
self,
|
||||
|
@ -93,13 +140,10 @@ class Allocator(Struct):
|
|||
else:
|
||||
return self.units_limit
|
||||
|
||||
def limit_info(self) -> tuple[str, float]:
|
||||
return self.size_unit, self.limit()
|
||||
|
||||
def next_order_info(
|
||||
self,
|
||||
|
||||
# we only need a startup size for exit calcs, we can then
|
||||
# we only need a startup size for exit calcs, we can the
|
||||
# determine how large slots should be if the initial pp size was
|
||||
# larger then the current live one, and the live one is smaller
|
||||
# then the initial config settings.
|
||||
|
@ -129,7 +173,7 @@ class Allocator(Struct):
|
|||
l_sub_pp = self.units_limit - abs_live_size
|
||||
|
||||
elif size_unit == 'currency':
|
||||
live_cost_basis = abs_live_size * live_pp.ppu
|
||||
live_cost_basis = abs_live_size * live_pp.avg_price
|
||||
slot_size = currency_per_slot / price
|
||||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
||||
|
||||
|
@ -140,14 +184,12 @@ class Allocator(Struct):
|
|||
|
||||
# an entry (adding-to or starting a pp)
|
||||
if (
|
||||
action == 'buy' and live_size > 0 or
|
||||
action == 'sell' and live_size < 0 or
|
||||
live_size == 0
|
||||
or (action == 'buy' and live_size > 0)
|
||||
or action == 'sell' and live_size < 0
|
||||
):
|
||||
order_size = min(
|
||||
slot_size,
|
||||
max(l_sub_pp, 0),
|
||||
)
|
||||
|
||||
order_size = min(slot_size, l_sub_pp)
|
||||
|
||||
# an exit (removing-from or going to net-zero pp)
|
||||
else:
|
||||
|
@ -163,7 +205,7 @@ class Allocator(Struct):
|
|||
if size_unit == 'currency':
|
||||
# compute the "projected" limit's worth of units at the
|
||||
# current pp (weighted) price:
|
||||
slot_size = currency_per_slot / live_pp.ppu
|
||||
slot_size = currency_per_slot / live_pp.avg_price
|
||||
|
||||
else:
|
||||
slot_size = u_per_slot
|
||||
|
@ -202,12 +244,7 @@ class Allocator(Struct):
|
|||
if order_size < slot_size:
|
||||
# compute a fractional slots size to display
|
||||
slots_used = self.slots_used(
|
||||
Position(
|
||||
symbol=sym,
|
||||
size=order_size,
|
||||
ppu=price,
|
||||
bsuid=sym,
|
||||
)
|
||||
Position(symbol=sym, size=order_size, avg_price=price)
|
||||
)
|
||||
|
||||
return {
|
||||
|
@ -234,8 +271,8 @@ class Allocator(Struct):
|
|||
abs_pp_size = abs(pp.size)
|
||||
|
||||
if self.size_unit == 'currency':
|
||||
# live_currency_size = size or (abs_pp_size * pp.ppu)
|
||||
live_currency_size = abs_pp_size * pp.ppu
|
||||
# live_currency_size = size or (abs_pp_size * pp.avg_price)
|
||||
live_currency_size = abs_pp_size * pp.avg_price
|
||||
prop = live_currency_size / self.currency_limit
|
||||
|
||||
else:
|
||||
|
@ -247,6 +284,14 @@ class Allocator(Struct):
|
|||
return round(prop * self.slots)
|
||||
|
||||
|
||||
_derivs = (
|
||||
'future',
|
||||
'continuous_future',
|
||||
'option',
|
||||
'futures_option',
|
||||
)
|
||||
|
||||
|
||||
def mk_allocator(
|
||||
|
||||
symbol: Symbol,
|
||||
|
@ -255,7 +300,7 @@ def mk_allocator(
|
|||
# default allocation settings
|
||||
defaults: dict[str, float] = {
|
||||
'account': None, # select paper by default
|
||||
# 'size_unit': 'currency',
|
||||
'size_unit': 'currency',
|
||||
'units_limit': 400,
|
||||
'currency_limit': 5e3,
|
||||
'slots': 4,
|
||||
|
@ -273,9 +318,42 @@ def mk_allocator(
|
|||
'currency_limit': 6e3,
|
||||
'slots': 6,
|
||||
}
|
||||
|
||||
defaults.update(user_def)
|
||||
|
||||
return Allocator(
|
||||
alloc = Allocator(
|
||||
symbol=symbol,
|
||||
**defaults,
|
||||
)
|
||||
|
||||
asset_type = symbol.type_key
|
||||
|
||||
# specific configs by asset class / type
|
||||
|
||||
if asset_type in _derivs:
|
||||
# since it's harder to know how currency "applies" in this case
|
||||
# given leverage properties
|
||||
alloc.size_unit = '# units'
|
||||
|
||||
# set units limit to slots size thus making make the next
|
||||
# entry step 1.0
|
||||
alloc.units_limit = alloc.slots
|
||||
|
||||
# if the current position is already greater then the limit
|
||||
# settings, increase the limit to the current position
|
||||
if alloc.size_unit == 'currency':
|
||||
startup_size = startup_pp.size * startup_pp.avg_price
|
||||
|
||||
if startup_size > alloc.currency_limit:
|
||||
alloc.currency_limit = round(startup_size, ndigits=2)
|
||||
|
||||
else:
|
||||
startup_size = abs(startup_pp.size)
|
||||
|
||||
if startup_size > alloc.units_limit:
|
||||
alloc.units_limit = startup_size
|
||||
|
||||
if asset_type in _derivs:
|
||||
alloc.slots = alloc.units_limit
|
||||
|
||||
return alloc
|
||||
|
|
|
@ -18,32 +18,26 @@
|
|||
Orders and execution client API.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from typing import Dict
|
||||
from pprint import pformat
|
||||
from typing import TYPE_CHECKING
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
import trio
|
||||
import tractor
|
||||
from tractor.trionics import broadcast_receiver
|
||||
|
||||
from ..log import get_logger
|
||||
from ..data.types import Struct
|
||||
from ._ems import _emsd_main
|
||||
from .._daemon import maybe_open_emsd
|
||||
from ._messages import Order, Cancel
|
||||
from ..brokers import get_brokermod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._messages import (
|
||||
BrokerdPosition,
|
||||
Status,
|
||||
)
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class OrderBook(Struct):
|
||||
@dataclass
|
||||
class OrderBook:
|
||||
'''EMS-client-side order book ctl and tracking.
|
||||
|
||||
A style similar to "model-view" is used here where this api is
|
||||
|
@ -58,18 +52,20 @@ class OrderBook(Struct):
|
|||
# mem channels used to relay order requests to the EMS daemon
|
||||
_to_ems: trio.abc.SendChannel
|
||||
_from_order_book: trio.abc.ReceiveChannel
|
||||
_sent_orders: dict[str, Order] = {}
|
||||
|
||||
_sent_orders: Dict[str, Order] = field(default_factory=dict)
|
||||
_ready_to_receive: trio.Event = trio.Event()
|
||||
|
||||
def send(
|
||||
self,
|
||||
msg: Order | dict,
|
||||
msg: Order,
|
||||
|
||||
) -> dict:
|
||||
self._sent_orders[msg.oid] = msg
|
||||
self._to_ems.send_nowait(msg)
|
||||
self._to_ems.send_nowait(msg.dict())
|
||||
return msg
|
||||
|
||||
def send_update(
|
||||
def update(
|
||||
self,
|
||||
|
||||
uuid: str,
|
||||
|
@ -77,8 +73,9 @@ class OrderBook(Struct):
|
|||
|
||||
) -> dict:
|
||||
cmd = self._sent_orders[uuid]
|
||||
msg = cmd.copy(update=data)
|
||||
self._sent_orders[uuid] = msg
|
||||
msg = cmd.dict()
|
||||
msg.update(data)
|
||||
self._sent_orders[uuid] = Order(**msg)
|
||||
self._to_ems.send_nowait(msg)
|
||||
return cmd
|
||||
|
||||
|
@ -86,18 +83,12 @@ class OrderBook(Struct):
|
|||
"""Cancel an order (or alert) in the EMS.
|
||||
|
||||
"""
|
||||
cmd = self._sent_orders.get(uuid)
|
||||
if not cmd:
|
||||
log.error(
|
||||
f'Unknown order {uuid}!?\n'
|
||||
f'Maybe there is a stale entry or line?\n'
|
||||
f'You should report this as a bug!'
|
||||
)
|
||||
cmd = self._sent_orders[uuid]
|
||||
msg = Cancel(
|
||||
oid=uuid,
|
||||
symbol=cmd.symbol,
|
||||
)
|
||||
self._to_ems.send_nowait(msg)
|
||||
self._to_ems.send_nowait(msg.dict())
|
||||
|
||||
|
||||
_orders: OrderBook = None
|
||||
|
@ -158,36 +149,21 @@ async def relay_order_cmds_from_sync_code(
|
|||
book = get_orders()
|
||||
async with book._from_order_book.subscribe() as orders_stream:
|
||||
async for cmd in orders_stream:
|
||||
sym = cmd.symbol
|
||||
msg = pformat(cmd)
|
||||
if sym == symbol_key:
|
||||
log.info(f'Send order cmd:\n{msg}')
|
||||
if cmd['symbol'] == symbol_key:
|
||||
log.info(f'Send order cmd:\n{pformat(cmd)}')
|
||||
# send msg over IPC / wire
|
||||
await to_ems_stream.send(cmd)
|
||||
else:
|
||||
log.warning(
|
||||
f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
|
||||
f'\n{msg}'
|
||||
)
|
||||
|
||||
|
||||
@acm
|
||||
async def open_ems(
|
||||
fqsn: str,
|
||||
mode: str = 'live',
|
||||
loglevel: str = 'error',
|
||||
|
||||
) -> tuple[
|
||||
) -> (
|
||||
OrderBook,
|
||||
tractor.MsgStream,
|
||||
dict[
|
||||
# brokername, acctid
|
||||
tuple[str, str],
|
||||
list[BrokerdPosition],
|
||||
],
|
||||
list[str],
|
||||
dict[str, Status],
|
||||
]:
|
||||
dict,
|
||||
):
|
||||
'''
|
||||
Spawn an EMS daemon and begin sending orders and receiving
|
||||
alerts.
|
||||
|
@ -230,36 +206,18 @@ async def open_ems(
|
|||
|
||||
async with maybe_open_emsd(broker) as portal:
|
||||
|
||||
mod = get_brokermod(broker)
|
||||
if (
|
||||
not getattr(mod, 'trades_dialogue', None)
|
||||
or mode == 'paper'
|
||||
):
|
||||
mode = 'paper'
|
||||
|
||||
from ._ems import _emsd_main
|
||||
async with (
|
||||
# connect to emsd
|
||||
portal.open_context(
|
||||
|
||||
_emsd_main,
|
||||
fqsn=fqsn,
|
||||
exec_mode=mode,
|
||||
loglevel=loglevel,
|
||||
|
||||
) as (
|
||||
ctx,
|
||||
(
|
||||
positions,
|
||||
accounts,
|
||||
dialogs,
|
||||
)
|
||||
),
|
||||
) as (ctx, (positions, accounts)),
|
||||
|
||||
# open 2-way trade command stream
|
||||
ctx.open_stream() as trades_stream,
|
||||
):
|
||||
# start sync code order msg delivery task
|
||||
async with trio.open_nursery() as n:
|
||||
n.start_soon(
|
||||
relay_order_cmds_from_sync_code,
|
||||
|
@ -267,10 +225,4 @@ async def open_ems(
|
|||
trades_stream
|
||||
)
|
||||
|
||||
yield (
|
||||
book,
|
||||
trades_stream,
|
||||
positions,
|
||||
accounts,
|
||||
dialogs,
|
||||
)
|
||||
yield book, trades_stream, positions, accounts
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -15,162 +15,108 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Clearing sub-system message and protocols.
|
||||
Clearing system messagingn types and protocols.
|
||||
|
||||
"""
|
||||
# from collections import (
|
||||
# ChainMap,
|
||||
# deque,
|
||||
# )
|
||||
from typing import (
|
||||
Optional,
|
||||
Literal,
|
||||
)
|
||||
from typing import Optional, Union
|
||||
|
||||
from msgspec import field
|
||||
# TODO: try out just encoding/send direction for now?
|
||||
# import msgspec
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..data._source import Symbol
|
||||
from ..data.types import Struct
|
||||
|
||||
|
||||
# TODO: a composite for tracking msg flow on 2-legged
|
||||
# dialogs.
|
||||
# class Dialog(ChainMap):
|
||||
# '''
|
||||
# Msg collection abstraction to easily track the state changes of
|
||||
# a msg flow in one high level, query-able and immutable construct.
|
||||
|
||||
# The main use case is to query data from a (long-running)
|
||||
# msg-transaction-sequence
|
||||
|
||||
|
||||
# '''
|
||||
# def update(
|
||||
# self,
|
||||
# msg,
|
||||
# ) -> None:
|
||||
# self.maps.insert(0, msg.to_dict())
|
||||
|
||||
# def flatten(self) -> dict:
|
||||
# return dict(self)
|
||||
|
||||
|
||||
# TODO: ``msgspec`` stuff worth paying attention to:
|
||||
# - schema evolution:
|
||||
# https://jcristharif.com/msgspec/usage.html#schema-evolution
|
||||
# - for eg. ``BrokerdStatus``, instead just have separate messages?
|
||||
# - use literals for a common msg determined by diff keys?
|
||||
# - https://jcristharif.com/msgspec/usage.html#literal
|
||||
|
||||
# --------------
|
||||
# Client -> emsd
|
||||
# --------------
|
||||
|
||||
class Order(Struct):
|
||||
|
||||
# TODO: ideally we can combine these 2 fields into
|
||||
# 1 and just use the size polarity to determine a buy/sell.
|
||||
# i would like to see this become more like
|
||||
# https://jcristharif.com/msgspec/usage.html#literal
|
||||
# action: Literal[
|
||||
# 'live',
|
||||
# 'dark',
|
||||
# 'alert',
|
||||
# ]
|
||||
|
||||
action: Literal[
|
||||
'buy',
|
||||
'sell',
|
||||
'alert',
|
||||
]
|
||||
# determines whether the create execution
|
||||
# will be submitted to the ems or directly to
|
||||
# the backend broker
|
||||
exec_mode: Literal[
|
||||
'dark',
|
||||
'live',
|
||||
# 'paper', no right?
|
||||
]
|
||||
|
||||
# internal ``emdsd`` unique "order id"
|
||||
oid: str # uuid4
|
||||
symbol: str | Symbol
|
||||
account: str # should we set a default as '' ?
|
||||
|
||||
price: float
|
||||
size: float # -ve is "sell", +ve is "buy"
|
||||
|
||||
brokers: list[str] = []
|
||||
|
||||
|
||||
class Cancel(Struct):
|
||||
'''
|
||||
Cancel msg for removing a dark (ems triggered) or
|
||||
class Cancel(BaseModel):
|
||||
'''Cancel msg for removing a dark (ems triggered) or
|
||||
broker-submitted (live) trigger/order.
|
||||
|
||||
'''
|
||||
action: str = 'cancel'
|
||||
oid: str # uuid4
|
||||
symbol: str
|
||||
action: str = 'cancel'
|
||||
|
||||
|
||||
# --------------
|
||||
class Order(BaseModel):
|
||||
|
||||
action: str # {'buy', 'sell', 'alert'}
|
||||
# internal ``emdsd`` unique "order id"
|
||||
oid: str # uuid4
|
||||
symbol: Union[str, Symbol]
|
||||
account: str # should we set a default as '' ?
|
||||
|
||||
price: float
|
||||
size: float
|
||||
brokers: list[str]
|
||||
|
||||
# Assigned once initial ack is received
|
||||
# ack_time_ns: Optional[int] = None
|
||||
|
||||
# determines whether the create execution
|
||||
# will be submitted to the ems or directly to
|
||||
# the backend broker
|
||||
exec_mode: str # {'dark', 'live', 'paper'}
|
||||
|
||||
class Config:
|
||||
# just for pre-loading a ``Symbol`` when used
|
||||
# in the order mode staging process
|
||||
arbitrary_types_allowed = True
|
||||
# don't copy this model instance when used in
|
||||
# a recursive model
|
||||
copy_on_model_validation = False
|
||||
|
||||
# Client <- emsd
|
||||
# --------------
|
||||
# update msgs from ems which relay state change info
|
||||
# from the active clearing engine.
|
||||
|
||||
class Status(Struct):
|
||||
|
||||
time_ns: int
|
||||
oid: str # uuid4 ems-order dialog id
|
||||
|
||||
resp: Literal[
|
||||
'pending', # acked by broker but not yet open
|
||||
'open',
|
||||
'dark_open', # dark/algo triggered order is open in ems clearing loop
|
||||
'triggered', # above triggered order sent to brokerd, or an alert closed
|
||||
'closed', # fully cleared all size/units
|
||||
'fill', # partial execution
|
||||
'canceled',
|
||||
'error',
|
||||
]
|
||||
class Status(BaseModel):
|
||||
|
||||
name: str = 'status'
|
||||
oid: str # uuid4
|
||||
time_ns: int
|
||||
|
||||
# {
|
||||
# 'dark_submitted',
|
||||
# 'dark_cancelled',
|
||||
# 'dark_triggered',
|
||||
|
||||
# 'broker_submitted',
|
||||
# 'broker_cancelled',
|
||||
# 'broker_executed',
|
||||
# 'broker_filled',
|
||||
# 'broker_errored',
|
||||
|
||||
# 'alert_submitted',
|
||||
# 'alert_triggered',
|
||||
|
||||
# }
|
||||
resp: str # "response", see above
|
||||
|
||||
# symbol: str
|
||||
|
||||
# trigger info
|
||||
trigger_price: Optional[float] = None
|
||||
# price: float
|
||||
|
||||
# broker: Optional[str] = None
|
||||
|
||||
# this maps normally to the ``BrokerdOrder.reqid`` below, an id
|
||||
# normally allocated internally by the backend broker routing system
|
||||
reqid: Optional[int | str] = None
|
||||
broker_reqid: Optional[Union[int, str]] = None
|
||||
|
||||
# the (last) source order/request msg if provided
|
||||
# (eg. the Order/Cancel which causes this msg) and
|
||||
# acts as a back-reference to the corresponding
|
||||
# request message which was the source of this msg.
|
||||
req: Order | None = None
|
||||
|
||||
# XXX: better design/name here?
|
||||
# flag that can be set to indicate a message for an order
|
||||
# event that wasn't originated by piker's emsd (eg. some external
|
||||
# trading system which does it's own order control but that you
|
||||
# might want to "track" using piker UIs/systems).
|
||||
src: Optional[str] = None
|
||||
|
||||
# set when a cancel request msg was set for this order flow dialog
|
||||
# but the brokerd dialog isn't yet in a cancelled state.
|
||||
cancel_called: bool = False
|
||||
|
||||
# for relaying a boxed brokerd-dialog-side msg data "through" the
|
||||
# ems layer to clients.
|
||||
# for relaying backend msg data "through" the ems layer
|
||||
brokerd_msg: dict = {}
|
||||
|
||||
|
||||
# ---------------
|
||||
# emsd -> brokerd
|
||||
# ---------------
|
||||
# requests *sent* from ems to respective backend broker daemon
|
||||
|
||||
class BrokerdCancel(Struct):
|
||||
class BrokerdCancel(BaseModel):
|
||||
|
||||
action: str = 'cancel'
|
||||
oid: str # piker emsd order id
|
||||
time_ns: int
|
||||
|
||||
|
@ -181,39 +127,34 @@ class BrokerdCancel(Struct):
|
|||
# for setting a unique order id then this value will be relayed back
|
||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||
# field
|
||||
reqid: Optional[int | str] = None
|
||||
action: str = 'cancel'
|
||||
reqid: Optional[Union[int, str]] = None
|
||||
|
||||
|
||||
class BrokerdOrder(Struct):
|
||||
class BrokerdOrder(BaseModel):
|
||||
|
||||
action: str # {buy, sell}
|
||||
oid: str
|
||||
account: str
|
||||
time_ns: int
|
||||
|
||||
symbol: str # fqsn
|
||||
price: float
|
||||
size: float
|
||||
|
||||
# TODO: if we instead rely on a +ve/-ve size to determine
|
||||
# the action we more or less don't need this field right?
|
||||
action: str = '' # {buy, sell}
|
||||
|
||||
# "broker request id": broker specific/internal order id if this is
|
||||
# None, creates a new order otherwise if the id is valid the backend
|
||||
# api must modify the existing matching order. If the broker allows
|
||||
# for setting a unique order id then this value will be relayed back
|
||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||
# field
|
||||
reqid: Optional[int | str] = None
|
||||
reqid: Optional[Union[int, str]] = None
|
||||
|
||||
symbol: str # symbol.<providername> ?
|
||||
price: float
|
||||
size: float
|
||||
|
||||
|
||||
# ---------------
|
||||
# emsd <- brokerd
|
||||
# ---------------
|
||||
# requests *received* to ems from broker backend
|
||||
|
||||
class BrokerdOrderAck(Struct):
|
||||
|
||||
class BrokerdOrderAck(BaseModel):
|
||||
'''
|
||||
Immediate reponse to a brokerd order request providing the broker
|
||||
specific unique order id so that the EMS can associate this
|
||||
|
@ -221,93 +162,102 @@ class BrokerdOrderAck(Struct):
|
|||
``.oid`` (which is a uuid4).
|
||||
|
||||
'''
|
||||
name: str = 'ack'
|
||||
|
||||
# defined and provided by backend
|
||||
reqid: int | str
|
||||
reqid: Union[int, str]
|
||||
|
||||
# emsd id originally sent in matching request msg
|
||||
oid: str
|
||||
account: str = ''
|
||||
name: str = 'ack'
|
||||
|
||||
|
||||
class BrokerdStatus(Struct):
|
||||
class BrokerdStatus(BaseModel):
|
||||
|
||||
reqid: int | str
|
||||
time_ns: int
|
||||
status: Literal[
|
||||
'open',
|
||||
'canceled',
|
||||
'fill',
|
||||
'pending',
|
||||
'error',
|
||||
]
|
||||
|
||||
account: str
|
||||
name: str = 'status'
|
||||
reqid: Union[int, str]
|
||||
time_ns: int
|
||||
|
||||
# XXX: should be best effort set for every update
|
||||
account: str = ''
|
||||
|
||||
# {
|
||||
# 'submitted',
|
||||
# 'cancelled',
|
||||
# 'filled',
|
||||
# }
|
||||
status: str
|
||||
|
||||
filled: float = 0.0
|
||||
reason: str = ''
|
||||
remaining: float = 0.0
|
||||
|
||||
# external: bool = False
|
||||
# XXX: better design/name here?
|
||||
# flag that can be set to indicate a message for an order
|
||||
# event that wasn't originated by piker's emsd (eg. some external
|
||||
# trading system which does it's own order control but that you
|
||||
# might want to "track" using piker UIs/systems).
|
||||
external: bool = False
|
||||
|
||||
# XXX: not required schema as of yet
|
||||
broker_details: dict = field(default_factory=lambda: {
|
||||
broker_details: dict = {
|
||||
'name': '',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class BrokerdFill(Struct):
|
||||
class BrokerdFill(BaseModel):
|
||||
'''
|
||||
A single message indicating a "fill-details" event from the broker
|
||||
if avaiable.
|
||||
|
||||
'''
|
||||
name: str = 'fill'
|
||||
reqid: Union[int, str]
|
||||
time_ns: int
|
||||
|
||||
# order exeuction related
|
||||
action: str
|
||||
size: float
|
||||
price: float
|
||||
|
||||
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
||||
|
||||
# brokerd timestamp required for order mode arrow placement on x-axis
|
||||
|
||||
# TODO: maybe int if we force ns?
|
||||
# we need to normalize this somehow since backends will use their
|
||||
# own format and likely across many disparate epoch clocks...
|
||||
broker_time: float
|
||||
reqid: int | str
|
||||
time_ns: int
|
||||
|
||||
# order exeuction related
|
||||
size: float
|
||||
price: float
|
||||
|
||||
name: str = 'fill'
|
||||
action: Optional[str] = None
|
||||
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
||||
|
||||
|
||||
class BrokerdError(Struct):
|
||||
class BrokerdError(BaseModel):
|
||||
'''
|
||||
Optional error type that can be relayed to emsd for error handling.
|
||||
|
||||
This is still a TODO thing since we're not sure how to employ it yet.
|
||||
|
||||
'''
|
||||
name: str = 'error'
|
||||
oid: str
|
||||
symbol: str
|
||||
reason: str
|
||||
|
||||
# if no brokerd order request was actually submitted (eg. we errored
|
||||
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
||||
reqid: Optional[int | str] = None
|
||||
reqid: Optional[Union[int, str]] = None
|
||||
|
||||
name: str = 'error'
|
||||
symbol: str
|
||||
reason: str
|
||||
broker_details: dict = {}
|
||||
|
||||
|
||||
class BrokerdPosition(Struct):
|
||||
class BrokerdPosition(BaseModel):
|
||||
'''Position update event from brokerd.
|
||||
|
||||
'''
|
||||
name: str = 'position'
|
||||
|
||||
broker: str
|
||||
account: str
|
||||
symbol: str
|
||||
currency: str
|
||||
size: float
|
||||
avg_price: float
|
||||
currency: str = ''
|
||||
name: str = 'position'
|
||||
|
|
|
@ -18,75 +18,54 @@
|
|||
Fake trading for forward testing.
|
||||
|
||||
"""
|
||||
from collections import defaultdict
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
import itertools
|
||||
import time
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Callable,
|
||||
)
|
||||
from typing import Tuple, Optional, Callable
|
||||
import uuid
|
||||
|
||||
from bidict import bidict
|
||||
import pendulum
|
||||
import trio
|
||||
import tractor
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .. import data
|
||||
from ..data.types import Struct
|
||||
from ..data._source import Symbol
|
||||
from ..pp import (
|
||||
Position,
|
||||
Transaction,
|
||||
open_trade_ledger,
|
||||
open_pps,
|
||||
)
|
||||
from ..data._normalize import iterticks
|
||||
from ..data._source import unpack_fqsn
|
||||
from ..log import get_logger
|
||||
from ._messages import (
|
||||
BrokerdCancel,
|
||||
BrokerdOrder,
|
||||
BrokerdOrderAck,
|
||||
BrokerdStatus,
|
||||
BrokerdFill,
|
||||
BrokerdPosition,
|
||||
BrokerdError,
|
||||
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
||||
BrokerdFill, BrokerdPosition, BrokerdError
|
||||
)
|
||||
|
||||
from ..config import load
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class PaperBoi(Struct):
|
||||
'''
|
||||
Emulates a broker order client providing approximately the same API
|
||||
and delivering an order-event response stream but with methods for
|
||||
@dataclass
|
||||
class PaperBoi:
|
||||
"""
|
||||
Emulates a broker order client providing the same API and
|
||||
delivering an order-event response stream but with methods for
|
||||
triggering desired events based on forward testing engine
|
||||
requirements (eg open, closed, fill msgs).
|
||||
requirements.
|
||||
|
||||
'''
|
||||
"""
|
||||
broker: str
|
||||
|
||||
ems_trades_stream: tractor.MsgStream
|
||||
|
||||
# map of paper "live" orders which be used
|
||||
# to simulate fills based on paper engine settings
|
||||
_buys: defaultdict[str, bidict]
|
||||
_sells: defaultdict[str, bidict]
|
||||
_buys: bidict
|
||||
_sells: bidict
|
||||
_reqids: bidict
|
||||
_positions: dict[str, Position]
|
||||
_trade_ledger: dict[str, Any]
|
||||
_syms: dict[str, Symbol] = {}
|
||||
_positions: dict[str, BrokerdPosition]
|
||||
|
||||
# init edge case L1 spread
|
||||
last_ask: tuple[float, float] = (float('inf'), 0) # price, size
|
||||
last_bid: tuple[float, float] = (0, 0)
|
||||
last_ask: Tuple[float, float] = (float('inf'), 0) # price, size
|
||||
last_bid: Tuple[float, float] = (0, 0)
|
||||
|
||||
async def submit_limit(
|
||||
self,
|
||||
|
@ -96,24 +75,27 @@ class PaperBoi(Struct):
|
|||
action: str,
|
||||
size: float,
|
||||
reqid: Optional[str],
|
||||
|
||||
) -> int:
|
||||
'''
|
||||
Place an order and return integer request id provided by client.
|
||||
"""Place an order and return integer request id provided by client.
|
||||
|
||||
"""
|
||||
is_modify: bool = False
|
||||
if reqid is None:
|
||||
reqid = str(uuid.uuid4())
|
||||
|
||||
else:
|
||||
# order is already existing, this is a modify
|
||||
(oid, symbol, action, old_price) = self._reqids[reqid]
|
||||
assert old_price != price
|
||||
is_modify = True
|
||||
|
||||
# register order internally
|
||||
self._reqids[reqid] = (oid, symbol, action, price)
|
||||
|
||||
'''
|
||||
if action == 'alert':
|
||||
# bypass all fill simulation
|
||||
return reqid
|
||||
|
||||
entry = self._reqids.get(reqid)
|
||||
if entry:
|
||||
# order is already existing, this is a modify
|
||||
(oid, symbol, action, old_price) = entry
|
||||
else:
|
||||
# register order internally
|
||||
self._reqids[reqid] = (oid, symbol, action, price)
|
||||
|
||||
# TODO: net latency model
|
||||
# we checkpoint here quickly particulalry
|
||||
# for dark orders since we want the dark_executed
|
||||
|
@ -125,18 +107,15 @@ class PaperBoi(Struct):
|
|||
size = -size
|
||||
|
||||
msg = BrokerdStatus(
|
||||
status='open',
|
||||
# account=f'paper_{self.broker}',
|
||||
account='paper',
|
||||
status='submitted',
|
||||
reqid=reqid,
|
||||
broker=self.broker,
|
||||
time_ns=time.time_ns(),
|
||||
filled=0.0,
|
||||
reason='paper_trigger',
|
||||
remaining=size,
|
||||
|
||||
broker_details={'name': 'paperboi'},
|
||||
)
|
||||
await self.ems_trades_stream.send(msg)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
|
||||
# if we're already a clearing price simulate an immediate fill
|
||||
if (
|
||||
|
@ -144,28 +123,28 @@ class PaperBoi(Struct):
|
|||
) or (
|
||||
action == 'sell' and (clear_price := self.last_bid[0]) >= price
|
||||
):
|
||||
await self.fake_fill(
|
||||
symbol,
|
||||
clear_price,
|
||||
size,
|
||||
action,
|
||||
reqid,
|
||||
oid,
|
||||
)
|
||||
await self.fake_fill(symbol, clear_price, size, action, reqid, oid)
|
||||
|
||||
# register this submissions as a paper live order
|
||||
else:
|
||||
# set the simulated order in the respective table for lookup
|
||||
# and trigger by the simulated clearing task normally
|
||||
# running ``simulate_fills()``.
|
||||
# register this submissions as a paper live order
|
||||
|
||||
# submit order to book simulation fill loop
|
||||
if action == 'buy':
|
||||
orders = self._buys
|
||||
|
||||
elif action == 'sell':
|
||||
orders = self._sells
|
||||
|
||||
# {symbol -> bidict[oid, (<price data>)]}
|
||||
orders[symbol][oid] = (price, size, reqid, action)
|
||||
# set the simulated order in the respective table for lookup
|
||||
# and trigger by the simulated clearing task normally
|
||||
# running ``simulate_fills()``.
|
||||
|
||||
if is_modify:
|
||||
# remove any existing order for the old price
|
||||
orders[symbol].pop((oid, old_price))
|
||||
|
||||
# buys/sells: (symbol -> (price -> order))
|
||||
orders.setdefault(symbol, {})[(oid, price)] = (size, reqid, action)
|
||||
|
||||
return reqid
|
||||
|
||||
|
@ -178,26 +157,26 @@ class PaperBoi(Struct):
|
|||
oid, symbol, action, price = self._reqids[reqid]
|
||||
|
||||
if action == 'buy':
|
||||
self._buys[symbol].pop(oid, None)
|
||||
self._buys[symbol].pop((oid, price))
|
||||
elif action == 'sell':
|
||||
self._sells[symbol].pop(oid, None)
|
||||
self._sells[symbol].pop((oid, price))
|
||||
|
||||
# TODO: net latency model
|
||||
await trio.sleep(0.05)
|
||||
|
||||
msg = BrokerdStatus(
|
||||
status='canceled',
|
||||
account='paper',
|
||||
status='cancelled',
|
||||
oid=oid,
|
||||
reqid=reqid,
|
||||
broker=self.broker,
|
||||
time_ns=time.time_ns(),
|
||||
broker_details={'name': 'paperboi'},
|
||||
)
|
||||
await self.ems_trades_stream.send(msg)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
|
||||
async def fake_fill(
|
||||
self,
|
||||
|
||||
fqsn: str,
|
||||
symbol: str,
|
||||
price: float,
|
||||
size: float,
|
||||
action: str, # one of {'buy', 'sell'}
|
||||
|
@ -211,21 +190,21 @@ class PaperBoi(Struct):
|
|||
remaining: float = 0,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Pretend to fill a broker order @ price and size.
|
||||
"""Pretend to fill a broker order @ price and size.
|
||||
|
||||
'''
|
||||
"""
|
||||
# TODO: net latency model
|
||||
await trio.sleep(0.05)
|
||||
fill_time_ns = time.time_ns()
|
||||
fill_time_s = time.time()
|
||||
|
||||
fill_msg = BrokerdFill(
|
||||
msg = BrokerdFill(
|
||||
|
||||
reqid=reqid,
|
||||
time_ns=fill_time_ns,
|
||||
time_ns=time.time_ns(),
|
||||
|
||||
action=action,
|
||||
size=size,
|
||||
price=price,
|
||||
|
||||
broker_time=datetime.now().timestamp(),
|
||||
broker_details={
|
||||
'paper_info': {
|
||||
|
@ -235,64 +214,79 @@ class PaperBoi(Struct):
|
|||
'name': self.broker + '_paper',
|
||||
},
|
||||
)
|
||||
log.info(f'Fake filling order:\n{fill_msg}')
|
||||
await self.ems_trades_stream.send(fill_msg)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
|
||||
if order_complete:
|
||||
|
||||
msg = BrokerdStatus(
|
||||
|
||||
reqid=reqid,
|
||||
time_ns=time.time_ns(),
|
||||
# account=f'paper_{self.broker}',
|
||||
account='paper',
|
||||
status='closed',
|
||||
|
||||
status='filled',
|
||||
filled=size,
|
||||
remaining=0 if order_complete else remaining,
|
||||
)
|
||||
await self.ems_trades_stream.send(msg)
|
||||
|
||||
# lookup any existing position
|
||||
key = fqsn.rstrip(f'.{self.broker}')
|
||||
t = Transaction(
|
||||
fqsn=fqsn,
|
||||
sym=self._syms[fqsn],
|
||||
tid=oid,
|
||||
action=action,
|
||||
size=size,
|
||||
price=price,
|
||||
cost=0, # TODO: cost model
|
||||
dt=pendulum.from_timestamp(fill_time_s),
|
||||
bsuid=key,
|
||||
|
||||
broker_details={
|
||||
'paper_info': {
|
||||
'oid': oid,
|
||||
},
|
||||
'name': self.broker,
|
||||
},
|
||||
)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
|
||||
with (
|
||||
open_trade_ledger(self.broker, 'paper') as ledger,
|
||||
open_pps(self.broker, 'paper', write_on_exit=True) as table
|
||||
):
|
||||
tx = t.to_dict()
|
||||
tx.pop('sym')
|
||||
ledger.update({oid: tx})
|
||||
# Write to pps toml right now
|
||||
table.update_from_trans({oid: t})
|
||||
|
||||
pp = table.pps[key]
|
||||
pp_msg = BrokerdPosition(
|
||||
# lookup any existing position
|
||||
token = f'{symbol}.{self.broker}'
|
||||
pp_msg = self._positions.setdefault(
|
||||
token,
|
||||
BrokerdPosition(
|
||||
broker=self.broker,
|
||||
account='paper',
|
||||
symbol=fqsn,
|
||||
symbol=symbol,
|
||||
# TODO: we need to look up the asset currency from
|
||||
# broker info. i guess for crypto this can be
|
||||
# inferred from the pair?
|
||||
currency=key,
|
||||
size=pp.size,
|
||||
avg_price=pp.ppu,
|
||||
currency='',
|
||||
size=0.0,
|
||||
avg_price=0,
|
||||
)
|
||||
)
|
||||
|
||||
await self.ems_trades_stream.send(pp_msg)
|
||||
# "avg position price" calcs
|
||||
# TODO: eventually it'd be nice to have a small set of routines
|
||||
# to do this stuff from a sequence of cleared orders to enable
|
||||
# so called "contextual positions".
|
||||
new_size = size + pp_msg.size
|
||||
|
||||
# old size minus the new size gives us size differential with
|
||||
# +ve -> increase in pp size
|
||||
# -ve -> decrease in pp size
|
||||
size_diff = abs(new_size) - abs(pp_msg.size)
|
||||
|
||||
if new_size == 0:
|
||||
pp_msg.avg_price = 0
|
||||
|
||||
elif size_diff > 0:
|
||||
# only update the "average position price" when the position
|
||||
# size increases not when it decreases (i.e. the position is
|
||||
# being made smaller)
|
||||
pp_msg.avg_price = (
|
||||
abs(size) * price + pp_msg.avg_price * abs(pp_msg.size)
|
||||
) / abs(new_size)
|
||||
|
||||
pp_msg.size = new_size
|
||||
|
||||
await self.ems_trades_stream.send(pp_msg.dict())
|
||||
|
||||
|
||||
async def simulate_fills(
|
||||
quote_stream: tractor.MsgStream, # noqa
|
||||
quote_stream: 'tractor.ReceiveStream', # noqa
|
||||
client: PaperBoi,
|
||||
|
||||
) -> None:
|
||||
|
||||
# TODO: more machinery to better simulate real-world market things:
|
||||
|
@ -312,116 +306,61 @@ async def simulate_fills(
|
|||
|
||||
# this stream may eventually contain multiple symbols
|
||||
async for quotes in quote_stream:
|
||||
|
||||
for sym, quote in quotes.items():
|
||||
|
||||
for tick in iterticks(
|
||||
quote,
|
||||
# dark order price filter(s)
|
||||
types=('ask', 'bid', 'trade', 'last')
|
||||
):
|
||||
tick_price = tick['price']
|
||||
# print(tick)
|
||||
tick_price = tick.get('price')
|
||||
ttype = tick['type']
|
||||
|
||||
buys: bidict[str, tuple] = client._buys[sym]
|
||||
iter_buys = reversed(sorted(
|
||||
buys.values(),
|
||||
key=itemgetter(0),
|
||||
))
|
||||
if ttype in ('ask',):
|
||||
|
||||
def buy_on_ask(our_price):
|
||||
return tick_price <= our_price
|
||||
|
||||
sells: bidict[str, tuple] = client._sells[sym]
|
||||
iter_sells = sorted(
|
||||
sells.values(),
|
||||
key=itemgetter(0)
|
||||
)
|
||||
|
||||
def sell_on_bid(our_price):
|
||||
return tick_price >= our_price
|
||||
|
||||
match tick:
|
||||
|
||||
# on an ask queue tick, only clear buy entries
|
||||
case {
|
||||
'price': tick_price,
|
||||
'type': 'ask',
|
||||
}:
|
||||
client.last_ask = (
|
||||
tick_price,
|
||||
tick.get('size', client.last_ask[1]),
|
||||
)
|
||||
|
||||
iter_entries = zip(
|
||||
iter_buys,
|
||||
itertools.repeat(buy_on_ask)
|
||||
)
|
||||
orders = client._buys.get(sym, {})
|
||||
|
||||
book_sequence = reversed(
|
||||
sorted(orders.keys(), key=itemgetter(1)))
|
||||
|
||||
def pred(our_price):
|
||||
return tick_price < our_price
|
||||
|
||||
elif ttype in ('bid',):
|
||||
|
||||
# on a bid queue tick, only clear sell entries
|
||||
case {
|
||||
'price': tick_price,
|
||||
'type': 'bid',
|
||||
}:
|
||||
client.last_bid = (
|
||||
tick_price,
|
||||
tick.get('size', client.last_bid[1]),
|
||||
)
|
||||
|
||||
iter_entries = zip(
|
||||
iter_sells,
|
||||
itertools.repeat(sell_on_bid)
|
||||
)
|
||||
orders = client._sells.get(sym, {})
|
||||
book_sequence = sorted(orders.keys(), key=itemgetter(1))
|
||||
|
||||
# TODO: fix this block, though it definitely
|
||||
# costs a lot more CPU-wise
|
||||
# - doesn't seem like clears are happening still on
|
||||
# "resting" limit orders?
|
||||
case {
|
||||
'price': tick_price,
|
||||
'type': ('trade' | 'last'),
|
||||
}:
|
||||
# in the clearing price / last price case we
|
||||
# want to iterate both sides of our book for
|
||||
# clears since we don't know which direction the
|
||||
# price is going to move (especially with HFT)
|
||||
# and thus we simply interleave both sides (buys
|
||||
# and sells) until one side clears and then
|
||||
# break until the next tick?
|
||||
def interleave():
|
||||
for pair in zip(
|
||||
iter_buys,
|
||||
iter_sells,
|
||||
):
|
||||
for order_info, pred in zip(
|
||||
pair,
|
||||
itertools.cycle([buy_on_ask, sell_on_bid]),
|
||||
):
|
||||
yield order_info, pred
|
||||
def pred(our_price):
|
||||
return tick_price > our_price
|
||||
|
||||
iter_entries = interleave()
|
||||
|
||||
# NOTE: all other (non-clearable) tick event types
|
||||
# - we don't want to sping the simulated clear loop
|
||||
# below unecessarily and further don't want to pop
|
||||
# simulated live orders prematurely.
|
||||
case _:
|
||||
elif ttype in ('trade', 'last'):
|
||||
# TODO: simulate actual book queues and our orders
|
||||
# place in it, might require full L2 data?
|
||||
continue
|
||||
|
||||
# iterate all potentially clearable book prices
|
||||
# in FIFO order per side.
|
||||
for order_info, pred in iter_entries:
|
||||
(our_price, size, reqid, action) = order_info
|
||||
# iterate book prices descending
|
||||
for oid, our_price in book_sequence:
|
||||
if pred(our_price):
|
||||
|
||||
# print(order_info)
|
||||
clearable = pred(our_price)
|
||||
if clearable:
|
||||
# pop and retreive order info
|
||||
oid = {
|
||||
'buy': buys,
|
||||
'sell': sells
|
||||
}[action].inverse.pop(order_info)
|
||||
# retreive order info
|
||||
(size, reqid, action) = orders.pop((oid, our_price))
|
||||
|
||||
# clearing price would have filled entirely
|
||||
await client.fake_fill(
|
||||
fqsn=sym,
|
||||
symbol=sym,
|
||||
# todo slippage to determine fill price
|
||||
price=tick_price,
|
||||
size=size,
|
||||
|
@ -429,6 +368,9 @@ async def simulate_fills(
|
|||
reqid=reqid,
|
||||
oid=oid,
|
||||
)
|
||||
else:
|
||||
# prices are iterated in sorted order so we're done
|
||||
break
|
||||
|
||||
|
||||
async def handle_order_requests(
|
||||
|
@ -438,83 +380,68 @@ async def handle_order_requests(
|
|||
|
||||
) -> None:
|
||||
|
||||
request_msg: dict
|
||||
# order_request: dict
|
||||
async for request_msg in ems_order_stream:
|
||||
match request_msg:
|
||||
case {'action': ('buy' | 'sell')}:
|
||||
order = BrokerdOrder(**request_msg)
|
||||
account = order.account
|
||||
|
||||
# error on bad inputs
|
||||
reason = None
|
||||
action = request_msg['action']
|
||||
|
||||
if action in {'buy', 'sell'}:
|
||||
|
||||
account = request_msg['account']
|
||||
if account != 'paper':
|
||||
reason = f'No account found:`{account}` (paper only)?'
|
||||
|
||||
elif order.size == 0:
|
||||
reason = 'Invalid size: 0'
|
||||
|
||||
if reason:
|
||||
log.error(reason)
|
||||
log.error(
|
||||
'This is a paper account, only a `paper` selection is valid'
|
||||
)
|
||||
await ems_order_stream.send(BrokerdError(
|
||||
oid=order.oid,
|
||||
symbol=order.symbol,
|
||||
reason=reason,
|
||||
))
|
||||
oid=request_msg['oid'],
|
||||
symbol=request_msg['symbol'],
|
||||
reason=f'Paper only. No account found: `{account}` ?',
|
||||
).dict())
|
||||
continue
|
||||
|
||||
reqid = order.reqid or str(uuid.uuid4())
|
||||
|
||||
# deliver ack that order has been submitted to broker routing
|
||||
await ems_order_stream.send(
|
||||
BrokerdOrderAck(
|
||||
oid=order.oid,
|
||||
reqid=reqid,
|
||||
)
|
||||
)
|
||||
# validate
|
||||
order = BrokerdOrder(**request_msg)
|
||||
|
||||
# call our client api to submit the order
|
||||
reqid = await client.submit_limit(
|
||||
|
||||
oid=order.oid,
|
||||
symbol=f'{order.symbol}.{client.broker}',
|
||||
symbol=order.symbol,
|
||||
price=order.price,
|
||||
action=order.action,
|
||||
size=order.size,
|
||||
|
||||
# XXX: by default 0 tells ``ib_insync`` methods that
|
||||
# there is no existing order so ask the client to create
|
||||
# a new one (which it seems to do by allocating an int
|
||||
# counter - collision prone..)
|
||||
reqid=reqid,
|
||||
reqid=order.reqid,
|
||||
)
|
||||
log.info(f'Submitted paper LIMIT {reqid}:\n{order}')
|
||||
|
||||
case {'action': 'cancel'}:
|
||||
# deliver ack that order has been submitted to broker routing
|
||||
await ems_order_stream.send(
|
||||
BrokerdOrderAck(
|
||||
|
||||
# ems order request id
|
||||
oid=order.oid,
|
||||
|
||||
# broker specific request id
|
||||
reqid=reqid,
|
||||
|
||||
).dict()
|
||||
)
|
||||
|
||||
elif action == 'cancel':
|
||||
msg = BrokerdCancel(**request_msg)
|
||||
|
||||
await client.submit_cancel(
|
||||
reqid=msg.reqid
|
||||
)
|
||||
|
||||
case _:
|
||||
else:
|
||||
log.error(f'Unknown order command: {request_msg}')
|
||||
|
||||
|
||||
_reqids: bidict[str, tuple] = {}
|
||||
_buys: defaultdict[
|
||||
str, # symbol
|
||||
bidict[
|
||||
str, # oid
|
||||
tuple[float, float, str, str], # order info
|
||||
]
|
||||
] = defaultdict(bidict)
|
||||
_sells: defaultdict[
|
||||
str, # symbol
|
||||
bidict[
|
||||
str, # oid
|
||||
tuple[float, float, str, str], # order info
|
||||
]
|
||||
] = defaultdict(bidict)
|
||||
_positions: dict[str, Position] = {}
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def trades_dialogue(
|
||||
|
||||
|
@ -524,68 +451,42 @@ async def trades_dialogue(
|
|||
loglevel: str = None,
|
||||
|
||||
) -> None:
|
||||
|
||||
tractor.log.get_console_log(loglevel)
|
||||
|
||||
async with (
|
||||
|
||||
data.open_feed(
|
||||
[fqsn],
|
||||
loglevel=loglevel,
|
||||
) as feed,
|
||||
|
||||
):
|
||||
|
||||
with open_pps(broker, 'paper') as table:
|
||||
# save pps in local state
|
||||
_positions.update(table.pps)
|
||||
|
||||
pp_msgs: list[BrokerdPosition] = []
|
||||
pos: Position
|
||||
token: str # f'{symbol}.{self.broker}'
|
||||
for token, pos in _positions.items():
|
||||
pp_msgs.append(BrokerdPosition(
|
||||
broker=broker,
|
||||
account='paper',
|
||||
symbol=pos.symbol.front_fqsn(),
|
||||
size=pos.size,
|
||||
avg_price=pos.ppu,
|
||||
))
|
||||
|
||||
await ctx.started((
|
||||
pp_msgs,
|
||||
['paper'],
|
||||
))
|
||||
# TODO: load paper positions per broker from .toml config file
|
||||
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
||||
# await ctx.started(all_positions)
|
||||
await ctx.started(({}, {'paper',}))
|
||||
|
||||
async with (
|
||||
ctx.open_stream() as ems_stream,
|
||||
trio.open_nursery() as n,
|
||||
):
|
||||
|
||||
client = PaperBoi(
|
||||
broker,
|
||||
ems_stream,
|
||||
_buys=_buys,
|
||||
_sells=_sells,
|
||||
_buys={},
|
||||
_sells={},
|
||||
|
||||
_reqids=_reqids,
|
||||
_reqids={},
|
||||
|
||||
_positions=_positions,
|
||||
|
||||
# TODO: load postions from ledger file
|
||||
_trade_ledger={},
|
||||
_syms={
|
||||
fqsn: flume.symbol
|
||||
for fqsn, flume in feed.flumes.items()
|
||||
}
|
||||
# TODO: load paper positions from ``positions.toml``
|
||||
_positions={},
|
||||
)
|
||||
|
||||
n.start_soon(
|
||||
handle_order_requests,
|
||||
client,
|
||||
ems_stream,
|
||||
)
|
||||
n.start_soon(handle_order_requests, client, ems_stream)
|
||||
|
||||
# paper engine simulator clearing task
|
||||
await simulate_fills(feed.streams[broker], client)
|
||||
await simulate_fills(feed.stream, client)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
|
@ -610,7 +511,6 @@ async def open_paperboi(
|
|||
# (we likely don't need more then one proc for basic
|
||||
# simulated order clearing)
|
||||
if portal is None:
|
||||
log.info('Starting new paper-engine actor')
|
||||
portal = await tn.start_actor(
|
||||
service_name,
|
||||
enable_modules=[__name__]
|
||||
|
@ -623,4 +523,5 @@ async def open_paperboi(
|
|||
loglevel=loglevel,
|
||||
|
||||
) as (ctx, first):
|
||||
|
||||
yield ctx, first
|
||||
|
|
|
@ -20,7 +20,6 @@ CLI commons.
|
|||
'''
|
||||
import os
|
||||
from pprint import pformat
|
||||
from functools import partial
|
||||
|
||||
import click
|
||||
import trio
|
||||
|
@ -28,46 +27,29 @@ import tractor
|
|||
|
||||
from ..log import get_console_log, get_logger, colorize_json
|
||||
from ..brokers import get_brokermod
|
||||
from .._daemon import (
|
||||
_default_registry_host,
|
||||
_default_registry_port,
|
||||
)
|
||||
from .._daemon import _tractor_kwargs
|
||||
from .. import config
|
||||
|
||||
|
||||
log = get_logger('cli')
|
||||
DEFAULT_BROKER = 'questrade'
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
||||
@click.option('--host', '-h', default=None, help='Host addr to bind')
|
||||
@click.option('--port', '-p', default=None, help='Port number to bind')
|
||||
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
||||
@click.option(
|
||||
'--tsdb',
|
||||
is_flag=True,
|
||||
help='Enable local ``marketstore`` instance'
|
||||
)
|
||||
@click.option(
|
||||
'--es',
|
||||
is_flag=True,
|
||||
help='Enable local ``elasticsearch`` instance'
|
||||
)
|
||||
def pikerd(
|
||||
loglevel: str,
|
||||
host: str,
|
||||
port: int,
|
||||
tl: bool,
|
||||
pdb: bool,
|
||||
tsdb: bool,
|
||||
es: bool,
|
||||
):
|
||||
def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||
'''
|
||||
Spawn the piker broker-daemon.
|
||||
|
||||
'''
|
||||
|
||||
from .._daemon import open_pikerd
|
||||
log = get_console_log(loglevel)
|
||||
|
||||
|
@ -80,25 +62,32 @@ def pikerd(
|
|||
"\n"
|
||||
))
|
||||
|
||||
reg_addr: None | tuple[str, int] = None
|
||||
if host or port:
|
||||
reg_addr = (
|
||||
host or _default_registry_host,
|
||||
int(port) or _default_registry_port,
|
||||
)
|
||||
|
||||
async def main():
|
||||
|
||||
async with (
|
||||
open_pikerd(
|
||||
tsdb=tsdb,
|
||||
es=es,
|
||||
loglevel=loglevel,
|
||||
debug_mode=pdb,
|
||||
registry_addr=reg_addr,
|
||||
|
||||
), # normally delivers a ``Services`` handle
|
||||
trio.open_nursery() as n,
|
||||
):
|
||||
if tsdb:
|
||||
from piker.data._ahab import start_ahab
|
||||
from piker.data.marketstore import start_marketstore
|
||||
|
||||
log.info('Spawning `marketstore` supervisor')
|
||||
ctn_ready, config, (cid, pid) = await n.start(
|
||||
start_ahab,
|
||||
'marketstored',
|
||||
start_marketstore,
|
||||
|
||||
)
|
||||
log.info(
|
||||
f'`marketstore` up!\n'
|
||||
f'`marketstored` pid: {pid}\n'
|
||||
f'docker container id: {cid}\n'
|
||||
f'config: {pformat(config)}'
|
||||
)
|
||||
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
@ -108,46 +97,25 @@ def pikerd(
|
|||
@click.group(context_settings=config._context_defaults)
|
||||
@click.option(
|
||||
'--brokers', '-b',
|
||||
default=None,
|
||||
default=[DEFAULT_BROKER],
|
||||
multiple=True,
|
||||
help='Broker backend to use'
|
||||
)
|
||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option('--configdir', '-c', help='Configuration directory')
|
||||
@click.option('--host', '-h', default=None, help='Host addr to bind')
|
||||
@click.option('--port', '-p', default=None, help='Port number to bind')
|
||||
@click.pass_context
|
||||
def cli(
|
||||
ctx: click.Context,
|
||||
brokers: list[str],
|
||||
loglevel: str,
|
||||
tl: bool,
|
||||
configdir: str,
|
||||
host: str,
|
||||
port: int,
|
||||
|
||||
) -> None:
|
||||
def cli(ctx, brokers, loglevel, tl, configdir):
|
||||
if configdir is not None:
|
||||
assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
|
||||
config._override_config_dir(configdir)
|
||||
|
||||
ctx.ensure_object(dict)
|
||||
|
||||
if not brokers:
|
||||
# (try to) load all (supposedly) supported data/broker backends
|
||||
from piker.brokers import __brokers__
|
||||
brokers = __brokers__
|
||||
|
||||
if len(brokers) == 1:
|
||||
brokermods = [get_brokermod(brokers[0])]
|
||||
else:
|
||||
brokermods = [get_brokermod(broker) for broker in brokers]
|
||||
assert brokermods
|
||||
|
||||
reg_addr: None | tuple[str, int] = None
|
||||
if host or port:
|
||||
reg_addr = (
|
||||
host or _default_registry_host,
|
||||
int(port) or _default_registry_port,
|
||||
)
|
||||
|
||||
ctx.obj.update({
|
||||
'brokers': brokers,
|
||||
|
@ -157,7 +125,6 @@ def cli(
|
|||
'log': get_console_log(loglevel),
|
||||
'confdir': config._config_dir,
|
||||
'wl_path': config._watchlists_data_path,
|
||||
'registry_addr': reg_addr,
|
||||
})
|
||||
|
||||
# allow enabling same loglevel in ``tractor`` machinery
|
||||
|
@ -167,45 +134,33 @@ def cli(
|
|||
|
||||
@cli.command()
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.argument('ports', nargs=-1, required=False)
|
||||
@click.argument('names', nargs=-1, required=False)
|
||||
@click.pass_obj
|
||||
def services(config, tl, ports):
|
||||
|
||||
from .._daemon import (
|
||||
open_piker_runtime,
|
||||
_default_registry_port,
|
||||
_default_registry_host,
|
||||
)
|
||||
|
||||
host = _default_registry_host
|
||||
if not ports:
|
||||
ports = [_default_registry_port]
|
||||
def services(config, tl, names):
|
||||
|
||||
async def list_services():
|
||||
nonlocal host
|
||||
async with (
|
||||
open_piker_runtime(
|
||||
name='service_query',
|
||||
loglevel=config['loglevel'] if tl else None,
|
||||
),
|
||||
tractor.get_arbiter(
|
||||
host=host,
|
||||
port=ports[0]
|
||||
) as portal
|
||||
):
|
||||
|
||||
async with tractor.get_arbiter(
|
||||
*_tractor_kwargs['arbiter_addr']
|
||||
) as portal:
|
||||
registry = await portal.run_from_ns('self', 'get_registry')
|
||||
json_d = {}
|
||||
for key, socket in registry.items():
|
||||
# name, uuid = uid
|
||||
host, port = socket
|
||||
json_d[key] = f'{host}:{port}'
|
||||
click.echo(f"{colorize_json(json_d)}")
|
||||
|
||||
trio.run(list_services)
|
||||
tractor.run(
|
||||
list_services,
|
||||
name='service_query',
|
||||
loglevel=config['loglevel'] if tl else None,
|
||||
arbiter_addr=_tractor_kwargs['arbiter_addr'],
|
||||
)
|
||||
|
||||
|
||||
def _load_clis() -> None:
|
||||
from ..data import marketstore # noqa
|
||||
from ..data import elastic
|
||||
from ..data import cli # noqa
|
||||
from ..brokers import cli # noqa
|
||||
from ..ui import cli # noqa
|
||||
|
|
|
@ -21,14 +21,13 @@ Broker configuration mgmt.
|
|||
import platform
|
||||
import sys
|
||||
import os
|
||||
from os import path
|
||||
from os.path import dirname
|
||||
import shutil
|
||||
from typing import Optional
|
||||
from pathlib import Path
|
||||
|
||||
from bidict import bidict
|
||||
import toml
|
||||
from piker.testing import TEST_CONFIG_DIR_PATH
|
||||
|
||||
from .log import get_logger
|
||||
|
||||
log = get_logger('broker-config')
|
||||
|
@ -75,13 +74,6 @@ def get_app_dir(app_name, roaming=True, force_posix=False):
|
|||
def _posixify(name):
|
||||
return "-".join(name.split()).lower()
|
||||
|
||||
# TODO: This is a hacky way to a) determine we're testing
|
||||
# and b) creating a test dir. We should aim to set a variable
|
||||
# within the tractor runtimes and store testing config data
|
||||
# outside of the users filesystem
|
||||
if "pytest" in sys.modules:
|
||||
app_name = os.path.join(app_name, TEST_CONFIG_DIR_PATH)
|
||||
|
||||
# if WIN:
|
||||
if platform.system() == 'Windows':
|
||||
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
||||
|
@ -119,10 +111,8 @@ if _parent_user:
|
|||
|
||||
_conf_names: set[str] = {
|
||||
'brokers',
|
||||
'pps',
|
||||
'trades',
|
||||
'watchlists',
|
||||
'paper_trades'
|
||||
}
|
||||
|
||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||
|
@ -157,21 +147,19 @@ def get_conf_path(
|
|||
conf_name: str = 'brokers',
|
||||
|
||||
) -> str:
|
||||
'''
|
||||
Return the top-level default config path normally under
|
||||
``~/.config/piker`` on linux for a given ``conf_name``, the config
|
||||
name.
|
||||
"""Return the default config path normally under
|
||||
``~/.config/piker`` on linux.
|
||||
|
||||
Contains files such as:
|
||||
- brokers.toml
|
||||
- pp.toml
|
||||
- watchlists.toml
|
||||
- trades.toml
|
||||
|
||||
# maybe coming soon ;)
|
||||
- signals.toml
|
||||
- strats.toml
|
||||
|
||||
'''
|
||||
"""
|
||||
assert conf_name in _conf_names
|
||||
fn = _conf_fn_w_ext(conf_name)
|
||||
return os.path.join(
|
||||
|
@ -185,7 +173,7 @@ def repodir():
|
|||
Return the abspath to the repo directory.
|
||||
|
||||
'''
|
||||
dirpath = path.abspath(
|
||||
dirpath = os.path.abspath(
|
||||
# we're 3 levels down in **this** module file
|
||||
dirname(dirname(os.path.realpath(__file__)))
|
||||
)
|
||||
|
@ -194,9 +182,7 @@ def repodir():
|
|||
|
||||
def load(
|
||||
conf_name: str = 'brokers',
|
||||
path: str = None,
|
||||
|
||||
**tomlkws,
|
||||
path: str = None
|
||||
|
||||
) -> (dict, str):
|
||||
'''
|
||||
|
@ -204,10 +190,6 @@ def load(
|
|||
|
||||
'''
|
||||
path = path or get_conf_path(conf_name)
|
||||
|
||||
if not os.path.isdir(_config_dir):
|
||||
Path(_config_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not os.path.isfile(path):
|
||||
fn = _conf_fn_w_ext(conf_name)
|
||||
|
||||
|
@ -220,15 +202,8 @@ def load(
|
|||
# if one exists.
|
||||
if os.path.isfile(template):
|
||||
shutil.copyfile(template, path)
|
||||
else:
|
||||
# create an empty file
|
||||
with open(path, 'x'):
|
||||
pass
|
||||
else:
|
||||
with open(path, 'r'):
|
||||
pass # touch it
|
||||
|
||||
config = toml.load(path, **tomlkws)
|
||||
config = toml.load(path)
|
||||
log.debug(f"Read config file {path}")
|
||||
return config, path
|
||||
|
||||
|
@ -237,8 +212,6 @@ def write(
|
|||
config: dict, # toml config as dict
|
||||
name: str = 'brokers',
|
||||
path: str = None,
|
||||
fail_empty: bool = True,
|
||||
**toml_kwargs,
|
||||
|
||||
) -> None:
|
||||
''''
|
||||
|
@ -253,7 +226,7 @@ def write(
|
|||
log.debug(f"Creating config dir {_config_dir}")
|
||||
os.makedirs(dirname)
|
||||
|
||||
if not config and fail_empty:
|
||||
if not config:
|
||||
raise ValueError(
|
||||
"Watch out you're trying to write a blank config!")
|
||||
|
||||
|
@ -262,14 +235,11 @@ def write(
|
|||
f"{path}"
|
||||
)
|
||||
with open(path, 'w') as cf:
|
||||
return toml.dump(
|
||||
config,
|
||||
cf,
|
||||
**toml_kwargs,
|
||||
)
|
||||
return toml.dump(config, cf)
|
||||
|
||||
|
||||
def load_accounts(
|
||||
|
||||
providers: Optional[list[str]] = None
|
||||
|
||||
) -> bidict[str, Optional[str]]:
|
||||
|
|
|
@ -22,12 +22,6 @@ and storing data from your brokers as well as
|
|||
sharing live streams over a network.
|
||||
|
||||
"""
|
||||
import tractor
|
||||
import trio
|
||||
|
||||
from ..log import (
|
||||
get_console_log,
|
||||
)
|
||||
from ._normalize import iterticks
|
||||
from ._sharedmem import (
|
||||
maybe_open_shm_array,
|
||||
|
@ -38,6 +32,7 @@ from ._sharedmem import (
|
|||
)
|
||||
from .feed import (
|
||||
open_feed,
|
||||
_setup_persistent_brokerd,
|
||||
)
|
||||
|
||||
|
||||
|
@ -49,40 +44,5 @@ __all__ = [
|
|||
'attach_shm_array',
|
||||
'open_shm_array',
|
||||
'get_shm_token',
|
||||
'_setup_persistent_brokerd',
|
||||
]
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def _setup_persistent_brokerd(
|
||||
ctx: tractor.Context,
|
||||
brokername: str,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Allocate a actor-wide service nursery in ``brokerd``
|
||||
such that feeds can be run in the background persistently by
|
||||
the broker backend as needed.
|
||||
|
||||
'''
|
||||
get_console_log(tractor.current_actor().loglevel)
|
||||
|
||||
from .feed import (
|
||||
_bus,
|
||||
get_feed_bus,
|
||||
)
|
||||
global _bus
|
||||
assert not _bus
|
||||
|
||||
async with trio.open_nursery() as service_nursery:
|
||||
# assign a nursery to the feeds bus for spawning
|
||||
# background tasks from clients
|
||||
get_feed_bus(brokername, service_nursery)
|
||||
|
||||
# unblock caller
|
||||
await ctx.started()
|
||||
|
||||
# we pin this task to keep the feeds manager active until the
|
||||
# parent actor decides to tear it down
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
||||
|
|
|
@ -37,13 +37,8 @@ from docker.models.containers import Container as DockerContainer
|
|||
from docker.errors import (
|
||||
DockerException,
|
||||
APIError,
|
||||
# ContainerError,
|
||||
)
|
||||
import requests
|
||||
from requests.exceptions import (
|
||||
ConnectionError,
|
||||
ReadTimeout,
|
||||
)
|
||||
from requests.exceptions import ConnectionError, ReadTimeout
|
||||
|
||||
from ..log import get_logger, get_console_log
|
||||
from .. import config
|
||||
|
@ -55,8 +50,8 @@ class DockerNotStarted(Exception):
|
|||
'Prolly you dint start da daemon bruh'
|
||||
|
||||
|
||||
class ApplicationLogError(Exception):
|
||||
'App in container reported an error in logs'
|
||||
class ContainerError(RuntimeError):
|
||||
'Error reported via app-container logging level'
|
||||
|
||||
|
||||
@acm
|
||||
|
@ -101,9 +96,9 @@ async def open_docker(
|
|||
# not perms?
|
||||
raise
|
||||
|
||||
# finally:
|
||||
# if client:
|
||||
# client.close()
|
||||
finally:
|
||||
if client:
|
||||
client.close()
|
||||
|
||||
|
||||
class Container:
|
||||
|
@ -124,9 +119,7 @@ class Container:
|
|||
|
||||
async def process_logs_until(
|
||||
self,
|
||||
# this is a predicate func for matching log msgs emitted by the
|
||||
# underlying containerized app
|
||||
patt_matcher: Callable[[str], bool],
|
||||
patt: str,
|
||||
bp_on_msg: bool = False,
|
||||
) -> bool:
|
||||
'''
|
||||
|
@ -137,14 +130,7 @@ class Container:
|
|||
seen_so_far = self.seen_so_far
|
||||
|
||||
while True:
|
||||
try:
|
||||
logs = self.cntr.logs()
|
||||
except (
|
||||
docker.errors.NotFound,
|
||||
docker.errors.APIError
|
||||
):
|
||||
return False
|
||||
|
||||
entries = logs.decode().split('\n')
|
||||
for entry in entries:
|
||||
|
||||
|
@ -152,38 +138,31 @@ class Container:
|
|||
if not entry:
|
||||
continue
|
||||
|
||||
entry = entry.strip()
|
||||
try:
|
||||
record = json.loads(entry)
|
||||
|
||||
if 'msg' in record:
|
||||
msg = record['msg']
|
||||
elif 'message' in record:
|
||||
msg = record['message']
|
||||
else:
|
||||
raise KeyError(f'Unexpected log format\n{record}')
|
||||
|
||||
level = record['level']
|
||||
|
||||
record = json.loads(entry.strip())
|
||||
except json.JSONDecodeError:
|
||||
msg = entry
|
||||
level = 'error'
|
||||
if 'Error' in entry:
|
||||
raise RuntimeError(entry)
|
||||
raise
|
||||
|
||||
msg = record['msg']
|
||||
level = record['level']
|
||||
if msg and entry not in seen_so_far:
|
||||
seen_so_far.add(entry)
|
||||
if bp_on_msg:
|
||||
await tractor.breakpoint()
|
||||
|
||||
getattr(log, level.lower(), log.error)(f'{msg}')
|
||||
getattr(log, level, log.error)(f'{msg}')
|
||||
|
||||
if level == 'fatal':
|
||||
raise ApplicationLogError(msg)
|
||||
# print(f'level: {level}')
|
||||
if level in ('error', 'fatal'):
|
||||
raise ContainerError(msg)
|
||||
|
||||
if await patt_matcher(msg):
|
||||
if patt in msg:
|
||||
return True
|
||||
|
||||
# do a checkpoint so we don't block if cancelled B)
|
||||
await trio.sleep(0.1)
|
||||
await trio.sleep(0.01)
|
||||
|
||||
return False
|
||||
|
||||
|
@ -206,29 +185,12 @@ class Container:
|
|||
if 'is not running' in err.explanation:
|
||||
return False
|
||||
|
||||
def hard_kill(self, start: float) -> None:
|
||||
delay = time.time() - start
|
||||
# get out the big guns, bc apparently marketstore
|
||||
# doesn't actually know how to terminate gracefully
|
||||
# :eyeroll:...
|
||||
log.error(
|
||||
f'SIGKILL-ing: {self.cntr.id} after {delay}s\n'
|
||||
)
|
||||
self.try_signal('SIGKILL')
|
||||
self.cntr.wait(
|
||||
timeout=3,
|
||||
condition='not-running',
|
||||
)
|
||||
|
||||
async def cancel(
|
||||
self,
|
||||
stop_msg: str,
|
||||
hard_kill: bool = False,
|
||||
|
||||
) -> None:
|
||||
|
||||
cid = self.cntr.id
|
||||
|
||||
# first try a graceful cancel
|
||||
log.cancel(
|
||||
f'SIGINT cancelling container: {cid}\n'
|
||||
|
@ -237,25 +199,15 @@ class Container:
|
|||
self.try_signal('SIGINT')
|
||||
|
||||
start = time.time()
|
||||
for _ in range(6):
|
||||
for _ in range(30):
|
||||
|
||||
with trio.move_on_after(0.5) as cs:
|
||||
log.cancel('polling for CNTR logs...')
|
||||
|
||||
try:
|
||||
cs.shield = True
|
||||
await self.process_logs_until(stop_msg)
|
||||
except ApplicationLogError:
|
||||
hard_kill = True
|
||||
else:
|
||||
# if we aren't cancelled on above checkpoint then we
|
||||
# assume we read the expected stop msg and
|
||||
# terminated.
|
||||
break
|
||||
|
||||
if cs.cancelled_caught:
|
||||
# on timeout just try a hard kill after
|
||||
# a quick container sync-wait.
|
||||
hard_kill = True
|
||||
# if we aren't cancelled on above checkpoint then we
|
||||
# assume we read the expected stop msg and terminated.
|
||||
break
|
||||
|
||||
try:
|
||||
log.info(f'Polling for container shutdown:\n{cid}')
|
||||
|
@ -266,7 +218,6 @@ class Container:
|
|||
condition='not-running',
|
||||
)
|
||||
|
||||
# graceful exit if we didn't time out
|
||||
break
|
||||
|
||||
except (
|
||||
|
@ -278,22 +229,24 @@ class Container:
|
|||
except (
|
||||
docker.errors.APIError,
|
||||
ConnectionError,
|
||||
requests.exceptions.ConnectionError,
|
||||
trio.Cancelled,
|
||||
):
|
||||
log.exception('Docker connection failure')
|
||||
self.hard_kill(start)
|
||||
raise
|
||||
|
||||
except trio.Cancelled:
|
||||
log.exception('trio cancelled...')
|
||||
self.hard_kill(start)
|
||||
break
|
||||
else:
|
||||
hard_kill = True
|
||||
delay = time.time() - start
|
||||
log.error(
|
||||
f'Failed to kill container {cid} after {delay}s\n'
|
||||
'sending SIGKILL..'
|
||||
)
|
||||
# get out the big guns, bc apparently marketstore
|
||||
# doesn't actually know how to terminate gracefully
|
||||
# :eyeroll:...
|
||||
self.try_signal('SIGKILL')
|
||||
self.cntr.wait(
|
||||
timeout=3,
|
||||
condition='not-running',
|
||||
)
|
||||
|
||||
if hard_kill:
|
||||
self.hard_kill(start)
|
||||
else:
|
||||
log.cancel(f'Container stopped: {cid}')
|
||||
|
||||
|
||||
|
@ -301,7 +254,6 @@ class Container:
|
|||
async def open_ahabd(
|
||||
ctx: tractor.Context,
|
||||
endpoint: str, # ns-pointer str-msg-type
|
||||
start_timeout: float = 1.0,
|
||||
|
||||
**kwargs,
|
||||
|
||||
|
@ -317,20 +269,17 @@ async def open_ahabd(
|
|||
(
|
||||
dcntr,
|
||||
cntr_config,
|
||||
start_lambda,
|
||||
stop_lambda,
|
||||
start_msg,
|
||||
stop_msg,
|
||||
) = ep_func(client)
|
||||
cntr = Container(dcntr)
|
||||
|
||||
with trio.move_on_after(start_timeout):
|
||||
found = await cntr.process_logs_until(start_lambda)
|
||||
|
||||
if not found and dcntr not in client.containers.list():
|
||||
for entry in cntr.seen_so_far:
|
||||
log.info(entry)
|
||||
with trio.move_on_after(1):
|
||||
found = await cntr.process_logs_until(start_msg)
|
||||
|
||||
if not found and cntr not in client.containers.list():
|
||||
raise RuntimeError(
|
||||
f'Failed to start {dcntr.id} check logs deats'
|
||||
'Failed to start `marketstore` check logs deats'
|
||||
)
|
||||
|
||||
await ctx.started((
|
||||
|
@ -340,19 +289,20 @@ async def open_ahabd(
|
|||
))
|
||||
|
||||
try:
|
||||
|
||||
# TODO: we might eventually want a proxy-style msg-prot here
|
||||
# to allow remote control of containers without needing
|
||||
# callers to have root perms?
|
||||
await trio.sleep_forever()
|
||||
|
||||
finally:
|
||||
await cntr.cancel(stop_lambda)
|
||||
with trio.CancelScope(shield=True):
|
||||
await cntr.cancel(stop_msg)
|
||||
|
||||
|
||||
async def start_ahab(
|
||||
service_name: str,
|
||||
endpoint: Callable[docker.DockerClient, DockerContainer],
|
||||
start_timeout: float = 1.0,
|
||||
task_status: TaskStatus[
|
||||
tuple[
|
||||
trio.Event,
|
||||
|
@ -400,7 +350,6 @@ async def start_ahab(
|
|||
async with portal.open_context(
|
||||
open_ahabd,
|
||||
endpoint=str(NamespacePath.from_ref(endpoint)),
|
||||
start_timeout=start_timeout
|
||||
) as (ctx, first):
|
||||
|
||||
cid, pid, cntr_config = first
|
||||
|
|
|
@ -1,827 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Pre-(path)-graphics formatted x/y nd/1d rendering subsystem.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import msgspec
|
||||
from msgspec import field
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
|
||||
from ._sharedmem import (
|
||||
ShmArray,
|
||||
)
|
||||
from ._pathops import (
|
||||
path_arrays_from_ohlc,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._dataviz import (
|
||||
Viz,
|
||||
)
|
||||
from .._profile import Profiler
|
||||
|
||||
|
||||
class IncrementalFormatter(msgspec.Struct):
|
||||
'''
|
||||
Incrementally updating, pre-path-graphics tracking, formatter.
|
||||
|
||||
Allows tracking source data state in an updateable pre-graphics
|
||||
``np.ndarray`` format (in local process memory) as well as
|
||||
incrementally rendering from that format **to** 1d x/y for path
|
||||
generation using ``pg.functions.arrayToQPath()``.
|
||||
|
||||
'''
|
||||
shm: ShmArray
|
||||
viz: Viz
|
||||
|
||||
# the value to be multiplied any any index into the x/y_1d arrays
|
||||
# given the input index is based on the original source data array.
|
||||
flat_index_ratio: float = 1
|
||||
|
||||
@property
|
||||
def index_field(self) -> 'str':
|
||||
'''
|
||||
Value (``str``) used to look up the "index series" from the
|
||||
underlying source ``numpy`` struct-array; delegate directly to
|
||||
the managing ``Viz``.
|
||||
|
||||
'''
|
||||
return self.viz.index_field
|
||||
|
||||
# Incrementally updated xy ndarray formatted data, a pre-1d
|
||||
# format which is updated and cached independently of the final
|
||||
# pre-graphics-path 1d format.
|
||||
x_nd: Optional[np.ndarray] = None
|
||||
y_nd: Optional[np.ndarray] = None
|
||||
|
||||
@property
|
||||
def xy_nd(self) -> tuple[np.ndarray, np.ndarray]:
|
||||
return (
|
||||
self.x_nd[self.xy_slice],
|
||||
self.y_nd[self.xy_slice],
|
||||
)
|
||||
|
||||
@property
|
||||
def xy_slice(self) -> slice:
|
||||
return slice(
|
||||
self.xy_nd_start,
|
||||
self.xy_nd_stop,
|
||||
)
|
||||
|
||||
# indexes which slice into the above arrays (which are allocated
|
||||
# based on source data shm input size) and allow retrieving
|
||||
# incrementally updated data.
|
||||
xy_nd_start: int | None = None
|
||||
xy_nd_stop: int | None = None
|
||||
|
||||
# TODO: eventually incrementally update 1d-pre-graphics path data?
|
||||
x_1d: np.ndarray | None = None
|
||||
y_1d: np.ndarray | None = None
|
||||
|
||||
# incremental view-change state(s) tracking
|
||||
_last_vr: tuple[float, float] | None = None
|
||||
_last_ivdr: tuple[float, float] | None = None
|
||||
|
||||
@property
|
||||
def index_step_size(self) -> float:
|
||||
'''
|
||||
Readonly value computed on first ``.diff()`` call.
|
||||
|
||||
'''
|
||||
return self.viz.index_step()
|
||||
|
||||
def diff(
|
||||
self,
|
||||
new_read: tuple[np.ndarray],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
# TODO:
|
||||
# - can the renderer just call ``Viz.read()`` directly? unpack
|
||||
# latest source data read
|
||||
# - eventually maybe we can implement some kind of
|
||||
# transform on the ``QPainterPath`` that will more or less
|
||||
# detect the diff in "elements" terms? update diff state since
|
||||
# we've now rendered paths.
|
||||
(
|
||||
xfirst,
|
||||
xlast,
|
||||
array,
|
||||
ivl,
|
||||
ivr,
|
||||
in_view,
|
||||
) = new_read
|
||||
|
||||
index = array['index']
|
||||
|
||||
# if the first index in the read array is 0 then
|
||||
# it means the source buffer has bee completely backfilled to
|
||||
# available space.
|
||||
src_start = index[0]
|
||||
src_stop = index[-1] + 1
|
||||
|
||||
# these are the "formatted output data" indices
|
||||
# for the pre-graphics arrays.
|
||||
nd_start = self.xy_nd_start
|
||||
nd_stop = self.xy_nd_stop
|
||||
|
||||
if (
|
||||
nd_start is None
|
||||
):
|
||||
assert nd_stop is None
|
||||
|
||||
# setup to do a prepend of all existing src history
|
||||
nd_start = self.xy_nd_start = src_stop
|
||||
# set us in a zero-to-append state
|
||||
nd_stop = self.xy_nd_stop = src_stop
|
||||
|
||||
# compute the length diffs between the first/last index entry in
|
||||
# the input data and the last indexes we have on record from the
|
||||
# last time we updated the curve index.
|
||||
prepend_length = int(nd_start - src_start)
|
||||
append_length = int(src_stop - nd_stop)
|
||||
|
||||
# blah blah blah
|
||||
# do diffing for prepend, append and last entry
|
||||
return (
|
||||
slice(src_start, nd_start),
|
||||
prepend_length,
|
||||
append_length,
|
||||
slice(nd_stop, src_stop),
|
||||
)
|
||||
|
||||
def _track_inview_range(
|
||||
self,
|
||||
view_range: tuple[int, int],
|
||||
|
||||
) -> bool:
|
||||
# if a view range is passed, plan to draw the
|
||||
# source ouput that's "in view" of the chart.
|
||||
vl, vr = view_range
|
||||
zoom_or_append = False
|
||||
last_vr = self._last_vr
|
||||
|
||||
# incremental in-view data update.
|
||||
if last_vr:
|
||||
lvl, lvr = last_vr # relative slice indices
|
||||
|
||||
# TODO: detecting more specifically the interaction changes
|
||||
# last_ivr = self._last_ivdr or (vl, vr)
|
||||
# al, ar = last_ivr # abs slice indices
|
||||
# left_change = abs(x_iv[0] - al) >= 1
|
||||
# right_change = abs(x_iv[-1] - ar) >= 1
|
||||
|
||||
# likely a zoom/pan view change or data append update
|
||||
if (
|
||||
(vr - lvr) > 2
|
||||
or vl < lvl
|
||||
|
||||
# append / prepend update
|
||||
# we had an append update where the view range
|
||||
# didn't change but the data-viewed (shifted)
|
||||
# underneath, so we need to redraw.
|
||||
# or left_change and right_change and last_vr == view_range
|
||||
|
||||
# not (left_change and right_change) and ivr
|
||||
# (
|
||||
# or abs(x_iv[ivr] - livr) > 1
|
||||
):
|
||||
zoom_or_append = True
|
||||
|
||||
self._last_vr = view_range
|
||||
|
||||
return zoom_or_append
|
||||
|
||||
def format_to_1d(
|
||||
self,
|
||||
new_read: tuple,
|
||||
array_key: str,
|
||||
profiler: Profiler,
|
||||
|
||||
slice_to_inview: bool = True,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
shm = self.shm
|
||||
|
||||
(
|
||||
_,
|
||||
_,
|
||||
array,
|
||||
ivl,
|
||||
ivr,
|
||||
in_view,
|
||||
|
||||
) = new_read
|
||||
|
||||
(
|
||||
pre_slice,
|
||||
prepend_len,
|
||||
append_len,
|
||||
post_slice,
|
||||
) = self.diff(new_read)
|
||||
|
||||
# we first need to allocate xy data arrays
|
||||
# from the source data.
|
||||
if self.y_nd is None:
|
||||
self.xy_nd_start = shm._first.value
|
||||
self.xy_nd_stop = shm._last.value
|
||||
self.x_nd, self.y_nd = self.allocate_xy_nd(
|
||||
shm,
|
||||
array_key,
|
||||
)
|
||||
profiler('allocated xy history')
|
||||
|
||||
# once allocated we do incremental pre/append
|
||||
# updates from the diff with the source buffer.
|
||||
else:
|
||||
if prepend_len:
|
||||
|
||||
self.incr_update_xy_nd(
|
||||
shm,
|
||||
array_key,
|
||||
|
||||
# this is the pre-sliced, "normally expected"
|
||||
# new data that an updater would normally be
|
||||
# expected to process, however in some cases (like
|
||||
# step curves) the updater routine may want to do
|
||||
# the source history-data reading itself, so we pass
|
||||
# both here.
|
||||
shm._array[pre_slice],
|
||||
pre_slice,
|
||||
prepend_len,
|
||||
|
||||
self.xy_nd_start,
|
||||
self.xy_nd_stop,
|
||||
is_append=False,
|
||||
)
|
||||
|
||||
self.xy_nd_start -= prepend_len
|
||||
profiler('prepended xy history: {prepend_length}')
|
||||
|
||||
if append_len:
|
||||
self.incr_update_xy_nd(
|
||||
shm,
|
||||
array_key,
|
||||
|
||||
shm._array[post_slice],
|
||||
post_slice,
|
||||
append_len,
|
||||
|
||||
self.xy_nd_start,
|
||||
self.xy_nd_stop,
|
||||
is_append=True,
|
||||
)
|
||||
self.xy_nd_stop += append_len
|
||||
profiler('appened xy history: {append_length}')
|
||||
# sanity
|
||||
# slice_ln = post_slice.stop - post_slice.start
|
||||
# assert append_len == slice_ln
|
||||
|
||||
view_changed: bool = False
|
||||
view_range: tuple[int, int] = (ivl, ivr)
|
||||
if slice_to_inview:
|
||||
view_changed = self._track_inview_range(view_range)
|
||||
array = in_view
|
||||
profiler(f'{self.viz.name} view range slice {view_range}')
|
||||
|
||||
# TODO: we need to check if the last-datum-in-view is true and
|
||||
# if so only slice to the 2nd last datumonly slice to the 2nd
|
||||
# last datum.
|
||||
# hist = array[:slice_to_head]
|
||||
|
||||
# XXX: WOA WTF TRACTOR DEBUGGING BUGGG
|
||||
# assert 0
|
||||
|
||||
# xy-path data transform: convert source data to a format
|
||||
# able to be passed to a `QPainterPath` rendering routine.
|
||||
if not len(array):
|
||||
# XXX: this might be why the profiler only has exits?
|
||||
return
|
||||
|
||||
# TODO: hist here should be the pre-sliced
|
||||
# x/y_data in the case where allocate_xy is
|
||||
# defined?
|
||||
x_1d, y_1d, connect = self.format_xy_nd_to_1d(
|
||||
array,
|
||||
array_key,
|
||||
view_range,
|
||||
)
|
||||
# cache/save last 1d outputs for use by other
|
||||
# readers (eg. `Viz.draw_last_datum()` in the
|
||||
# only-draw-last-uppx case).
|
||||
self.x_1d = x_1d
|
||||
self.y_1d = y_1d
|
||||
|
||||
# app_tres = None
|
||||
# if append_len:
|
||||
# appended = array[-append_len-1:slice_to_head]
|
||||
# app_tres = self.format_xy_nd_to_1d(
|
||||
# appended,
|
||||
# array_key,
|
||||
# (
|
||||
# view_range[1] - append_len + slice_to_head,
|
||||
# view_range[1]
|
||||
# ),
|
||||
# )
|
||||
# # assert (len(appended) - 1) == append_len
|
||||
# # assert len(appended) == append_len
|
||||
# print(
|
||||
# f'{self.viz.name} APPEND LEN: {append_len}\n'
|
||||
# f'{self.viz.name} APPENDED: {appended}\n'
|
||||
# f'{self.viz.name} app_tres: {app_tres}\n'
|
||||
# )
|
||||
|
||||
# update the last "in view data range"
|
||||
if len(x_1d):
|
||||
self._last_ivdr = x_1d[0], x_1d[-1]
|
||||
|
||||
profiler('.format_to_1d()')
|
||||
|
||||
return (
|
||||
x_1d,
|
||||
y_1d,
|
||||
connect,
|
||||
prepend_len,
|
||||
append_len,
|
||||
view_changed,
|
||||
# app_tres,
|
||||
)
|
||||
|
||||
###############################
|
||||
# Sub-type override interface #
|
||||
###############################
|
||||
|
||||
x_offset: np.ndarray = np.array([0])
|
||||
|
||||
# optional pre-graphics xy formatted data which
|
||||
# is incrementally updated in sync with the source data.
|
||||
# XXX: was ``.allocate_xy()``
|
||||
def allocate_xy_nd(
|
||||
self,
|
||||
src_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # x
|
||||
np.nd.array # y
|
||||
]:
|
||||
'''
|
||||
Convert the structured-array ``src_shm`` format to
|
||||
a equivalently shaped (and field-less) ``np.ndarray``.
|
||||
|
||||
Eg. a 4 field x N struct-array => (N, 4)
|
||||
|
||||
'''
|
||||
y_nd = src_shm._array[data_field].copy()
|
||||
x_nd = (
|
||||
src_shm._array[self.index_field].copy()
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
return x_nd, y_nd
|
||||
|
||||
# XXX: was ``.update_xy()``
|
||||
def incr_update_xy_nd(
|
||||
self,
|
||||
|
||||
src_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
new_from_src: np.ndarray, # portion of source that was updated
|
||||
|
||||
read_slc: slice,
|
||||
ln: int, # len of updated
|
||||
|
||||
nd_start: int,
|
||||
nd_stop: int,
|
||||
|
||||
is_append: bool,
|
||||
|
||||
) -> None:
|
||||
# write pushed data to flattened copy
|
||||
y_nd_new = new_from_src[data_field]
|
||||
self.y_nd[read_slc] = y_nd_new
|
||||
|
||||
x_nd_new = self.x_nd[read_slc]
|
||||
x_nd_new[:] = (
|
||||
new_from_src[self.index_field]
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
|
||||
# x_nd = self.x_nd[self.xy_slice]
|
||||
# y_nd = self.y_nd[self.xy_slice]
|
||||
# name = self.viz.name
|
||||
# if 'trade_rate' == name:
|
||||
# s = 4
|
||||
# print(
|
||||
# f'{name.upper()}:\n'
|
||||
# 'NEW_FROM_SRC:\n'
|
||||
# f'new_from_src: {new_from_src}\n\n'
|
||||
|
||||
# f'PRE self.x_nd:'
|
||||
# f'\n{list(x_nd[-s:])}\n'
|
||||
|
||||
# f'PRE self.y_nd:\n'
|
||||
# f'{list(y_nd[-s:])}\n\n'
|
||||
|
||||
# f'TO WRITE:\n'
|
||||
|
||||
# f'x_nd_new:\n'
|
||||
# f'{x_nd_new[0]}\n'
|
||||
|
||||
# f'y_nd_new:\n'
|
||||
# f'{y_nd_new}\n'
|
||||
# )
|
||||
|
||||
# XXX: was ``.format_xy()``
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # 1d x
|
||||
np.ndarray, # 1d y
|
||||
np.ndarray | str, # connection array/style
|
||||
]:
|
||||
'''
|
||||
Default xy-nd array to 1d pre-graphics-path render routine.
|
||||
|
||||
Return single field column data verbatim
|
||||
|
||||
'''
|
||||
# NOTE: we don't include the very last datum which is filled in
|
||||
# normally by another graphics object.
|
||||
x_1d = array[self.index_field][:-1]
|
||||
y_1d = array[array_key][:-1]
|
||||
|
||||
# name = self.viz.name
|
||||
# if 'trade_rate' == name:
|
||||
# s = 4
|
||||
# x_nd = list(self.x_nd[self.xy_slice][-s:-1])
|
||||
# y_nd = list(self.y_nd[self.xy_slice][-s:-1])
|
||||
# print(
|
||||
# f'{name}:\n'
|
||||
# f'XY data:\n'
|
||||
# f'x: {x_nd}\n'
|
||||
# f'y: {y_nd}\n\n'
|
||||
# f'x_1d: {list(x_1d[-s:])}\n'
|
||||
# f'y_1d: {list(y_1d[-s:])}\n\n'
|
||||
|
||||
# )
|
||||
return (
|
||||
x_1d,
|
||||
y_1d,
|
||||
|
||||
# 1d connection array or style-key to
|
||||
# ``pg.functions.arrayToQPath()``
|
||||
'all',
|
||||
)
|
||||
|
||||
|
||||
class OHLCBarsFmtr(IncrementalFormatter):
|
||||
x_offset: np.ndarray = np.array([
|
||||
-0.5,
|
||||
0,
|
||||
0,
|
||||
0.5,
|
||||
])
|
||||
|
||||
fields: list[str] = field(
|
||||
default_factory=lambda: ['open', 'high', 'low', 'close']
|
||||
)
|
||||
flat_index_ratio: float = 4
|
||||
|
||||
def allocate_xy_nd(
|
||||
self,
|
||||
|
||||
ohlc_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # x
|
||||
np.nd.array # y
|
||||
]:
|
||||
'''
|
||||
Convert an input struct-array holding OHLC samples into a pair of
|
||||
flattened x, y arrays with the same size (datums wise) as the source
|
||||
data.
|
||||
|
||||
'''
|
||||
y_nd = ohlc_shm.ustruct(self.fields)
|
||||
|
||||
# generate an flat-interpolated x-domain
|
||||
x_nd = (
|
||||
np.broadcast_to(
|
||||
ohlc_shm._array[self.index_field][:, None],
|
||||
(
|
||||
ohlc_shm._array.size,
|
||||
# 4, # only ohlc
|
||||
y_nd.shape[1],
|
||||
),
|
||||
)
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
assert y_nd.any()
|
||||
|
||||
# write pushed data to flattened copy
|
||||
return (
|
||||
x_nd,
|
||||
y_nd,
|
||||
)
|
||||
|
||||
def incr_update_xy_nd(
|
||||
self,
|
||||
|
||||
src_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
new_from_src: np.ndarray, # portion of source that was updated
|
||||
|
||||
read_slc: slice,
|
||||
ln: int, # len of updated
|
||||
|
||||
nd_start: int,
|
||||
nd_stop: int,
|
||||
|
||||
is_append: bool,
|
||||
|
||||
) -> None:
|
||||
# write newly pushed data to flattened copy
|
||||
# a struct-arr is always passed in.
|
||||
new_y_nd = rfn.structured_to_unstructured(
|
||||
new_from_src[self.fields]
|
||||
)
|
||||
self.y_nd[read_slc] = new_y_nd
|
||||
|
||||
# generate same-valued-per-row x support based on y shape
|
||||
x_nd_new = self.x_nd[read_slc]
|
||||
x_nd_new[:] = np.broadcast_to(
|
||||
new_from_src[self.index_field][:, None],
|
||||
new_y_nd.shape,
|
||||
) + self.x_offset
|
||||
|
||||
# TODO: can we drop this frame and just use the above?
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
start: int = 0, # XXX: do we need this?
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
w: float = 0.16,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
'''
|
||||
More or less direct proxy to the ``numba``-fied
|
||||
``path_arrays_from_ohlc()`` (above) but with closed in kwargs
|
||||
for line spacing.
|
||||
|
||||
'''
|
||||
x, y, c = path_arrays_from_ohlc(
|
||||
array[:-1],
|
||||
start,
|
||||
bar_w=self.index_step_size,
|
||||
bar_gap=w * self.index_step_size,
|
||||
|
||||
# XXX: don't ask, due to a ``numba`` bug..
|
||||
use_time_index=(self.index_field == 'time'),
|
||||
)
|
||||
return x, y, c
|
||||
|
||||
|
||||
class OHLCBarsAsCurveFmtr(OHLCBarsFmtr):
|
||||
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
str,
|
||||
]:
|
||||
# TODO: in the case of an existing ``.update_xy()``
|
||||
# should we be passing in array as an xy arrays tuple?
|
||||
|
||||
# 2 more datum-indexes to capture zero at end
|
||||
x_flat = self.x_nd[self.xy_nd_start:self.xy_nd_stop-1]
|
||||
y_flat = self.y_nd[self.xy_nd_start:self.xy_nd_stop-1]
|
||||
|
||||
# slice to view
|
||||
ivl, ivr = vr
|
||||
x_iv_flat = x_flat[ivl:ivr]
|
||||
y_iv_flat = y_flat[ivl:ivr]
|
||||
|
||||
# reshape to 1d for graphics rendering
|
||||
y_iv = y_iv_flat.reshape(-1)
|
||||
x_iv = x_iv_flat.reshape(-1)
|
||||
|
||||
return x_iv, y_iv, 'all'
|
||||
|
||||
|
||||
class StepCurveFmtr(IncrementalFormatter):
|
||||
|
||||
x_offset: np.ndarray = np.array([
|
||||
0,
|
||||
1,
|
||||
])
|
||||
|
||||
def allocate_xy_nd(
|
||||
self,
|
||||
|
||||
shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # x
|
||||
np.nd.array # y
|
||||
]:
|
||||
'''
|
||||
Convert an input 1d shm array to a "step array" format
|
||||
for use by path graphics generation.
|
||||
|
||||
'''
|
||||
i = shm._array[self.index_field].copy()
|
||||
out = shm._array[data_field].copy()
|
||||
|
||||
x_out = (
|
||||
np.broadcast_to(
|
||||
i[:, None],
|
||||
(i.size, 2),
|
||||
)
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
|
||||
# fill out Nx2 array to hold each step's left + right vertices.
|
||||
y_out = np.empty(
|
||||
x_out.shape,
|
||||
dtype=out.dtype,
|
||||
)
|
||||
# fill in (current) values from source shm buffer
|
||||
y_out[:] = out[:, np.newaxis]
|
||||
|
||||
# TODO: pretty sure we can drop this?
|
||||
# start y at origin level
|
||||
# y_out[0, 0] = 0
|
||||
# y_out[self.xy_nd_start] = 0
|
||||
return x_out, y_out
|
||||
|
||||
def incr_update_xy_nd(
|
||||
self,
|
||||
|
||||
src_shm: ShmArray,
|
||||
array_key: str,
|
||||
|
||||
new_from_src: np.ndarray, # portion of source that was updated
|
||||
read_slc: slice,
|
||||
ln: int, # len of updated
|
||||
|
||||
nd_start: int,
|
||||
nd_stop: int,
|
||||
|
||||
is_append: bool,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
slice,
|
||||
]:
|
||||
# NOTE: for a step curve we slice from one datum prior
|
||||
# to the current "update slice" to get the previous
|
||||
# "level".
|
||||
#
|
||||
# why this is needed,
|
||||
# - the current new append slice will often have a zero
|
||||
# value in the latest datum-step (at least for zero-on-new
|
||||
# cases like vlm in the) as per configuration of the FSP
|
||||
# engine.
|
||||
# - we need to look back a datum to get the last level which
|
||||
# will be used to terminate/complete the last step x-width
|
||||
# which will be set to pair with the last x-index THIS MEANS
|
||||
#
|
||||
# XXX: this means WE CAN'T USE the append slice since we need to
|
||||
# "look backward" one step to get the needed back-to-zero level
|
||||
# and the update data in ``new_from_src`` will only contain the
|
||||
# latest new data.
|
||||
back_1 = slice(
|
||||
read_slc.start - 1,
|
||||
read_slc.stop,
|
||||
)
|
||||
|
||||
to_write = src_shm._array[back_1]
|
||||
y_nd_new = self.y_nd[back_1]
|
||||
y_nd_new[:] = to_write[array_key][:, None]
|
||||
|
||||
x_nd_new = self.x_nd[read_slc]
|
||||
x_nd_new[:] = (
|
||||
new_from_src[self.index_field][:, None]
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
|
||||
# XXX: uncomment for debugging
|
||||
# x_nd = self.x_nd[self.xy_slice]
|
||||
# y_nd = self.y_nd[self.xy_slice]
|
||||
# name = self.viz.name
|
||||
# if 'dolla_vlm' in name:
|
||||
# s = 4
|
||||
# print(
|
||||
# f'{name}:\n'
|
||||
# 'NEW_FROM_SRC:\n'
|
||||
# f'new_from_src: {new_from_src}\n\n'
|
||||
|
||||
# f'PRE self.x_nd:'
|
||||
# f'\n{x_nd[-s:]}\n'
|
||||
# f'PRE self.y_nd:\n'
|
||||
# f'{y_nd[-s:]}\n\n'
|
||||
|
||||
# f'TO WRITE:\n'
|
||||
# f'x_nd_new:\n'
|
||||
# f'{x_nd_new}\n'
|
||||
# f'y_nd_new:\n'
|
||||
# f'{y_nd_new}\n'
|
||||
# )
|
||||
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
str,
|
||||
]:
|
||||
last_t, last = array[-1][[self.index_field, array_key]]
|
||||
|
||||
start = self.xy_nd_start
|
||||
stop = self.xy_nd_stop
|
||||
|
||||
x_step = self.x_nd[start:stop]
|
||||
y_step = self.y_nd[start:stop]
|
||||
|
||||
# slice out in-view data
|
||||
ivl, ivr = vr
|
||||
|
||||
# NOTE: add an extra step to get the vertical-line-down-to-zero
|
||||
# adjacent to the last-datum graphic (filled rect).
|
||||
x_step_iv = x_step[ivl:ivr+1]
|
||||
y_step_iv = y_step[ivl:ivr+1]
|
||||
|
||||
# flatten to 1d
|
||||
x_1d = x_step_iv.reshape(x_step_iv.size)
|
||||
y_1d = y_step_iv.reshape(y_step_iv.size)
|
||||
|
||||
# debugging
|
||||
# if y_1d.any():
|
||||
# s = 6
|
||||
# print(
|
||||
# f'x_step_iv:\n{x_step_iv[-s:]}\n'
|
||||
# f'y_step_iv:\n{y_step_iv[-s:]}\n\n'
|
||||
# f'x_1d:\n{x_1d[-s:]}\n'
|
||||
# f'y_1d:\n{y_1d[-s:]}\n'
|
||||
# )
|
||||
|
||||
return x_1d, y_1d, 'all'
|
|
@ -56,7 +56,7 @@ def iterticks(
|
|||
sig = (
|
||||
time,
|
||||
tick['price'],
|
||||
tick.get('size')
|
||||
tick['size']
|
||||
)
|
||||
|
||||
if ttype == 'dark_trade':
|
||||
|
|
|
@ -1,452 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Super fast ``QPainterPath`` generation related operator routines.
|
||||
|
||||
"""
|
||||
from math import (
|
||||
ceil,
|
||||
floor,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import (
|
||||
# types,
|
||||
njit,
|
||||
float64,
|
||||
int64,
|
||||
# optional,
|
||||
)
|
||||
|
||||
# TODO: for ``numba`` typing..
|
||||
# from ._source import numba_ohlc_dtype
|
||||
from ._m4 import ds_m4
|
||||
from .._profile import (
|
||||
Profiler,
|
||||
pg_profile_enabled,
|
||||
ms_slower_then,
|
||||
)
|
||||
|
||||
|
||||
def xy_downsample(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
|
||||
x_spacer: float = 0.5,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
float,
|
||||
float,
|
||||
]:
|
||||
'''
|
||||
Downsample 1D (flat ``numpy.ndarray``) arrays using M4 given an input
|
||||
``uppx`` (units-per-pixel) and add space between discreet datums.
|
||||
|
||||
'''
|
||||
# downsample whenever more then 1 pixels per datum can be shown.
|
||||
# always refresh data bounds until we get diffing
|
||||
# working properly, see above..
|
||||
m4_out = ds_m4(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
)
|
||||
|
||||
if m4_out is not None:
|
||||
bins, x, y, ymn, ymx = m4_out
|
||||
# flatten output to 1d arrays suitable for path-graphics generation.
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
x = (x + np.array(
|
||||
[-x_spacer, 0, 0, x_spacer]
|
||||
)).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
return x, y, ymn, ymx
|
||||
|
||||
# XXX: we accept a None output for the case where the input range
|
||||
# to ``ds_m4()`` is bad (-ve) and we want to catch and debug
|
||||
# that (seemingly super rare) circumstance..
|
||||
return None
|
||||
|
||||
|
||||
@njit(
|
||||
# NOTE: need to construct this manually for readonly
|
||||
# arrays, see https://github.com/numba/numba/issues/4511
|
||||
# (
|
||||
# types.Array(
|
||||
# numba_ohlc_dtype,
|
||||
# 1,
|
||||
# 'C',
|
||||
# readonly=True,
|
||||
# ),
|
||||
# int64,
|
||||
# types.unicode_type,
|
||||
# optional(float64),
|
||||
# ),
|
||||
nogil=True
|
||||
)
|
||||
def path_arrays_from_ohlc(
|
||||
data: np.ndarray,
|
||||
start: int64,
|
||||
bar_w: float64,
|
||||
bar_gap: float64 = 0.16,
|
||||
use_time_index: bool = True,
|
||||
|
||||
# XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622
|
||||
# index_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
'''
|
||||
Generate an array of lines objects from input ohlc data.
|
||||
|
||||
'''
|
||||
size = int(data.shape[0] * 6)
|
||||
|
||||
# XXX: see this for why the dtype might have to be defined outside
|
||||
# the routine.
|
||||
# https://github.com/numba/numba/issues/4098#issuecomment-493914533
|
||||
x = np.zeros(
|
||||
shape=size,
|
||||
dtype=float64,
|
||||
)
|
||||
y, c = x.copy(), x.copy()
|
||||
|
||||
half_w: float = bar_w/2
|
||||
|
||||
# TODO: report bug for assert @
|
||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||
for i, q in enumerate(data[start:], start):
|
||||
|
||||
open = q['open']
|
||||
high = q['high']
|
||||
low = q['low']
|
||||
close = q['close']
|
||||
|
||||
if use_time_index:
|
||||
index = float64(q['time'])
|
||||
else:
|
||||
index = float64(q['index'])
|
||||
|
||||
# XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622
|
||||
# index = float64(q[index_field])
|
||||
# AND this (probably)
|
||||
# open, high, low, close, index = q[
|
||||
# ['open', 'high', 'low', 'close', 'index']]
|
||||
|
||||
istart = i * 6
|
||||
istop = istart + 6
|
||||
|
||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||
mid: float = index + half_w
|
||||
x[istart:istop] = (
|
||||
index + bar_gap,
|
||||
mid,
|
||||
mid,
|
||||
mid,
|
||||
mid,
|
||||
index + bar_w - bar_gap,
|
||||
)
|
||||
y[istart:istop] = (
|
||||
open,
|
||||
open,
|
||||
low,
|
||||
high,
|
||||
close,
|
||||
close,
|
||||
)
|
||||
|
||||
# specifies that the first edge is never connected to the
|
||||
# prior bars last edge thus providing a small "gap"/"space"
|
||||
# between bars determined by ``bar_gap``.
|
||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||
|
||||
return x, y, c
|
||||
|
||||
|
||||
def hl2mxmn(
|
||||
ohlc: np.ndarray,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||
to a "joined" max/min 1-d array.
|
||||
|
||||
'''
|
||||
index = ohlc[index_field]
|
||||
hls = ohlc[[
|
||||
'low',
|
||||
'high',
|
||||
]]
|
||||
|
||||
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||
x = np.empty(2*hls.size, dtype=np.float64)
|
||||
trace_hl(hls, mxmn, x, index[0])
|
||||
x = x + index[0]
|
||||
|
||||
return mxmn, x
|
||||
|
||||
|
||||
@njit(
|
||||
# TODO: the type annots..
|
||||
# float64[:](float64[:],),
|
||||
)
|
||||
def trace_hl(
|
||||
hl: 'np.ndarray',
|
||||
out: np.ndarray,
|
||||
x: np.ndarray,
|
||||
start: int,
|
||||
|
||||
# the "offset" values in the x-domain which
|
||||
# place the 2 output points around each ``int``
|
||||
# master index.
|
||||
margin: float = 0.43,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
"Trace" the outline of the high-low values of an ohlc sequence
|
||||
as a line such that the maximum deviation (aka disperaion) between
|
||||
bars if preserved.
|
||||
|
||||
This routine is expected to modify input arrays in-place.
|
||||
|
||||
'''
|
||||
last_l = hl['low'][0]
|
||||
last_h = hl['high'][0]
|
||||
|
||||
for i in range(hl.size):
|
||||
row = hl[i]
|
||||
l, h = row['low'], row['high']
|
||||
|
||||
up_diff = h - last_l
|
||||
down_diff = last_h - l
|
||||
|
||||
if up_diff > down_diff:
|
||||
out[2*i + 1] = h
|
||||
out[2*i] = last_l
|
||||
else:
|
||||
out[2*i + 1] = l
|
||||
out[2*i] = last_h
|
||||
|
||||
last_l = l
|
||||
last_h = h
|
||||
|
||||
x[2*i] = int(i) - margin
|
||||
x[2*i + 1] = int(i) + margin
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def ohlc_flatten(
|
||||
ohlc: np.ndarray,
|
||||
use_mxmn: bool = True,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||
1-d array that is 4 times the size with x-domain values distributed
|
||||
evenly (by 0.5 steps) over each index.
|
||||
|
||||
'''
|
||||
index = ohlc[index_field]
|
||||
|
||||
if use_mxmn:
|
||||
# traces a line optimally over highs to lows
|
||||
# using numba. NOTE: pretty sure this is faster
|
||||
# and looks about the same as the below output.
|
||||
flat, x = hl2mxmn(ohlc)
|
||||
|
||||
else:
|
||||
flat = rfn.structured_to_unstructured(
|
||||
ohlc[['open', 'high', 'low', 'close']]
|
||||
).flatten()
|
||||
|
||||
x = np.linspace(
|
||||
start=index[0] - 0.5,
|
||||
stop=index[-1] + 0.5,
|
||||
num=len(flat),
|
||||
)
|
||||
return x, flat
|
||||
|
||||
|
||||
def slice_from_time(
|
||||
arr: np.ndarray,
|
||||
start_t: float,
|
||||
stop_t: float,
|
||||
step: int | None = None,
|
||||
|
||||
) -> slice:
|
||||
'''
|
||||
Calculate array indices mapped from a time range and return them in
|
||||
a slice.
|
||||
|
||||
Given an input array with an epoch `'time'` series entry, calculate
|
||||
the indices which span the time range and return in a slice. Presume
|
||||
each `'time'` step increment is uniform and when the time stamp
|
||||
series contains gaps (the uniform presumption is untrue) use
|
||||
``np.searchsorted()`` binary search to look up the appropriate
|
||||
index.
|
||||
|
||||
'''
|
||||
profiler = Profiler(
|
||||
msg='slice_from_time()',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
)
|
||||
|
||||
times = arr['time']
|
||||
t_first = floor(times[0])
|
||||
t_last = ceil(times[-1])
|
||||
|
||||
# the greatest index we can return which slices to the
|
||||
# end of the input array.
|
||||
read_i_max = arr.shape[0]
|
||||
|
||||
# TODO: require this is always passed in?
|
||||
if step is None:
|
||||
step = round(t_last - times[-2])
|
||||
if step == 0:
|
||||
step = 1
|
||||
|
||||
# compute (presumed) uniform-time-step index offsets
|
||||
i_start_t = floor(start_t)
|
||||
read_i_start = floor(((i_start_t - t_first) // step)) - 1
|
||||
|
||||
i_stop_t = ceil(stop_t)
|
||||
|
||||
# XXX: edge case -> always set stop index to last in array whenever
|
||||
# the input stop time is detected to be greater then the equiv time
|
||||
# stamp at that last entry.
|
||||
if i_stop_t >= t_last:
|
||||
read_i_stop = read_i_max
|
||||
else:
|
||||
read_i_stop = ceil((i_stop_t - t_first) // step) + 1
|
||||
|
||||
# always clip outputs to array support
|
||||
# for read start:
|
||||
# - never allow a start < the 0 index
|
||||
# - never allow an end index > the read array len
|
||||
read_i_start = min(
|
||||
max(0, read_i_start),
|
||||
read_i_max - 1,
|
||||
)
|
||||
read_i_stop = max(
|
||||
0,
|
||||
min(read_i_stop, read_i_max),
|
||||
)
|
||||
|
||||
# check for larger-then-latest calculated index for given start
|
||||
# time, in which case we do a binary search for the correct index.
|
||||
# NOTE: this is usually the result of a time series with time gaps
|
||||
# where it is expected that each index step maps to a uniform step
|
||||
# in the time stamp series.
|
||||
t_iv_start = times[read_i_start]
|
||||
if (
|
||||
t_iv_start > i_start_t
|
||||
):
|
||||
# do a binary search for the best index mapping to ``start_t``
|
||||
# given we measured an overshoot using the uniform-time-step
|
||||
# calculation from above.
|
||||
|
||||
# TODO: once we start caching these per source-array,
|
||||
# we can just overwrite ``read_i_start`` directly.
|
||||
new_read_i_start = np.searchsorted(
|
||||
times,
|
||||
i_start_t,
|
||||
side='left',
|
||||
)
|
||||
|
||||
# TODO: minimize binary search work as much as possible:
|
||||
# - cache these remap values which compensate for gaps in the
|
||||
# uniform time step basis where we calc a later start
|
||||
# index for the given input ``start_t``.
|
||||
# - can we shorten the input search sequence by heuristic?
|
||||
# up_to_arith_start = index[:read_i_start]
|
||||
|
||||
if (
|
||||
new_read_i_start <= read_i_start
|
||||
):
|
||||
# t_diff = t_iv_start - start_t
|
||||
# print(
|
||||
# f"WE'RE CUTTING OUT TIME - STEP:{step}\n"
|
||||
# f'start_t:{start_t} -> 0index start_t:{t_iv_start}\n'
|
||||
# f'diff: {t_diff}\n'
|
||||
# f'REMAPPED START i: {read_i_start} -> {new_read_i_start}\n'
|
||||
# )
|
||||
read_i_start = new_read_i_start - 1
|
||||
|
||||
t_iv_stop = times[read_i_stop - 1]
|
||||
if (
|
||||
t_iv_stop > i_stop_t
|
||||
):
|
||||
# t_diff = stop_t - t_iv_stop
|
||||
# print(
|
||||
# f"WE'RE CUTTING OUT TIME - STEP:{step}\n"
|
||||
# f'calced iv stop:{t_iv_stop} -> stop_t:{stop_t}\n'
|
||||
# f'diff: {t_diff}\n'
|
||||
# # f'SHOULD REMAP STOP: {read_i_start} -> {new_read_i_start}\n'
|
||||
# )
|
||||
new_read_i_stop = np.searchsorted(
|
||||
times[read_i_start:],
|
||||
# times,
|
||||
i_stop_t,
|
||||
side='left',
|
||||
)
|
||||
|
||||
if (
|
||||
new_read_i_stop <= read_i_stop
|
||||
):
|
||||
read_i_stop = read_i_start + new_read_i_stop + 1
|
||||
|
||||
# sanity checks for range size
|
||||
# samples = (i_stop_t - i_start_t) // step
|
||||
# index_diff = read_i_stop - read_i_start + 1
|
||||
# if index_diff > (samples + 3):
|
||||
# breakpoint()
|
||||
|
||||
# read-relative indexes: gives a slice where `shm.array[read_slc]`
|
||||
# will be the data spanning the input time range `start_t` ->
|
||||
# `stop_t`
|
||||
read_slc = slice(
|
||||
int(read_i_start),
|
||||
int(read_i_stop),
|
||||
)
|
||||
|
||||
profiler(
|
||||
'slicing complete'
|
||||
# f'{start_t} -> {abs_slc.start} | {read_slc.start}\n'
|
||||
# f'{stop_t} -> {abs_slc.stop} | {read_slc.stop}\n'
|
||||
)
|
||||
|
||||
# NOTE: if caller needs absolute buffer indices they can
|
||||
# slice the buffer abs index like so:
|
||||
# index = arr['index']
|
||||
# abs_indx = index[read_slc]
|
||||
# abs_slc = slice(
|
||||
# int(abs_indx[0]),
|
||||
# int(abs_indx[-1]),
|
||||
# )
|
||||
|
||||
return read_slc
|
|
@ -20,96 +20,53 @@ financial data flows.
|
|||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from collections import (
|
||||
Counter,
|
||||
defaultdict,
|
||||
)
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from collections import Counter
|
||||
import time
|
||||
from typing import (
|
||||
AsyncIterator,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
from typing import TYPE_CHECKING, Optional, Union
|
||||
|
||||
import tractor
|
||||
from tractor.trionics import (
|
||||
maybe_open_nursery,
|
||||
)
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
|
||||
from ..log import (
|
||||
get_logger,
|
||||
get_console_log,
|
||||
)
|
||||
from .._daemon import maybe_spawn_daemon
|
||||
from ..log import get_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._sharedmem import (
|
||||
ShmArray,
|
||||
)
|
||||
from ._sharedmem import ShmArray
|
||||
from .feed import _FeedsBus
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
# highest frequency sample step is 1 second by default, though in
|
||||
# the future we may want to support shorter periods or a dynamic style
|
||||
# tick-event stream.
|
||||
_default_delay_s: float = 1.0
|
||||
|
||||
|
||||
class Sampler:
|
||||
class sampler:
|
||||
'''
|
||||
Global sampling engine registry.
|
||||
|
||||
Manages state for sampling events, shm incrementing and
|
||||
sample period logic.
|
||||
|
||||
This non-instantiated type is meant to be a singleton within
|
||||
a `samplerd` actor-service spawned once by the user wishing to
|
||||
time-step sample real-time quote feeds, see
|
||||
``._daemon.maybe_open_samplerd()`` and the below
|
||||
``register_with_sampler()``.
|
||||
|
||||
'''
|
||||
service_nursery: None | trio.Nursery = None
|
||||
|
||||
# TODO: we could stick these in a composed type to avoid
|
||||
# angering the "i hate module scoped variables crowd" (yawn).
|
||||
ohlcv_shms: dict[float, list[ShmArray]] = {}
|
||||
ohlcv_shms: dict[int, list[ShmArray]] = {}
|
||||
|
||||
# holds one-task-per-sample-period tasks which are spawned as-needed by
|
||||
# data feed requests with a given detected time step usually from
|
||||
# history loading.
|
||||
incr_task_cs: trio.CancelScope | None = None
|
||||
incrementers: dict[int, trio.CancelScope] = {}
|
||||
|
||||
# holds all the ``tractor.Context`` remote subscriptions for
|
||||
# a particular sample period increment event: all subscribers are
|
||||
# notified on a step.
|
||||
# subscribers: dict[int, list[tractor.MsgStream]] = {}
|
||||
subscribers: defaultdict[
|
||||
float,
|
||||
list[
|
||||
float,
|
||||
set[tractor.MsgStream]
|
||||
],
|
||||
] = defaultdict(
|
||||
lambda: [
|
||||
round(time.time()),
|
||||
set(),
|
||||
]
|
||||
)
|
||||
subscribers: dict[int, tractor.Context] = {}
|
||||
|
||||
@classmethod
|
||||
async def increment_ohlc_buffer(
|
||||
self,
|
||||
period_s: float,
|
||||
|
||||
async def increment_ohlc_buffer(
|
||||
delay_s: int,
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
):
|
||||
):
|
||||
'''
|
||||
Task which inserts new bars into the provide shared memory array
|
||||
every ``period_s`` seconds.
|
||||
every ``delay_s`` seconds.
|
||||
|
||||
This task fulfills 2 purposes:
|
||||
- it takes the subscribed set of shm arrays and increments them
|
||||
|
@ -121,143 +78,100 @@ class Sampler:
|
|||
the underlying buffers will actually be incremented.
|
||||
|
||||
'''
|
||||
# # wait for brokerd to signal we should start sampling
|
||||
# await shm_incrementing(shm_token['shm_name']).wait()
|
||||
|
||||
# TODO: right now we'll spin printing bars if the last time stamp is
|
||||
# before a large period of no market activity. Likely the best way
|
||||
# to solve this is to make this task aware of the instrument's
|
||||
# tradable hours?
|
||||
|
||||
total_s: float = 0 # total seconds counted
|
||||
ad = period_s - 0.001 # compensate for trio processing time
|
||||
# adjust delay to compensate for trio processing time
|
||||
ad = min(sampler.ohlcv_shms.keys()) - 0.001
|
||||
|
||||
total_s = 0 # total seconds counted
|
||||
lowest = min(sampler.ohlcv_shms.keys())
|
||||
lowest_shm = sampler.ohlcv_shms[lowest][0]
|
||||
ad = lowest - 0.001
|
||||
|
||||
with trio.CancelScope() as cs:
|
||||
|
||||
# register this time period step as active
|
||||
sampler.incrementers[delay_s] = cs
|
||||
task_status.started(cs)
|
||||
|
||||
# sample step loop:
|
||||
# includes broadcasting to all connected consumers on every
|
||||
# new sample step as well incrementing any registered
|
||||
# buffers by registered sample period.
|
||||
while True:
|
||||
# TODO: do we want to support dynamically
|
||||
# adding a "lower" lowest increment period?
|
||||
await trio.sleep(ad)
|
||||
total_s += period_s
|
||||
total_s += lowest
|
||||
|
||||
# increment all subscribed shm arrays
|
||||
# TODO:
|
||||
# - this in ``numba``
|
||||
# - just lookup shms for this step instead of iterating?
|
||||
|
||||
i_epoch = round(time.time())
|
||||
broadcasted: set[float] = set()
|
||||
|
||||
# print(f'epoch: {i_epoch} -> REGISTRY {self.ohlcv_shms}')
|
||||
for shm_period_s, shms in self.ohlcv_shms.items():
|
||||
|
||||
# short-circuit on any not-ready because slower sample
|
||||
# rate consuming shm buffers.
|
||||
if total_s % shm_period_s != 0:
|
||||
# print(f'skipping `{shm_period_s}s` sample update')
|
||||
for delay_s, shms in sampler.ohlcv_shms.items():
|
||||
if total_s % delay_s != 0:
|
||||
continue
|
||||
|
||||
# update last epoch stamp for this period group
|
||||
if shm_period_s not in broadcasted:
|
||||
sub_pair = self.subscribers[shm_period_s]
|
||||
sub_pair[0] = i_epoch
|
||||
broadcasted.add(shm_period_s)
|
||||
|
||||
# TODO: ``numba`` this!
|
||||
for shm in shms:
|
||||
# print(f'UPDATE {shm_period_s}s STEP for {shm.token}')
|
||||
# TODO: in theory we could make this faster by copying the
|
||||
# "last" readable value into the underlying larger buffer's
|
||||
# next value and then incrementing the counter instead of
|
||||
# using ``.push()``?
|
||||
|
||||
# append new entry to buffer thus "incrementing"
|
||||
# the bar
|
||||
# append new entry to buffer thus "incrementing" the bar
|
||||
array = shm.array
|
||||
last = array[-1:][shm._write_fields].copy()
|
||||
# (index, t, close) = last[0][['index', 'time', 'close']]
|
||||
(t, close) = last[0][['time', 'close']]
|
||||
|
||||
# guard against startup backfilling races where
|
||||
# the buffer has not yet been filled.
|
||||
if not last.size:
|
||||
continue
|
||||
|
||||
(t, close) = last[0][[
|
||||
'time',
|
||||
'close',
|
||||
]]
|
||||
|
||||
next_t = t + shm_period_s
|
||||
|
||||
if shm_period_s <= 1:
|
||||
next_t = i_epoch
|
||||
|
||||
# this copies non-std fields (eg. vwap) from the
|
||||
# last datum
|
||||
last[[
|
||||
'time',
|
||||
|
||||
'open',
|
||||
'high',
|
||||
'low',
|
||||
'close',
|
||||
|
||||
'volume',
|
||||
]][0] = (
|
||||
# epoch timestamp
|
||||
next_t,
|
||||
|
||||
# OHLC
|
||||
close,
|
||||
close,
|
||||
close,
|
||||
close,
|
||||
|
||||
0, # vlm
|
||||
)
|
||||
|
||||
# TODO: in theory we could make this faster by
|
||||
# copying the "last" readable value into the
|
||||
# underlying larger buffer's next value and then
|
||||
# incrementing the counter instead of using
|
||||
# ``.push()``?
|
||||
# this copies non-std fields (eg. vwap) from the last datum
|
||||
last[
|
||||
['time', 'volume', 'open', 'high', 'low', 'close']
|
||||
][0] = (t + delay_s, 0, close, close, close, close)
|
||||
|
||||
# write to the buffer
|
||||
shm.push(last)
|
||||
|
||||
# broadcast increment msg to all updated subs per period
|
||||
for shm_period_s in broadcasted:
|
||||
await self.broadcast(
|
||||
period_s=shm_period_s,
|
||||
time_stamp=i_epoch,
|
||||
)
|
||||
await broadcast(delay_s, shm=lowest_shm)
|
||||
|
||||
@classmethod
|
||||
async def broadcast(
|
||||
self,
|
||||
period_s: float,
|
||||
time_stamp: float | None = None,
|
||||
|
||||
) -> None:
|
||||
async def broadcast(
|
||||
delay_s: int,
|
||||
shm: Optional[ShmArray] = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Broadcast the period size and last index step value to all
|
||||
Broadcast the given ``shm: ShmArray``'s buffer index step to any
|
||||
subscribers for a given sample period.
|
||||
|
||||
The sent msg will include the first and last index which slice into
|
||||
the buffer's non-empty data.
|
||||
|
||||
'''
|
||||
pair = self.subscribers[period_s]
|
||||
subs = sampler.subscribers.get(delay_s, ())
|
||||
|
||||
last_ts, subs = pair
|
||||
first = last = -1
|
||||
|
||||
if shm is None:
|
||||
periods = sampler.ohlcv_shms.keys()
|
||||
# if this is an update triggered by a history update there
|
||||
# might not actually be any sampling bus setup since there's
|
||||
# no "live feed" active yet.
|
||||
if periods:
|
||||
lowest = min(periods)
|
||||
shm = sampler.ohlcv_shms[lowest][0]
|
||||
first = shm._first.value
|
||||
last = shm._last.value
|
||||
|
||||
task = trio.lowlevel.current_task()
|
||||
log.debug(
|
||||
f'SUBS {self.subscribers}\n'
|
||||
f'PAIR {pair}\n'
|
||||
f'TASK: {task}: {id(task)}\n'
|
||||
f'broadcasting {period_s} -> {last_ts}\n'
|
||||
# f'consumers: {subs}'
|
||||
)
|
||||
borked: set[tractor.MsgStream] = set()
|
||||
for stream in subs:
|
||||
try:
|
||||
await stream.send({
|
||||
'index': time_stamp or last_ts,
|
||||
'period': period_s,
|
||||
'first': first,
|
||||
'last': last,
|
||||
'index': last,
|
||||
})
|
||||
except (
|
||||
trio.BrokenResourceError,
|
||||
|
@ -266,9 +180,6 @@ class Sampler:
|
|||
log.error(
|
||||
f'{stream._ctx.chan.uid} dropped connection'
|
||||
)
|
||||
borked.add(stream)
|
||||
|
||||
for stream in borked:
|
||||
try:
|
||||
subs.remove(stream)
|
||||
except ValueError:
|
||||
|
@ -276,255 +187,53 @@ class Sampler:
|
|||
f'{stream._ctx.chan.uid} sub already removed!?'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def broadcast_all(self) -> None:
|
||||
for period_s in self.subscribers:
|
||||
await self.broadcast(period_s)
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def register_with_sampler(
|
||||
async def iter_ohlc_periods(
|
||||
ctx: tractor.Context,
|
||||
period_s: float,
|
||||
shms_by_period: dict[float, dict] | None = None,
|
||||
|
||||
open_index_stream: bool = True, # open a 2way stream for sample step msgs?
|
||||
sub_for_broadcasts: bool = True, # sampler side to send step updates?
|
||||
delay_s: int,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Subscribe to OHLC sampling "step" events: when the time
|
||||
aggregation period increments, this event stream emits an index
|
||||
event.
|
||||
|
||||
get_console_log(tractor.current_actor().loglevel)
|
||||
incr_was_started: bool = False
|
||||
|
||||
try:
|
||||
async with maybe_open_nursery(
|
||||
Sampler.service_nursery
|
||||
) as service_nursery:
|
||||
|
||||
# init startup, create (actor-)local service nursery and start
|
||||
# increment task
|
||||
Sampler.service_nursery = service_nursery
|
||||
|
||||
# always ensure a period subs entry exists
|
||||
last_ts, subs = Sampler.subscribers[float(period_s)]
|
||||
|
||||
async with trio.Lock():
|
||||
if Sampler.incr_task_cs is None:
|
||||
Sampler.incr_task_cs = await service_nursery.start(
|
||||
Sampler.increment_ohlc_buffer,
|
||||
1.,
|
||||
)
|
||||
incr_was_started = True
|
||||
|
||||
# insert the base 1s period (for OHLC style sampling) into
|
||||
# the increment buffer set to update and shift every second.
|
||||
if shms_by_period is not None:
|
||||
from ._sharedmem import (
|
||||
attach_shm_array,
|
||||
_Token,
|
||||
)
|
||||
for period in shms_by_period:
|
||||
|
||||
# load and register shm handles
|
||||
shm_token_msg = shms_by_period[period]
|
||||
shm = attach_shm_array(
|
||||
_Token.from_msg(shm_token_msg),
|
||||
readonly=False,
|
||||
)
|
||||
shms_by_period[period] = shm
|
||||
Sampler.ohlcv_shms.setdefault(period, []).append(shm)
|
||||
|
||||
assert Sampler.ohlcv_shms
|
||||
|
||||
# unblock caller
|
||||
await ctx.started(set(Sampler.ohlcv_shms.keys()))
|
||||
|
||||
if open_index_stream:
|
||||
try:
|
||||
'''
|
||||
# add our subscription
|
||||
subs = sampler.subscribers.setdefault(delay_s, [])
|
||||
await ctx.started()
|
||||
async with ctx.open_stream() as stream:
|
||||
if sub_for_broadcasts:
|
||||
subs.add(stream)
|
||||
subs.append(stream)
|
||||
|
||||
# except broadcast requests from the subscriber
|
||||
async for msg in stream:
|
||||
if msg == 'broadcast_all':
|
||||
await Sampler.broadcast_all()
|
||||
finally:
|
||||
if sub_for_broadcasts:
|
||||
subs.remove(stream)
|
||||
else:
|
||||
# if no shms are passed in we just wait until cancelled
|
||||
# by caller.
|
||||
try:
|
||||
# stream and block until cancelled
|
||||
await trio.sleep_forever()
|
||||
|
||||
finally:
|
||||
# TODO: why tf isn't this working?
|
||||
if shms_by_period is not None:
|
||||
for period, shm in shms_by_period.items():
|
||||
Sampler.ohlcv_shms[period].remove(shm)
|
||||
|
||||
if incr_was_started:
|
||||
Sampler.incr_task_cs.cancel()
|
||||
Sampler.incr_task_cs = None
|
||||
|
||||
|
||||
async def spawn_samplerd(
|
||||
|
||||
loglevel: str | None = None,
|
||||
**extra_tractor_kwargs
|
||||
|
||||
) -> bool:
|
||||
'''
|
||||
Daemon-side service task: start a sampling daemon for common step
|
||||
update and increment count write and stream broadcasting.
|
||||
|
||||
'''
|
||||
from piker._daemon import Services
|
||||
|
||||
dname = 'samplerd'
|
||||
log.info(f'Spawning `{dname}`')
|
||||
|
||||
# singleton lock creation of ``samplerd`` since we only ever want
|
||||
# one daemon per ``pikerd`` proc tree.
|
||||
# TODO: make this built-into the service api?
|
||||
async with Services.locks[dname + '_singleton']:
|
||||
|
||||
if dname not in Services.service_tasks:
|
||||
|
||||
portal = await Services.actor_n.start_actor(
|
||||
dname,
|
||||
enable_modules=[
|
||||
'piker.data._sampling',
|
||||
],
|
||||
loglevel=loglevel,
|
||||
debug_mode=Services.debug_mode, # set by pikerd flag
|
||||
**extra_tractor_kwargs
|
||||
try:
|
||||
subs.remove(stream)
|
||||
except ValueError:
|
||||
log.error(
|
||||
f'iOHLC step stream was already dropped {ctx.chan.uid}?'
|
||||
)
|
||||
|
||||
await Services.start_service_task(
|
||||
dname,
|
||||
portal,
|
||||
register_with_sampler,
|
||||
period_s=1,
|
||||
sub_for_broadcasts=False,
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_samplerd(
|
||||
|
||||
loglevel: str | None = None,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor._portal.Portal: # noqa
|
||||
'''
|
||||
Client-side helper to maybe startup the ``samplerd`` service
|
||||
under the ``pikerd`` tree.
|
||||
|
||||
'''
|
||||
dname = 'samplerd'
|
||||
|
||||
async with maybe_spawn_daemon(
|
||||
dname,
|
||||
service_task_target=spawn_samplerd,
|
||||
spawn_args={'loglevel': loglevel},
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
|
||||
) as portal:
|
||||
yield portal
|
||||
|
||||
|
||||
@acm
|
||||
async def open_sample_stream(
|
||||
period_s: float,
|
||||
shms_by_period: dict[float, dict] | None = None,
|
||||
open_index_stream: bool = True,
|
||||
sub_for_broadcasts: bool = True,
|
||||
|
||||
cache_key: str | None = None,
|
||||
allow_new_sampler: bool = True,
|
||||
|
||||
) -> AsyncIterator[dict[str, float]]:
|
||||
'''
|
||||
Subscribe to OHLC sampling "step" events: when the time aggregation
|
||||
period increments, this event stream emits an index event.
|
||||
|
||||
This is a client-side endpoint that does all the work of ensuring
|
||||
the `samplerd` actor is up and that mult-consumer-tasks are given
|
||||
a broadcast stream when possible.
|
||||
|
||||
'''
|
||||
# TODO: wrap this manager with the following to make it cached
|
||||
# per client-multitasks entry.
|
||||
# maybe_open_context(
|
||||
# acm_func=partial(
|
||||
# portal.open_context,
|
||||
# register_with_sampler,
|
||||
# ),
|
||||
# key=cache_key or period_s,
|
||||
# )
|
||||
# if cache_hit:
|
||||
# # add a new broadcast subscription for the quote stream
|
||||
# # if this feed is likely already in use
|
||||
# async with istream.subscribe() as bistream:
|
||||
# yield bistream
|
||||
# else:
|
||||
|
||||
async with (
|
||||
# XXX: this should be singleton on a host,
|
||||
# a lone broker-daemon per provider should be
|
||||
# created for all practical purposes
|
||||
maybe_open_samplerd() as portal,
|
||||
|
||||
portal.open_context(
|
||||
register_with_sampler,
|
||||
**{
|
||||
'period_s': period_s,
|
||||
'shms_by_period': shms_by_period,
|
||||
'open_index_stream': open_index_stream,
|
||||
'sub_for_broadcasts': sub_for_broadcasts,
|
||||
},
|
||||
) as (ctx, first)
|
||||
):
|
||||
async with (
|
||||
ctx.open_stream() as istream,
|
||||
|
||||
# TODO: we don't need this task-bcasting right?
|
||||
# istream.subscribe() as istream,
|
||||
):
|
||||
yield istream
|
||||
|
||||
|
||||
async def sample_and_broadcast(
|
||||
|
||||
bus: _FeedsBus, # noqa
|
||||
rt_shm: ShmArray,
|
||||
hist_shm: ShmArray,
|
||||
shm: ShmArray,
|
||||
quote_stream: trio.abc.ReceiveChannel,
|
||||
brokername: str,
|
||||
sum_tick_vlm: bool = True,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
`brokerd`-side task which writes latest datum sampled data.
|
||||
|
||||
This task is meant to run in the same actor (mem space) as the
|
||||
`brokerd` real-time quote feed which is being sampled to
|
||||
a ``ShmArray`` buffer.
|
||||
|
||||
'''
|
||||
log.info("Started shared mem bar writer")
|
||||
|
||||
overruns = Counter()
|
||||
|
||||
# iterate stream delivered by broker
|
||||
async for quotes in quote_stream:
|
||||
# print(quotes)
|
||||
|
||||
# TODO: ``numba`` this!
|
||||
for broker_symbol, quote in quotes.items():
|
||||
# TODO: in theory you can send the IPC msg *before* writing
|
||||
|
@ -548,9 +257,6 @@ async def sample_and_broadcast(
|
|||
|
||||
last = tick['price']
|
||||
|
||||
# more compact inline-way to do this assignment
|
||||
# to both buffers?
|
||||
for shm in [rt_shm, hist_shm]:
|
||||
# update last entry
|
||||
# benchmarked in the 4-5 us range
|
||||
o, high, low, v = shm.array[-1][
|
||||
|
@ -587,29 +293,29 @@ async def sample_and_broadcast(
|
|||
volume,
|
||||
)
|
||||
|
||||
# TODO: PUT THIS IN A ``_FeedsBus.broadcast()`` method!
|
||||
# XXX: we need to be very cautious here that no
|
||||
# context-channel is left lingering which doesn't have
|
||||
# a far end receiver actor-task. In such a case you can
|
||||
# end up triggering backpressure which which will
|
||||
# eventually block this producer end of the feed and
|
||||
# thus other consumers still attached.
|
||||
sub_key: str = broker_symbol.lower()
|
||||
subs: list[
|
||||
tuple[
|
||||
tractor.MsgStream | trio.MemorySendChannel,
|
||||
float | None, # tick throttle in Hz
|
||||
Union[tractor.MsgStream, trio.MemorySendChannel],
|
||||
tractor.Context,
|
||||
Optional[float], # tick throttle in Hz
|
||||
]
|
||||
] = bus.get_subs(sub_key)
|
||||
] = bus._subscribers[broker_symbol.lower()]
|
||||
|
||||
# NOTE: by default the broker backend doesn't append
|
||||
# it's own "name" into the fqsn schema (but maybe it
|
||||
# should?) so we have to manually generate the correct
|
||||
# key here.
|
||||
fqsn = f'{broker_symbol}.{brokername}'
|
||||
bsym = f'{broker_symbol}.{brokername}'
|
||||
lags: int = 0
|
||||
|
||||
for (stream, tick_throttle) in subs.copy():
|
||||
for (stream, ctx, tick_throttle) in subs:
|
||||
|
||||
try:
|
||||
with trio.move_on_after(0.2) as cs:
|
||||
if tick_throttle:
|
||||
|
@ -617,39 +323,47 @@ async def sample_and_broadcast(
|
|||
# pushes to the ``uniform_rate_send()`` below.
|
||||
try:
|
||||
stream.send_nowait(
|
||||
(fqsn, quote)
|
||||
(bsym, quote)
|
||||
)
|
||||
except trio.WouldBlock:
|
||||
overruns[sub_key] += 1
|
||||
ctx = stream._ctx
|
||||
chan = ctx.chan
|
||||
|
||||
if ctx:
|
||||
log.warning(
|
||||
f'Feed OVERRUN {sub_key}'
|
||||
'@{bus.brokername} -> \n'
|
||||
f'feed @ {chan.uid}\n'
|
||||
f'throttle = {tick_throttle} Hz'
|
||||
f'Feed overrun {bus.brokername} ->'
|
||||
f'{chan.uid} !!!'
|
||||
)
|
||||
|
||||
if overruns[sub_key] > 6:
|
||||
else:
|
||||
key = id(stream)
|
||||
overruns[key] += 1
|
||||
log.warning(
|
||||
f'Feed overrun {broker_symbol}'
|
||||
'@{bus.brokername} -> '
|
||||
f'feed @ {tick_throttle} Hz'
|
||||
)
|
||||
if overruns[key] > 6:
|
||||
# TODO: should we check for the
|
||||
# context being cancelled? this
|
||||
# could happen but the
|
||||
# channel-ipc-pipe is still up.
|
||||
if (
|
||||
not chan.connected()
|
||||
or ctx._cancel_called
|
||||
):
|
||||
if not chan.connected():
|
||||
log.warning(
|
||||
'Dropping broken consumer:\n'
|
||||
f'{sub_key}:'
|
||||
f'{broker_symbol}:'
|
||||
f'{ctx.cid}@{chan.uid}'
|
||||
)
|
||||
await stream.aclose()
|
||||
raise trio.BrokenResourceError
|
||||
else:
|
||||
log.warning(
|
||||
'Feed getting overrun bro!\n'
|
||||
f'{broker_symbol}:'
|
||||
f'{ctx.cid}@{chan.uid}'
|
||||
)
|
||||
continue
|
||||
|
||||
else:
|
||||
await stream.send(
|
||||
{fqsn: quote}
|
||||
{bsym: quote}
|
||||
)
|
||||
|
||||
if cs.cancelled_caught:
|
||||
|
@ -662,7 +376,6 @@ async def sample_and_broadcast(
|
|||
trio.ClosedResourceError,
|
||||
trio.EndOfChannel,
|
||||
):
|
||||
ctx = stream._ctx
|
||||
chan = ctx.chan
|
||||
if ctx:
|
||||
log.warning(
|
||||
|
@ -678,69 +391,20 @@ async def sample_and_broadcast(
|
|||
# so far seems like no since this should all
|
||||
# be single-threaded. Doing it anyway though
|
||||
# since there seems to be some kinda race..
|
||||
bus.remove_subs(
|
||||
sub_key,
|
||||
{(stream, tick_throttle)},
|
||||
try:
|
||||
subs.remove((stream, tick_throttle))
|
||||
except ValueError:
|
||||
log.error(
|
||||
f'Stream was already removed from subs!?\n'
|
||||
f'{broker_symbol}:'
|
||||
f'{ctx.cid}@{chan.uid}'
|
||||
)
|
||||
|
||||
|
||||
# a working tick-type-classes template
|
||||
_tick_groups = {
|
||||
'clears': {'trade', 'dark_trade', 'last'},
|
||||
'bids': {'bid', 'bsize'},
|
||||
'asks': {'ask', 'asize'},
|
||||
}
|
||||
|
||||
|
||||
def frame_ticks(
|
||||
first_quote: dict,
|
||||
last_quote: dict,
|
||||
ticks_by_type: dict,
|
||||
) -> None:
|
||||
# append quotes since last iteration into the last quote's
|
||||
# tick array/buffer.
|
||||
ticks = last_quote.get('ticks')
|
||||
|
||||
# TODO: once we decide to get fancy really we should
|
||||
# have a shared mem tick buffer that is just
|
||||
# continually filled and the UI just ready from it
|
||||
# at it's display rate.
|
||||
if ticks:
|
||||
# TODO: do we need this any more or can we just
|
||||
# expect the receiver to unwind the below
|
||||
# `ticks_by_type: dict`?
|
||||
# => undwinding would potentially require a
|
||||
# `dict[str, set | list]` instead with an
|
||||
# included `'types' field which is an (ordered)
|
||||
# set of tick type fields in the order which
|
||||
# types arrived?
|
||||
first_quote['ticks'].extend(ticks)
|
||||
|
||||
# XXX: build a tick-by-type table of lists
|
||||
# of tick messages. This allows for less
|
||||
# iteration on the receiver side by allowing for
|
||||
# a single "latest tick event" look up by
|
||||
# indexing the last entry in each sub-list.
|
||||
# tbt = {
|
||||
# 'types': ['bid', 'asize', 'last', .. '<type_n>'],
|
||||
|
||||
# 'bid': [tick0, tick1, tick2, .., tickn],
|
||||
# 'asize': [tick0, tick1, tick2, .., tickn],
|
||||
# 'last': [tick0, tick1, tick2, .., tickn],
|
||||
# ...
|
||||
# '<type_n>': [tick0, tick1, tick2, .., tickn],
|
||||
# }
|
||||
|
||||
# append in reverse FIFO order for in-order iteration on
|
||||
# receiver side.
|
||||
for tick in ticks:
|
||||
ttype = tick['type']
|
||||
ticks_by_type[ttype].append(tick)
|
||||
|
||||
|
||||
# TODO: a less naive throttler, here's some snippets:
|
||||
# token bucket by njs:
|
||||
# https://gist.github.com/njsmith/7ea44ec07e901cb78ebe1dd8dd846cb9
|
||||
|
||||
async def uniform_rate_send(
|
||||
|
||||
rate: float,
|
||||
|
@ -751,9 +415,6 @@ async def uniform_rate_send(
|
|||
|
||||
) -> None:
|
||||
|
||||
# try not to error-out on overruns of the subscribed (chart) client
|
||||
stream._ctx._backpressure = True
|
||||
|
||||
# TODO: compute the approx overhead latency per cycle
|
||||
left_to_sleep = throttle_period = 1/rate - 0.000616
|
||||
|
||||
|
@ -763,12 +424,6 @@ async def uniform_rate_send(
|
|||
diff = 0
|
||||
|
||||
task_status.started()
|
||||
ticks_by_type: defaultdict[
|
||||
str,
|
||||
list[dict],
|
||||
] = defaultdict(list)
|
||||
|
||||
clear_types = _tick_groups['clears']
|
||||
|
||||
while True:
|
||||
|
||||
|
@ -787,17 +442,34 @@ async def uniform_rate_send(
|
|||
|
||||
if not first_quote:
|
||||
first_quote = last_quote
|
||||
# first_quote['tbt'] = ticks_by_type
|
||||
|
||||
if (throttle_period - diff) > 0:
|
||||
# received a quote but the send cycle period hasn't yet
|
||||
# expired we aren't supposed to send yet so append
|
||||
# to the tick frame.
|
||||
frame_ticks(
|
||||
first_quote,
|
||||
last_quote,
|
||||
ticks_by_type,
|
||||
)
|
||||
|
||||
# append quotes since last iteration into the last quote's
|
||||
# tick array/buffer.
|
||||
ticks = last_quote.get('ticks')
|
||||
|
||||
# XXX: idea for frame type data structure we could
|
||||
# use on the wire instead of a simple list?
|
||||
# frames = {
|
||||
# 'index': ['type_a', 'type_c', 'type_n', 'type_n'],
|
||||
|
||||
# 'type_a': [tick0, tick1, tick2, .., tickn],
|
||||
# 'type_b': [tick0, tick1, tick2, .., tickn],
|
||||
# 'type_c': [tick0, tick1, tick2, .., tickn],
|
||||
# ...
|
||||
# 'type_n': [tick0, tick1, tick2, .., tickn],
|
||||
# }
|
||||
|
||||
# TODO: once we decide to get fancy really we should
|
||||
# have a shared mem tick buffer that is just
|
||||
# continually filled and the UI just ready from it
|
||||
# at it's display rate.
|
||||
if ticks:
|
||||
first_quote['ticks'].extend(ticks)
|
||||
|
||||
# send cycle isn't due yet so continue waiting
|
||||
continue
|
||||
|
@ -814,35 +486,12 @@ async def uniform_rate_send(
|
|||
# received quote ASAP.
|
||||
sym, first_quote = await quote_stream.receive()
|
||||
|
||||
frame_ticks(
|
||||
first_quote,
|
||||
first_quote,
|
||||
ticks_by_type,
|
||||
)
|
||||
|
||||
# we have a quote already so send it now.
|
||||
|
||||
with trio.move_on_after(throttle_period) as cs:
|
||||
while (
|
||||
not set(ticks_by_type).intersection(clear_types)
|
||||
):
|
||||
try:
|
||||
sym, last_quote = await quote_stream.receive()
|
||||
except trio.EndOfChannel:
|
||||
log.exception(f"feed for {stream} ended?")
|
||||
break
|
||||
|
||||
frame_ticks(
|
||||
first_quote,
|
||||
last_quote,
|
||||
ticks_by_type,
|
||||
)
|
||||
|
||||
# measured_rate = 1 / (time.time() - last_send)
|
||||
# log.info(
|
||||
# f'`{sym}` throttled send hz: {round(measured_rate, ndigits=1)}'
|
||||
# )
|
||||
first_quote['tbt'] = ticks_by_type
|
||||
|
||||
# TODO: now if only we could sync this to the display
|
||||
# rate timing exactly lul
|
||||
|
@ -868,4 +517,3 @@ async def uniform_rate_send(
|
|||
first_quote = last_quote = None
|
||||
diff = 0
|
||||
last_send = time.time()
|
||||
ticks_by_type.clear()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -27,14 +27,13 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
|||
if _USE_POSIX:
|
||||
from _posixshmem import shm_unlink
|
||||
|
||||
# import msgspec
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
import tractor
|
||||
import numpy as np
|
||||
from pydantic import BaseModel
|
||||
from numpy.lib import recfunctions as rfn
|
||||
|
||||
from ..log import get_logger
|
||||
from ._source import base_iohlc_dtype
|
||||
from .types import Struct
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -50,11 +49,7 @@ _rt_buffer_start = int((_days_worth - 1) * _secs_in_day)
|
|||
|
||||
|
||||
def cuckoff_mantracker():
|
||||
'''
|
||||
Disable all ``multiprocessing``` "resource tracking" machinery since
|
||||
it's an absolute multi-threaded mess of non-SC madness.
|
||||
|
||||
'''
|
||||
from multiprocessing import resource_tracker as mantracker
|
||||
|
||||
# Tell the "resource tracker" thing to fuck off.
|
||||
|
@ -112,39 +107,36 @@ class SharedInt:
|
|||
log.warning(f'Shm for {name} already unlinked?')
|
||||
|
||||
|
||||
class _Token(Struct, frozen=True):
|
||||
class _Token(BaseModel):
|
||||
'''
|
||||
Internal represenation of a shared memory "token"
|
||||
which can be used to key a system wide post shm entry.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
frozen = True
|
||||
|
||||
shm_name: str # this servers as a "key" value
|
||||
shm_first_index_name: str
|
||||
shm_last_index_name: str
|
||||
dtype_descr: tuple
|
||||
size: int # in struct-array index / row terms
|
||||
|
||||
@property
|
||||
def dtype(self) -> np.dtype:
|
||||
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
||||
|
||||
def as_msg(self):
|
||||
return self.to_dict()
|
||||
return self.dict()
|
||||
|
||||
@classmethod
|
||||
def from_msg(cls, msg: dict) -> _Token:
|
||||
if isinstance(msg, _Token):
|
||||
return msg
|
||||
|
||||
# TODO: native struct decoding
|
||||
# return _token_dec.decode(msg)
|
||||
|
||||
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
||||
return _Token(**msg)
|
||||
|
||||
|
||||
# _token_dec = msgspec.msgpack.Decoder(_Token)
|
||||
|
||||
# TODO: this api?
|
||||
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
||||
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
||||
|
@ -163,7 +155,6 @@ def get_shm_token(key: str) -> _Token:
|
|||
|
||||
def _make_token(
|
||||
key: str,
|
||||
size: int,
|
||||
dtype: Optional[np.dtype] = None,
|
||||
) -> _Token:
|
||||
'''
|
||||
|
@ -176,8 +167,7 @@ def _make_token(
|
|||
shm_name=key,
|
||||
shm_first_index_name=key + "_first",
|
||||
shm_last_index_name=key + "_last",
|
||||
dtype_descr=tuple(np.dtype(dtype).descr),
|
||||
size=size,
|
||||
dtype_descr=np.dtype(dtype).descr
|
||||
)
|
||||
|
||||
|
||||
|
@ -229,7 +219,6 @@ class ShmArray:
|
|||
shm_first_index_name=self._first._shm.name,
|
||||
shm_last_index_name=self._last._shm.name,
|
||||
dtype_descr=tuple(self._array.dtype.descr),
|
||||
size=self._len,
|
||||
)
|
||||
|
||||
@property
|
||||
|
@ -444,7 +433,7 @@ class ShmArray:
|
|||
def open_shm_array(
|
||||
|
||||
key: Optional[str] = None,
|
||||
size: int = _default_size, # see above
|
||||
size: int = _default_size,
|
||||
dtype: Optional[np.dtype] = None,
|
||||
readonly: bool = False,
|
||||
|
||||
|
@ -475,8 +464,7 @@ def open_shm_array(
|
|||
|
||||
token = _make_token(
|
||||
key=key,
|
||||
size=size,
|
||||
dtype=dtype,
|
||||
dtype=dtype
|
||||
)
|
||||
|
||||
# create single entry arrays for storing an first and last indices
|
||||
|
@ -528,15 +516,15 @@ def open_shm_array(
|
|||
# "unlink" created shm on process teardown by
|
||||
# pushing teardown calls onto actor context stack
|
||||
|
||||
stack = tractor.current_actor().lifetime_stack
|
||||
stack.callback(shmarr.close)
|
||||
stack.callback(shmarr.destroy)
|
||||
tractor._actor._lifetime_stack.callback(shmarr.close)
|
||||
tractor._actor._lifetime_stack.callback(shmarr.destroy)
|
||||
|
||||
return shmarr
|
||||
|
||||
|
||||
def attach_shm_array(
|
||||
token: tuple[str, str, tuple[str, str]],
|
||||
size: int = _default_size,
|
||||
readonly: bool = True,
|
||||
|
||||
) -> ShmArray:
|
||||
|
@ -575,7 +563,7 @@ def attach_shm_array(
|
|||
raise _err
|
||||
|
||||
shmarr = np.ndarray(
|
||||
(token.size,),
|
||||
(size,),
|
||||
dtype=token.dtype,
|
||||
buffer=shm.buf
|
||||
)
|
||||
|
@ -614,8 +602,8 @@ def attach_shm_array(
|
|||
if key not in _known_tokens:
|
||||
_known_tokens[key] = token
|
||||
|
||||
# "close" attached shm on actor teardown
|
||||
tractor.current_actor().lifetime_stack.callback(sha.close)
|
||||
# "close" attached shm on process teardown
|
||||
tractor._actor._lifetime_stack.callback(sha.close)
|
||||
|
||||
return sha
|
||||
|
||||
|
@ -643,7 +631,6 @@ def maybe_open_shm_array(
|
|||
use ``attach_shm_array``.
|
||||
|
||||
'''
|
||||
size = kwargs.pop('size', _default_size)
|
||||
try:
|
||||
# see if we already know this key
|
||||
token = _known_tokens[key]
|
||||
|
@ -651,11 +638,7 @@ def maybe_open_shm_array(
|
|||
except KeyError:
|
||||
log.warning(f"Could not find {key} in shms cache")
|
||||
if dtype:
|
||||
token = _make_token(
|
||||
key,
|
||||
size=size,
|
||||
dtype=dtype,
|
||||
)
|
||||
token = _make_token(key, dtype)
|
||||
try:
|
||||
return attach_shm_array(token=token, **kwargs), False
|
||||
except FileNotFoundError:
|
||||
|
|
|
@ -18,16 +18,12 @@
|
|||
numpy data source coversion helpers.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from decimal import (
|
||||
Decimal,
|
||||
ROUND_HALF_EVEN,
|
||||
)
|
||||
from typing import Any
|
||||
import decimal
|
||||
|
||||
from bidict import bidict
|
||||
import numpy as np
|
||||
|
||||
from .types import Struct
|
||||
from pydantic import BaseModel
|
||||
# from numba import from_dtype
|
||||
|
||||
|
||||
|
@ -80,14 +76,10 @@ def mk_fqsn(
|
|||
def float_digits(
|
||||
value: float,
|
||||
) -> int:
|
||||
'''
|
||||
Return the number of precision digits read from a float value.
|
||||
|
||||
'''
|
||||
if value == 0:
|
||||
return 0
|
||||
|
||||
return int(-Decimal(str(value)).as_tuple().exponent)
|
||||
return int(-decimal.Decimal(str(value)).as_tuple().exponent)
|
||||
|
||||
|
||||
def ohlc_zeros(length: int) -> np.ndarray:
|
||||
|
@ -134,57 +126,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
|||
)
|
||||
|
||||
|
||||
class MktPair(Struct, frozen=True):
|
||||
|
||||
src: str # source asset name being used to buy
|
||||
src_type: str # source asset's financial type/classification name
|
||||
# ^ specifies a "class" of financial instrument
|
||||
# egs. stock, futer, option, bond etc.
|
||||
|
||||
dst: str # destination asset name being bought
|
||||
dst_type: str # destination asset's financial type/classification name
|
||||
|
||||
price_tick: float # minimum price increment value increment
|
||||
price_tick_digits: int # required decimal digits for above
|
||||
|
||||
size_tick: float # minimum size (aka vlm) increment value increment
|
||||
size_tick_digits: int # required decimal digits for above
|
||||
|
||||
venue: str | None = None # market venue provider name
|
||||
expiry: str | None = None # for derivs, expiry datetime parseable str
|
||||
|
||||
# for derivs, info describing contract, egs.
|
||||
# strike price, call or put, swap type, exercise model, etc.
|
||||
contract_info: str | None = None
|
||||
|
||||
@classmethod
|
||||
def from_msg(
|
||||
self,
|
||||
msg: dict[str, Any],
|
||||
|
||||
) -> MktPair:
|
||||
'''
|
||||
Constructor for a received msg-dict normally received over IPC.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
# fqa, fqma, .. etc. see issue:
|
||||
# https://github.com/pikers/piker/issues/467
|
||||
@property
|
||||
def fqsn(self) -> str:
|
||||
'''
|
||||
Return the fully qualified market (endpoint) name for the
|
||||
pair of transacting assets.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
|
||||
# TODO: rework the below `Symbol` (which was originally inspired and
|
||||
# derived from stuff in quantdom) into a simpler, ipc msg ready, market
|
||||
# endpoint meta-data container type as per the drafted interace above.
|
||||
class Symbol(Struct):
|
||||
class Symbol(BaseModel):
|
||||
'''
|
||||
I guess this is some kinda container thing for dealing with
|
||||
all the different meta-data formats from brokers?
|
||||
|
@ -198,6 +140,10 @@ class Symbol(Struct):
|
|||
suffix: str = ''
|
||||
broker_info: dict[str, dict[str, Any]] = {}
|
||||
|
||||
# specifies a "class" of financial instrument
|
||||
# ex. stock, futer, option, bond etc.
|
||||
|
||||
# @validate_arguments
|
||||
@classmethod
|
||||
def from_broker_info(
|
||||
cls,
|
||||
|
@ -206,17 +152,19 @@ class Symbol(Struct):
|
|||
info: dict[str, Any],
|
||||
suffix: str = '',
|
||||
|
||||
) -> Symbol:
|
||||
# XXX: like wtf..
|
||||
# ) -> 'Symbol':
|
||||
) -> None:
|
||||
|
||||
tick_size = info.get('price_tick_size', 0.01)
|
||||
lot_size = info.get('lot_tick_size', 0.0)
|
||||
lot_tick_size = info.get('lot_tick_size', 0.0)
|
||||
|
||||
return Symbol(
|
||||
key=symbol,
|
||||
tick_size=tick_size,
|
||||
lot_tick_size=lot_size,
|
||||
lot_tick_size=lot_tick_size,
|
||||
tick_size_digits=float_digits(tick_size),
|
||||
lot_size_digits=float_digits(lot_size),
|
||||
lot_size_digits=float_digits(lot_tick_size),
|
||||
suffix=suffix,
|
||||
broker_info={broker: info},
|
||||
)
|
||||
|
@ -227,7 +175,9 @@ class Symbol(Struct):
|
|||
fqsn: str,
|
||||
info: dict[str, Any],
|
||||
|
||||
) -> Symbol:
|
||||
# XXX: like wtf..
|
||||
# ) -> 'Symbol':
|
||||
) -> None:
|
||||
broker, key, suffix = unpack_fqsn(fqsn)
|
||||
return cls.from_broker_info(
|
||||
broker,
|
||||
|
@ -271,10 +221,6 @@ class Symbol(Struct):
|
|||
else:
|
||||
return (key, broker)
|
||||
|
||||
@property
|
||||
def fqsn(self) -> str:
|
||||
return '.'.join(self.tokens()).lower()
|
||||
|
||||
def front_fqsn(self) -> str:
|
||||
'''
|
||||
fqsn = "fully qualified symbol name"
|
||||
|
@ -294,24 +240,18 @@ class Symbol(Struct):
|
|||
|
||||
'''
|
||||
tokens = self.tokens()
|
||||
fqsn = '.'.join(map(str.lower, tokens))
|
||||
fqsn = '.'.join(tokens)
|
||||
return fqsn
|
||||
|
||||
def quantize_size(
|
||||
self,
|
||||
size: float,
|
||||
def iterfqsns(self) -> list[str]:
|
||||
keys = []
|
||||
for broker in self.broker_info.keys():
|
||||
fqsn = mk_fqsn(self.key, broker)
|
||||
if self.suffix:
|
||||
fqsn += f'.{self.suffix}'
|
||||
keys.append(fqsn)
|
||||
|
||||
) -> Decimal:
|
||||
'''
|
||||
Truncate input ``size: float`` using ``Decimal``
|
||||
and ``.lot_size_digits``.
|
||||
|
||||
'''
|
||||
digits = self.lot_size_digits
|
||||
return Decimal(size).quantize(
|
||||
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
||||
rounding=ROUND_HALF_EVEN
|
||||
)
|
||||
return keys
|
||||
|
||||
|
||||
def _nan_to_closest_num(array: np.ndarray):
|
||||
|
|
|
@ -18,24 +18,13 @@
|
|||
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
||||
|
||||
"""
|
||||
from contextlib import (
|
||||
asynccontextmanager,
|
||||
AsyncExitStack,
|
||||
)
|
||||
from itertools import count
|
||||
from contextlib import asynccontextmanager, AsyncExitStack
|
||||
from types import ModuleType
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Callable,
|
||||
AsyncGenerator,
|
||||
Iterable,
|
||||
)
|
||||
from typing import Any, Callable, AsyncGenerator
|
||||
import json
|
||||
|
||||
import trio
|
||||
import trio_websocket
|
||||
from wsproto.utilities import LocalProtocolError
|
||||
from trio_websocket._impl import (
|
||||
ConnectionClosed,
|
||||
DisconnectionTimeout,
|
||||
|
@ -46,53 +35,43 @@ from trio_websocket._impl import (
|
|||
|
||||
from ..log import get_logger
|
||||
|
||||
from .types import Struct
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class NoBsWs:
|
||||
'''
|
||||
Make ``trio_websocket`` sockets stay up no matter the bs.
|
||||
"""Make ``trio_websocket`` sockets stay up no matter the bs.
|
||||
|
||||
You can provide a ``fixture`` async-context-manager which will be
|
||||
enter/exitted around each reconnect operation.
|
||||
'''
|
||||
"""
|
||||
recon_errors = (
|
||||
ConnectionClosed,
|
||||
DisconnectionTimeout,
|
||||
ConnectionRejected,
|
||||
HandshakeError,
|
||||
ConnectionTimeout,
|
||||
LocalProtocolError,
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
token: str,
|
||||
stack: AsyncExitStack,
|
||||
fixture: Optional[Callable] = None,
|
||||
serializer: ModuleType = json
|
||||
fixture: Callable,
|
||||
serializer: ModuleType = json,
|
||||
):
|
||||
self.url = url
|
||||
self.token = token
|
||||
self.fixture = fixture
|
||||
self._stack = stack
|
||||
self._ws: 'WebSocketConnection' = None # noqa
|
||||
|
||||
# TODO: is there some method we can call
|
||||
# on the underlying `._ws` to get this?
|
||||
self._connected: bool = False
|
||||
|
||||
async def _connect(
|
||||
self,
|
||||
tries: int = 1000,
|
||||
) -> None:
|
||||
|
||||
self._connected = False
|
||||
while True:
|
||||
try:
|
||||
await self._stack.aclose()
|
||||
except self.recon_errors:
|
||||
except (DisconnectionTimeout, RuntimeError):
|
||||
await trio.sleep(0.5)
|
||||
else:
|
||||
break
|
||||
|
@ -103,18 +82,19 @@ class NoBsWs:
|
|||
self._ws = await self._stack.enter_async_context(
|
||||
trio_websocket.open_websocket_url(self.url)
|
||||
)
|
||||
|
||||
if self.fixture is not None:
|
||||
# rerun user code fixture
|
||||
if self.token == '':
|
||||
ret = await self._stack.enter_async_context(
|
||||
self.fixture(self)
|
||||
)
|
||||
else:
|
||||
ret = await self._stack.enter_async_context(
|
||||
self.fixture(self, self.token)
|
||||
)
|
||||
|
||||
assert ret is None
|
||||
|
||||
log.info(f'Connection success: {self.url}')
|
||||
|
||||
self._connected = True
|
||||
return self._ws
|
||||
|
||||
except self.recon_errors as err:
|
||||
|
@ -124,15 +104,11 @@ class NoBsWs:
|
|||
f'{type(err)}...retry attempt {i}'
|
||||
)
|
||||
await trio.sleep(0.5)
|
||||
self._connected = False
|
||||
continue
|
||||
else:
|
||||
log.exception('ws connection fail...')
|
||||
raise last_err
|
||||
|
||||
def connected(self) -> bool:
|
||||
return self._connected
|
||||
|
||||
async def send_msg(
|
||||
self,
|
||||
data: Any,
|
||||
|
@ -152,26 +128,21 @@ class NoBsWs:
|
|||
except self.recon_errors:
|
||||
await self._connect()
|
||||
|
||||
def __aiter__(self):
|
||||
return self
|
||||
|
||||
async def __anext__(self):
|
||||
return await self.recv_msg()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_autorecon_ws(
|
||||
url: str,
|
||||
|
||||
# TODO: proper type cannot smh
|
||||
fixture: Optional[Callable] = None,
|
||||
|
||||
# TODO: proper type annot smh
|
||||
fixture: Callable,
|
||||
# used for authenticated websockets
|
||||
token: str = '',
|
||||
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
||||
|
||||
"""
|
||||
async with AsyncExitStack() as stack:
|
||||
ws = NoBsWs(url, stack, fixture=fixture)
|
||||
ws = NoBsWs(url, token, stack, fixture=fixture)
|
||||
await ws._connect()
|
||||
|
||||
try:
|
||||
|
@ -179,114 +150,3 @@ async def open_autorecon_ws(
|
|||
|
||||
finally:
|
||||
await stack.aclose()
|
||||
|
||||
|
||||
'''
|
||||
JSONRPC response-request style machinery for transparent multiplexing of msgs
|
||||
over a NoBsWs.
|
||||
|
||||
'''
|
||||
|
||||
|
||||
class JSONRPCResult(Struct):
|
||||
id: int
|
||||
jsonrpc: str = '2.0'
|
||||
result: Optional[dict] = None
|
||||
error: Optional[dict] = None
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_jsonrpc_session(
|
||||
url: str,
|
||||
start_id: int = 0,
|
||||
response_type: type = JSONRPCResult,
|
||||
request_type: Optional[type] = None,
|
||||
request_hook: Optional[Callable] = None,
|
||||
error_hook: Optional[Callable] = None,
|
||||
) -> Callable[[str, dict], dict]:
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as n,
|
||||
open_autorecon_ws(url) as ws
|
||||
):
|
||||
rpc_id: Iterable = count(start_id)
|
||||
rpc_results: dict[int, dict] = {}
|
||||
|
||||
async def json_rpc(method: str, params: dict) -> dict:
|
||||
'''
|
||||
perform a json rpc call and wait for the result, raise exception in
|
||||
case of error field present on response
|
||||
'''
|
||||
msg = {
|
||||
'jsonrpc': '2.0',
|
||||
'id': next(rpc_id),
|
||||
'method': method,
|
||||
'params': params
|
||||
}
|
||||
_id = msg['id']
|
||||
|
||||
rpc_results[_id] = {
|
||||
'result': None,
|
||||
'event': trio.Event()
|
||||
}
|
||||
|
||||
await ws.send_msg(msg)
|
||||
|
||||
await rpc_results[_id]['event'].wait()
|
||||
|
||||
ret = rpc_results[_id]['result']
|
||||
|
||||
del rpc_results[_id]
|
||||
|
||||
if ret.error is not None:
|
||||
raise Exception(json.dumps(ret.error, indent=4))
|
||||
|
||||
return ret
|
||||
|
||||
async def recv_task():
|
||||
'''
|
||||
receives every ws message and stores it in its corresponding
|
||||
result field, then sets the event to wakeup original sender
|
||||
tasks. also recieves responses to requests originated from
|
||||
the server side.
|
||||
|
||||
'''
|
||||
async for msg in ws:
|
||||
match msg:
|
||||
case {
|
||||
'result': _,
|
||||
'id': mid,
|
||||
} if res_entry := rpc_results.get(mid):
|
||||
|
||||
res_entry['result'] = response_type(**msg)
|
||||
res_entry['event'].set()
|
||||
|
||||
case {
|
||||
'result': _,
|
||||
'id': mid,
|
||||
} if not rpc_results.get(mid):
|
||||
log.warning(
|
||||
f'Unexpected ws msg: {json.dumps(msg, indent=4)}'
|
||||
)
|
||||
|
||||
case {
|
||||
'method': _,
|
||||
'params': _,
|
||||
}:
|
||||
log.debug(f'Recieved\n{msg}')
|
||||
if request_hook:
|
||||
await request_hook(request_type(**msg))
|
||||
|
||||
case {
|
||||
'error': error
|
||||
}:
|
||||
log.warning(f'Recieved\n{error}')
|
||||
if error_hook:
|
||||
await error_hook(response_type(**msg))
|
||||
|
||||
case _:
|
||||
log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
|
||||
|
||||
n.start_soon(recv_task)
|
||||
yield json_rpc
|
||||
n.cancel_scope.cancel()
|
||||
|
|
|
@ -1,109 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from pprint import pformat
|
||||
from typing import (
|
||||
Any,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import pyqtgraph as pg
|
||||
import numpy as np
|
||||
import tractor
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import docker
|
||||
from ._ahab import DockerContainer
|
||||
|
||||
from piker.log import (
|
||||
get_logger,
|
||||
get_console_log
|
||||
)
|
||||
|
||||
import asks
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
# container level config
|
||||
_config = {
|
||||
'port': 19200,
|
||||
'log_level': 'debug',
|
||||
}
|
||||
|
||||
|
||||
def start_elasticsearch(
|
||||
client: docker.DockerClient,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> tuple[DockerContainer, dict[str, Any]]:
|
||||
'''
|
||||
Start and supervise an elasticsearch instance with its config bind-mounted
|
||||
in from the piker config directory on the system.
|
||||
|
||||
The equivalent cli cmd to this code is:
|
||||
|
||||
sudo docker run \
|
||||
-itd \
|
||||
--rm \
|
||||
--network=host \
|
||||
--mount type=bind,source="$(pwd)"/elastic,target=/usr/share/elasticsearch/data \
|
||||
--env "elastic_username=elastic" \
|
||||
--env "elastic_password=password" \
|
||||
--env "xpack.security.enabled=false" \
|
||||
elastic
|
||||
|
||||
'''
|
||||
import docker
|
||||
get_console_log('info', name=__name__)
|
||||
|
||||
dcntr: DockerContainer = client.containers.run(
|
||||
'piker:elastic',
|
||||
name='piker-elastic',
|
||||
network='host',
|
||||
detach=True,
|
||||
remove=True
|
||||
)
|
||||
|
||||
async def start_matcher(msg: str):
|
||||
try:
|
||||
health = (await asks.get(
|
||||
f'http://localhost:19200/_cat/health',
|
||||
params={'format': 'json'}
|
||||
)).json()
|
||||
|
||||
except OSError:
|
||||
log.error('couldnt reach elastic container')
|
||||
return False
|
||||
|
||||
log.info(health)
|
||||
return health[0]['status'] == 'green'
|
||||
|
||||
async def stop_matcher(msg: str):
|
||||
return msg == 'closed'
|
||||
|
||||
return (
|
||||
dcntr,
|
||||
{},
|
||||
# expected startup and stop msgs
|
||||
start_matcher,
|
||||
stop_matcher,
|
||||
)
|
1700
piker/data/feed.py
1700
piker/data/feed.py
File diff suppressed because it is too large
Load Diff
|
@ -1,210 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
abstractions for organizing, managing and generally operating-on
|
||||
real-time data processing data-structures.
|
||||
|
||||
"Streams, flumes, cascades and flows.."
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import tractor
|
||||
import pendulum
|
||||
import numpy as np
|
||||
|
||||
from .types import Struct
|
||||
from ._source import (
|
||||
Symbol,
|
||||
)
|
||||
from ._sharedmem import (
|
||||
attach_shm_array,
|
||||
ShmArray,
|
||||
_Token,
|
||||
)
|
||||
# from .._profile import (
|
||||
# Profiler,
|
||||
# pg_profile_enabled,
|
||||
# )
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# from pyqtgraph import PlotItem
|
||||
from .feed import Feed
|
||||
|
||||
|
||||
# TODO: ideas for further abstractions as per
|
||||
# https://github.com/pikers/piker/issues/216 and
|
||||
# https://github.com/pikers/piker/issues/270:
|
||||
# - a ``Cascade`` would be the minimal "connection" of 2 ``Flumes``
|
||||
# as per circuit parlance:
|
||||
# https://en.wikipedia.org/wiki/Two-port_network#Cascade_connection
|
||||
# - could cover the combination of our `FspAdmin` and the
|
||||
# backend `.fsp._engine` related machinery to "connect" one flume
|
||||
# to another?
|
||||
# - a (financial signal) ``Flow`` would be the a "collection" of such
|
||||
# minmial cascades. Some engineering based jargon concepts:
|
||||
# - https://en.wikipedia.org/wiki/Signal_chain
|
||||
# - https://en.wikipedia.org/wiki/Daisy_chain_(electrical_engineering)
|
||||
# - https://en.wikipedia.org/wiki/Audio_signal_flow
|
||||
# - https://en.wikipedia.org/wiki/Digital_signal_processing#Implementation
|
||||
# - https://en.wikipedia.org/wiki/Dataflow_programming
|
||||
# - https://en.wikipedia.org/wiki/Signal_programming
|
||||
# - https://en.wikipedia.org/wiki/Incremental_computing
|
||||
|
||||
|
||||
class Flume(Struct):
|
||||
'''
|
||||
Composite reference type which points to all the addressing handles
|
||||
and other meta-data necessary for the read, measure and management
|
||||
of a set of real-time updated data flows.
|
||||
|
||||
Can be thought of as a "flow descriptor" or "flow frame" which
|
||||
describes the high level properties of a set of data flows that can
|
||||
be used seamlessly across process-memory boundaries.
|
||||
|
||||
Each instance's sub-components normally includes:
|
||||
- a msg oriented quote stream provided via an IPC transport
|
||||
- history and real-time shm buffers which are both real-time
|
||||
updated and backfilled.
|
||||
- associated startup indexing information related to both buffer
|
||||
real-time-append and historical prepend addresses.
|
||||
- low level APIs to read and measure the updated data and manage
|
||||
queuing properties.
|
||||
|
||||
'''
|
||||
symbol: Symbol
|
||||
first_quote: dict
|
||||
_rt_shm_token: _Token
|
||||
|
||||
# optional since some data flows won't have a "downsampled" history
|
||||
# buffer/stream (eg. FSPs).
|
||||
_hist_shm_token: _Token | None = None
|
||||
|
||||
# private shm refs loaded dynamically from tokens
|
||||
_hist_shm: ShmArray | None = None
|
||||
_rt_shm: ShmArray | None = None
|
||||
|
||||
stream: tractor.MsgStream | None = None
|
||||
izero_hist: int = 0
|
||||
izero_rt: int = 0
|
||||
throttle_rate: int | None = None
|
||||
|
||||
# TODO: do we need this really if we can pull the `Portal` from
|
||||
# ``tractor``'s internals?
|
||||
feed: Feed | None = None
|
||||
|
||||
@property
|
||||
def rt_shm(self) -> ShmArray:
|
||||
|
||||
if self._rt_shm is None:
|
||||
self._rt_shm = attach_shm_array(
|
||||
token=self._rt_shm_token,
|
||||
readonly=True,
|
||||
)
|
||||
|
||||
return self._rt_shm
|
||||
|
||||
@property
|
||||
def hist_shm(self) -> ShmArray:
|
||||
|
||||
if self._hist_shm_token is None:
|
||||
raise RuntimeError(
|
||||
'No shm token has been set for the history buffer?'
|
||||
)
|
||||
|
||||
if (
|
||||
self._hist_shm is None
|
||||
):
|
||||
self._hist_shm = attach_shm_array(
|
||||
token=self._hist_shm_token,
|
||||
readonly=True,
|
||||
)
|
||||
|
||||
return self._hist_shm
|
||||
|
||||
async def receive(self) -> dict:
|
||||
return await self.stream.receive()
|
||||
|
||||
def get_ds_info(
|
||||
self,
|
||||
) -> tuple[float, float, float]:
|
||||
'''
|
||||
Compute the "downsampling" ratio info between the historical shm
|
||||
buffer and the real-time (HFT) one.
|
||||
|
||||
Return a tuple of the fast sample period, historical sample
|
||||
period and ratio between them.
|
||||
|
||||
'''
|
||||
times = self.hist_shm.array['time']
|
||||
end = pendulum.from_timestamp(times[-1])
|
||||
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
||||
hist_step_size_s = (end - start).seconds
|
||||
|
||||
times = self.rt_shm.array['time']
|
||||
end = pendulum.from_timestamp(times[-1])
|
||||
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
||||
rt_step_size_s = (end - start).seconds
|
||||
|
||||
ratio = hist_step_size_s / rt_step_size_s
|
||||
return (
|
||||
rt_step_size_s,
|
||||
hist_step_size_s,
|
||||
ratio,
|
||||
)
|
||||
|
||||
# TODO: get native msgspec decoding for these workinn
|
||||
def to_msg(self) -> dict:
|
||||
msg = self.to_dict()
|
||||
msg['symbol'] = msg['symbol'].to_dict()
|
||||
|
||||
# can't serialize the stream or feed objects, it's expected
|
||||
# you'll have a ref to it since this msg should be rxed on
|
||||
# a stream on whatever far end IPC..
|
||||
msg.pop('stream')
|
||||
msg.pop('feed')
|
||||
return msg
|
||||
|
||||
@classmethod
|
||||
def from_msg(cls, msg: dict) -> dict:
|
||||
symbol = Symbol(**msg.pop('symbol'))
|
||||
return cls(
|
||||
symbol=symbol,
|
||||
**msg,
|
||||
)
|
||||
|
||||
def get_index(
|
||||
self,
|
||||
time_s: float,
|
||||
array: np.ndarray,
|
||||
|
||||
) -> int | float:
|
||||
'''
|
||||
Return array shm-buffer index for for epoch time.
|
||||
|
||||
'''
|
||||
times = array['time']
|
||||
first = np.searchsorted(
|
||||
times,
|
||||
time_s,
|
||||
side='left',
|
||||
)
|
||||
imx = times.shape[0] - 1
|
||||
return min(first, imx)
|
|
@ -35,11 +35,10 @@ from typing import (
|
|||
)
|
||||
import time
|
||||
from math import isnan
|
||||
from pathlib import Path
|
||||
|
||||
from bidict import bidict
|
||||
from msgspec.msgpack import encode, decode
|
||||
# import pyqtgraph as pg
|
||||
import msgpack
|
||||
import pyqtgraph as pg
|
||||
import numpy as np
|
||||
import tractor
|
||||
from trio_websocket import open_websocket_url
|
||||
|
@ -57,7 +56,6 @@ if TYPE_CHECKING:
|
|||
|
||||
from .feed import maybe_open_feed
|
||||
from ..log import get_logger, get_console_log
|
||||
from .._profile import Profiler
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -133,10 +131,7 @@ def start_marketstore(
|
|||
|
||||
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
||||
|
||||
# create dirs when dne
|
||||
if not os.path.isdir(config._config_dir):
|
||||
Path(config._config_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# create when dne
|
||||
if not os.path.isdir(mktsdir):
|
||||
os.mkdir(mktsdir)
|
||||
|
||||
|
@ -190,20 +185,13 @@ def start_marketstore(
|
|||
init=True,
|
||||
# remove=True,
|
||||
)
|
||||
|
||||
async def start_matcher(msg: str):
|
||||
return "launching tcp listener for all services..." in msg
|
||||
|
||||
async def stop_matcher(msg: str):
|
||||
return "exiting..." in msg
|
||||
|
||||
return (
|
||||
dcntr,
|
||||
_config,
|
||||
|
||||
# expected startup and stop msgs
|
||||
start_matcher,
|
||||
stop_matcher,
|
||||
"launching tcp listener for all services...",
|
||||
"exiting...",
|
||||
)
|
||||
|
||||
|
||||
|
@ -399,54 +387,50 @@ class Storage:
|
|||
async def load(
|
||||
self,
|
||||
fqsn: str,
|
||||
timeframe: int,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # timeframe sampled array-series
|
||||
dict[int, np.ndarray], # timeframe (in secs) to series
|
||||
Optional[datetime], # first dt
|
||||
Optional[datetime], # last dt
|
||||
]:
|
||||
|
||||
first_tsdb_dt, last_tsdb_dt = None, None
|
||||
hist = await self.read_ohlcv(
|
||||
tsdb_arrays = await self.read_ohlcv(
|
||||
fqsn,
|
||||
# on first load we don't need to pull the max
|
||||
# history per request size worth.
|
||||
limit=3000,
|
||||
timeframe=timeframe,
|
||||
)
|
||||
log.info(f'Loaded tsdb history {hist}')
|
||||
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
||||
|
||||
if len(hist):
|
||||
times = hist['Epoch']
|
||||
if tsdb_arrays:
|
||||
fastest = list(tsdb_arrays.values())[0]
|
||||
times = fastest['Epoch']
|
||||
first, last = times[0], times[-1]
|
||||
first_tsdb_dt, last_tsdb_dt = map(
|
||||
pendulum.from_timestamp, [first, last]
|
||||
)
|
||||
|
||||
return (
|
||||
hist, # array-data
|
||||
first_tsdb_dt, # start of query-frame
|
||||
last_tsdb_dt, # most recent
|
||||
)
|
||||
return tsdb_arrays, first_tsdb_dt, last_tsdb_dt
|
||||
|
||||
async def read_ohlcv(
|
||||
self,
|
||||
fqsn: str,
|
||||
timeframe: int | str,
|
||||
timeframe: Optional[Union[int, str]] = None,
|
||||
end: Optional[int] = None,
|
||||
limit: int = int(800e3),
|
||||
|
||||
) -> np.ndarray:
|
||||
|
||||
) -> tuple[
|
||||
MarketstoreClient,
|
||||
Union[dict, np.ndarray]
|
||||
]:
|
||||
client = self.client
|
||||
syms = await client.list_symbols()
|
||||
|
||||
if fqsn not in syms:
|
||||
return {}
|
||||
|
||||
# use the provided timeframe or 1s by default
|
||||
tfstr = tf_in_1s.get(timeframe, tf_in_1s[1])
|
||||
tfstr = tf_in_1s[1]
|
||||
|
||||
params = Params(
|
||||
symbols=fqsn,
|
||||
|
@ -460,72 +444,58 @@ class Storage:
|
|||
limit=limit,
|
||||
)
|
||||
|
||||
if timeframe is None:
|
||||
log.info(f'starting {fqsn} tsdb granularity scan..')
|
||||
# loop through and try to find highest granularity
|
||||
for tfstr in tf_in_1s.values():
|
||||
try:
|
||||
log.info(f'querying for {tfstr}@{fqsn}')
|
||||
params.set('timeframe', tfstr)
|
||||
result = await client.query(params)
|
||||
except purerpc.grpclib.exceptions.UnknownError as err:
|
||||
# indicate there is no history for this timeframe
|
||||
log.exception(
|
||||
f'Unknown mkts QUERY error: {params}\n'
|
||||
f'{err.args}'
|
||||
)
|
||||
break
|
||||
|
||||
except purerpc.grpclib.exceptions.UnknownError:
|
||||
# XXX: this is already logged by the container and
|
||||
# thus shows up through `marketstored` logs relay.
|
||||
# log.warning(f'{tfstr}@{fqsn} not found')
|
||||
continue
|
||||
else:
|
||||
return {}
|
||||
|
||||
else:
|
||||
result = await client.query(params)
|
||||
|
||||
# TODO: it turns out column access on recarrays is actually slower:
|
||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||
# it might make sense to make these structured arrays?
|
||||
data_set = result.by_symbols()[fqsn]
|
||||
array = data_set.array
|
||||
# Fill out a `numpy` array-results map
|
||||
arrays = {}
|
||||
for fqsn, data_set in result.by_symbols().items():
|
||||
arrays.setdefault(fqsn, {})[
|
||||
tf_in_1s.inverse[data_set.timeframe]
|
||||
] = data_set.array
|
||||
|
||||
# XXX: ensure sample rate is as expected
|
||||
time = data_set.array['Epoch']
|
||||
if len(time) > 1:
|
||||
time_step = time[-1] - time[-2]
|
||||
ts = tf_in_1s.inverse[data_set.timeframe]
|
||||
|
||||
if time_step != ts:
|
||||
log.warning(
|
||||
f'MKTS BUG: wrong timeframe loaded: {time_step}'
|
||||
'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG'
|
||||
f'WIPING HISTORY FOR {ts}s'
|
||||
)
|
||||
await self.delete_ts(fqsn, timeframe)
|
||||
|
||||
# try reading again..
|
||||
return await self.read_ohlcv(
|
||||
fqsn,
|
||||
timeframe,
|
||||
end,
|
||||
limit,
|
||||
)
|
||||
|
||||
return array
|
||||
return arrays[fqsn][timeframe] if timeframe else arrays[fqsn]
|
||||
|
||||
async def delete_ts(
|
||||
self,
|
||||
key: str,
|
||||
timeframe: Optional[Union[int, str]] = None,
|
||||
fmt: str = 'OHLCV',
|
||||
|
||||
) -> bool:
|
||||
|
||||
client = self.client
|
||||
syms = await client.list_symbols()
|
||||
print(syms)
|
||||
if key not in syms:
|
||||
raise KeyError(f'`{key}` table key not found in\n{syms}?')
|
||||
# if key not in syms:
|
||||
# raise KeyError(f'`{fqsn}` table key not found?')
|
||||
|
||||
tbk = mk_tbk((
|
||||
key,
|
||||
tf_in_1s.get(timeframe, tf_in_1s[60]),
|
||||
fmt,
|
||||
))
|
||||
return await client.destroy(tbk=tbk)
|
||||
return await client.destroy(tbk=key)
|
||||
|
||||
async def write_ohlcv(
|
||||
self,
|
||||
fqsn: str,
|
||||
ohlcv: np.ndarray,
|
||||
timeframe: int,
|
||||
append_and_duplicate: bool = True,
|
||||
limit: int = int(800e3),
|
||||
|
||||
|
@ -549,18 +519,17 @@ class Storage:
|
|||
|
||||
m, r = divmod(len(mkts_array), limit)
|
||||
|
||||
tfkey = tf_in_1s[timeframe]
|
||||
for i in range(m, 1):
|
||||
to_push = mkts_array[i-1:i*limit]
|
||||
|
||||
# write to db
|
||||
resp = await self.client.write(
|
||||
to_push,
|
||||
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||
|
||||
# NOTE: will will append duplicates
|
||||
# for the same timestamp-index.
|
||||
# TODO: pre-deduplicate?
|
||||
# TODO: pre deduplicate?
|
||||
isvariablelength=append_and_duplicate,
|
||||
)
|
||||
|
||||
|
@ -579,7 +548,7 @@ class Storage:
|
|||
# write to db
|
||||
resp = await self.client.write(
|
||||
to_push,
|
||||
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||
|
||||
# NOTE: will will append duplicates
|
||||
# for the same timestamp-index.
|
||||
|
@ -608,7 +577,6 @@ class Storage:
|
|||
# def delete_range(self, start_dt, end_dt) -> None:
|
||||
# ...
|
||||
|
||||
|
||||
@acm
|
||||
async def open_storage_client(
|
||||
fqsn: str,
|
||||
|
@ -658,7 +626,7 @@ async def tsdb_history_update(
|
|||
# * the original data feed arch blurb:
|
||||
# - https://github.com/pikers/piker/issues/98
|
||||
#
|
||||
profiler = Profiler(
|
||||
profiler = pg.debug.Profiler(
|
||||
disabled=False, # not pg_profile_enabled(),
|
||||
delayed=False,
|
||||
)
|
||||
|
@ -670,35 +638,34 @@ async def tsdb_history_update(
|
|||
[fqsn],
|
||||
start_stream=False,
|
||||
|
||||
) as feed,
|
||||
) as (feed, stream),
|
||||
):
|
||||
profiler(f'opened feed for {fqsn}')
|
||||
|
||||
# to_append = feed.hist_shm.array
|
||||
# to_prepend = None
|
||||
to_append = feed.shm.array
|
||||
to_prepend = None
|
||||
|
||||
if fqsn:
|
||||
flume = feed.flumes[fqsn]
|
||||
symbol = flume.symbol
|
||||
symbol = feed.symbols.get(fqsn)
|
||||
if symbol:
|
||||
fqsn = symbol.fqsn
|
||||
fqsn = symbol.front_fqsn()
|
||||
|
||||
# diff db history with shm and only write the missing portions
|
||||
# ohlcv = flume.hist_shm.array
|
||||
ohlcv = feed.shm.array
|
||||
|
||||
# TODO: use pg profiler
|
||||
# for secs in (1, 60):
|
||||
# tsdb_array = await storage.read_ohlcv(
|
||||
# fqsn,
|
||||
# timeframe=timeframe,
|
||||
# )
|
||||
# # hist diffing:
|
||||
# # these aren't currently used but can be referenced from
|
||||
# # within the embedded ipython shell below.
|
||||
# to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
||||
# to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
||||
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
||||
# hist diffing
|
||||
if tsdb_arrays:
|
||||
for secs in (1, 60):
|
||||
ts = tsdb_arrays.get(secs)
|
||||
if ts is not None and len(ts):
|
||||
# these aren't currently used but can be referenced from
|
||||
# within the embedded ipython shell below.
|
||||
to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
||||
to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
||||
|
||||
# profiler('Finished db arrays diffs')
|
||||
profiler('Finished db arrays diffs')
|
||||
|
||||
syms = await storage.client.list_symbols()
|
||||
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||
|
@ -807,13 +774,12 @@ async def stream_quotes(
|
|||
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
||||
# send subs topics to server
|
||||
resp = await ws.send_message(
|
||||
|
||||
encode({'streams': list(tbks.values())})
|
||||
msgpack.dumps({'streams': list(tbks.values())})
|
||||
)
|
||||
log.info(resp)
|
||||
|
||||
async def recv() -> dict[str, Any]:
|
||||
return decode((await ws.get_message()), encoding='utf-8')
|
||||
return msgpack.loads((await ws.get_message()), encoding='utf-8')
|
||||
|
||||
streams = (await recv())['streams']
|
||||
log.info(f"Subscribed to {streams}")
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Built-in (extension) types.
|
||||
|
||||
"""
|
||||
import sys
|
||||
from typing import Optional
|
||||
from pprint import pformat
|
||||
|
||||
import msgspec
|
||||
|
||||
|
||||
class Struct(
|
||||
msgspec.Struct,
|
||||
|
||||
# https://jcristharif.com/msgspec/structs.html#tagged-unions
|
||||
# tag='pikerstruct',
|
||||
# tag=True,
|
||||
):
|
||||
'''
|
||||
A "human friendlier" (aka repl buddy) struct subtype.
|
||||
|
||||
'''
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
f: getattr(self, f)
|
||||
for f in self.__struct_fields__
|
||||
}
|
||||
|
||||
# Lul, doesn't seem to work that well..
|
||||
# def __repr__(self):
|
||||
# # only turn on pprint when we detect a python REPL
|
||||
# # at runtime B)
|
||||
# if (
|
||||
# hasattr(sys, 'ps1')
|
||||
# # TODO: check if we're in pdb
|
||||
# ):
|
||||
# return self.pformat()
|
||||
|
||||
# return super().__repr__()
|
||||
|
||||
def pformat(self) -> str:
|
||||
return f'Struct({pformat(self.to_dict())})'
|
||||
|
||||
def copy(
|
||||
self,
|
||||
update: Optional[dict] = None,
|
||||
|
||||
) -> msgspec.Struct:
|
||||
'''
|
||||
Validate-typecast all self defined fields, return a copy of us
|
||||
with all such fields.
|
||||
|
||||
This is kinda like the default behaviour in `pydantic.BaseModel`.
|
||||
|
||||
'''
|
||||
if update:
|
||||
for k, v in update.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
# roundtrip serialize to validate
|
||||
return msgspec.msgpack.Decoder(
|
||||
type=type(self)
|
||||
).decode(
|
||||
msgspec.msgpack.Encoder().encode(self)
|
||||
)
|
||||
|
||||
def typecast(
|
||||
self,
|
||||
# fields: Optional[list[str]] = None,
|
||||
) -> None:
|
||||
for fname, ftype in self.__annotations__.items():
|
||||
setattr(self, fname, ftype(getattr(self, fname)))
|
|
@ -78,8 +78,7 @@ class Fsp:
|
|||
# + the consuming fsp *to* the consumers output
|
||||
# shm flow.
|
||||
_flow_registry: dict[
|
||||
tuple[_Token, str],
|
||||
tuple[_Token, Optional[ShmArray]],
|
||||
tuple[_Token, str], _Token,
|
||||
] = {}
|
||||
|
||||
def __init__(
|
||||
|
@ -121,6 +120,7 @@ class Fsp:
|
|||
):
|
||||
return self.func(*args, **kwargs)
|
||||
|
||||
# TODO: lru_cache this? prettty sure it'll work?
|
||||
def get_shm(
|
||||
self,
|
||||
src_shm: ShmArray,
|
||||
|
@ -131,27 +131,12 @@ class Fsp:
|
|||
for this "instance" of a signal processor for
|
||||
the given ``key``.
|
||||
|
||||
The destination shm "token" and array are cached if possible to
|
||||
minimize multiple stdlib/system calls.
|
||||
|
||||
'''
|
||||
dst_token, maybe_array = self._flow_registry[
|
||||
dst_token = self._flow_registry[
|
||||
(src_shm._token, self.name)
|
||||
]
|
||||
if maybe_array is None:
|
||||
self._flow_registry[
|
||||
(src_shm._token, self.name)
|
||||
] = (
|
||||
dst_token,
|
||||
# "cache" the ``ShmArray`` such that
|
||||
# we call the underlying "attach" code as few
|
||||
# times as possible as per:
|
||||
# - https://github.com/pikers/piker/issues/359
|
||||
# - https://github.com/pikers/piker/issues/332
|
||||
maybe_array := attach_shm_array(dst_token)
|
||||
)
|
||||
|
||||
return maybe_array
|
||||
shm = attach_shm_array(dst_token)
|
||||
return shm
|
||||
|
||||
|
||||
def fsp(
|
||||
|
@ -199,10 +184,7 @@ def maybe_mk_fsp_shm(
|
|||
# TODO: load output types from `Fsp`
|
||||
# - should `index` be a required internal field?
|
||||
fsp_dtype = np.dtype(
|
||||
[('index', int)]
|
||||
+
|
||||
[('time', float)]
|
||||
+
|
||||
[('index', int)] +
|
||||
[(field_name, float) for field_name in target.outputs]
|
||||
)
|
||||
|
||||
|
|
|
@ -21,13 +21,12 @@ core task logic for processing chains
|
|||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
from typing import (
|
||||
AsyncIterator,
|
||||
Callable,
|
||||
Optional,
|
||||
AsyncIterator, Callable, Optional,
|
||||
Union,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
|
@ -36,22 +35,14 @@ from tractor.msg import NamespacePath
|
|||
from ..log import get_logger, get_console_log
|
||||
from .. import data
|
||||
from ..data import attach_shm_array
|
||||
from ..data.feed import (
|
||||
Flume,
|
||||
Feed,
|
||||
)
|
||||
from ..data.feed import Feed
|
||||
from ..data._sharedmem import ShmArray
|
||||
from ..data._sampling import (
|
||||
_default_delay_s,
|
||||
open_sample_stream,
|
||||
)
|
||||
from ..data._source import Symbol
|
||||
from ._api import (
|
||||
Fsp,
|
||||
_load_builtins,
|
||||
_Token,
|
||||
)
|
||||
from .._profile import Profiler
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -86,7 +77,7 @@ async def filter_quotes_by_sym(
|
|||
async def fsp_compute(
|
||||
|
||||
symbol: Symbol,
|
||||
flume: Flume,
|
||||
feed: Feed,
|
||||
quote_stream: trio.abc.ReceiveChannel,
|
||||
|
||||
src: ShmArray,
|
||||
|
@ -99,7 +90,7 @@ async def fsp_compute(
|
|||
|
||||
) -> None:
|
||||
|
||||
profiler = Profiler(
|
||||
profiler = pg.debug.Profiler(
|
||||
delayed=False,
|
||||
disabled=True
|
||||
)
|
||||
|
@ -114,17 +105,16 @@ async def fsp_compute(
|
|||
filter_quotes_by_sym(fqsn, quote_stream),
|
||||
|
||||
# XXX: currently the ``ohlcv`` arg
|
||||
flume.rt_shm,
|
||||
feed.shm,
|
||||
)
|
||||
|
||||
# HISTORY COMPUTE PHASE
|
||||
# conduct a single iteration of fsp with historical bars input
|
||||
# and get historical output.
|
||||
# Conduct a single iteration of fsp with historical bars input
|
||||
# and get historical output
|
||||
history_output: Union[
|
||||
dict[str, np.ndarray], # multi-output case
|
||||
np.ndarray, # single output case
|
||||
]
|
||||
history_output = await anext(out_stream)
|
||||
history_output = await out_stream.__anext__()
|
||||
|
||||
func_name = func.__name__
|
||||
profiler(f'{func_name} generated history')
|
||||
|
@ -136,13 +126,9 @@ async def fsp_compute(
|
|||
# each respective field.
|
||||
fields = getattr(dst.array.dtype, 'fields', None).copy()
|
||||
fields.pop('index')
|
||||
history_by_field: Optional[np.ndarray] = None
|
||||
src_time = src.array['time']
|
||||
history: Optional[np.ndarray] = None # TODO: nptyping here!
|
||||
|
||||
if (
|
||||
fields and
|
||||
len(fields) > 1
|
||||
):
|
||||
if fields and len(fields) > 1 and fields:
|
||||
if not isinstance(history_output, dict):
|
||||
raise ValueError(
|
||||
f'`{func_name}` is a multi-output FSP and should yield a '
|
||||
|
@ -153,7 +139,7 @@ async def fsp_compute(
|
|||
if key in history_output:
|
||||
output = history_output[key]
|
||||
|
||||
if history_by_field is None:
|
||||
if history is None:
|
||||
|
||||
if output is None:
|
||||
length = len(src.array)
|
||||
|
@ -163,7 +149,7 @@ async def fsp_compute(
|
|||
# using the first output, determine
|
||||
# the length of the struct-array that
|
||||
# will be pushed to shm.
|
||||
history_by_field = np.zeros(
|
||||
history = np.zeros(
|
||||
length,
|
||||
dtype=dst.array.dtype
|
||||
)
|
||||
|
@ -171,7 +157,7 @@ async def fsp_compute(
|
|||
if output is None:
|
||||
continue
|
||||
|
||||
history_by_field[key] = output
|
||||
history[key] = output
|
||||
|
||||
# single-key output stream
|
||||
else:
|
||||
|
@ -180,15 +166,11 @@ async def fsp_compute(
|
|||
f'`{func_name}` is a single output FSP and should yield an '
|
||||
'`np.ndarray` for history'
|
||||
)
|
||||
history_by_field = np.zeros(
|
||||
history = np.zeros(
|
||||
len(history_output),
|
||||
dtype=dst.array.dtype
|
||||
)
|
||||
history_by_field[func_name] = history_output
|
||||
|
||||
history_by_field['time'] = src_time[-len(history_by_field):]
|
||||
|
||||
history_output['time'] = src.array['time']
|
||||
history[func_name] = history_output
|
||||
|
||||
# TODO: XXX:
|
||||
# THERE'S A BIG BUG HERE WITH THE `index` field since we're
|
||||
|
@ -205,10 +187,7 @@ async def fsp_compute(
|
|||
|
||||
# TODO: can we use this `start` flag instead of the manual
|
||||
# setting above?
|
||||
index = dst.push(
|
||||
history_by_field,
|
||||
start=first,
|
||||
)
|
||||
index = dst.push(history, start=first)
|
||||
|
||||
profiler(f'{func_name} pushed history')
|
||||
profiler.finish()
|
||||
|
@ -234,14 +213,8 @@ async def fsp_compute(
|
|||
|
||||
log.debug(f"{func_name}: {processed}")
|
||||
key, output = processed
|
||||
# dst.array[-1][key] = output
|
||||
dst.array[[key, 'time']][-1] = (
|
||||
output,
|
||||
# TODO: what about pushing ``time.time_ns()``
|
||||
# in which case we'll need to round at the graphics
|
||||
# processing / sampling layer?
|
||||
src.array[-1]['time']
|
||||
)
|
||||
index = src.index
|
||||
dst.array[-1][key] = output
|
||||
|
||||
# NOTE: for now we aren't streaming this to the consumer
|
||||
# stream latest array index entry which basically just acts
|
||||
|
@ -252,7 +225,6 @@ async def fsp_compute(
|
|||
# N-consumers who subscribe for the real-time output,
|
||||
# which we'll likely want to implement using local-mem
|
||||
# chans for the fan out?
|
||||
# index = src.index
|
||||
# if attach_stream:
|
||||
# await client_stream.send(index)
|
||||
|
||||
|
@ -289,7 +261,7 @@ async def cascade(
|
|||
destination shm array buffer.
|
||||
|
||||
'''
|
||||
profiler = Profiler(
|
||||
profiler = pg.debug.Profiler(
|
||||
delayed=False,
|
||||
disabled=False
|
||||
)
|
||||
|
@ -312,10 +284,9 @@ async def cascade(
|
|||
# TODO: ugh i hate this wind/unwind to list over the wire
|
||||
# but not sure how else to do it.
|
||||
for (token, fsp_name, dst_token) in shm_registry:
|
||||
Fsp._flow_registry[(
|
||||
_Token.from_msg(token),
|
||||
fsp_name,
|
||||
)] = _Token.from_msg(dst_token), None
|
||||
Fsp._flow_registry[
|
||||
(_Token.from_msg(token), fsp_name)
|
||||
] = _Token.from_msg(dst_token)
|
||||
|
||||
fsp: Fsp = reg.get(
|
||||
NamespacePath(ns_path)
|
||||
|
@ -327,7 +298,6 @@ async def cascade(
|
|||
raise ValueError(f'Unknown fsp target: {ns_path}')
|
||||
|
||||
# open a data feed stream with requested broker
|
||||
feed: Feed
|
||||
async with data.feed.maybe_open_feed(
|
||||
[fqsn],
|
||||
|
||||
|
@ -337,13 +307,14 @@ async def cascade(
|
|||
# needs to get throttled the ticks we generate.
|
||||
# tick_throttle=60,
|
||||
|
||||
) as feed:
|
||||
) as (feed, quote_stream):
|
||||
symbol = feed.symbols[fqsn]
|
||||
|
||||
flume = feed.flumes[fqsn]
|
||||
symbol = flume.symbol
|
||||
assert src.token == flume.rt_shm.token
|
||||
profiler(f'{func}: feed up')
|
||||
|
||||
assert src.token == feed.shm.token
|
||||
# last_len = new_len = len(src.array)
|
||||
|
||||
func_name = func.__name__
|
||||
async with (
|
||||
trio.open_nursery() as n,
|
||||
|
@ -353,8 +324,8 @@ async def cascade(
|
|||
|
||||
fsp_compute,
|
||||
symbol=symbol,
|
||||
flume=flume,
|
||||
quote_stream=flume.stream,
|
||||
feed=feed,
|
||||
quote_stream=quote_stream,
|
||||
|
||||
# shm
|
||||
src=src,
|
||||
|
@ -390,7 +361,7 @@ async def cascade(
|
|||
) -> tuple[TaskTracker, int]:
|
||||
# TODO: adopt an incremental update engine/approach
|
||||
# where possible here eventually!
|
||||
log.info(f're-syncing fsp {func_name} to source')
|
||||
log.debug(f're-syncing fsp {func_name} to source')
|
||||
tracker.cs.cancel()
|
||||
await tracker.complete.wait()
|
||||
tracker, index = await n.start(fsp_target)
|
||||
|
@ -403,16 +374,14 @@ async def cascade(
|
|||
'key': dst_shm_token,
|
||||
'first': dst._first.value,
|
||||
'last': dst._last.value,
|
||||
}
|
||||
})
|
||||
}})
|
||||
return tracker, index
|
||||
|
||||
def is_synced(
|
||||
src: ShmArray,
|
||||
dst: ShmArray
|
||||
) -> tuple[bool, int, int]:
|
||||
'''
|
||||
Predicate to dertmine if a destination FSP
|
||||
'''Predicate to dertmine if a destination FSP
|
||||
output array is aligned to its source array.
|
||||
|
||||
'''
|
||||
|
@ -421,15 +390,16 @@ async def cascade(
|
|||
return not (
|
||||
# the source is likely backfilling and we must
|
||||
# sync history calculations
|
||||
len_diff > 2
|
||||
len_diff > 2 or
|
||||
|
||||
# we aren't step synced to the source and may be
|
||||
# leading/lagging by a step
|
||||
or step_diff > 1
|
||||
or step_diff < 0
|
||||
step_diff > 1 or
|
||||
step_diff < 0
|
||||
), step_diff, len_diff
|
||||
|
||||
async def poll_and_sync_to_step(
|
||||
|
||||
tracker: TaskTracker,
|
||||
src: ShmArray,
|
||||
dst: ShmArray,
|
||||
|
@ -448,23 +418,18 @@ async def cascade(
|
|||
# detect sample period step for subscription to increment
|
||||
# signal
|
||||
times = src.array['time']
|
||||
if len(times) > 1:
|
||||
last_ts = times[-1]
|
||||
delay_s = float(last_ts - times[times != last_ts][-1])
|
||||
else:
|
||||
# our default "HFT" sample rate.
|
||||
delay_s = _default_delay_s
|
||||
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||
|
||||
# sub and increment the underlying shared memory buffer
|
||||
# on every step msg received from the global `samplerd`
|
||||
# service.
|
||||
async with open_sample_stream(float(delay_s)) as istream:
|
||||
# Increment the underlying shared memory buffer on every
|
||||
# "increment" msg received from the underlying data feed.
|
||||
async with feed.index_stream(
|
||||
int(delay_s)
|
||||
) as istream:
|
||||
|
||||
profiler(f'{func_name}: sample stream up')
|
||||
profiler.finish()
|
||||
|
||||
async for i in istream:
|
||||
# print(f'FSP incrementing {i}')
|
||||
async for _ in istream:
|
||||
|
||||
# respawn the compute task if the source
|
||||
# array has been updated such that we compute
|
||||
|
@ -493,23 +458,3 @@ async def cascade(
|
|||
last = array[-1:].copy()
|
||||
|
||||
dst.push(last)
|
||||
|
||||
# sync with source buffer's time step
|
||||
src_l2 = src.array[-2:]
|
||||
src_li, src_lt = src_l2[-1][['index', 'time']]
|
||||
src_2li, src_2lt = src_l2[-2][['index', 'time']]
|
||||
dst._array['time'][src_li] = src_lt
|
||||
dst._array['time'][src_2li] = src_2lt
|
||||
|
||||
# last2 = dst.array[-2:]
|
||||
# if (
|
||||
# last2[-1]['index'] != src_li
|
||||
# or last2[-2]['index'] != src_2li
|
||||
# ):
|
||||
# dstl2 = list(last2)
|
||||
# srcl2 = list(src_l2)
|
||||
# print(
|
||||
# # f'{dst.token}\n'
|
||||
# f'src: {srcl2}\n'
|
||||
# f'dst: {dstl2}\n'
|
||||
# )
|
||||
|
|
|
@ -234,7 +234,7 @@ async def flow_rates(
|
|||
# FSPs, user input, and possibly any general event stream in
|
||||
# real-time. Hint: ideally implemented with caching until mutated
|
||||
# ;)
|
||||
period: 'Param[int]' = 1, # noqa
|
||||
period: 'Param[int]' = 6, # noqa
|
||||
|
||||
# TODO: support other means by providing a map
|
||||
# to weights `partial()`-ed with `wma()`?
|
||||
|
@ -268,7 +268,8 @@ async def flow_rates(
|
|||
'dark_dvlm_rate': None,
|
||||
}
|
||||
|
||||
quote = await anext(source)
|
||||
# TODO: 3.10 do ``anext()``
|
||||
quote = await source.__anext__()
|
||||
|
||||
# ltr = 0
|
||||
# lvr = 0
|
||||
|
|
1039
piker/pp.py
1039
piker/pp.py
File diff suppressed because it is too large
Load Diff
|
@ -1 +0,0 @@
|
|||
TEST_CONFIG_DIR_PATH = '_testing'
|
|
@ -32,22 +32,16 @@ def mk_marker_path(
|
|||
style: str,
|
||||
|
||||
) -> QGraphicsPathItem:
|
||||
'''
|
||||
Add a marker to be displayed on the line wrapped in
|
||||
a ``QGraphicsPathItem`` ready to be placed using scene coordinates
|
||||
(not view).
|
||||
"""Add a marker to be displayed on the line wrapped in a ``QGraphicsPathItem``
|
||||
ready to be placed using scene coordinates (not view).
|
||||
|
||||
**Arguments**
|
||||
style String indicating the style of marker to add:
|
||||
``'<|'``, ``'|>'``, ``'>|'``, ``'|<'``, ``'<|>'``,
|
||||
``'>|<'``, ``'^'``, ``'v'``, ``'o'``
|
||||
size Size of the marker in pixels.
|
||||
|
||||
This code is taken nearly verbatim from the
|
||||
`InfiniteLine.addMarker()` method but does not attempt do be aware
|
||||
of low(er) level graphics controls and expects for the output
|
||||
polygon to be applied to a ``QGraphicsPathItem``.
|
||||
|
||||
'''
|
||||
"""
|
||||
path = QtGui.QPainterPath()
|
||||
|
||||
if style == 'o':
|
||||
|
@ -93,8 +87,7 @@ def mk_marker_path(
|
|||
|
||||
|
||||
class LevelMarker(QGraphicsPathItem):
|
||||
'''
|
||||
An arrow marker path graphich which redraws itself
|
||||
'''An arrow marker path graphich which redraws itself
|
||||
to the specified view coordinate level on each paint cycle.
|
||||
|
||||
'''
|
||||
|
@ -111,8 +104,7 @@ class LevelMarker(QGraphicsPathItem):
|
|||
|
||||
# get polygon and scale
|
||||
super().__init__()
|
||||
# self.setScale(size, size)
|
||||
self.setScale(size)
|
||||
self.scale(size, size)
|
||||
|
||||
# interally generates path
|
||||
self._style = None
|
||||
|
@ -122,7 +114,6 @@ class LevelMarker(QGraphicsPathItem):
|
|||
|
||||
self.get_level = get_level
|
||||
self._on_paint = on_paint
|
||||
|
||||
self.scene_x = lambda: chart.marker_right_points()[1]
|
||||
self.level: float = 0
|
||||
self.keep_in_view = keep_in_view
|
||||
|
@ -158,9 +149,12 @@ class LevelMarker(QGraphicsPathItem):
|
|||
def w(self) -> float:
|
||||
return self.path_br().width()
|
||||
|
||||
def position_in_view(self) -> None:
|
||||
'''
|
||||
Show a pp off-screen indicator for a level label.
|
||||
def position_in_view(
|
||||
self,
|
||||
# level: float,
|
||||
|
||||
) -> None:
|
||||
'''Show a pp off-screen indicator for a level label.
|
||||
|
||||
This is like in fps games where you have a gps "nav" indicator
|
||||
but your teammate is outside the range of view, except in 2D, on
|
||||
|
@ -168,6 +162,7 @@ class LevelMarker(QGraphicsPathItem):
|
|||
|
||||
'''
|
||||
level = self.get_level()
|
||||
|
||||
view = self.chart.getViewBox()
|
||||
vr = view.state['viewRange']
|
||||
ymn, ymx = vr[1]
|
||||
|
@ -191,6 +186,7 @@ class LevelMarker(QGraphicsPathItem):
|
|||
)
|
||||
|
||||
elif level < ymn: # pin to bottom of view
|
||||
|
||||
self.setPos(
|
||||
QPointF(
|
||||
x,
|
||||
|
@ -215,8 +211,7 @@ class LevelMarker(QGraphicsPathItem):
|
|||
w: QtWidgets.QWidget
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Core paint which we override to always update
|
||||
'''Core paint which we override to always update
|
||||
our marker position in scene coordinates from a
|
||||
view cooridnate "level".
|
||||
|
||||
|
@ -240,12 +235,11 @@ def qgo_draw_markers(
|
|||
right_offset: float,
|
||||
|
||||
) -> float:
|
||||
'''
|
||||
Paint markers in ``pg.GraphicsItem`` style by first
|
||||
"""Paint markers in ``pg.GraphicsItem`` style by first
|
||||
removing the view transform for the painter, drawing the markers
|
||||
in scene coords, then restoring the view coords.
|
||||
|
||||
'''
|
||||
"""
|
||||
# paint markers in native coordinate system
|
||||
orig_tr = p.transform()
|
||||
|
||||
|
|
|
@ -19,16 +19,15 @@ Main app startup and run.
|
|||
|
||||
'''
|
||||
from functools import partial
|
||||
from types import ModuleType
|
||||
|
||||
from PyQt5.QtCore import QEvent
|
||||
import trio
|
||||
|
||||
from .._daemon import maybe_spawn_brokerd
|
||||
from ..brokers import get_brokermod
|
||||
from . import _event
|
||||
from ._exec import run_qtractor
|
||||
from ..data.feed import install_brokerd_search
|
||||
from ..data._source import unpack_fqsn
|
||||
from . import _search
|
||||
from ._chart import GodWidget
|
||||
from ..log import get_logger
|
||||
|
@ -37,26 +36,27 @@ log = get_logger(__name__)
|
|||
|
||||
|
||||
async def load_provider_search(
|
||||
brokermod: str,
|
||||
|
||||
broker: str,
|
||||
loglevel: str,
|
||||
|
||||
) -> None:
|
||||
|
||||
name = brokermod.name
|
||||
log.info(f'loading brokerd for {name}..')
|
||||
log.info(f'loading brokerd for {broker}..')
|
||||
|
||||
async with (
|
||||
|
||||
maybe_spawn_brokerd(
|
||||
name,
|
||||
broker,
|
||||
loglevel=loglevel
|
||||
) as portal,
|
||||
|
||||
install_brokerd_search(
|
||||
portal,
|
||||
brokermod,
|
||||
get_brokermod(broker),
|
||||
),
|
||||
):
|
||||
|
||||
# keep search engine stream up until cancelled
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
@ -66,8 +66,8 @@ async def _async_main(
|
|||
# implicit required argument provided by ``qtractor_run()``
|
||||
main_widget: GodWidget,
|
||||
|
||||
syms: list[str],
|
||||
brokers: dict[str, ModuleType],
|
||||
sym: str,
|
||||
brokernames: str,
|
||||
loglevel: str,
|
||||
|
||||
) -> None:
|
||||
|
@ -78,8 +78,6 @@ async def _async_main(
|
|||
|
||||
"""
|
||||
from . import _display
|
||||
from ._pg_overrides import _do_overrides
|
||||
_do_overrides()
|
||||
|
||||
godwidget = main_widget
|
||||
|
||||
|
@ -99,11 +97,6 @@ async def _async_main(
|
|||
sbar = godwidget.window.status_bar
|
||||
starting_done = sbar.open_status('starting ze sexy chartz')
|
||||
|
||||
needed_brokermods: dict[str, ModuleType] = {}
|
||||
for fqsn in syms:
|
||||
brokername, *_ = unpack_fqsn(fqsn)
|
||||
needed_brokermods[brokername] = brokers[brokername]
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as root_n,
|
||||
):
|
||||
|
@ -114,14 +107,18 @@ async def _async_main(
|
|||
# setup search widget and focus main chart view at startup
|
||||
# search widget is a singleton alongside the godwidget
|
||||
search = _search.SearchWidget(godwidget=godwidget)
|
||||
# search.bar.unfocus()
|
||||
# godwidget.hbox.addWidget(search)
|
||||
search.bar.unfocus()
|
||||
|
||||
godwidget.hbox.addWidget(search)
|
||||
godwidget.search = search
|
||||
|
||||
symbol, _, provider = sym.rpartition('.')
|
||||
|
||||
# this internally starts a ``display_symbol_data()`` task above
|
||||
order_mode_ready = await godwidget.load_symbols(
|
||||
fqsns=syms,
|
||||
loglevel=loglevel,
|
||||
order_mode_ready = await godwidget.load_symbol(
|
||||
provider,
|
||||
symbol,
|
||||
loglevel
|
||||
)
|
||||
|
||||
# spin up a search engine for the local cached symbol set
|
||||
|
@ -138,12 +135,8 @@ async def _async_main(
|
|||
):
|
||||
# load other providers into search **after**
|
||||
# the chart's select cache
|
||||
for brokername, mod in needed_brokermods.items():
|
||||
root_n.start_soon(
|
||||
load_provider_search,
|
||||
mod,
|
||||
loglevel,
|
||||
)
|
||||
for broker in brokernames:
|
||||
root_n.start_soon(load_provider_search, broker, loglevel)
|
||||
|
||||
await order_mode_ready.wait()
|
||||
|
||||
|
@ -172,22 +165,19 @@ async def _async_main(
|
|||
|
||||
|
||||
def _main(
|
||||
syms: list[str],
|
||||
brokermods: list[ModuleType],
|
||||
sym: str,
|
||||
brokernames: [str],
|
||||
piker_loglevel: str,
|
||||
tractor_kwargs,
|
||||
) -> None:
|
||||
'''
|
||||
Sync entry point to start a chart: a ``tractor`` + Qt runtime.
|
||||
Sync entry point to start a chart: a ``tractor`` + Qt runtime
|
||||
entry point
|
||||
|
||||
'''
|
||||
run_qtractor(
|
||||
func=_async_main,
|
||||
args=(
|
||||
syms,
|
||||
{mod.name: mod for mod in brokermods},
|
||||
piker_loglevel,
|
||||
),
|
||||
main_widget_type=GodWidget,
|
||||
args=(sym, brokernames, piker_loglevel),
|
||||
main_widget=GodWidget,
|
||||
tractor_kwargs=tractor_kwargs,
|
||||
)
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
Chart axes graphics and behavior.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from functools import lru_cache
|
||||
from typing import Optional, Callable
|
||||
from math import floor
|
||||
|
@ -28,7 +27,6 @@ import pyqtgraph as pg
|
|||
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||
from PyQt5.QtCore import QPointF
|
||||
|
||||
from . import _pg_overrides as pgo
|
||||
from ..data._source import float_digits
|
||||
from ._label import Label
|
||||
from ._style import DpiAwareFont, hcolor, _font
|
||||
|
@ -41,17 +39,12 @@ class Axis(pg.AxisItem):
|
|||
'''
|
||||
A better axis that sizes tick contents considering font size.
|
||||
|
||||
Also includes tick values lru caching originally proposed in but never
|
||||
accepted upstream:
|
||||
https://github.com/pyqtgraph/pyqtgraph/pull/2160
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
plotitem: pgo.PlotItem,
|
||||
typical_max_str: str = '100 000.000 ',
|
||||
linkedsplits,
|
||||
typical_max_str: str = '100 000.000',
|
||||
text_color: str = 'bracket',
|
||||
lru_cache_tick_strings: bool = True,
|
||||
**kwargs
|
||||
|
||||
) -> None:
|
||||
|
@ -63,78 +56,41 @@ class Axis(pg.AxisItem):
|
|||
# XXX: pretty sure this makes things slower
|
||||
# self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
self.pi = plotitem
|
||||
self.linkedsplits = linkedsplits
|
||||
self._dpi_font = _font
|
||||
|
||||
self.setTickFont(_font.font)
|
||||
font_size = self._dpi_font.font.pixelSize()
|
||||
|
||||
style_conf = {
|
||||
'textFillLimits': [(0, 0.5)],
|
||||
'tickFont': self._dpi_font.font,
|
||||
|
||||
}
|
||||
text_offset = None
|
||||
if self.orientation in ('bottom',):
|
||||
text_offset = floor(0.25 * font_size)
|
||||
|
||||
elif self.orientation in ('left', 'right'):
|
||||
text_offset = floor(font_size / 2)
|
||||
|
||||
if text_offset:
|
||||
style_conf.update({
|
||||
self.setStyle(**{
|
||||
'textFillLimits': [(0, 0.5)],
|
||||
'tickFont': self._dpi_font.font,
|
||||
|
||||
# offset of text *away from* axis line in px
|
||||
# use approx. half the font pixel size (height)
|
||||
'tickTextOffset': text_offset,
|
||||
})
|
||||
|
||||
self.setStyle(**style_conf)
|
||||
self.setTickFont(_font.font)
|
||||
|
||||
# NOTE: this is for surrounding "border"
|
||||
self.setPen(_axis_pen)
|
||||
|
||||
# this is the text color
|
||||
# self.setTextPen(pg.mkPen(hcolor(text_color)))
|
||||
self.text_color = text_color
|
||||
|
||||
# generate a bounding rect based on sizing to a "typical"
|
||||
# maximum length-ed string defined as init default.
|
||||
self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
||||
|
||||
# size the pertinent axis dimension to a "typical value"
|
||||
self.size_to_values()
|
||||
|
||||
# NOTE: requires override ``.tickValues()`` method seen below.
|
||||
if lru_cache_tick_strings:
|
||||
self.tickStrings = lru_cache(
|
||||
maxsize=2**20
|
||||
)(self.tickStrings)
|
||||
|
||||
# axis "sticky" labels
|
||||
self._stickies: dict[str, YAxisLabel] = {}
|
||||
|
||||
# NOTE: only overriden to cast tick values entries into tuples
|
||||
# for use with the lru caching.
|
||||
def tickValues(
|
||||
self,
|
||||
minVal: float,
|
||||
maxVal: float,
|
||||
size: int,
|
||||
|
||||
) -> list[tuple[float, tuple[str]]]:
|
||||
'''
|
||||
Repack tick values into tuples for lru caching.
|
||||
|
||||
'''
|
||||
ticks = []
|
||||
for scalar, values in super().tickValues(minVal, maxVal, size):
|
||||
ticks.append((
|
||||
scalar,
|
||||
tuple(values), # this
|
||||
))
|
||||
|
||||
return ticks
|
||||
|
||||
@property
|
||||
def text_color(self) -> str:
|
||||
return self._text_color
|
||||
|
@ -150,38 +106,6 @@ class Axis(pg.AxisItem):
|
|||
def txt_offsets(self) -> tuple[int, int]:
|
||||
return tuple(self.style['tickTextOffset'])
|
||||
|
||||
def add_sticky(
|
||||
self,
|
||||
pi: pgo.PlotItem,
|
||||
name: None | str = None,
|
||||
digits: None | int = 2,
|
||||
bg_color='default',
|
||||
fg_color='black',
|
||||
|
||||
) -> YAxisLabel:
|
||||
|
||||
# if the sticky is for our symbol
|
||||
# use the tick size precision for display
|
||||
name = name or pi.name
|
||||
digits = digits or 2
|
||||
|
||||
# TODO: ``._ysticks`` should really be an attr on each
|
||||
# ``PlotItem`` now instead of the containing widget (because of
|
||||
# overlays) ?
|
||||
|
||||
# add y-axis "last" value label
|
||||
sticky = self._stickies[name] = YAxisLabel(
|
||||
pi=pi,
|
||||
parent=self,
|
||||
digits=digits, # TODO: pass this from symbol data
|
||||
opacity=0.9, # slight see-through
|
||||
bg_color=bg_color,
|
||||
fg_color=fg_color,
|
||||
)
|
||||
|
||||
pi.sigRangeChanged.connect(sticky.update_on_resize)
|
||||
return sticky
|
||||
|
||||
|
||||
class PriceAxis(Axis):
|
||||
|
||||
|
@ -243,6 +167,7 @@ class PriceAxis(Axis):
|
|||
self._min_tick = size
|
||||
|
||||
def size_to_values(self) -> None:
|
||||
# self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
||||
self.setWidth(self.typical_br.width())
|
||||
|
||||
# XXX: drop for now since it just eats up h space
|
||||
|
@ -297,50 +222,28 @@ class DynamicDateAxis(Axis):
|
|||
|
||||
) -> list[str]:
|
||||
|
||||
# XX: ARGGGGG AG:LKSKDJF:LKJSDFD
|
||||
chart = self.pi.chart_widget
|
||||
|
||||
viz = chart._vizs[chart.name]
|
||||
shm = viz.shm
|
||||
array = shm.array
|
||||
times = array['time']
|
||||
i_0, i_l = times[0], times[-1]
|
||||
|
||||
# edge cases
|
||||
if (
|
||||
not indexes
|
||||
or
|
||||
(indexes[0] < i_0
|
||||
and indexes[-1] < i_l)
|
||||
or
|
||||
(indexes[0] > i_0
|
||||
and indexes[-1] > i_l)
|
||||
):
|
||||
return []
|
||||
|
||||
if viz.index_field == 'index':
|
||||
arr_len = times.shape[0]
|
||||
chart = self.linkedsplits.chart
|
||||
flow = chart._flows[chart.name]
|
||||
shm = flow.shm
|
||||
bars = shm.array
|
||||
first = shm._first.value
|
||||
epochs = times[
|
||||
list(
|
||||
|
||||
bars_len = len(bars)
|
||||
times = bars['time']
|
||||
|
||||
epochs = times[list(
|
||||
map(
|
||||
int,
|
||||
filter(
|
||||
lambda i: i > 0 and i < arr_len,
|
||||
(i - first for i in indexes)
|
||||
lambda i: i > 0 and i < bars_len,
|
||||
(i-first for i in indexes)
|
||||
)
|
||||
)
|
||||
)
|
||||
]
|
||||
else:
|
||||
epochs = list(map(int, indexes))
|
||||
)]
|
||||
|
||||
# TODO: **don't** have this hard coded shift to EST
|
||||
# delay = times[-1] - times[-2]
|
||||
dts = np.array(
|
||||
epochs,
|
||||
dtype='datetime64[s]',
|
||||
)
|
||||
dts = np.array(epochs, dtype='datetime64[s]')
|
||||
|
||||
# see units listing:
|
||||
# https://numpy.org/devdocs/reference/arrays.datetime.html#datetime-units
|
||||
|
@ -358,39 +261,24 @@ class DynamicDateAxis(Axis):
|
|||
spacing: float,
|
||||
|
||||
) -> list[str]:
|
||||
|
||||
return self._indexes_to_timestrs(values)
|
||||
|
||||
# NOTE: handy for debugging the lru cache
|
||||
# info = self.tickStrings.cache_info()
|
||||
# print(info)
|
||||
return self._indexes_to_timestrs(values)
|
||||
|
||||
|
||||
class AxisLabel(pg.GraphicsObject):
|
||||
|
||||
# relative offsets *OF* the bounding rect relative
|
||||
# to parent graphics object.
|
||||
# eg. <parent>| => <_x_br_offset> => | <text> |
|
||||
_x_br_offset: float = 0
|
||||
_y_br_offset: float = 0
|
||||
|
||||
# relative offsets of text *within* bounding rect
|
||||
# eg. | <_x_margin> => <text> |
|
||||
_x_margin: float = 0
|
||||
_y_margin: float = 0
|
||||
|
||||
# multiplier of the text content's height in order
|
||||
# to force a larger (y-dimension) bounding rect.
|
||||
_y_txt_h_scaling: float = 1
|
||||
_x_margin = 0
|
||||
_y_margin = 0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
parent: pg.GraphicsItem,
|
||||
digits: int = 2,
|
||||
|
||||
bg_color: str = 'default',
|
||||
bg_color: str = 'bracket',
|
||||
fg_color: str = 'black',
|
||||
opacity: int = .8, # XXX: seriously don't set this to 0
|
||||
opacity: int = 1, # XXX: seriously don't set this to 0
|
||||
font_size: str = 'default',
|
||||
|
||||
use_arrow: bool = True,
|
||||
|
@ -401,7 +289,6 @@ class AxisLabel(pg.GraphicsObject):
|
|||
self.setParentItem(parent)
|
||||
|
||||
self.setFlag(self.ItemIgnoresTransformations)
|
||||
self.setZValue(100)
|
||||
|
||||
# XXX: pretty sure this is faster
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
@ -433,14 +320,14 @@ class AxisLabel(pg.GraphicsObject):
|
|||
p: QtGui.QPainter,
|
||||
opt: QtWidgets.QStyleOptionGraphicsItem,
|
||||
w: QtWidgets.QWidget
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Draw a filled rectangle based on the size of ``.label_str`` text.
|
||||
"""Draw a filled rectangle based on the size of ``.label_str`` text.
|
||||
|
||||
Subtypes can customize further by overloading ``.draw()``.
|
||||
|
||||
'''
|
||||
"""
|
||||
# p.setCompositionMode(QtWidgets.QPainter.CompositionMode_SourceOver)
|
||||
|
||||
if self.label_str:
|
||||
|
||||
# if not self.rect:
|
||||
|
@ -451,11 +338,7 @@ class AxisLabel(pg.GraphicsObject):
|
|||
|
||||
p.setFont(self._dpifont.font)
|
||||
p.setPen(self.fg_color)
|
||||
p.drawText(
|
||||
self.rect,
|
||||
self.text_flags,
|
||||
self.label_str,
|
||||
)
|
||||
p.drawText(self.rect, self.text_flags, self.label_str)
|
||||
|
||||
def draw(
|
||||
self,
|
||||
|
@ -463,8 +346,6 @@ class AxisLabel(pg.GraphicsObject):
|
|||
rect: QtCore.QRectF
|
||||
) -> None:
|
||||
|
||||
p.setOpacity(self.opacity)
|
||||
|
||||
if self._use_arrow:
|
||||
if not self.path:
|
||||
self._draw_arrow_path()
|
||||
|
@ -472,13 +353,15 @@ class AxisLabel(pg.GraphicsObject):
|
|||
p.drawPath(self.path)
|
||||
p.fillPath(self.path, pg.mkBrush(self.bg_color))
|
||||
|
||||
# this adds a nice black outline around the label for some odd
|
||||
# reason; ok by us
|
||||
p.setOpacity(self.opacity)
|
||||
|
||||
# this cause the L1 labels to glitch out if used in the subtype
|
||||
# and it will leave a small black strip with the arrow path if
|
||||
# done before the above
|
||||
p.fillRect(
|
||||
self.rect,
|
||||
self.bg_color,
|
||||
)
|
||||
p.fillRect(self.rect, self.bg_color)
|
||||
|
||||
|
||||
def boundingRect(self): # noqa
|
||||
'''
|
||||
|
@ -522,18 +405,15 @@ class AxisLabel(pg.GraphicsObject):
|
|||
txt_h, txt_w = txt_br.height(), txt_br.width()
|
||||
# print(f'wsw: {self._dpifont.boundingRect(" ")}')
|
||||
|
||||
# allow subtypes to override width and height
|
||||
# allow subtypes to specify a static width and height
|
||||
h, w = self.size_hint()
|
||||
# print(f'axis size: {self._parent.size()}')
|
||||
# print(f'axis geo: {self._parent.geometry()}')
|
||||
|
||||
self.rect = QtCore.QRectF(
|
||||
|
||||
# relative bounds offsets
|
||||
self._x_br_offset,
|
||||
self._y_br_offset,
|
||||
|
||||
0, 0,
|
||||
(w or txt_w) + self._x_margin / 2,
|
||||
|
||||
(h or txt_h) * self._y_txt_h_scaling + (self._y_margin / 2),
|
||||
(h or txt_h) + self._y_margin / 2,
|
||||
)
|
||||
# print(self.rect)
|
||||
# hb = self.path.controlPointRect()
|
||||
|
@ -609,7 +489,7 @@ class XAxisLabel(AxisLabel):
|
|||
|
||||
|
||||
class YAxisLabel(AxisLabel):
|
||||
_y_margin: int = 4
|
||||
_y_margin = 4
|
||||
|
||||
text_flags = (
|
||||
QtCore.Qt.AlignLeft
|
||||
|
@ -620,19 +500,19 @@ class YAxisLabel(AxisLabel):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
pi: pgo.PlotItem,
|
||||
chart,
|
||||
*args,
|
||||
**kwargs
|
||||
) -> None:
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._pi = pi
|
||||
pi.sigRangeChanged.connect(self.update_on_resize)
|
||||
self._chart = chart
|
||||
|
||||
chart.sigRangeChanged.connect(self.update_on_resize)
|
||||
|
||||
self._last_datum = (None, None)
|
||||
|
||||
self.x_offset = 0
|
||||
# pull text offset from axis from parent axis
|
||||
if getattr(self._parent, 'txt_offsets', False):
|
||||
self.x_offset, y_offset = self._parent.txt_offsets()
|
||||
|
@ -651,8 +531,7 @@ class YAxisLabel(AxisLabel):
|
|||
value: float, # data for text
|
||||
|
||||
# on odd dimension and/or adds nice black line
|
||||
x_offset: int = 0,
|
||||
|
||||
x_offset: Optional[int] = None
|
||||
) -> None:
|
||||
|
||||
# this is read inside ``.paint()``
|
||||
|
@ -698,7 +577,7 @@ class YAxisLabel(AxisLabel):
|
|||
self._last_datum = (index, value)
|
||||
|
||||
self.update_label(
|
||||
self._pi.mapFromView(QPointF(index, value)),
|
||||
self._chart.mapFromView(QPointF(index, value)),
|
||||
value
|
||||
)
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -15,30 +15,17 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Graphics downsampling using the infamous M4 algorithm.
|
||||
|
||||
This is one of ``piker``'s secret weapons allowing us to boss all other
|
||||
charting platforms B)
|
||||
|
||||
(AND DON'T YOU DARE TAKE THIS CODE WITHOUT CREDIT OR WE'LL SUE UR F#&@* ASS).
|
||||
|
||||
NOTES: this method is a so called "visualization driven data
|
||||
aggregation" approach. It gives error-free line chart
|
||||
downsampling, see
|
||||
further scientific paper resources:
|
||||
- http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
- http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||
|
||||
Details on implementation of this algo are based in,
|
||||
https://github.com/pikers/piker/issues/109
|
||||
Graphics related downsampling routines for compressing to pixel
|
||||
limits on the display device.
|
||||
|
||||
'''
|
||||
import math
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import (
|
||||
njit,
|
||||
jit,
|
||||
# float64, optional, int64,
|
||||
)
|
||||
|
||||
|
@ -48,6 +35,109 @@ from ..log import get_logger
|
|||
log = get_logger(__name__)
|
||||
|
||||
|
||||
def hl2mxmn(ohlc: np.ndarray) -> np.ndarray:
|
||||
'''
|
||||
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||
to a "joined" max/min 1-d array.
|
||||
|
||||
'''
|
||||
index = ohlc['index']
|
||||
hls = ohlc[[
|
||||
'low',
|
||||
'high',
|
||||
]]
|
||||
|
||||
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||
x = np.empty(2*hls.size, dtype=np.float64)
|
||||
trace_hl(hls, mxmn, x, index[0])
|
||||
x = x + index[0]
|
||||
|
||||
return mxmn, x
|
||||
|
||||
|
||||
@jit(
|
||||
# TODO: the type annots..
|
||||
# float64[:](float64[:],),
|
||||
nopython=True,
|
||||
)
|
||||
def trace_hl(
|
||||
hl: 'np.ndarray',
|
||||
out: np.ndarray,
|
||||
x: np.ndarray,
|
||||
start: int,
|
||||
|
||||
# the "offset" values in the x-domain which
|
||||
# place the 2 output points around each ``int``
|
||||
# master index.
|
||||
margin: float = 0.43,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
"Trace" the outline of the high-low values of an ohlc sequence
|
||||
as a line such that the maximum deviation (aka disperaion) between
|
||||
bars if preserved.
|
||||
|
||||
This routine is expected to modify input arrays in-place.
|
||||
|
||||
'''
|
||||
last_l = hl['low'][0]
|
||||
last_h = hl['high'][0]
|
||||
|
||||
for i in range(hl.size):
|
||||
row = hl[i]
|
||||
l, h = row['low'], row['high']
|
||||
|
||||
up_diff = h - last_l
|
||||
down_diff = last_h - l
|
||||
|
||||
if up_diff > down_diff:
|
||||
out[2*i + 1] = h
|
||||
out[2*i] = last_l
|
||||
else:
|
||||
out[2*i + 1] = l
|
||||
out[2*i] = last_h
|
||||
|
||||
last_l = l
|
||||
last_h = h
|
||||
|
||||
x[2*i] = int(i) - margin
|
||||
x[2*i + 1] = int(i) + margin
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def ohlc_flatten(
|
||||
ohlc: np.ndarray,
|
||||
use_mxmn: bool = True,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||
1-d array that is 4 times the size with x-domain values distributed
|
||||
evenly (by 0.5 steps) over each index.
|
||||
|
||||
'''
|
||||
index = ohlc['index']
|
||||
|
||||
if use_mxmn:
|
||||
# traces a line optimally over highs to lows
|
||||
# using numba. NOTE: pretty sure this is faster
|
||||
# and looks about the same as the below output.
|
||||
flat, x = hl2mxmn(ohlc)
|
||||
|
||||
else:
|
||||
flat = rfn.structured_to_unstructured(
|
||||
ohlc[['open', 'high', 'low', 'close']]
|
||||
).flatten()
|
||||
|
||||
x = np.linspace(
|
||||
start=index[0] - 0.5,
|
||||
stop=index[-1] + 0.5,
|
||||
num=len(flat),
|
||||
)
|
||||
return x, flat
|
||||
|
||||
|
||||
def ds_m4(
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
|
@ -70,6 +160,16 @@ def ds_m4(
|
|||
This is more or less an OHLC style sampling of a line-style series.
|
||||
|
||||
'''
|
||||
# NOTE: this method is a so called "visualization driven data
|
||||
# aggregation" approach. It gives error-free line chart
|
||||
# downsampling, see
|
||||
# further scientific paper resources:
|
||||
# - http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
# - http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||
|
||||
# Details on implementation of this algo are based in,
|
||||
# https://github.com/pikers/piker/issues/109
|
||||
|
||||
# XXX: from infinite on downsampling viewable graphics:
|
||||
# "one thing i remembered about the binning - if you are
|
||||
# picking a range within your timeseries the start and end bin
|
||||
|
@ -91,14 +191,6 @@ def ds_m4(
|
|||
x_end = x[-1] # x end value/highest in domain
|
||||
xrange = (x_end - x_start)
|
||||
|
||||
if xrange < 0:
|
||||
log.error(f'-VE M4 X-RANGE: {x_start} -> {x_end}')
|
||||
# XXX: broken x-range calc-case, likely the x-end points
|
||||
# are wrong and have some default value set (such as
|
||||
# x_end -> <some epoch float> while x_start -> 0.5).
|
||||
# breakpoint()
|
||||
return None
|
||||
|
||||
# XXX: always round up on the input pixels
|
||||
# lnx = len(x)
|
||||
# uppx *= max(4 / (1 + math.log(uppx, 2)), 1)
|
||||
|
@ -131,20 +223,14 @@ def ds_m4(
|
|||
assert frames >= (xrange / uppx)
|
||||
|
||||
# call into ``numba``
|
||||
(
|
||||
nb,
|
||||
x_out,
|
||||
y_out,
|
||||
ymn,
|
||||
ymx,
|
||||
) = _m4(
|
||||
nb, i_win, y_out = _m4(
|
||||
x,
|
||||
y,
|
||||
|
||||
frames,
|
||||
|
||||
# TODO: see func below..
|
||||
# x_out,
|
||||
# i_win,
|
||||
# y_out,
|
||||
|
||||
# first index in x data to start at
|
||||
|
@ -157,14 +243,14 @@ def ds_m4(
|
|||
# filter out any overshoot in the input allocation arrays by
|
||||
# removing zero-ed tail entries which should start at a certain
|
||||
# index.
|
||||
x_out = x_out[x_out != 0]
|
||||
y_out = y_out[:x_out.size]
|
||||
i_win = i_win[i_win != 0]
|
||||
y_out = y_out[:i_win.size]
|
||||
|
||||
# print(f'M4 output ymn, ymx: {ymn},{ymx}')
|
||||
return nb, x_out, y_out, ymn, ymx
|
||||
return nb, i_win, y_out
|
||||
|
||||
|
||||
@njit(
|
||||
@jit(
|
||||
nopython=True,
|
||||
nogil=True,
|
||||
)
|
||||
def _m4(
|
||||
|
@ -174,8 +260,8 @@ def _m4(
|
|||
|
||||
frames: int,
|
||||
|
||||
# TODO: using this approach, having the ``.zeros()`` alloc lines
|
||||
# below in pure python, there were segs faults and alloc crashes..
|
||||
# TODO: using this approach by having the ``.zeros()`` alloc lines
|
||||
# below, in put python was causing segs faults and alloc crashes..
|
||||
# we might need to see how it behaves with shm arrays and consider
|
||||
# allocating them once at startup?
|
||||
|
||||
|
@ -188,22 +274,14 @@ def _m4(
|
|||
x_start: int,
|
||||
step: float,
|
||||
|
||||
) -> tuple[
|
||||
int,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
float,
|
||||
float,
|
||||
]:
|
||||
'''
|
||||
Implementation of the m4 algorithm in ``numba``:
|
||||
http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
) -> int:
|
||||
# nbins = len(i_win)
|
||||
# count = len(xs)
|
||||
|
||||
'''
|
||||
# these are pre-allocated and mutated by ``numba``
|
||||
# code in-place.
|
||||
y_out = np.zeros((frames, 4), ys.dtype)
|
||||
x_out = np.zeros(frames, xs.dtype)
|
||||
i_win = np.zeros(frames, xs.dtype)
|
||||
|
||||
bincount = 0
|
||||
x_left = x_start
|
||||
|
@ -217,34 +295,24 @@ def _m4(
|
|||
|
||||
# set all bins in the left-most entry to the starting left-most x value
|
||||
# (aka a row broadcast).
|
||||
x_out[bincount] = x_left
|
||||
i_win[bincount] = x_left
|
||||
# set all y-values to the first value passed in.
|
||||
y_out[bincount] = ys[0]
|
||||
|
||||
# full input y-data mx and mn
|
||||
mx: float = -np.inf
|
||||
mn: float = np.inf
|
||||
|
||||
# compute OHLC style max / min values per window sized x-frame.
|
||||
for i in range(len(xs)):
|
||||
|
||||
x = xs[i]
|
||||
y = ys[i]
|
||||
|
||||
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||
ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||
ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||
y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||
y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||
y_out[bincount, 3] = y
|
||||
mx = max(mx, ymx)
|
||||
mn = min(mn, ymn)
|
||||
|
||||
else:
|
||||
# Find the next bin
|
||||
while x >= x_left + step:
|
||||
x_left += step
|
||||
|
||||
bincount += 1
|
||||
x_out[bincount] = x_left
|
||||
i_win[bincount] = x_left
|
||||
y_out[bincount] = y
|
||||
|
||||
return bincount, x_out, y_out, mn, mx
|
||||
return bincount, i_win, y_out
|
|
@ -18,13 +18,8 @@
|
|||
Mouse interaction graphics
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from functools import partial
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
from typing import Optional, Callable
|
||||
|
||||
import inspect
|
||||
import numpy as np
|
||||
|
@ -41,12 +36,6 @@ from ._style import (
|
|||
from ._axes import YAxisLabel, XAxisLabel
|
||||
from ..log import get_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import (
|
||||
ChartPlotWidget,
|
||||
LinkedSplits,
|
||||
)
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -69,9 +58,9 @@ class LineDot(pg.CurvePoint):
|
|||
curve: pg.PlotCurveItem,
|
||||
index: int,
|
||||
|
||||
plot: ChartPlotWidget, # type: ingore # noqa
|
||||
plot: 'ChartPlotWidget', # type: ingore # noqa
|
||||
pos=None,
|
||||
color: str = 'bracket',
|
||||
color: str = 'default_light',
|
||||
|
||||
) -> None:
|
||||
# scale from dpi aware font size
|
||||
|
@ -162,7 +151,7 @@ class ContentsLabel(pg.LabelItem):
|
|||
def __init__(
|
||||
self,
|
||||
|
||||
# chart: ChartPlotWidget, # noqa
|
||||
# chart: 'ChartPlotWidget', # noqa
|
||||
view: pg.ViewBox,
|
||||
|
||||
anchor_at: str = ('top', 'right'),
|
||||
|
@ -198,11 +187,12 @@ class ContentsLabel(pg.LabelItem):
|
|||
self,
|
||||
|
||||
name: str,
|
||||
ix: int,
|
||||
index: int,
|
||||
array: np.ndarray,
|
||||
|
||||
) -> None:
|
||||
# this being "html" is the dumbest shit :eyeroll:
|
||||
first = array[0]['index']
|
||||
|
||||
self.setText(
|
||||
"<b>i</b>:{index}<br/>"
|
||||
|
@ -215,7 +205,7 @@ class ContentsLabel(pg.LabelItem):
|
|||
"<b>C</b>:{}<br/>"
|
||||
"<b>V</b>:{}<br/>"
|
||||
"<b>wap</b>:{}".format(
|
||||
*array[ix][
|
||||
*array[index - first][
|
||||
[
|
||||
'time',
|
||||
'open',
|
||||
|
@ -227,7 +217,7 @@ class ContentsLabel(pg.LabelItem):
|
|||
]
|
||||
],
|
||||
name=name,
|
||||
index=ix,
|
||||
index=index,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -235,11 +225,14 @@ class ContentsLabel(pg.LabelItem):
|
|||
self,
|
||||
|
||||
name: str,
|
||||
ix: int,
|
||||
index: int,
|
||||
array: np.ndarray,
|
||||
|
||||
) -> None:
|
||||
data = array[ix][name]
|
||||
|
||||
first = array[0]['index']
|
||||
if index < array[-1]['index'] and index > first:
|
||||
data = array[index - first][name]
|
||||
self.setText(f"{name}: {data:.2f}")
|
||||
|
||||
|
||||
|
@ -251,7 +244,7 @@ class ContentsLabels:
|
|||
'''
|
||||
def __init__(
|
||||
self,
|
||||
linkedsplits: LinkedSplits, # type: ignore # noqa
|
||||
linkedsplits: 'LinkedSplits', # type: ignore # noqa
|
||||
|
||||
) -> None:
|
||||
|
||||
|
@ -265,20 +258,17 @@ class ContentsLabels:
|
|||
|
||||
def update_labels(
|
||||
self,
|
||||
x_in: int,
|
||||
index: int,
|
||||
|
||||
) -> None:
|
||||
for chart, name, label, update in self._labels:
|
||||
|
||||
viz = chart.get_viz(name)
|
||||
array = viz.shm.array
|
||||
index = array[viz.index_field]
|
||||
start = index[0]
|
||||
stop = index[-1]
|
||||
flow = chart._flows[name]
|
||||
array = flow.shm.array
|
||||
|
||||
if not (
|
||||
x_in >= start
|
||||
and x_in <= stop
|
||||
index >= 0
|
||||
and index < array[-1]['index']
|
||||
):
|
||||
# out of range
|
||||
print('WTF out of range?')
|
||||
|
@ -287,10 +277,7 @@ class ContentsLabels:
|
|||
# call provided update func with data point
|
||||
try:
|
||||
label.show()
|
||||
ix = np.searchsorted(index, x_in)
|
||||
if ix > len(array):
|
||||
breakpoint()
|
||||
update(ix, array)
|
||||
update(index, array)
|
||||
|
||||
except IndexError:
|
||||
log.exception(f"Failed to update label: {name}")
|
||||
|
@ -302,7 +289,7 @@ class ContentsLabels:
|
|||
def add_label(
|
||||
|
||||
self,
|
||||
chart: ChartPlotWidget, # type: ignore # noqa
|
||||
chart: 'ChartPlotWidget', # type: ignore # noqa
|
||||
name: str,
|
||||
anchor_at: tuple[str, str] = ('top', 'left'),
|
||||
update_func: Callable = ContentsLabel.update_from_value,
|
||||
|
@ -329,7 +316,7 @@ class Cursor(pg.GraphicsObject):
|
|||
def __init__(
|
||||
|
||||
self,
|
||||
linkedsplits: LinkedSplits, # noqa
|
||||
linkedsplits: 'LinkedSplits', # noqa
|
||||
digits: int = 0
|
||||
|
||||
) -> None:
|
||||
|
@ -338,8 +325,6 @@ class Cursor(pg.GraphicsObject):
|
|||
|
||||
self.linked = linkedsplits
|
||||
self.graphics: dict[str, pg.GraphicsObject] = {}
|
||||
self.xaxis_label: Optional[XAxisLabel] = None
|
||||
self.always_show_xlabel: bool = True
|
||||
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
||||
self.active_plot = None
|
||||
self.digits: int = digits
|
||||
|
@ -351,7 +336,7 @@ class Cursor(pg.GraphicsObject):
|
|||
# XXX: not sure why these are instance variables?
|
||||
# It's not like we can change them on the fly..?
|
||||
self.pen = pg.mkPen(
|
||||
color=hcolor('bracket'),
|
||||
color=hcolor('default'),
|
||||
style=QtCore.Qt.DashLine,
|
||||
)
|
||||
self.lines_pen = pg.mkPen(
|
||||
|
@ -367,7 +352,7 @@ class Cursor(pg.GraphicsObject):
|
|||
self._lw = self.pixelWidth() * self.lines_pen.width()
|
||||
|
||||
# xhair label's color name
|
||||
self.label_color: str = 'bracket'
|
||||
self.label_color: str = 'default'
|
||||
|
||||
self._y_label_update: bool = True
|
||||
|
||||
|
@ -400,7 +385,7 @@ class Cursor(pg.GraphicsObject):
|
|||
|
||||
def add_plot(
|
||||
self,
|
||||
plot: ChartPlotWidget, # noqa
|
||||
plot: 'ChartPlotWidget', # noqa
|
||||
digits: int = 0,
|
||||
|
||||
) -> None:
|
||||
|
@ -420,7 +405,7 @@ class Cursor(pg.GraphicsObject):
|
|||
hl.hide()
|
||||
|
||||
yl = YAxisLabel(
|
||||
pi=plot.plotItem,
|
||||
chart=plot,
|
||||
# parent=plot.getAxis('right'),
|
||||
parent=plot.pi_overlay.get_axis(plot.plotItem, 'right'),
|
||||
digits=digits or self.digits,
|
||||
|
@ -484,58 +469,39 @@ class Cursor(pg.GraphicsObject):
|
|||
|
||||
def add_curve_cursor(
|
||||
self,
|
||||
chart: ChartPlotWidget, # noqa
|
||||
plot: 'ChartPlotWidget', # noqa
|
||||
curve: 'PlotCurveItem', # noqa
|
||||
|
||||
) -> LineDot:
|
||||
# if this chart contains curves add line dot "cursors" to denote
|
||||
# if this plot contains curves add line dot "cursors" to denote
|
||||
# the current sample under the mouse
|
||||
main_viz = chart.get_viz(chart.name)
|
||||
|
||||
main_flow = plot._flows[plot.name]
|
||||
# read out last index
|
||||
i = main_viz.shm.array[-1]['index']
|
||||
i = main_flow.shm.array[-1]['index']
|
||||
cursor = LineDot(
|
||||
curve,
|
||||
index=i,
|
||||
plot=chart
|
||||
plot=plot
|
||||
)
|
||||
chart.addItem(cursor)
|
||||
self.graphics[chart].setdefault('cursors', []).append(cursor)
|
||||
plot.addItem(cursor)
|
||||
self.graphics[plot].setdefault('cursors', []).append(cursor)
|
||||
return cursor
|
||||
|
||||
def mouseAction(
|
||||
self,
|
||||
action: str,
|
||||
plot: ChartPlotWidget,
|
||||
|
||||
) -> None: # noqa
|
||||
|
||||
def mouseAction(self, action, plot): # noqa
|
||||
log.debug(f"{(action, plot.name)}")
|
||||
if action == 'Enter':
|
||||
self.active_plot = plot
|
||||
plot.linked.godwidget._active_cursor = self
|
||||
|
||||
# show horiz line and y-label
|
||||
self.graphics[plot]['hl'].show()
|
||||
self.graphics[plot]['yl'].show()
|
||||
|
||||
if (
|
||||
not self.always_show_xlabel
|
||||
and not self.xaxis_label.isVisible()
|
||||
):
|
||||
self.xaxis_label.show()
|
||||
else: # Leave
|
||||
|
||||
# Leave: hide horiz line and y-label
|
||||
else:
|
||||
# hide horiz line and y-label
|
||||
self.graphics[plot]['hl'].hide()
|
||||
self.graphics[plot]['yl'].hide()
|
||||
|
||||
if (
|
||||
not self.always_show_xlabel
|
||||
and self.xaxis_label.isVisible()
|
||||
):
|
||||
self.xaxis_label.hide()
|
||||
|
||||
def mouseMoved(
|
||||
self,
|
||||
coords: tuple[QPointF], # noqa
|
||||
|
@ -624,10 +590,6 @@ class Cursor(pg.GraphicsObject):
|
|||
left_axis_width += left.width()
|
||||
|
||||
# map back to abs (label-local) coordinates
|
||||
if (
|
||||
self.always_show_xlabel
|
||||
or self.xaxis_label.isVisible()
|
||||
):
|
||||
self.xaxis_label.update_label(
|
||||
abs_pos=(
|
||||
plot.mapFromView(QPointF(vl_x, iy)) -
|
||||
|
|
|
@ -28,7 +28,10 @@ from PyQt5.QtWidgets import QGraphicsItem
|
|||
from PyQt5.QtCore import (
|
||||
Qt,
|
||||
QLineF,
|
||||
QSizeF,
|
||||
QRectF,
|
||||
# QRect,
|
||||
QPointF,
|
||||
)
|
||||
from PyQt5.QtGui import (
|
||||
QPainter,
|
||||
|
@ -36,8 +39,11 @@ from PyQt5.QtGui import (
|
|||
)
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._style import hcolor
|
||||
# from ._compression import (
|
||||
# # ohlc_to_m4_line,
|
||||
# ds_m4,
|
||||
# )
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -51,117 +57,7 @@ _line_styles: dict[str, int] = {
|
|||
}
|
||||
|
||||
|
||||
class FlowGraphic(pg.GraphicsObject):
|
||||
'''
|
||||
Base class with minimal interface for `QPainterPath` implemented,
|
||||
real-time updated "data flow" graphics.
|
||||
|
||||
See subtypes below.
|
||||
|
||||
'''
|
||||
# sub-type customization methods
|
||||
declare_paintables: Callable | None = None
|
||||
sub_paint: Callable | None = None
|
||||
|
||||
# XXX-NOTE-XXX: graphics caching B)
|
||||
# see explanation for different caching modes:
|
||||
# https://stackoverflow.com/a/39410081
|
||||
cache_mode: int = QGraphicsItem.DeviceCoordinateCache
|
||||
# XXX: WARNING item caching seems to only be useful
|
||||
# if we don't re-generate the entire QPainterPath every time
|
||||
# don't ever use this - it's a colossal nightmare of artefacts
|
||||
# and is disastrous for performance.
|
||||
# QGraphicsItem.ItemCoordinateCache
|
||||
# TODO: still questions todo with coord-cacheing that we should
|
||||
# probably talk to a core dev about:
|
||||
# - if this makes trasform interactions slower (such as zooming)
|
||||
# and if so maybe if/when we implement a "history" mode for the
|
||||
# view we disable this in that mode?
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
name: str | None = None,
|
||||
|
||||
# line styling
|
||||
color: str = 'bracket',
|
||||
last_step_color: str | None = None,
|
||||
fill_color: Optional[str] = None,
|
||||
style: str = 'solid',
|
||||
|
||||
**kwargs
|
||||
|
||||
) -> None:
|
||||
|
||||
self._name = name
|
||||
|
||||
# primary graphics item used for history
|
||||
self.path: QPainterPath = QPainterPath()
|
||||
|
||||
# additional path that can be optionally used for appends which
|
||||
# tries to avoid triggering an update/redraw of the presumably
|
||||
# larger historical ``.path`` above. the flag to enable
|
||||
# this behaviour is found in `Renderer.render()`.
|
||||
self.fast_path: QPainterPath | None = None
|
||||
|
||||
# TODO: evaluating the path capacity stuff and see
|
||||
# if it really makes much diff pre-allocating it.
|
||||
# self._last_cap: int = 0
|
||||
# cap = path.capacity()
|
||||
# if cap != self._last_cap:
|
||||
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
||||
# self._last_cap = cap
|
||||
|
||||
# all history of curve is drawn in single px thickness
|
||||
self._color: str = color
|
||||
pen = pg.mkPen(hcolor(color), width=1)
|
||||
pen.setStyle(_line_styles[style])
|
||||
|
||||
if 'dash' in style:
|
||||
pen.setDashPattern([8, 3])
|
||||
|
||||
self._pen = pen
|
||||
self._brush = pg.functions.mkBrush(
|
||||
hcolor(fill_color or color)
|
||||
)
|
||||
|
||||
# last segment is drawn in 2px thickness for emphasis
|
||||
if last_step_color:
|
||||
self.last_step_pen = pg.mkPen(
|
||||
hcolor(last_step_color),
|
||||
width=2,
|
||||
)
|
||||
else:
|
||||
self.last_step_pen = pg.mkPen(
|
||||
self._pen,
|
||||
width=2,
|
||||
)
|
||||
|
||||
self._last_line: QLineF = QLineF()
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# apply cache mode
|
||||
self.setCacheMode(self.cache_mode)
|
||||
|
||||
def x_uppx(self) -> int:
|
||||
|
||||
px_vecs = self.pixelVectors()[0]
|
||||
if px_vecs:
|
||||
return px_vecs.x()
|
||||
else:
|
||||
return 0
|
||||
|
||||
def x_last(self) -> float | None:
|
||||
'''
|
||||
Return the last most x value of the last line segment or if not
|
||||
drawn yet, ``None``.
|
||||
|
||||
'''
|
||||
return self._last_line.x1() if self._last_line else None
|
||||
|
||||
|
||||
class Curve(FlowGraphic):
|
||||
class Curve(pg.GraphicsObject):
|
||||
'''
|
||||
A faster, simpler, append friendly version of
|
||||
``pyqtgraph.PlotCurveItem`` built for highly customizable real-time
|
||||
|
@ -178,7 +74,7 @@ class Curve(FlowGraphic):
|
|||
lower level graphics data can be rendered in different threads and
|
||||
then read and drawn in this main thread without having to worry
|
||||
about dealing with Qt's concurrency primitives. See
|
||||
``piker.ui._render.Renderer`` for details and logic related to lower
|
||||
``piker.ui._flows.Renderer`` for details and logic related to lower
|
||||
level path generation and incremental update. The main differences in
|
||||
the path generation code include:
|
||||
|
||||
|
@ -190,38 +86,127 @@ class Curve(FlowGraphic):
|
|||
updates don't trigger a full path redraw.
|
||||
|
||||
'''
|
||||
# TODO: can we remove this?
|
||||
# sub_br: Optional[Callable] = None
|
||||
|
||||
# sub-type customization methods
|
||||
sub_br: Optional[Callable] = None
|
||||
sub_paint: Optional[Callable] = None
|
||||
declare_paintables: Optional[Callable] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
|
||||
# color: str = 'default_lightest',
|
||||
# fill_color: Optional[str] = None,
|
||||
# style: str = 'solid',
|
||||
step_mode: bool = False,
|
||||
color: str = 'default_lightest',
|
||||
fill_color: Optional[str] = None,
|
||||
style: str = 'solid',
|
||||
name: Optional[str] = None,
|
||||
use_fpath: bool = True,
|
||||
|
||||
**kwargs
|
||||
|
||||
) -> None:
|
||||
|
||||
self._name = name
|
||||
|
||||
# brutaaalll, see comments within..
|
||||
self.yData = None
|
||||
self.xData = None
|
||||
|
||||
# self._last_cap: int = 0
|
||||
self.path: Optional[QPainterPath] = None
|
||||
|
||||
# additional path used for appends which tries to avoid
|
||||
# triggering an update/redraw of the presumably larger
|
||||
# historical ``.path`` above.
|
||||
self.use_fpath = use_fpath
|
||||
self.fast_path: Optional[QPainterPath] = None
|
||||
|
||||
# TODO: we can probably just dispense with the parent since
|
||||
# we're basically only using the pen setting now...
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._last_line: QLineF = QLineF()
|
||||
# all history of curve is drawn in single px thickness
|
||||
pen = pg.mkPen(hcolor(color))
|
||||
pen.setStyle(_line_styles[style])
|
||||
|
||||
if 'dash' in style:
|
||||
pen.setDashPattern([8, 3])
|
||||
|
||||
self._pen = pen
|
||||
|
||||
# last segment is drawn in 2px thickness for emphasis
|
||||
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
||||
self.last_step_pen = pg.mkPen(pen, width=2)
|
||||
|
||||
# self._last_line: Optional[QLineF] = None
|
||||
self._last_line = QLineF()
|
||||
self._last_w: float = 1
|
||||
|
||||
# flat-top style histogram-like discrete curve
|
||||
# self._step_mode: bool = step_mode
|
||||
|
||||
# self._fill = True
|
||||
self._brush = pg.functions.mkBrush(hcolor(fill_color or color))
|
||||
|
||||
# NOTE: this setting seems to mostly prevent redraws on mouse
|
||||
# interaction which is a huge boon for avg interaction latency.
|
||||
|
||||
# TODO: one question still remaining is if this makes trasform
|
||||
# interactions slower (such as zooming) and if so maybe if/when
|
||||
# we implement a "history" mode for the view we disable this in
|
||||
# that mode?
|
||||
# don't enable caching by default for the case where the
|
||||
# only thing drawn is the "last" line segment which can
|
||||
# have a weird artifact where it won't be fully drawn to its
|
||||
# endpoint (something we saw on trade rate curves)
|
||||
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# XXX: see explanation for different caching modes:
|
||||
# https://stackoverflow.com/a/39410081
|
||||
# seems to only be useful if we don't re-generate the entire
|
||||
# QPainterPath every time
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# don't ever use this - it's a colossal nightmare of artefacts
|
||||
# and is disastrous for performance.
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache)
|
||||
|
||||
# allow sub-type customization
|
||||
declare = self.declare_paintables
|
||||
if declare:
|
||||
declare()
|
||||
|
||||
# TODO: probably stick this in a new parent
|
||||
# type which will contain our own version of
|
||||
# what ``PlotCurveItem`` had in terms of base
|
||||
# functionality? A `FlowGraphic` maybe?
|
||||
def x_uppx(self) -> int:
|
||||
|
||||
px_vecs = self.pixelVectors()[0]
|
||||
if px_vecs:
|
||||
xs_in_px = px_vecs.x()
|
||||
return round(xs_in_px)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def px_width(self) -> float:
|
||||
|
||||
vb = self.getViewBox()
|
||||
if not vb:
|
||||
return 0
|
||||
|
||||
vr = self.viewRect()
|
||||
l, r = int(vr.left()), int(vr.right())
|
||||
|
||||
start, stop = self._xrange
|
||||
lbar = max(l, start)
|
||||
rbar = min(r, stop)
|
||||
|
||||
return vb.mapViewToDevice(
|
||||
QLineF(lbar, 0, rbar, 0)
|
||||
).length()
|
||||
|
||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||
# our `LineDot`) required ``.getData()`` to work..
|
||||
def getData(self):
|
||||
|
@ -245,8 +230,8 @@ class Curve(FlowGraphic):
|
|||
self.path.clear()
|
||||
|
||||
if self.fast_path:
|
||||
self.fast_path.clear()
|
||||
# self.fast_path = None
|
||||
# self.fast_path.clear()
|
||||
self.fast_path = None
|
||||
|
||||
@cm
|
||||
def reset_cache(self) -> None:
|
||||
|
@ -266,65 +251,77 @@ class Curve(FlowGraphic):
|
|||
self.boundingRect = self._path_br
|
||||
return self._path_br()
|
||||
|
||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||
def _path_br(self):
|
||||
'''
|
||||
Post init ``.boundingRect()```.
|
||||
|
||||
'''
|
||||
# profiler = Profiler(
|
||||
# msg=f'Curve.boundingRect(): `{self._name}`',
|
||||
# disabled=not pg_profile_enabled(),
|
||||
# ms_threshold=ms_slower_then,
|
||||
# hb = self.path.boundingRect()
|
||||
hb = self.path.controlPointRect()
|
||||
hb_size = hb.size()
|
||||
|
||||
fp = self.fast_path
|
||||
if fp:
|
||||
fhb = fp.controlPointRect()
|
||||
hb_size = fhb.size() + hb_size
|
||||
|
||||
# print(f'hb_size: {hb_size}')
|
||||
|
||||
# if self._last_step_rect:
|
||||
# hb_size += self._last_step_rect.size()
|
||||
|
||||
# if self._line:
|
||||
# br = self._last_step_rect.bottomRight()
|
||||
|
||||
# tl = QPointF(
|
||||
# # self._vr[0],
|
||||
# # hb.topLeft().y(),
|
||||
# # 0,
|
||||
# # hb_size.height() + 1
|
||||
# )
|
||||
pr = self.path.controlPointRect()
|
||||
hb_tl, hb_br = (
|
||||
pr.topLeft(),
|
||||
pr.bottomRight(),
|
||||
)
|
||||
mn_y = hb_tl.y()
|
||||
mx_y = hb_br.y()
|
||||
most_left = hb_tl.x()
|
||||
most_right = hb_br.x()
|
||||
# profiler('calc path vertices')
|
||||
|
||||
# TODO: if/when we get fast path appends working in the
|
||||
# `Renderer`, then we might need to actually use this..
|
||||
# fp = self.fast_path
|
||||
# if fp:
|
||||
# fhb = fp.controlPointRect()
|
||||
# # hb_size = fhb.size() + hb_size
|
||||
# br = pr.united(fhb)
|
||||
# br = self._last_step_rect.bottomRight()
|
||||
|
||||
# XXX: *was* a way to allow sub-types to extend the
|
||||
# boundingrect calc, but in the one use case for a step curve
|
||||
# doesn't seem like we need it as long as the last line segment
|
||||
# is drawn as it is?
|
||||
|
||||
# sbr = self.sub_br
|
||||
# if sbr:
|
||||
# # w, h = self.sub_br(w, h)
|
||||
# sub_br = sbr()
|
||||
# br = br.united(sub_br)
|
||||
w = hb_size.width()
|
||||
h = hb_size.height()
|
||||
|
||||
sbr = self.sub_br
|
||||
if sbr:
|
||||
w, h = self.sub_br(w, h)
|
||||
else:
|
||||
# assume plain line graphic and use
|
||||
# default unit step in each direction.
|
||||
ll = self._last_line
|
||||
y1, y2 = ll.y1(), ll.y2()
|
||||
x1, x2 = ll.x1(), ll.x2()
|
||||
|
||||
ymn = min(y1, y2, mn_y)
|
||||
ymx = max(y1, y2, mx_y)
|
||||
most_left = min(x1, x2, most_left)
|
||||
most_right = max(x1, x2, most_right)
|
||||
# profiler('calc last line vertices')
|
||||
# only on a plane line do we include
|
||||
# and extra index step's worth of width
|
||||
# since in the step case the end of the curve
|
||||
# actually terminates earlier so we don't need
|
||||
# this for the last step.
|
||||
w += self._last_w
|
||||
# ll = self._last_line
|
||||
h += 1 # ll.y2() - ll.y1()
|
||||
|
||||
return QRectF(
|
||||
most_left,
|
||||
ymn,
|
||||
most_right - most_left + 1,
|
||||
ymx,
|
||||
# br = QPointF(
|
||||
# self._vr[-1],
|
||||
# # tl.x() + w,
|
||||
# tl.y() + h,
|
||||
# )
|
||||
|
||||
br = QRectF(
|
||||
|
||||
# top left
|
||||
# hb.topLeft()
|
||||
# tl,
|
||||
QPointF(hb.topLeft()),
|
||||
|
||||
# br,
|
||||
# total size
|
||||
# QSizeF(hb_size)
|
||||
# hb_size,
|
||||
QSizeF(w, h)
|
||||
)
|
||||
# print(f'bounding rect: {br}')
|
||||
return br
|
||||
|
||||
def paint(
|
||||
self,
|
||||
|
@ -334,7 +331,7 @@ class Curve(FlowGraphic):
|
|||
|
||||
) -> None:
|
||||
|
||||
profiler = Profiler(
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'Curve.paint(): `{self._name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
|
@ -342,14 +339,18 @@ class Curve(FlowGraphic):
|
|||
|
||||
sub_paint = self.sub_paint
|
||||
if sub_paint:
|
||||
sub_paint(p)
|
||||
sub_paint(p, profiler)
|
||||
|
||||
p.setPen(self.last_step_pen)
|
||||
p.drawLine(self._last_line)
|
||||
profiler('last datum `.drawLine()`')
|
||||
|
||||
profiler('.drawLine()')
|
||||
p.setPen(self._pen)
|
||||
|
||||
path = self.path
|
||||
# cap = path.capacity()
|
||||
# if cap != self._last_cap:
|
||||
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
||||
# self._last_cap = cap
|
||||
|
||||
if path:
|
||||
p.drawPath(path)
|
||||
|
@ -372,30 +373,22 @@ class Curve(FlowGraphic):
|
|||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
index_field: str,
|
||||
|
||||
) -> None:
|
||||
# default line draw last call
|
||||
# with self.reset_cache():
|
||||
x = src_data[index_field]
|
||||
y = src_data[array_key]
|
||||
|
||||
x_last = x[-1]
|
||||
x_2last = x[-2]
|
||||
x = render_data['index']
|
||||
y = render_data[array_key]
|
||||
|
||||
# draw the "current" step graphic segment so it
|
||||
# lines up with the "middle" of the current
|
||||
# (OHLC) sample.
|
||||
self._last_line = QLineF(
|
||||
|
||||
# NOTE: currently we draw in x-domain
|
||||
# from last datum to current such that
|
||||
# the end of line touches the "beginning"
|
||||
# of the current datum step span.
|
||||
x_2last, y[-2],
|
||||
x_last, y[-1],
|
||||
x[-2], y[-2],
|
||||
x[-1], y[-1],
|
||||
)
|
||||
|
||||
return x, y
|
||||
|
@ -407,20 +400,17 @@ class Curve(FlowGraphic):
|
|||
# (via it's max / min) even when highly zoomed out.
|
||||
class FlattenedOHLC(Curve):
|
||||
|
||||
# avoids strange dragging/smearing artifacts when panning..
|
||||
cache_mode: int = QGraphicsItem.NoCache
|
||||
|
||||
def draw_last_datum(
|
||||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
index_field: str,
|
||||
|
||||
) -> None:
|
||||
lasts = src_data[-2:]
|
||||
x = lasts[index_field]
|
||||
x = lasts['index']
|
||||
y = lasts['close']
|
||||
|
||||
# draw the "current" step graphic segment so it
|
||||
|
@ -444,9 +434,9 @@ class StepCurve(Curve):
|
|||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
index_field: str,
|
||||
|
||||
w: float = 0.5,
|
||||
|
||||
|
@ -455,31 +445,40 @@ class StepCurve(Curve):
|
|||
# TODO: remove this and instead place all step curve
|
||||
# updating into pre-path data render callbacks.
|
||||
# full input data
|
||||
x = src_data[index_field]
|
||||
x = src_data['index']
|
||||
y = src_data[array_key]
|
||||
|
||||
x_last = x[-1]
|
||||
x_2last = x[-2]
|
||||
y_last = y[-1]
|
||||
step_size = x_last - x_2last
|
||||
|
||||
# lol, commenting this makes step curves
|
||||
# all "black" for me :eyeroll:..
|
||||
self._last_line = QLineF(
|
||||
x_2last, 0,
|
||||
x_last, 0,
|
||||
x_last - w, 0,
|
||||
x_last + w, 0,
|
||||
)
|
||||
self._last_step_rect = QRectF(
|
||||
x_last, 0,
|
||||
step_size, y_last,
|
||||
x_last - w, 0,
|
||||
x_last + w, y_last,
|
||||
)
|
||||
return x, y
|
||||
|
||||
def sub_paint(
|
||||
self,
|
||||
p: QPainter,
|
||||
profiler: pg.debug.Profiler,
|
||||
|
||||
) -> None:
|
||||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||
# p.drawRect(self._last_step_rect)
|
||||
p.fillRect(self._last_step_rect, self._brush)
|
||||
profiler('.fillRect()')
|
||||
|
||||
def sub_br(
|
||||
self,
|
||||
path_w: float,
|
||||
path_h: float,
|
||||
|
||||
) -> (float, float):
|
||||
# passthrough
|
||||
return path_w, path_h
|
||||
|
|
1238
piker/ui/_dataviz.py
1238
piker/ui/_dataviz.py
File diff suppressed because it is too large
Load Diff
1478
piker/ui/_display.py
1478
piker/ui/_display.py
File diff suppressed because it is too large
Load Diff
|
@ -18,27 +18,11 @@
|
|||
Higher level annotation editors.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from collections import defaultdict
|
||||
from typing import (
|
||||
Optional,
|
||||
TYPE_CHECKING
|
||||
)
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
import pyqtgraph as pg
|
||||
from pyqtgraph import (
|
||||
ViewBox,
|
||||
Point,
|
||||
QtCore,
|
||||
QtWidgets,
|
||||
)
|
||||
from PyQt5.QtGui import (
|
||||
QColor,
|
||||
)
|
||||
from PyQt5.QtWidgets import (
|
||||
QLabel,
|
||||
)
|
||||
|
||||
from pyqtgraph import ViewBox, Point, QtCore, QtGui
|
||||
from pyqtgraph import functions as fn
|
||||
from PyQt5.QtCore import QPointF
|
||||
import numpy as np
|
||||
|
@ -46,34 +30,28 @@ import numpy as np
|
|||
from ._style import hcolor, _font
|
||||
from ._lines import LevelLine
|
||||
from ..log import get_logger
|
||||
from ..data.types import Struct
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import GodWidget
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class ArrowEditor(Struct):
|
||||
@dataclass
|
||||
class ArrowEditor:
|
||||
|
||||
godw: GodWidget = None # type: ignore # noqa
|
||||
_arrows: dict[str, list[pg.ArrowItem]] = {}
|
||||
chart: 'ChartPlotWidget' # noqa
|
||||
_arrows: field(default_factory=dict)
|
||||
|
||||
def add(
|
||||
self,
|
||||
plot: pg.PlotItem,
|
||||
uid: str,
|
||||
x: float,
|
||||
y: float,
|
||||
color='default',
|
||||
pointing: Optional[str] = None,
|
||||
|
||||
) -> pg.ArrowItem:
|
||||
'''
|
||||
Add an arrow graphic to view at given (x, y).
|
||||
"""Add an arrow graphic to view at given (x, y).
|
||||
|
||||
'''
|
||||
"""
|
||||
angle = {
|
||||
'up': 90,
|
||||
'down': -90,
|
||||
|
@ -96,25 +74,25 @@ class ArrowEditor(Struct):
|
|||
brush=pg.mkBrush(hcolor(color)),
|
||||
)
|
||||
arrow.setPos(x, y)
|
||||
self._arrows.setdefault(uid, []).append(arrow)
|
||||
|
||||
self._arrows[uid] = arrow
|
||||
|
||||
# render to view
|
||||
plot.addItem(arrow)
|
||||
self.chart.plotItem.addItem(arrow)
|
||||
|
||||
return arrow
|
||||
|
||||
def remove(self, arrow) -> bool:
|
||||
for linked in self.godw.iter_linked():
|
||||
linked.chart.plotItem.removeItem(arrow)
|
||||
self.chart.plotItem.removeItem(arrow)
|
||||
|
||||
|
||||
class LineEditor(Struct):
|
||||
'''
|
||||
The great editor of linez.
|
||||
@dataclass
|
||||
class LineEditor:
|
||||
'''The great editor of linez.
|
||||
|
||||
'''
|
||||
godw: GodWidget = None # type: ignore # noqa
|
||||
_order_lines: defaultdict[str, LevelLine] = defaultdict(list)
|
||||
chart: 'ChartPlotWidget' = None # type: ignore # noqa
|
||||
_order_lines: dict[str, LevelLine] = field(default_factory=dict)
|
||||
_active_staged_line: LevelLine = None
|
||||
|
||||
def stage_line(
|
||||
|
@ -122,11 +100,11 @@ class LineEditor(Struct):
|
|||
line: LevelLine,
|
||||
|
||||
) -> LevelLine:
|
||||
'''
|
||||
Stage a line at the current chart's cursor position
|
||||
"""Stage a line at the current chart's cursor position
|
||||
and return it.
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
# add a "staged" cursor-tracking line to view
|
||||
# and cash it in a a var
|
||||
if self._active_staged_line:
|
||||
|
@ -137,25 +115,17 @@ class LineEditor(Struct):
|
|||
return line
|
||||
|
||||
def unstage_line(self) -> LevelLine:
|
||||
'''
|
||||
Inverse of ``.stage_line()``.
|
||||
"""Inverse of ``.stage_line()``.
|
||||
|
||||
'''
|
||||
cursor = self.godw.get_cursor()
|
||||
if not cursor:
|
||||
return None
|
||||
"""
|
||||
# chart = self.chart._cursor.active_plot
|
||||
# # chart.setCursor(QtCore.Qt.ArrowCursor)
|
||||
cursor = self.chart.linked.cursor
|
||||
|
||||
# delete "staged" cursor tracking line from view
|
||||
line = self._active_staged_line
|
||||
if line:
|
||||
try:
|
||||
cursor._trackers.remove(line)
|
||||
except KeyError:
|
||||
# when the current cursor doesn't have said line
|
||||
# registered (probably means that user held order mode
|
||||
# key while panning to another view) then we just
|
||||
# ignore the remove error.
|
||||
pass
|
||||
line.delete()
|
||||
|
||||
self._active_staged_line = None
|
||||
|
@ -163,58 +133,55 @@ class LineEditor(Struct):
|
|||
# show the crosshair y line and label
|
||||
cursor.show_xhair()
|
||||
|
||||
def submit_lines(
|
||||
def submit_line(
|
||||
self,
|
||||
lines: list[LevelLine],
|
||||
line: LevelLine,
|
||||
uuid: str,
|
||||
|
||||
) -> LevelLine:
|
||||
|
||||
# staged_line = self._active_staged_line
|
||||
# if not staged_line:
|
||||
# raise RuntimeError("No line is currently staged!?")
|
||||
staged_line = self._active_staged_line
|
||||
if not staged_line:
|
||||
raise RuntimeError("No line is currently staged!?")
|
||||
|
||||
# for now, until submission reponse arrives
|
||||
for line in lines:
|
||||
line.hide_labels()
|
||||
|
||||
# register for later lookup/deletion
|
||||
self._order_lines[uuid] += lines
|
||||
self._order_lines[uuid] = line
|
||||
|
||||
return lines
|
||||
return line
|
||||
|
||||
def commit_line(self, uuid: str) -> list[LevelLine]:
|
||||
'''
|
||||
Commit a "staged line" to view.
|
||||
def commit_line(self, uuid: str) -> LevelLine:
|
||||
"""Commit a "staged line" to view.
|
||||
|
||||
Submits the line graphic under the cursor as a (new) permanent
|
||||
graphic in view.
|
||||
|
||||
'''
|
||||
lines = self._order_lines[uuid]
|
||||
if lines:
|
||||
for line in lines:
|
||||
"""
|
||||
try:
|
||||
line = self._order_lines[uuid]
|
||||
except KeyError:
|
||||
log.warning(f'No line for {uuid} could be found?')
|
||||
return
|
||||
else:
|
||||
line.show_labels()
|
||||
line.hide_markers()
|
||||
log.debug(f'Level active for level: {line.value()}')
|
||||
|
||||
# TODO: other flashy things to indicate the order is active
|
||||
|
||||
return lines
|
||||
log.debug(f'Level active for level: {line.value()}')
|
||||
|
||||
return line
|
||||
|
||||
def lines_under_cursor(self) -> list[LevelLine]:
|
||||
'''
|
||||
Get the line(s) under the cursor position.
|
||||
"""Get the line(s) under the cursor position.
|
||||
|
||||
'''
|
||||
"""
|
||||
# Delete any hoverable under the cursor
|
||||
return self.godw.get_cursor()._hovered
|
||||
return self.chart.linked.cursor._hovered
|
||||
|
||||
def all_lines(self) -> list[LevelLine]:
|
||||
all_lines = []
|
||||
for lines in list(self._order_lines.values()):
|
||||
all_lines.extend(lines)
|
||||
|
||||
return all_lines
|
||||
def all_lines(self) -> tuple[LevelLine]:
|
||||
return tuple(self._order_lines.values())
|
||||
|
||||
def remove_line(
|
||||
self,
|
||||
|
@ -229,30 +196,29 @@ class LineEditor(Struct):
|
|||
|
||||
'''
|
||||
# try to look up line from our registry
|
||||
lines = self._order_lines.pop(uuid, None)
|
||||
if lines:
|
||||
cursor = self.godw.get_cursor()
|
||||
if cursor:
|
||||
for line in lines:
|
||||
line = self._order_lines.pop(uuid, line)
|
||||
if line:
|
||||
|
||||
# if hovered remove from cursor set
|
||||
cursor = self.chart.linked.cursor
|
||||
hovered = cursor._hovered
|
||||
if line in hovered:
|
||||
hovered.remove(line)
|
||||
|
||||
log.debug(f'deleting {line} with oid: {uuid}')
|
||||
line.delete()
|
||||
|
||||
# make sure the xhair doesn't get left off
|
||||
# just because we never got a un-hover event
|
||||
cursor.show_xhair()
|
||||
|
||||
log.debug(f'deleting {line} with oid: {uuid}')
|
||||
line.delete()
|
||||
|
||||
else:
|
||||
log.warning(f'Could not find line for {line}')
|
||||
|
||||
return lines
|
||||
return line
|
||||
|
||||
|
||||
class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||
class SelectRect(QtGui.QGraphicsRectItem):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -261,12 +227,12 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
|||
) -> None:
|
||||
super().__init__(0, 0, 1, 1)
|
||||
|
||||
# self.rbScaleBox = QGraphicsRectItem(0, 0, 1, 1)
|
||||
# self.rbScaleBox = QtGui.QGraphicsRectItem(0, 0, 1, 1)
|
||||
self.vb = viewbox
|
||||
self._chart: 'ChartPlotWidget' = None # noqa
|
||||
|
||||
# override selection box color
|
||||
color = QColor(hcolor(color))
|
||||
color = QtGui.QColor(hcolor(color))
|
||||
self.setPen(fn.mkPen(color, width=1))
|
||||
color.setAlpha(66)
|
||||
self.setBrush(fn.mkBrush(color))
|
||||
|
@ -274,7 +240,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
|||
self.hide()
|
||||
self._label = None
|
||||
|
||||
label = self._label = QLabel()
|
||||
label = self._label = QtGui.QLabel()
|
||||
label.setTextFormat(0) # markdown
|
||||
label.setFont(_font.font)
|
||||
label.setMargin(0)
|
||||
|
@ -311,8 +277,8 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
|||
# TODO: get bg color working
|
||||
palette.setColor(
|
||||
self._label.backgroundRole(),
|
||||
# QColor(chart.backgroundBrush()),
|
||||
QColor(hcolor('papas_special')),
|
||||
# QtGui.QColor(chart.backgroundBrush()),
|
||||
QtGui.QColor(hcolor('papas_special')),
|
||||
)
|
||||
|
||||
def update_on_resize(self, vr, r):
|
||||
|
@ -360,7 +326,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
|||
|
||||
self.setPos(r.topLeft())
|
||||
self.resetTransform()
|
||||
self.setRect(r)
|
||||
self.scale(r.width(), r.height())
|
||||
self.show()
|
||||
|
||||
y1, y2 = start_pos.y(), end_pos.y()
|
||||
|
@ -377,7 +343,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
|||
nbars = ixmx - ixmn + 1
|
||||
|
||||
chart = self._chart
|
||||
data = chart.get_viz(chart.name).shm.array[ixmn:ixmx]
|
||||
data = chart._flows[chart.name].shm.array[ixmn:ixmx]
|
||||
|
||||
if len(data):
|
||||
std = data['close'].std()
|
||||
|
|
|
@ -18,11 +18,11 @@
|
|||
Qt event proxying and processing using ``trio`` mem chans.
|
||||
|
||||
"""
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from contextlib import asynccontextmanager, AsyncExitStack
|
||||
from typing import Callable
|
||||
|
||||
from pydantic import BaseModel
|
||||
import trio
|
||||
from tractor.trionics import gather_contexts
|
||||
from PyQt5 import QtCore
|
||||
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
||||
from PyQt5.QtWidgets import QWidget
|
||||
|
@ -30,8 +30,6 @@ from PyQt5.QtWidgets import (
|
|||
QGraphicsSceneMouseEvent as gs_mouse,
|
||||
)
|
||||
|
||||
from ..data.types import Struct
|
||||
|
||||
|
||||
MOUSE_EVENTS = {
|
||||
gs_mouse.GraphicsSceneMousePress,
|
||||
|
@ -45,10 +43,13 @@ MOUSE_EVENTS = {
|
|||
# TODO: maybe consider some constrained ints down the road?
|
||||
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
||||
|
||||
class KeyboardMsg(Struct):
|
||||
class KeyboardMsg(BaseModel):
|
||||
'''Unpacked Qt keyboard event data.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
event: QEvent
|
||||
etype: int
|
||||
key: int
|
||||
|
@ -56,13 +57,16 @@ class KeyboardMsg(Struct):
|
|||
txt: str
|
||||
|
||||
def to_tuple(self) -> tuple:
|
||||
return tuple(self.to_dict().values())
|
||||
return tuple(self.dict().values())
|
||||
|
||||
|
||||
class MouseMsg(Struct):
|
||||
class MouseMsg(BaseModel):
|
||||
'''Unpacked Qt keyboard event data.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
event: QEvent
|
||||
etype: int
|
||||
button: int
|
||||
|
@ -156,7 +160,7 @@ class EventRelay(QtCore.QObject):
|
|||
return False
|
||||
|
||||
|
||||
@acm
|
||||
@asynccontextmanager
|
||||
async def open_event_stream(
|
||||
|
||||
source_widget: QWidget,
|
||||
|
@ -182,7 +186,7 @@ async def open_event_stream(
|
|||
source_widget.removeEventFilter(kc)
|
||||
|
||||
|
||||
@acm
|
||||
@asynccontextmanager
|
||||
async def open_signal_handler(
|
||||
|
||||
signal: pyqtBoundSignal,
|
||||
|
@ -207,7 +211,7 @@ async def open_signal_handler(
|
|||
yield
|
||||
|
||||
|
||||
@acm
|
||||
@asynccontextmanager
|
||||
async def open_handlers(
|
||||
|
||||
source_widgets: list[QWidget],
|
||||
|
@ -216,14 +220,16 @@ async def open_handlers(
|
|||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as n,
|
||||
gather_contexts([
|
||||
open_event_stream(widget, event_types, **kwargs)
|
||||
for widget in source_widgets
|
||||
]) as streams,
|
||||
AsyncExitStack() as stack,
|
||||
):
|
||||
for widget, event_recv_stream in zip(source_widgets, streams):
|
||||
for widget in source_widgets:
|
||||
|
||||
event_recv_stream = await stack.enter_async_context(
|
||||
open_event_stream(widget, event_types, **kwargs)
|
||||
)
|
||||
n.start_soon(async_handler, widget, event_recv_stream)
|
||||
|
||||
yield
|
||||
|
|
|
@ -20,24 +20,16 @@ Trio - Qt integration
|
|||
Run ``trio`` in guest mode on top of the Qt event loop.
|
||||
All global Qt runtime settings are mostly defined here.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Callable,
|
||||
Any,
|
||||
Type,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
from typing import Tuple, Callable, Dict, Any
|
||||
import platform
|
||||
import traceback
|
||||
|
||||
# Qt specific
|
||||
import PyQt5 # noqa
|
||||
from PyQt5.QtWidgets import (
|
||||
QWidget,
|
||||
QMainWindow,
|
||||
QApplication,
|
||||
)
|
||||
import pyqtgraph as pg
|
||||
from pyqtgraph import QtGui
|
||||
from PyQt5 import QtCore
|
||||
# from PyQt5.QtGui import QLabel, QStatusBar
|
||||
from PyQt5.QtCore import (
|
||||
pyqtRemoveInputHook,
|
||||
Qt,
|
||||
|
@ -45,19 +37,15 @@ from PyQt5.QtCore import (
|
|||
)
|
||||
import qdarkstyle
|
||||
from qdarkstyle import DarkPalette
|
||||
# import qdarkgraystyle # TODO: play with it
|
||||
# import qdarkgraystyle
|
||||
import trio
|
||||
from outcome import Error
|
||||
|
||||
from .._daemon import (
|
||||
maybe_open_pikerd,
|
||||
get_tractor_runtime_kwargs,
|
||||
)
|
||||
from .._daemon import maybe_open_pikerd, _tractor_kwargs
|
||||
from ..log import get_logger
|
||||
from ._pg_overrides import _do_overrides
|
||||
from . import _style
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
# pyqtgraph global config
|
||||
|
@ -84,18 +72,17 @@ if platform.system() == "Windows":
|
|||
|
||||
def run_qtractor(
|
||||
func: Callable,
|
||||
args: tuple,
|
||||
main_widget_type: Type[QWidget],
|
||||
tractor_kwargs: dict[str, Any] = {},
|
||||
window_type: QMainWindow = None,
|
||||
|
||||
args: Tuple,
|
||||
main_widget: QtGui.QWidget,
|
||||
tractor_kwargs: Dict[str, Any] = {},
|
||||
window_type: QtGui.QMainWindow = None,
|
||||
) -> None:
|
||||
# avoids annoying message when entering debugger from qt loop
|
||||
pyqtRemoveInputHook()
|
||||
|
||||
app = QApplication.instance()
|
||||
app = QtGui.QApplication.instance()
|
||||
if app is None:
|
||||
app = QApplication([])
|
||||
app = PyQt5.QtWidgets.QApplication([])
|
||||
|
||||
# TODO: we might not need this if it's desired
|
||||
# to cancel the tractor machinery on Qt loop
|
||||
|
@ -169,11 +156,11 @@ def run_qtractor(
|
|||
# hook into app focus change events
|
||||
app.focusChanged.connect(window.on_focus_change)
|
||||
|
||||
instance = main_widget_type()
|
||||
instance = main_widget()
|
||||
instance.window = window
|
||||
|
||||
# override tractor's defaults
|
||||
tractor_kwargs.update(get_tractor_runtime_kwargs())
|
||||
tractor_kwargs.update(_tractor_kwargs)
|
||||
|
||||
# define tractor entrypoint
|
||||
async def main():
|
||||
|
@ -191,7 +178,7 @@ def run_qtractor(
|
|||
# restrict_keyboard_interrupt_to_checkpoints=True,
|
||||
)
|
||||
|
||||
window.godwidget: GodWidget = instance
|
||||
window.main_widget = main_widget
|
||||
window.setCentralWidget(instance)
|
||||
if is_windows:
|
||||
window.configure_to_desktop()
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -619,7 +619,7 @@ class FillStatusBar(QProgressBar):
|
|||
# color: #19232D;
|
||||
# width: 10px;
|
||||
|
||||
self.setRange(0, int(slots))
|
||||
self.setRange(0, slots)
|
||||
self.setValue(value)
|
||||
|
||||
|
||||
|
@ -644,7 +644,7 @@ def mk_fill_status_bar(
|
|||
|
||||
# TODO: calc this height from the ``ChartnPane``
|
||||
chart_h = round(parent_pane.height() * 5/8)
|
||||
bar_h = chart_h * 0.375*0.9
|
||||
bar_h = chart_h * 0.375
|
||||
|
||||
# TODO: once things are sized to screen
|
||||
bar_label_font_size = label_font_size or _font.px_size - 2
|
||||
|
|
282
piker/ui/_fsp.py
282
piker/ui/_fsp.py
|
@ -27,13 +27,12 @@ from itertools import cycle
|
|||
from typing import Optional, AsyncGenerator, Any
|
||||
|
||||
import numpy as np
|
||||
import msgspec
|
||||
from pydantic import create_model
|
||||
import tractor
|
||||
import pyqtgraph as pg
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
|
||||
from piker.data.types import Struct
|
||||
from ._axes import PriceAxis
|
||||
from .._cacheables import maybe_open_context
|
||||
from ..calc import humanize
|
||||
|
@ -42,8 +41,6 @@ from ..data._sharedmem import (
|
|||
_Token,
|
||||
try_read,
|
||||
)
|
||||
from ..data.feed import Flume
|
||||
from ..data._source import Symbol
|
||||
from ._chart import (
|
||||
ChartPlotWidget,
|
||||
LinkedSplits,
|
||||
|
@ -53,18 +50,14 @@ from ._forms import (
|
|||
mk_form,
|
||||
open_form_input_handling,
|
||||
)
|
||||
from ..fsp._api import (
|
||||
maybe_mk_fsp_shm,
|
||||
Fsp,
|
||||
)
|
||||
from ..fsp._api import maybe_mk_fsp_shm, Fsp
|
||||
from ..fsp import cascade
|
||||
from ..fsp._volume import (
|
||||
# tina_vwap,
|
||||
tina_vwap,
|
||||
dolla_vlm,
|
||||
flow_rates,
|
||||
)
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -78,14 +71,15 @@ def has_vlm(ohlcv: ShmArray) -> bool:
|
|||
|
||||
|
||||
def update_fsp_chart(
|
||||
viz,
|
||||
chart: ChartPlotWidget,
|
||||
flow,
|
||||
graphics_name: str,
|
||||
array_key: Optional[str],
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
|
||||
shm = viz.shm
|
||||
shm = flow.shm
|
||||
if not shm:
|
||||
return
|
||||
|
||||
|
@ -100,15 +94,18 @@ def update_fsp_chart(
|
|||
# update graphics
|
||||
# NOTE: this does a length check internally which allows it
|
||||
# staying above the last row check below..
|
||||
viz.update_graphics()
|
||||
chart.update_graphics_from_flow(
|
||||
graphics_name,
|
||||
array_key=array_key or graphics_name,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# XXX: re: ``array_key``: fsp func names must be unique meaning we
|
||||
# can't have duplicates of the underlying data even if multiple
|
||||
# sub-charts reference it under different 'named charts'.
|
||||
|
||||
# read from last calculated value and update any label
|
||||
last_val_sticky = viz.plot.getAxis(
|
||||
'right')._stickies.get(graphics_name)
|
||||
last_val_sticky = chart._ysticks.get(graphics_name)
|
||||
if last_val_sticky:
|
||||
last = last_row[array_key]
|
||||
last_val_sticky.update_from_data(-1, last)
|
||||
|
@ -156,13 +153,12 @@ async def open_fsp_sidepane(
|
|||
)
|
||||
|
||||
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
||||
FspConfig = msgspec.defstruct(
|
||||
"Point",
|
||||
[('name', name)] + list(params.items()),
|
||||
bases=(Struct,),
|
||||
FspConfig = create_model(
|
||||
'FspConfig',
|
||||
name=name,
|
||||
**params,
|
||||
)
|
||||
model = FspConfig(name=name, **params)
|
||||
sidepane.model = model
|
||||
sidepane.model = FspConfig()
|
||||
|
||||
# just a logger for now until we get fsp configs up and running.
|
||||
async def settings_change(
|
||||
|
@ -192,7 +188,7 @@ async def open_fsp_actor_cluster(
|
|||
|
||||
from tractor._clustering import open_actor_cluster
|
||||
|
||||
# profiler = Profiler(
|
||||
# profiler = pg.debug.Profiler(
|
||||
# delayed=False,
|
||||
# disabled=False
|
||||
# )
|
||||
|
@ -209,12 +205,12 @@ async def open_fsp_actor_cluster(
|
|||
async def run_fsp_ui(
|
||||
|
||||
linkedsplits: LinkedSplits,
|
||||
flume: Flume,
|
||||
shm: ShmArray,
|
||||
started: trio.Event,
|
||||
target: Fsp,
|
||||
conf: dict[str, dict],
|
||||
loglevel: str,
|
||||
# profiler: Profiler,
|
||||
# profiler: pg.debug.Profiler,
|
||||
# _quote_throttle_rate: int = 58,
|
||||
|
||||
) -> None:
|
||||
|
@ -246,11 +242,9 @@ async def run_fsp_ui(
|
|||
else:
|
||||
chart = linkedsplits.subplots[overlay_with]
|
||||
|
||||
shm = flume.rt_shm
|
||||
chart.draw_curve(
|
||||
name,
|
||||
shm,
|
||||
flume,
|
||||
name=name,
|
||||
shm=shm,
|
||||
overlay=True,
|
||||
color='default_light',
|
||||
array_key=name,
|
||||
|
@ -260,9 +254,8 @@ async def run_fsp_ui(
|
|||
else:
|
||||
# create a new sub-chart widget for this fsp
|
||||
chart = linkedsplits.add_plot(
|
||||
name,
|
||||
shm,
|
||||
flume,
|
||||
name=name,
|
||||
shm=shm,
|
||||
|
||||
array_key=name,
|
||||
sidepane=sidepane,
|
||||
|
@ -282,10 +275,9 @@ async def run_fsp_ui(
|
|||
# profiler(f'fsp:{name} chart created')
|
||||
|
||||
# first UI update, usually from shm pushed history
|
||||
viz = chart.get_viz(array_key)
|
||||
update_fsp_chart(
|
||||
chart,
|
||||
viz,
|
||||
chart._flows[array_key],
|
||||
name,
|
||||
array_key=array_key,
|
||||
)
|
||||
|
@ -312,7 +304,7 @@ async def run_fsp_ui(
|
|||
# level_line(chart, 70, orient_v='bottom')
|
||||
# level_line(chart, 80, orient_v='top')
|
||||
|
||||
chart.view._set_yrange(viz=viz)
|
||||
chart.view._set_yrange()
|
||||
# done() # status updates
|
||||
|
||||
# profiler(f'fsp:{func_name} starting update loop')
|
||||
|
@ -353,9 +345,6 @@ async def run_fsp_ui(
|
|||
# last = time.time()
|
||||
|
||||
|
||||
# TODO: maybe this should be our ``Viz`` type since it maps
|
||||
# one flume to the next? The machinery for task/actor mgmt should
|
||||
# be part of the instantiation API?
|
||||
class FspAdmin:
|
||||
'''
|
||||
Client API for orchestrating FSP actors and displaying
|
||||
|
@ -367,7 +356,7 @@ class FspAdmin:
|
|||
tn: trio.Nursery,
|
||||
cluster: dict[str, tractor.Portal],
|
||||
linked: LinkedSplits,
|
||||
flume: Flume,
|
||||
src_shm: ShmArray,
|
||||
|
||||
) -> None:
|
||||
self.tn = tn
|
||||
|
@ -379,11 +368,7 @@ class FspAdmin:
|
|||
tuple[tractor.MsgStream, ShmArray]
|
||||
] = {}
|
||||
self._flow_registry: dict[_Token, str] = {}
|
||||
|
||||
# TODO: make this a `.src_flume` and add
|
||||
# a `dst_flume`?
|
||||
# (=> but then wouldn't this be the most basic `Viz`?)
|
||||
self.flume = flume
|
||||
self.src_shm = src_shm
|
||||
|
||||
def rr_next_portal(self) -> tractor.Portal:
|
||||
name, portal = next(self._rr_next_actor)
|
||||
|
@ -396,7 +381,7 @@ class FspAdmin:
|
|||
complete: trio.Event,
|
||||
started: trio.Event,
|
||||
fqsn: str,
|
||||
dst_fsp_flume: Flume,
|
||||
dst_shm: ShmArray,
|
||||
conf: dict,
|
||||
target: Fsp,
|
||||
loglevel: str,
|
||||
|
@ -417,10 +402,9 @@ class FspAdmin:
|
|||
# data feed key
|
||||
fqsn=fqsn,
|
||||
|
||||
# TODO: pass `Flume.to_msg()`s here?
|
||||
# mems
|
||||
src_shm_token=self.flume.rt_shm.token,
|
||||
dst_shm_token=dst_fsp_flume.rt_shm.token,
|
||||
src_shm_token=self.src_shm.token,
|
||||
dst_shm_token=dst_shm.token,
|
||||
|
||||
# target
|
||||
ns_path=ns_path,
|
||||
|
@ -437,14 +421,12 @@ class FspAdmin:
|
|||
ctx.open_stream() as stream,
|
||||
):
|
||||
|
||||
dst_fsp_flume.stream: tractor.MsgStream = stream
|
||||
|
||||
# register output data
|
||||
self._registry[
|
||||
(fqsn, ns_path)
|
||||
] = (
|
||||
stream,
|
||||
dst_fsp_flume.rt_shm,
|
||||
dst_shm,
|
||||
complete
|
||||
)
|
||||
|
||||
|
@ -458,9 +440,7 @@ class FspAdmin:
|
|||
# if the chart isn't hidden try to update
|
||||
# the data on screen.
|
||||
if not self.linked.isHidden():
|
||||
log.debug(
|
||||
f'Re-syncing graphics for fsp: {ns_path}'
|
||||
)
|
||||
log.debug(f'Re-syncing graphics for fsp: {ns_path}')
|
||||
self.linked.graphics_cycle(
|
||||
trigger_all=True,
|
||||
prepend_update_index=info['first'],
|
||||
|
@ -479,9 +459,9 @@ class FspAdmin:
|
|||
worker_name: Optional[str] = None,
|
||||
loglevel: str = 'info',
|
||||
|
||||
) -> (Flume, trio.Event):
|
||||
) -> (ShmArray, trio.Event):
|
||||
|
||||
fqsn = self.flume.symbol.fqsn
|
||||
fqsn = self.linked.symbol.front_fqsn()
|
||||
|
||||
# allocate an output shm array
|
||||
key, dst_shm, opened = maybe_mk_fsp_shm(
|
||||
|
@ -489,36 +469,16 @@ class FspAdmin:
|
|||
target=target,
|
||||
readonly=True,
|
||||
)
|
||||
|
||||
portal = self.cluster.get(worker_name) or self.rr_next_portal()
|
||||
provider_tag = portal.channel.uid
|
||||
|
||||
symbol = Symbol(
|
||||
key=key,
|
||||
broker_info={
|
||||
provider_tag: {'asset_type': 'fsp'},
|
||||
},
|
||||
)
|
||||
dst_fsp_flume = Flume(
|
||||
symbol=symbol,
|
||||
_rt_shm_token=dst_shm.token,
|
||||
first_quote={},
|
||||
|
||||
# set to 0 presuming for now that we can't load
|
||||
# FSP history (though we should eventually).
|
||||
izero_hist=0,
|
||||
izero_rt=0,
|
||||
)
|
||||
self._flow_registry[(
|
||||
self.flume.rt_shm._token,
|
||||
target.name
|
||||
)] = dst_shm._token
|
||||
self._flow_registry[
|
||||
(self.src_shm._token, target.name)
|
||||
] = dst_shm._token
|
||||
|
||||
# if not opened:
|
||||
# raise RuntimeError(
|
||||
# f'Already started FSP `{fqsn}:{func_name}`'
|
||||
# )
|
||||
|
||||
portal = self.cluster.get(worker_name) or self.rr_next_portal()
|
||||
complete = trio.Event()
|
||||
started = trio.Event()
|
||||
self.tn.start_soon(
|
||||
|
@ -527,13 +487,13 @@ class FspAdmin:
|
|||
complete,
|
||||
started,
|
||||
fqsn,
|
||||
dst_fsp_flume,
|
||||
dst_shm,
|
||||
conf,
|
||||
target,
|
||||
loglevel,
|
||||
)
|
||||
|
||||
return dst_fsp_flume, started
|
||||
return dst_shm, started
|
||||
|
||||
async def open_fsp_chart(
|
||||
self,
|
||||
|
@ -545,7 +505,7 @@ class FspAdmin:
|
|||
|
||||
) -> (trio.Event, ChartPlotWidget):
|
||||
|
||||
flume, started = await self.start_engine_task(
|
||||
shm, started = await self.start_engine_task(
|
||||
target,
|
||||
conf,
|
||||
loglevel,
|
||||
|
@ -557,7 +517,7 @@ class FspAdmin:
|
|||
run_fsp_ui,
|
||||
|
||||
self.linked,
|
||||
flume,
|
||||
shm,
|
||||
started,
|
||||
target,
|
||||
|
||||
|
@ -571,7 +531,7 @@ class FspAdmin:
|
|||
@acm
|
||||
async def open_fsp_admin(
|
||||
linked: LinkedSplits,
|
||||
flume: Flume,
|
||||
src_shm: ShmArray,
|
||||
**kwargs,
|
||||
|
||||
) -> AsyncGenerator[dict, dict[str, tractor.Portal]]:
|
||||
|
@ -592,7 +552,7 @@ async def open_fsp_admin(
|
|||
tn,
|
||||
cluster_map,
|
||||
linked,
|
||||
flume,
|
||||
src_shm,
|
||||
)
|
||||
try:
|
||||
yield admin
|
||||
|
@ -606,7 +566,7 @@ async def open_fsp_admin(
|
|||
async def open_vlm_displays(
|
||||
|
||||
linked: LinkedSplits,
|
||||
flume: Flume,
|
||||
ohlcv: ShmArray,
|
||||
dvlm: bool = True,
|
||||
|
||||
task_status: TaskStatus[ChartPlotWidget] = trio.TASK_STATUS_IGNORED,
|
||||
|
@ -628,8 +588,6 @@ async def open_vlm_displays(
|
|||
sig = inspect.signature(flow_rates.func)
|
||||
params = sig.parameters
|
||||
|
||||
ohlcv: ShmArray = flume.rt_shm
|
||||
|
||||
async with (
|
||||
open_fsp_sidepane(
|
||||
linked, {
|
||||
|
@ -649,7 +607,7 @@ async def open_vlm_displays(
|
|||
}
|
||||
},
|
||||
) as sidepane,
|
||||
open_fsp_admin(linked, flume) as admin,
|
||||
open_fsp_admin(linked, ohlcv) as admin,
|
||||
):
|
||||
# TODO: support updates
|
||||
# period_field = sidepane.fields['period']
|
||||
|
@ -657,21 +615,12 @@ async def open_vlm_displays(
|
|||
# str(period_param.default)
|
||||
# )
|
||||
|
||||
# use slightly less light (then bracket) gray
|
||||
# for volume from "main exchange" and a more "bluey"
|
||||
# gray for "dark" vlm.
|
||||
vlm_color = 'i3'
|
||||
dark_vlm_color = 'charcoal'
|
||||
|
||||
# built-in vlm which we plot ASAP since it's
|
||||
# usually data provided directly with OHLC history.
|
||||
shm = ohlcv
|
||||
# ohlc_chart = linked.chart
|
||||
|
||||
vlm_chart = linked.add_plot(
|
||||
chart = linked.add_plot(
|
||||
name='volume',
|
||||
shm=shm,
|
||||
flume=flume,
|
||||
|
||||
array_key='volume',
|
||||
sidepane=sidepane,
|
||||
|
@ -684,47 +633,63 @@ async def open_vlm_displays(
|
|||
# the curve item internals are pretty convoluted.
|
||||
style='step',
|
||||
)
|
||||
vlm_viz = vlm_chart._vizs['volume']
|
||||
|
||||
# force 0 to always be in view
|
||||
def multi_maxmin(
|
||||
names: list[str],
|
||||
|
||||
) -> tuple[float, float]:
|
||||
|
||||
mx = 0
|
||||
for name in names:
|
||||
|
||||
mxmn = chart.maxmin(name=name)
|
||||
if mxmn:
|
||||
ymax = mxmn[1]
|
||||
if ymax > mx:
|
||||
mx = ymax
|
||||
|
||||
return 0, mx
|
||||
|
||||
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
|
||||
|
||||
# TODO: fix the x-axis label issue where if you put
|
||||
# the axis on the left it's totally not lined up...
|
||||
# show volume units value on LHS (for dinkus)
|
||||
# vlm_chart.hideAxis('right')
|
||||
# vlm_chart.showAxis('left')
|
||||
# chart.hideAxis('right')
|
||||
# chart.showAxis('left')
|
||||
|
||||
# send back new chart to caller
|
||||
task_status.started(vlm_chart)
|
||||
task_status.started(chart)
|
||||
|
||||
# should **not** be the same sub-chart widget
|
||||
assert vlm_chart.name != linked.chart.name
|
||||
assert chart.name != linked.chart.name
|
||||
|
||||
# sticky only on sub-charts atm
|
||||
last_val_sticky = vlm_chart.plotItem.getAxis(
|
||||
'right')._stickies.get(vlm_chart.name)
|
||||
last_val_sticky = chart._ysticks[chart.name]
|
||||
|
||||
# read from last calculated value
|
||||
value = shm.array['volume'][-1]
|
||||
|
||||
last_val_sticky.update_from_data(-1, value)
|
||||
|
||||
_, _, vlm_curve = vlm_chart.update_graphics_from_flow(
|
||||
vlm_curve = chart.update_graphics_from_flow(
|
||||
'volume',
|
||||
# shm.array,
|
||||
)
|
||||
|
||||
# size view to data once at outset
|
||||
vlm_chart.view._set_yrange(
|
||||
viz=vlm_viz
|
||||
)
|
||||
chart.view._set_yrange()
|
||||
|
||||
# add axis title
|
||||
axis = vlm_chart.getAxis('right')
|
||||
axis = chart.getAxis('right')
|
||||
axis.set_title(' vlm')
|
||||
|
||||
if dvlm:
|
||||
|
||||
tasks_ready = []
|
||||
# spawn and overlay $ vlm on the same subchart
|
||||
dvlm_flume, started = await admin.start_engine_task(
|
||||
dvlm_shm, started = await admin.start_engine_task(
|
||||
dolla_vlm,
|
||||
|
||||
{ # fsp engine conf
|
||||
|
@ -743,7 +708,7 @@ async def open_vlm_displays(
|
|||
# FIXME: we should error on starting the same fsp right
|
||||
# since it might collide with existing shm.. or wait we
|
||||
# had this before??
|
||||
# dolla_vlm
|
||||
# dolla_vlm,
|
||||
|
||||
tasks_ready.append(started)
|
||||
# profiler(f'created shm for fsp actor: {display_name}')
|
||||
|
@ -757,29 +722,22 @@ async def open_vlm_displays(
|
|||
# XXX: the main chart already contains a vlm "units" axis
|
||||
# so here we add an overlay wth a y-range in
|
||||
# $ liquidity-value units (normally a fiat like USD).
|
||||
dvlm_pi = vlm_chart.overlay_plotitem(
|
||||
dvlm_pi = chart.overlay_plotitem(
|
||||
'dolla_vlm',
|
||||
index=0, # place axis on inside (nearest to chart)
|
||||
|
||||
axis_title=' $vlm',
|
||||
axis_side='left',
|
||||
|
||||
axis_side='right',
|
||||
axis_kwargs={
|
||||
'typical_max_str': ' 100.0 M ',
|
||||
'formatter': partial(
|
||||
humanize,
|
||||
digits=2,
|
||||
),
|
||||
'text_color': vlm_color,
|
||||
},
|
||||
)
|
||||
|
||||
# TODO: should this maybe be implicit based on input args to
|
||||
# `.overlay_plotitem()` above?
|
||||
dvlm_pi.hideAxis('bottom')
|
||||
|
||||
# all to be overlayed curve names
|
||||
dvlm_fields = [
|
||||
fields = [
|
||||
'dolla_vlm',
|
||||
'dark_vlm',
|
||||
]
|
||||
|
@ -792,18 +750,32 @@ async def open_vlm_displays(
|
|||
'dark_trade_rate',
|
||||
]
|
||||
|
||||
group_mxmn = partial(
|
||||
multi_maxmin,
|
||||
# keep both regular and dark vlm in view
|
||||
names=fields,
|
||||
# names=fields + dvlm_rate_fields,
|
||||
)
|
||||
|
||||
# add custom auto range handler
|
||||
dvlm_pi.vb._maxmin = group_mxmn
|
||||
|
||||
# use slightly less light (then bracket) gray
|
||||
# for volume from "main exchange" and a more "bluey"
|
||||
# gray for "dark" vlm.
|
||||
vlm_color = 'i3'
|
||||
dark_vlm_color = 'charcoal'
|
||||
|
||||
# add dvlm (step) curves to common view
|
||||
def chart_curves(
|
||||
names: list[str],
|
||||
pi: pg.PlotItem,
|
||||
shm: ShmArray,
|
||||
flume: Flume,
|
||||
step_mode: bool = False,
|
||||
style: str = 'solid',
|
||||
|
||||
) -> None:
|
||||
for name in names:
|
||||
|
||||
if 'dark' in name:
|
||||
color = dark_vlm_color
|
||||
elif 'rate' in name:
|
||||
|
@ -811,13 +783,9 @@ async def open_vlm_displays(
|
|||
else:
|
||||
color = 'bracket'
|
||||
|
||||
assert isinstance(shm, ShmArray)
|
||||
assert isinstance(flume, Flume)
|
||||
|
||||
viz = vlm_chart.draw_curve(
|
||||
name,
|
||||
shm,
|
||||
flume,
|
||||
curve, _ = chart.draw_curve(
|
||||
name=name,
|
||||
shm=shm,
|
||||
array_key=name,
|
||||
overlay=pi,
|
||||
color=color,
|
||||
|
@ -825,24 +793,29 @@ async def open_vlm_displays(
|
|||
style=style,
|
||||
pi=pi,
|
||||
)
|
||||
assert viz.plot is pi
|
||||
|
||||
# TODO: we need a better API to do this..
|
||||
# specially store ref to shm for lookup in display loop
|
||||
# since only a placeholder of `None` is entered in
|
||||
# ``.draw_curve()``.
|
||||
flow = chart._flows[name]
|
||||
assert flow.plot is pi
|
||||
|
||||
chart_curves(
|
||||
dvlm_fields,
|
||||
fields,
|
||||
dvlm_pi,
|
||||
dvlm_flume.rt_shm,
|
||||
dvlm_flume,
|
||||
dvlm_shm,
|
||||
step_mode=True,
|
||||
)
|
||||
|
||||
# spawn flow rates fsp **ONLY AFTER** the 'dolla_vlm' fsp is
|
||||
# up since this one depends on it.
|
||||
|
||||
fr_flume, started = await admin.start_engine_task(
|
||||
fr_shm, started = await admin.start_engine_task(
|
||||
flow_rates,
|
||||
{ # fsp engine conf
|
||||
'func_name': 'flow_rates',
|
||||
'zero_on_step': True,
|
||||
'zero_on_step': False,
|
||||
},
|
||||
# loglevel,
|
||||
)
|
||||
|
@ -851,7 +824,7 @@ async def open_vlm_displays(
|
|||
# chart_curves(
|
||||
# dvlm_rate_fields,
|
||||
# dvlm_pi,
|
||||
# fr_flume.rt_shm,
|
||||
# fr_shm,
|
||||
# )
|
||||
|
||||
# TODO: is there a way to "sync" the dual axes such that only
|
||||
|
@ -860,24 +833,24 @@ async def open_vlm_displays(
|
|||
# displayed and the curves are effectively the same minus
|
||||
# liquidity events (well at least on low OHLC periods - 1s).
|
||||
vlm_curve.hide()
|
||||
vlm_chart.removeItem(vlm_curve)
|
||||
vlm_viz = vlm_chart._vizs['volume']
|
||||
vlm_viz.render = False
|
||||
chart.removeItem(vlm_curve)
|
||||
vflow = chart._flows['volume']
|
||||
vflow.render = False
|
||||
|
||||
# avoid range sorting on volume once disabled
|
||||
vlm_chart.view.disable_auto_yrange()
|
||||
chart.view.disable_auto_yrange()
|
||||
|
||||
# Trade rate overlay
|
||||
# XXX: requires an additional overlay for
|
||||
# a trades-per-period (time) y-range.
|
||||
tr_pi = vlm_chart.overlay_plotitem(
|
||||
tr_pi = chart.overlay_plotitem(
|
||||
'trade_rates',
|
||||
|
||||
# TODO: dynamically update period (and thus this axis?)
|
||||
# title from user input.
|
||||
axis_title='clears',
|
||||
axis_side='left',
|
||||
|
||||
axis_side='left',
|
||||
axis_kwargs={
|
||||
'typical_max_str': ' 10.0 M ',
|
||||
'formatter': partial(
|
||||
|
@ -888,13 +861,17 @@ async def open_vlm_displays(
|
|||
},
|
||||
|
||||
)
|
||||
tr_pi.hideAxis('bottom')
|
||||
# add custom auto range handler
|
||||
tr_pi.vb.maxmin = partial(
|
||||
multi_maxmin,
|
||||
# keep both regular and dark vlm in view
|
||||
names=trade_rate_fields,
|
||||
)
|
||||
|
||||
chart_curves(
|
||||
trade_rate_fields,
|
||||
tr_pi,
|
||||
fr_flume.rt_shm,
|
||||
fr_flume,
|
||||
fr_shm,
|
||||
# step_mode=True,
|
||||
|
||||
# dashed line to represent "individual trades" being
|
||||
|
@ -928,7 +905,7 @@ async def open_vlm_displays(
|
|||
async def start_fsp_displays(
|
||||
|
||||
linked: LinkedSplits,
|
||||
flume: Flume,
|
||||
ohlcv: ShmArray,
|
||||
group_status_key: str,
|
||||
loglevel: str,
|
||||
|
||||
|
@ -963,7 +940,7 @@ async def start_fsp_displays(
|
|||
# },
|
||||
# },
|
||||
}
|
||||
profiler = Profiler(
|
||||
profiler = pg.debug.Profiler(
|
||||
delayed=False,
|
||||
disabled=False
|
||||
)
|
||||
|
@ -971,10 +948,7 @@ async def start_fsp_displays(
|
|||
async with (
|
||||
|
||||
# NOTE: this admin internally opens an actor cluster
|
||||
open_fsp_admin(
|
||||
linked,
|
||||
flume,
|
||||
) as admin,
|
||||
open_fsp_admin(linked, ohlcv) as admin,
|
||||
):
|
||||
statuses = []
|
||||
for target, conf in fsp_conf.items():
|
||||
|
|
|
@ -20,13 +20,8 @@ Chart view box primitives
|
|||
"""
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager
|
||||
from functools import partial
|
||||
import time
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
from typing import Optional, Callable
|
||||
|
||||
import pyqtgraph as pg
|
||||
# from pyqtgraph.GraphicsScene import mouseEvents
|
||||
|
@ -38,16 +33,11 @@ import numpy as np
|
|||
import trio
|
||||
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
# from ._style import _min_points_to_show
|
||||
from ._editors import SelectRect
|
||||
from . import _event
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import ChartPlotWidget
|
||||
from ._dataviz import Viz
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -85,6 +75,7 @@ async def handle_viewmode_kb_inputs(
|
|||
pressed: set[str] = set()
|
||||
|
||||
last = time.time()
|
||||
trigger_mode: str
|
||||
action: str
|
||||
|
||||
on_next_release: Optional[Callable] = None
|
||||
|
@ -150,16 +141,13 @@ async def handle_viewmode_kb_inputs(
|
|||
Qt.Key_Space,
|
||||
}
|
||||
):
|
||||
godw = view._chart.linked.godwidget
|
||||
godw.hist_linked.resize_sidepanes(from_linked=godw.rt_linked)
|
||||
godw.search.focus()
|
||||
view._chart.linked.godwidget.search.focus()
|
||||
|
||||
# esc and ctrl-c
|
||||
if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C):
|
||||
# ctrl-c as cancel
|
||||
# https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9
|
||||
view.select_box.clear()
|
||||
view.linked.focus()
|
||||
|
||||
# cancel order or clear graphics
|
||||
if key == Qt.Key_C or key == Qt.Key_Delete:
|
||||
|
@ -190,17 +178,17 @@ async def handle_viewmode_kb_inputs(
|
|||
if key in pressed:
|
||||
pressed.remove(key)
|
||||
|
||||
# QUERY/QUOTE MODE
|
||||
# ----------------
|
||||
# QUERY/QUOTE MODE #
|
||||
if {Qt.Key_Q}.intersection(pressed):
|
||||
|
||||
view.linked.cursor.in_query_mode = True
|
||||
view.linkedsplits.cursor.in_query_mode = True
|
||||
|
||||
else:
|
||||
view.linked.cursor.in_query_mode = False
|
||||
view.linkedsplits.cursor.in_query_mode = False
|
||||
|
||||
# SELECTION MODE
|
||||
# --------------
|
||||
|
||||
if shift:
|
||||
if view.state['mouseMode'] == ViewBox.PanMode:
|
||||
view.setMouseMode(ViewBox.RectMode)
|
||||
|
@ -221,27 +209,18 @@ async def handle_viewmode_kb_inputs(
|
|||
|
||||
# ORDER MODE
|
||||
# ----------
|
||||
|
||||
# live vs. dark trigger + an action {buy, sell, alert}
|
||||
order_keys_pressed = ORDER_MODE.intersection(pressed)
|
||||
|
||||
if order_keys_pressed:
|
||||
|
||||
# TODO: it seems like maybe the composition should be
|
||||
# reversed here? Like, maybe we should have the nav have
|
||||
# access to the pos state and then make encapsulated logic
|
||||
# that shows the right stuff on screen instead or order mode
|
||||
# and position-related abstractions doing this?
|
||||
|
||||
# show the pp size label only if there is
|
||||
# a non-zero pos existing
|
||||
tracker = order_mode.current_pp
|
||||
if tracker.live_pp.size:
|
||||
tracker.nav.show()
|
||||
# show the pp size label
|
||||
order_mode.current_pp.show()
|
||||
|
||||
# TODO: show pp config mini-params in status bar widget
|
||||
# mode.pp_config.show()
|
||||
|
||||
trigger_type: str = 'dark'
|
||||
if (
|
||||
# 's' for "submit" to activate "live" order
|
||||
Qt.Key_S in pressed or
|
||||
|
@ -249,6 +228,9 @@ async def handle_viewmode_kb_inputs(
|
|||
):
|
||||
trigger_type: str = 'live'
|
||||
|
||||
else:
|
||||
trigger_type: str = 'dark'
|
||||
|
||||
# order mode trigger "actions"
|
||||
if Qt.Key_D in pressed: # for "damp eet"
|
||||
action = 'sell'
|
||||
|
@ -277,8 +259,8 @@ async def handle_viewmode_kb_inputs(
|
|||
Qt.Key_S in pressed or
|
||||
order_keys_pressed or
|
||||
Qt.Key_O in pressed
|
||||
)
|
||||
and key in NUMBER_LINE
|
||||
) and
|
||||
key in NUMBER_LINE
|
||||
):
|
||||
# hot key to set order slots size.
|
||||
# change edit field to current number line value,
|
||||
|
@ -296,7 +278,7 @@ async def handle_viewmode_kb_inputs(
|
|||
else: # none active
|
||||
|
||||
# hide pp label
|
||||
order_mode.current_pp.nav.hide_info()
|
||||
order_mode.current_pp.hide_info()
|
||||
|
||||
# if none are pressed, remove "staged" level
|
||||
# line under cursor position
|
||||
|
@ -337,6 +319,7 @@ async def handle_viewmode_mouse(
|
|||
):
|
||||
# when in order mode, submit execution
|
||||
# msg.event.accept()
|
||||
# breakpoint()
|
||||
view.order_mode.submit_order()
|
||||
|
||||
|
||||
|
@ -353,6 +336,16 @@ class ChartView(ViewBox):
|
|||
'''
|
||||
mode_name: str = 'view'
|
||||
|
||||
# "relay events" for making overlaid views work.
|
||||
# NOTE: these MUST be defined here (and can't be monkey patched
|
||||
# on later) due to signal construction requiring refs to be
|
||||
# in place during the run of meta-class machinery.
|
||||
mouseDragEventRelay = QtCore.Signal(object, object, object)
|
||||
wheelEventRelay = QtCore.Signal(object, object, object)
|
||||
|
||||
event_relay_source: 'Optional[ViewBox]' = None
|
||||
relays: dict[str, QtCore.Signal] = {}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
|
@ -374,6 +367,7 @@ class ChartView(ViewBox):
|
|||
)
|
||||
# for "known y-range style"
|
||||
self._static_yrange = static_yrange
|
||||
self._maxmin = None
|
||||
|
||||
# disable vertical scrolling
|
||||
self.setMouseEnabled(
|
||||
|
@ -381,8 +375,8 @@ class ChartView(ViewBox):
|
|||
y=True,
|
||||
)
|
||||
|
||||
self.linked = None
|
||||
self._chart: ChartPlotWidget | None = None # noqa
|
||||
self.linkedsplits = None
|
||||
self._chart: 'ChartPlotWidget' = None # noqa
|
||||
|
||||
# add our selection box annotator
|
||||
self.select_box = SelectRect(self)
|
||||
|
@ -393,7 +387,6 @@ class ChartView(ViewBox):
|
|||
|
||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
self._ic = None
|
||||
self._yranger: Callable | None = None
|
||||
|
||||
def start_ic(
|
||||
self,
|
||||
|
@ -404,11 +397,8 @@ class ChartView(ViewBox):
|
|||
|
||||
'''
|
||||
if self._ic is None:
|
||||
try:
|
||||
self.chart.pause_all_feeds()
|
||||
self._ic = trio.Event()
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
def signal_ic(
|
||||
self,
|
||||
|
@ -421,12 +411,9 @@ class ChartView(ViewBox):
|
|||
|
||||
'''
|
||||
if self._ic:
|
||||
try:
|
||||
self._ic.set()
|
||||
self._ic = None
|
||||
self.chart.resume_all_feeds()
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_async_input_handler(
|
||||
|
@ -454,18 +441,29 @@ class ChartView(ViewBox):
|
|||
yield self
|
||||
|
||||
@property
|
||||
def chart(self) -> ChartPlotWidget: # type: ignore # noqa
|
||||
def chart(self) -> 'ChartPlotWidget': # type: ignore # noqa
|
||||
return self._chart
|
||||
|
||||
@chart.setter
|
||||
def chart(self, chart: ChartPlotWidget) -> None: # type: ignore # noqa
|
||||
def chart(self, chart: 'ChartPlotWidget') -> None: # type: ignore # noqa
|
||||
self._chart = chart
|
||||
self.select_box.chart = chart
|
||||
if self._maxmin is None:
|
||||
self._maxmin = chart.maxmin
|
||||
|
||||
@property
|
||||
def maxmin(self) -> Callable:
|
||||
return self._maxmin
|
||||
|
||||
@maxmin.setter
|
||||
def maxmin(self, callback: Callable) -> None:
|
||||
self._maxmin = callback
|
||||
|
||||
def wheelEvent(
|
||||
self,
|
||||
ev,
|
||||
axis=None,
|
||||
relayed_from: ChartView = None,
|
||||
):
|
||||
'''
|
||||
Override "center-point" location for scrolling.
|
||||
|
@ -476,34 +474,27 @@ class ChartView(ViewBox):
|
|||
TODO: PR a method into ``pyqtgraph`` to make this configurable
|
||||
|
||||
'''
|
||||
linked = self.linked
|
||||
if (
|
||||
not linked
|
||||
):
|
||||
return
|
||||
|
||||
if axis in (0, 1):
|
||||
mask = [False, False]
|
||||
mask[axis] = self.state['mouseEnabled'][axis]
|
||||
else:
|
||||
mask = self.state['mouseEnabled'][:]
|
||||
|
||||
chart = self.linked.chart
|
||||
chart = self.linkedsplits.chart
|
||||
|
||||
# don't zoom more then the min points setting
|
||||
viz = chart.get_viz(chart.name)
|
||||
vl, lbar, rbar, vr = viz.bars_range()
|
||||
l, lbar, rbar, r = chart.bars_range()
|
||||
# vl = r - l
|
||||
|
||||
# TODO: max/min zoom limits incorporating time step size.
|
||||
# rl = vr - vl
|
||||
# if ev.delta() > 0 and rl <= _min_points_to_show:
|
||||
# log.warning("Max zoom bruh...")
|
||||
# if ev.delta() > 0 and vl <= _min_points_to_show:
|
||||
# log.debug("Max zoom bruh...")
|
||||
# return
|
||||
|
||||
# if (
|
||||
# ev.delta() < 0
|
||||
# and rl >= len(chart._vizs[chart.name].shm.array) + 666
|
||||
# and vl >= len(chart._flows[chart.name].shm.array) + 666
|
||||
# ):
|
||||
# log.warning("Min zoom bruh...")
|
||||
# log.debug("Min zoom bruh...")
|
||||
# return
|
||||
|
||||
# actual scaling factor
|
||||
|
@ -534,17 +525,49 @@ class ChartView(ViewBox):
|
|||
self.scaleBy(s, center)
|
||||
|
||||
else:
|
||||
# use right-most point of current curve graphic
|
||||
xl = viz.graphics.x_last()
|
||||
focal = min(
|
||||
xl,
|
||||
vr,
|
||||
|
||||
# center = pg.Point(
|
||||
# fn.invertQTransform(self.childGroup.transform()).map(ev.pos())
|
||||
# )
|
||||
|
||||
# XXX: scroll "around" the right most element in the view
|
||||
# which stays "pinned" in place.
|
||||
|
||||
# furthest_right_coord = self.boundingRect().topRight()
|
||||
|
||||
# yaxis = pg.Point(
|
||||
# fn.invertQTransform(
|
||||
# self.childGroup.transform()
|
||||
# ).map(furthest_right_coord)
|
||||
# )
|
||||
|
||||
# This seems like the most "intuitive option, a hybrid of
|
||||
# tws and tv styles
|
||||
last_bar = pg.Point(int(rbar)) + 1
|
||||
|
||||
ryaxis = chart.getAxis('right')
|
||||
r_axis_x = ryaxis.pos().x()
|
||||
|
||||
end_of_l1 = pg.Point(
|
||||
round(
|
||||
chart.cv.mapToView(
|
||||
pg.Point(r_axis_x - chart._max_l1_line_len)
|
||||
# QPointF(chart._max_l1_line_len, 0)
|
||||
).x()
|
||||
)
|
||||
) # .x()
|
||||
|
||||
# self.state['viewRange'][0][1] = end_of_l1
|
||||
# focal = pg.Point((last_bar.x() + end_of_l1)/2)
|
||||
|
||||
focal = min(
|
||||
last_bar,
|
||||
end_of_l1,
|
||||
key=lambda p: p.x()
|
||||
)
|
||||
# focal = pg.Point(last_bar.x() + end_of_l1)
|
||||
|
||||
self._resetTarget()
|
||||
|
||||
# NOTE: scroll "around" the right most datum-element in view
|
||||
# gives the feeling of staying "pinned" in place.
|
||||
self.scaleBy(s, focal)
|
||||
|
||||
# XXX: the order of the next 2 lines i'm pretty sure
|
||||
|
@ -570,8 +593,10 @@ class ChartView(ViewBox):
|
|||
self,
|
||||
ev,
|
||||
axis: Optional[int] = None,
|
||||
relayed_from: ChartView = None,
|
||||
|
||||
) -> None:
|
||||
|
||||
pos = ev.pos()
|
||||
lastPos = ev.lastPos()
|
||||
dif = pos - lastPos
|
||||
|
@ -641,10 +666,10 @@ class ChartView(ViewBox):
|
|||
|
||||
# PANNING MODE
|
||||
else:
|
||||
try:
|
||||
# XXX: WHY
|
||||
ev.accept()
|
||||
|
||||
self.start_ic()
|
||||
except RuntimeError:
|
||||
pass
|
||||
# if self._ic is None:
|
||||
# self.chart.pause_all_feeds()
|
||||
# self._ic = trio.Event()
|
||||
|
@ -672,9 +697,6 @@ class ChartView(ViewBox):
|
|||
# self._ic = None
|
||||
# self.chart.resume_all_feeds()
|
||||
|
||||
# XXX: WHY
|
||||
ev.accept()
|
||||
|
||||
# WEIRD "RIGHT-CLICK CENTER ZOOM" MODE
|
||||
elif button & QtCore.Qt.RightButton:
|
||||
|
||||
|
@ -720,12 +742,7 @@ class ChartView(ViewBox):
|
|||
*,
|
||||
|
||||
yrange: Optional[tuple[float, float]] = None,
|
||||
viz: Viz | None = None,
|
||||
|
||||
# NOTE: this value pairs (more or less) with L1 label text
|
||||
# height offset from from the bid/ask lines.
|
||||
range_margin: float = 0.09,
|
||||
|
||||
range_margin: float = 0.06,
|
||||
bars_range: Optional[tuple[int, int, int, int]] = None,
|
||||
|
||||
# flag to prevent triggering sibling charts from the same linked
|
||||
|
@ -744,7 +761,7 @@ class ChartView(ViewBox):
|
|||
'''
|
||||
name = self.name
|
||||
# print(f'YRANGE ON {name}')
|
||||
profiler = Profiler(
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'`ChartView._set_yrange()`: `{name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
|
@ -778,28 +795,18 @@ class ChartView(ViewBox):
|
|||
# XXX: only compute the mxmn range
|
||||
# if none is provided as input!
|
||||
if not yrange:
|
||||
|
||||
if not viz:
|
||||
breakpoint()
|
||||
|
||||
out = viz.maxmin()
|
||||
if out is None:
|
||||
log.warning(f'No yrange provided for {name}!?')
|
||||
return
|
||||
(
|
||||
ixrng,
|
||||
_,
|
||||
yrange
|
||||
) = out
|
||||
|
||||
profiler(f'`{self.name}:Viz.maxmin()` -> {ixrng}=>{yrange}')
|
||||
# flow = chart._flows[name]
|
||||
yrange = self._maxmin()
|
||||
|
||||
if yrange is None:
|
||||
log.warning(f'No yrange provided for {name}!?')
|
||||
print(f"WTF NO YRANGE {name}")
|
||||
return
|
||||
|
||||
ylow, yhigh = yrange
|
||||
|
||||
profiler(f'callback ._maxmin(): {yrange}')
|
||||
|
||||
# view margins: stay within a % of the "true range"
|
||||
diff = yhigh - ylow
|
||||
ylow = ylow - (diff * range_margin)
|
||||
|
@ -819,55 +826,54 @@ class ChartView(ViewBox):
|
|||
|
||||
def enable_auto_yrange(
|
||||
self,
|
||||
viz: Viz,
|
||||
src_vb: Optional[ChartView] = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Assign callbacks for rescaling and resampling y-axis data
|
||||
automatically based on data contents and ``ViewBox`` state.
|
||||
Assign callback for rescaling y-axis automatically
|
||||
based on data contents and ``ViewBox`` state.
|
||||
|
||||
'''
|
||||
if src_vb is None:
|
||||
src_vb = self
|
||||
|
||||
if self._yranger is None:
|
||||
self._yranger = partial(
|
||||
self._set_yrange,
|
||||
viz=viz,
|
||||
)
|
||||
# splitter(s) resizing
|
||||
src_vb.sigResized.connect(self._set_yrange)
|
||||
|
||||
# widget-UIs/splitter(s) resizing
|
||||
src_vb.sigResized.connect(self._yranger)
|
||||
|
||||
# mouse wheel doesn't emit XRangeChanged
|
||||
src_vb.sigRangeChangedManually.connect(self._yranger)
|
||||
|
||||
# re-sampling trigger:
|
||||
# TODO: a smarter way to avoid calling this needlessly?
|
||||
# 2 things i can think of:
|
||||
# - register downsample-able graphics specially and only
|
||||
# iterate those.
|
||||
# - only register this when certain downsample-able graphics are
|
||||
# - only register this when certain downsampleable graphics are
|
||||
# "added to scene".
|
||||
src_vb.sigRangeChangedManually.connect(
|
||||
self.maybe_downsample_graphics
|
||||
)
|
||||
|
||||
# mouse wheel doesn't emit XRangeChanged
|
||||
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
||||
|
||||
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
||||
# src_vb.sigXRangeChanged.connect(
|
||||
# self.maybe_downsample_graphics
|
||||
# )
|
||||
|
||||
def disable_auto_yrange(self) -> None:
|
||||
|
||||
# XXX: not entirely sure why we can't de-reg this..
|
||||
self.sigResized.disconnect(
|
||||
self._yranger,
|
||||
self._set_yrange,
|
||||
)
|
||||
|
||||
self.sigRangeChangedManually.disconnect(
|
||||
self._yranger,
|
||||
)
|
||||
|
||||
self.sigRangeChangedManually.disconnect(
|
||||
self.maybe_downsample_graphics
|
||||
)
|
||||
self.sigRangeChangedManually.disconnect(
|
||||
self._set_yrange,
|
||||
)
|
||||
|
||||
# self.sigXRangeChanged.disconnect(self._set_yrange)
|
||||
# self.sigXRangeChanged.disconnect(
|
||||
# self.maybe_downsample_graphics
|
||||
# )
|
||||
|
||||
def x_uppx(self) -> float:
|
||||
'''
|
||||
|
@ -876,7 +882,7 @@ class ChartView(ViewBox):
|
|||
graphics items which are our children.
|
||||
|
||||
'''
|
||||
graphics = [f.graphics for f in self._chart._vizs.values()]
|
||||
graphics = [f.graphics for f in self._chart._flows.values()]
|
||||
if not graphics:
|
||||
return 0
|
||||
|
||||
|
@ -889,9 +895,10 @@ class ChartView(ViewBox):
|
|||
|
||||
def maybe_downsample_graphics(
|
||||
self,
|
||||
autoscale_overlays: bool = False,
|
||||
autoscale_overlays: bool = True,
|
||||
):
|
||||
profiler = Profiler(
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
||||
disabled=not pg_profile_enabled(),
|
||||
|
||||
|
@ -905,14 +912,10 @@ class ChartView(ViewBox):
|
|||
|
||||
# TODO: a faster single-loop-iterator way of doing this XD
|
||||
chart = self._chart
|
||||
plots = {chart.name: chart}
|
||||
|
||||
linked = self.linked
|
||||
if linked:
|
||||
plots |= linked.subplots
|
||||
|
||||
linked = self.linkedsplits
|
||||
plots = linked.subplots | {chart.name: chart}
|
||||
for chart_name, chart in plots.items():
|
||||
for name, flow in chart._vizs.items():
|
||||
for name, flow in chart._flows.items():
|
||||
|
||||
if (
|
||||
not flow.render
|
||||
|
@ -920,24 +923,25 @@ class ChartView(ViewBox):
|
|||
# XXX: super important to be aware of this.
|
||||
# or not flow.graphics.isVisible()
|
||||
):
|
||||
# print(f'skipping {flow.name}')
|
||||
continue
|
||||
|
||||
# pass in no array which will read and render from the last
|
||||
# passed array (normally provided by the display loop.)
|
||||
chart.update_graphics_from_flow(name)
|
||||
chart.update_graphics_from_flow(
|
||||
name,
|
||||
use_vr=True,
|
||||
)
|
||||
|
||||
# for each overlay on this chart auto-scale the
|
||||
# y-range to max-min values.
|
||||
# if autoscale_overlays:
|
||||
# overlay = chart.pi_overlay
|
||||
# if overlay:
|
||||
# for pi in overlay.overlays:
|
||||
# pi.vb._set_yrange(
|
||||
# # TODO: get the range once up front...
|
||||
# # bars_range=br,
|
||||
# viz=pi.viz,
|
||||
# )
|
||||
# profiler('autoscaled linked plots')
|
||||
if autoscale_overlays:
|
||||
overlay = chart.pi_overlay
|
||||
if overlay:
|
||||
for pi in overlay.overlays:
|
||||
pi.vb._set_yrange(
|
||||
# TODO: get the range once up front...
|
||||
# bars_range=br,
|
||||
)
|
||||
profiler('autoscaled linked plots')
|
||||
|
||||
profiler(f'<{chart_name}>.update_graphics_from_flow({name})')
|
||||
|
|
|
@ -26,24 +26,22 @@ from PyQt5.QtCore import QPointF
|
|||
|
||||
from ._axes import YAxisLabel
|
||||
from ._style import hcolor
|
||||
from ._pg_overrides import PlotItem
|
||||
|
||||
|
||||
class LevelLabel(YAxisLabel):
|
||||
'''
|
||||
Y-axis (vertically) oriented, horizontal label that sticks to
|
||||
"""Y-axis (vertically) oriented, horizontal label that sticks to
|
||||
where it's placed despite chart resizing and supports displaying
|
||||
multiple fields.
|
||||
|
||||
|
||||
TODO: replace the rectangle-text part with our new ``Label`` type.
|
||||
|
||||
'''
|
||||
_x_br_offset: float = -16
|
||||
_y_txt_h_scaling: float = 2
|
||||
"""
|
||||
_x_margin = 0
|
||||
_y_margin = 0
|
||||
|
||||
# adjustment "further away from" anchor point
|
||||
_x_offset = 0
|
||||
_x_offset = 9
|
||||
_y_offset = 0
|
||||
|
||||
# fields to be displayed in the label string
|
||||
|
@ -59,12 +57,12 @@ class LevelLabel(YAxisLabel):
|
|||
chart,
|
||||
parent,
|
||||
|
||||
color: str = 'default_light',
|
||||
color: str = 'bracket',
|
||||
|
||||
orient_v: str = 'bottom',
|
||||
orient_h: str = 'right',
|
||||
orient_h: str = 'left',
|
||||
|
||||
opacity: float = 1,
|
||||
opacity: float = 0,
|
||||
|
||||
# makes order line labels offset from their parent axis
|
||||
# such that they don't collide with the L1/L2 lines/prices
|
||||
|
@ -100,15 +98,13 @@ class LevelLabel(YAxisLabel):
|
|||
|
||||
self._h_shift = {
|
||||
'left': -1.,
|
||||
'right': 0.,
|
||||
'right': 0.
|
||||
}[orient_h]
|
||||
|
||||
self.fields = self._fields.copy()
|
||||
# ensure default format fields are in correct
|
||||
self.set_fmt_str(self._fmt_str, self.fields)
|
||||
|
||||
self.setZValue(10)
|
||||
|
||||
@property
|
||||
def color(self):
|
||||
return self._hcolor
|
||||
|
@ -116,10 +112,7 @@ class LevelLabel(YAxisLabel):
|
|||
@color.setter
|
||||
def color(self, color: str) -> None:
|
||||
self._hcolor = color
|
||||
self._pen = self.pen = pg.mkPen(
|
||||
hcolor(color),
|
||||
width=3,
|
||||
)
|
||||
self._pen = self.pen = pg.mkPen(hcolor(color))
|
||||
|
||||
def update_on_resize(self, vr, r):
|
||||
"""Tiis is a ``.sigRangeChanged()`` handler.
|
||||
|
@ -131,16 +124,15 @@ class LevelLabel(YAxisLabel):
|
|||
self,
|
||||
fields: dict = None,
|
||||
) -> None:
|
||||
'''
|
||||
Update the label's text contents **and** position from
|
||||
"""Update the label's text contents **and** position from
|
||||
a view box coordinate datum.
|
||||
|
||||
'''
|
||||
"""
|
||||
self.fields.update(fields)
|
||||
level = self.fields['level']
|
||||
|
||||
# map "level" to local coords
|
||||
abs_xy = self._pi.mapFromView(QPointF(0, level))
|
||||
abs_xy = self._chart.mapFromView(QPointF(0, level))
|
||||
|
||||
self.update_label(
|
||||
abs_xy,
|
||||
|
@ -157,7 +149,7 @@ class LevelLabel(YAxisLabel):
|
|||
h, w = self.set_label_str(fields)
|
||||
|
||||
if self._adjust_to_l1:
|
||||
self._x_offset = self._pi.chart_widget._max_l1_line_len
|
||||
self._x_offset = self._chart._max_l1_line_len
|
||||
|
||||
self.setPos(QPointF(
|
||||
self._h_shift * (w + self._x_offset),
|
||||
|
@ -182,8 +174,7 @@ class LevelLabel(YAxisLabel):
|
|||
fields: dict,
|
||||
):
|
||||
# use space as e3 delim
|
||||
self.label_str = self._fmt_str.format(
|
||||
**fields).replace(',', ' ')
|
||||
self.label_str = self._fmt_str.format(**fields).replace(',', ' ')
|
||||
|
||||
br = self.boundingRect()
|
||||
h, w = br.height(), br.width()
|
||||
|
@ -196,14 +187,14 @@ class LevelLabel(YAxisLabel):
|
|||
self,
|
||||
p: QtGui.QPainter,
|
||||
rect: QtCore.QRectF
|
||||
|
||||
) -> None:
|
||||
|
||||
p.setPen(self._pen)
|
||||
|
||||
rect = self.rect
|
||||
|
||||
if self._orient_v == 'bottom':
|
||||
lp, rp = rect.topLeft(), rect.topRight()
|
||||
# p.drawLine(rect.topLeft(), rect.topRight())
|
||||
|
||||
elif self._orient_v == 'top':
|
||||
lp, rp = rect.bottomLeft(), rect.bottomRight()
|
||||
|
@ -217,11 +208,6 @@ class LevelLabel(YAxisLabel):
|
|||
])
|
||||
)
|
||||
|
||||
p.fillRect(
|
||||
self.rect,
|
||||
self.bg_color,
|
||||
)
|
||||
|
||||
def highlight(self, pen) -> None:
|
||||
self._pen = pen
|
||||
self.update()
|
||||
|
@ -250,46 +236,43 @@ class L1Label(LevelLabel):
|
|||
# Set a global "max L1 label length" so we can
|
||||
# look it up on order lines and adjust their
|
||||
# labels not to overlap with it.
|
||||
chart = self._pi.chart_widget
|
||||
chart = self._chart
|
||||
chart._max_l1_line_len: float = max(
|
||||
chart._max_l1_line_len,
|
||||
w,
|
||||
w
|
||||
)
|
||||
|
||||
return h, w
|
||||
|
||||
|
||||
class L1Labels:
|
||||
'''
|
||||
Level 1 bid ask labels for dynamic update on price-axis.
|
||||
"""Level 1 bid ask labels for dynamic update on price-axis.
|
||||
|
||||
'''
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
plotitem: PlotItem,
|
||||
chart: 'ChartPlotWidget', # noqa
|
||||
digits: int = 2,
|
||||
size_digits: int = 3,
|
||||
font_size: str = 'small',
|
||||
) -> None:
|
||||
|
||||
chart = self.chart = plotitem.chart_widget
|
||||
self.chart = chart
|
||||
|
||||
raxis = plotitem.getAxis('right')
|
||||
raxis = chart.getAxis('right')
|
||||
kwargs = {
|
||||
'chart': plotitem,
|
||||
'chart': chart,
|
||||
'parent': raxis,
|
||||
|
||||
'opacity': .9,
|
||||
'opacity': 1,
|
||||
'font_size': font_size,
|
||||
'fg_color': 'default_light',
|
||||
'bg_color': chart.view_color, # normally 'papas_special'
|
||||
'fg_color': chart.pen_color,
|
||||
'bg_color': chart.view_color,
|
||||
}
|
||||
|
||||
# TODO: add humanized source-asset
|
||||
# info format.
|
||||
fmt_str = (
|
||||
' {size:.{size_digits}f} u'
|
||||
# '{level:,.{level_digits}f} '
|
||||
' {size:.{size_digits}f} x '
|
||||
'{level:,.{level_digits}f} '
|
||||
)
|
||||
fields = {
|
||||
'level': 0,
|
||||
|
@ -302,17 +285,12 @@ class L1Labels:
|
|||
orient_v='bottom',
|
||||
**kwargs,
|
||||
)
|
||||
bid.set_fmt_str(
|
||||
fmt_str='\n' + fmt_str,
|
||||
fields=fields,
|
||||
)
|
||||
bid.set_fmt_str(fmt_str=fmt_str, fields=fields)
|
||||
bid.show()
|
||||
|
||||
ask = self.ask_label = L1Label(
|
||||
orient_v='top',
|
||||
**kwargs,
|
||||
)
|
||||
ask.set_fmt_str(
|
||||
fmt_str=fmt_str,
|
||||
fields=fields)
|
||||
ask.set_fmt_str(fmt_str=fmt_str, fields=fields)
|
||||
ask.show()
|
||||
|
|
|
@ -233,36 +233,6 @@ class Label:
|
|||
def delete(self) -> None:
|
||||
self.vb.scene().removeItem(self.txt)
|
||||
|
||||
# NOTE: pulled out from ``ChartPlotWidget`` from way way old code.
|
||||
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
||||
# # compute contents label "height" in view terms
|
||||
# # to avoid having data "contents" overlap with them
|
||||
# if self._labels:
|
||||
# label = self._labels[self.name][0]
|
||||
|
||||
# rect = label.itemRect()
|
||||
# tl, br = rect.topLeft(), rect.bottomRight()
|
||||
# vb = self.plotItem.vb
|
||||
|
||||
# try:
|
||||
# # on startup labels might not yet be rendered
|
||||
# top, bottom = (vb.mapToView(tl).y(), vb.mapToView(br).y())
|
||||
|
||||
# # XXX: magic hack, how do we compute exactly?
|
||||
# label_h = (top - bottom) * 0.42
|
||||
|
||||
# except np.linalg.LinAlgError:
|
||||
# label_h = 0
|
||||
# else:
|
||||
# label_h = 0
|
||||
|
||||
# # print(f'label height {self.name}: {label_h}')
|
||||
|
||||
# if label_h > yhigh - ylow:
|
||||
# label_h = 0
|
||||
|
||||
# print(f"bounds (ylow, yhigh): {(ylow, yhigh)}")
|
||||
|
||||
|
||||
class FormatLabel(QLabel):
|
||||
'''
|
||||
|
|
|
@ -18,14 +18,9 @@
|
|||
Lines for orders, alerts, L2.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from functools import partial
|
||||
from math import floor
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
from typing import Optional, Callable
|
||||
|
||||
import pyqtgraph as pg
|
||||
from pyqtgraph import Point, functions as fn
|
||||
|
@ -42,9 +37,6 @@ from ..calc import humanize
|
|||
from ._label import Label
|
||||
from ._style import hcolor, _font
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._cursor import Cursor
|
||||
|
||||
|
||||
# TODO: probably worth investigating if we can
|
||||
# make .boundingRect() faster:
|
||||
|
@ -92,7 +84,7 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
self._marker = None
|
||||
self.only_show_markers_on_hover = only_show_markers_on_hover
|
||||
self.track_marker_pos: bool = False
|
||||
self.show_markers: bool = True # presuming the line is hovered at init
|
||||
|
||||
# should line go all the way to far end or leave a "margin"
|
||||
# space for other graphics (eg. L1 book)
|
||||
|
@ -130,9 +122,6 @@ class LevelLine(pg.InfiniteLine):
|
|||
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
||||
self._right_end_sc: float = 0
|
||||
|
||||
# use px caching
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
def txt_offsets(self) -> tuple[int, int]:
|
||||
return 0, 0
|
||||
|
||||
|
@ -227,23 +216,20 @@ class LevelLine(pg.InfiniteLine):
|
|||
y: float
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Chart coordinates cursor tracking callback.
|
||||
'''Chart coordinates cursor tracking callback.
|
||||
|
||||
this is called by our ``Cursor`` type once this line is set to
|
||||
track the cursor: for every movement this callback is invoked to
|
||||
reposition the line with the current view coordinates.
|
||||
|
||||
'''
|
||||
self.movable = True
|
||||
self.set_level(y) # implictly calls reposition handler
|
||||
|
||||
def mouseDragEvent(self, ev):
|
||||
'''
|
||||
Override the ``InfiniteLine`` handler since we need more
|
||||
"""Override the ``InfiniteLine`` handler since we need more
|
||||
detailed control and start end signalling.
|
||||
|
||||
'''
|
||||
"""
|
||||
cursor = self._chart.linked.cursor
|
||||
|
||||
# hide y-crosshair
|
||||
|
@ -295,20 +281,10 @@ class LevelLine(pg.InfiniteLine):
|
|||
# show y-crosshair again
|
||||
cursor.show_xhair()
|
||||
|
||||
def get_cursor(self) -> Optional[Cursor]:
|
||||
|
||||
chart = self._chart
|
||||
cur = chart.linked.cursor
|
||||
if self in cur._hovered:
|
||||
return cur
|
||||
|
||||
return None
|
||||
|
||||
def delete(self) -> None:
|
||||
'''
|
||||
Remove this line from containing chart/view/scene.
|
||||
"""Remove this line from containing chart/view/scene.
|
||||
|
||||
'''
|
||||
"""
|
||||
scene = self.scene()
|
||||
if scene:
|
||||
for label in self._labels:
|
||||
|
@ -322,8 +298,9 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
# remove from chart/cursor states
|
||||
chart = self._chart
|
||||
cur = self.get_cursor()
|
||||
if cur:
|
||||
cur = chart.linked.cursor
|
||||
|
||||
if self in cur._hovered:
|
||||
cur._hovered.remove(self)
|
||||
|
||||
chart.plotItem.removeItem(self)
|
||||
|
@ -331,8 +308,8 @@ class LevelLine(pg.InfiniteLine):
|
|||
def mouseDoubleClickEvent(
|
||||
self,
|
||||
ev: QtGui.QMouseEvent,
|
||||
|
||||
) -> None:
|
||||
|
||||
# TODO: enter labels edit mode
|
||||
print(f'double click {ev}')
|
||||
|
||||
|
@ -357,22 +334,30 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
||||
|
||||
# (legacy) NOTE: at one point this seemed slower when moving around
|
||||
# order lines.. not sure if that's still true or why but we've
|
||||
# dropped the original hacky `.pain()` transform stuff for inf
|
||||
# line markers now - check the git history if it needs to be
|
||||
# reverted.
|
||||
if self._marker:
|
||||
if self.track_marker_pos:
|
||||
# make the line end at the marker's x pos
|
||||
line_end = marker_right = self._marker.pos().x()
|
||||
if self.show_markers and self.markers:
|
||||
|
||||
p.setPen(self.pen)
|
||||
qgo_draw_markers(
|
||||
self.markers,
|
||||
self.pen.color(),
|
||||
p,
|
||||
vb_left,
|
||||
vb_right,
|
||||
marker_right,
|
||||
)
|
||||
# marker_size = self.markers[0][2]
|
||||
self._maxMarkerSize = max([m[2] / 2. for m in self.markers])
|
||||
|
||||
# this seems slower when moving around
|
||||
# order lines.. not sure wtf is up with that.
|
||||
# for now we're just using it on the position line.
|
||||
elif self._marker:
|
||||
|
||||
# TODO: make this label update part of a scene-aware-marker
|
||||
# composed annotation
|
||||
self._marker.setPos(
|
||||
QPointF(marker_right, self.scene_y())
|
||||
)
|
||||
|
||||
if hasattr(self._marker, 'label'):
|
||||
self._marker.label.update()
|
||||
|
||||
|
@ -394,14 +379,16 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
def hide(self) -> None:
|
||||
super().hide()
|
||||
mkr = self._marker
|
||||
if mkr:
|
||||
mkr.hide()
|
||||
if self._marker:
|
||||
self._marker.hide()
|
||||
# needed for ``order_line()`` lines currently
|
||||
self._marker.label.hide()
|
||||
|
||||
def show(self) -> None:
|
||||
super().show()
|
||||
if self._marker:
|
||||
self._marker.show()
|
||||
# self._marker.label.show()
|
||||
|
||||
def scene_y(self) -> float:
|
||||
return self.getViewBox().mapFromView(
|
||||
|
@ -434,10 +421,6 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
return path
|
||||
|
||||
@property
|
||||
def marker(self) -> LevelMarker:
|
||||
return self._marker
|
||||
|
||||
def hoverEvent(self, ev):
|
||||
'''
|
||||
Mouse hover callback.
|
||||
|
@ -446,16 +429,17 @@ class LevelLine(pg.InfiniteLine):
|
|||
cur = self._chart.linked.cursor
|
||||
|
||||
# hovered
|
||||
if (
|
||||
not ev.isExit()
|
||||
and ev.acceptDrags(QtCore.Qt.LeftButton)
|
||||
):
|
||||
if (not ev.isExit()) and ev.acceptDrags(QtCore.Qt.LeftButton):
|
||||
|
||||
# if already hovered we don't need to run again
|
||||
if self.mouseHovering is True:
|
||||
return
|
||||
|
||||
if self.only_show_markers_on_hover:
|
||||
self.show_markers()
|
||||
self.show_markers = True
|
||||
|
||||
if self._marker:
|
||||
self._marker.show()
|
||||
|
||||
# highlight if so configured
|
||||
if self.highlight_on_hover:
|
||||
|
@ -498,7 +482,11 @@ class LevelLine(pg.InfiniteLine):
|
|||
cur._hovered.remove(self)
|
||||
|
||||
if self.only_show_markers_on_hover:
|
||||
self.hide_markers()
|
||||
self.show_markers = False
|
||||
|
||||
if self._marker:
|
||||
self._marker.hide()
|
||||
self._marker.label.hide()
|
||||
|
||||
if self not in cur._trackers:
|
||||
cur.show_xhair(y_label_level=self.value())
|
||||
|
@ -510,15 +498,6 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
self.update()
|
||||
|
||||
def hide_markers(self) -> None:
|
||||
if self._marker:
|
||||
self._marker.hide()
|
||||
self._marker.label.hide()
|
||||
|
||||
def show_markers(self) -> None:
|
||||
if self._marker:
|
||||
self._marker.show()
|
||||
|
||||
|
||||
def level_line(
|
||||
|
||||
|
@ -539,10 +518,9 @@ def level_line(
|
|||
**kwargs,
|
||||
|
||||
) -> LevelLine:
|
||||
'''
|
||||
Convenience routine to add a styled horizontal line to a plot.
|
||||
"""Convenience routine to add a styled horizontal line to a plot.
|
||||
|
||||
'''
|
||||
"""
|
||||
hl_color = color + '_light' if highlight_on_hover else color
|
||||
|
||||
line = LevelLine(
|
||||
|
@ -724,7 +702,7 @@ def order_line(
|
|||
marker = LevelMarker(
|
||||
chart=chart,
|
||||
style=marker_style,
|
||||
get_level=line.value, # callback
|
||||
get_level=line.value,
|
||||
size=marker_size,
|
||||
keep_in_view=False,
|
||||
)
|
||||
|
@ -733,8 +711,7 @@ def order_line(
|
|||
marker = line.add_marker(marker)
|
||||
|
||||
# XXX: DON'T COMMENT THIS!
|
||||
# this fixes it the artifact issue!
|
||||
# .. of course, bounding rect stuff
|
||||
# this fixes it the artifact issue! .. of course, bounding rect stuff
|
||||
line._maxMarkerSize = marker_size
|
||||
|
||||
assert line._marker is marker
|
||||
|
@ -755,8 +732,7 @@ def order_line(
|
|||
|
||||
if action != 'alert':
|
||||
|
||||
# add a partial position label if we also added a level
|
||||
# marker
|
||||
# add a partial position label if we also added a level marker
|
||||
pp_size_label = Label(
|
||||
view=view,
|
||||
color=line.color,
|
||||
|
@ -790,9 +766,9 @@ def order_line(
|
|||
# XXX: without this the pp proportion label next the marker
|
||||
# seems to lag? this is the same issue we had with position
|
||||
# lines which we handle with ``.update_graphcis()``.
|
||||
# marker._on_paint=lambda marker: pp_size_label.update()
|
||||
marker._on_paint = lambda marker: pp_size_label.update()
|
||||
|
||||
# XXX: THIS IS AN UNTYPED MONKEY PATCH!?!?!
|
||||
marker.label = label
|
||||
|
||||
# sanity check
|
||||
|
|
|
@ -1,108 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Notifications utils.
|
||||
|
||||
"""
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
from typing import Optional
|
||||
|
||||
import trio
|
||||
|
||||
from ..log import get_logger
|
||||
from ..clearing._messages import (
|
||||
Status,
|
||||
)
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
_dbus_uid: Optional[str] = ''
|
||||
|
||||
|
||||
async def notify_from_ems_status_msg(
|
||||
msg: Status,
|
||||
duration: int = 3000,
|
||||
is_subproc: bool = False,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Send a linux desktop notification.
|
||||
|
||||
Handle subprocesses by discovering the dbus user id
|
||||
on first call.
|
||||
|
||||
'''
|
||||
if platform.system() != "Linux":
|
||||
return
|
||||
|
||||
# TODO: this in another task?
|
||||
# not sure if this will ever be a bottleneck,
|
||||
# we probably could do graphics stuff first tho?
|
||||
|
||||
if is_subproc:
|
||||
global _dbus_uid
|
||||
su = os.environ.get('SUDO_USER')
|
||||
if (
|
||||
not _dbus_uid
|
||||
and su
|
||||
):
|
||||
|
||||
# TODO: use `trio` but we need to use nursery.start()
|
||||
# to use pipes?
|
||||
# result = await trio.run_process(
|
||||
result = subprocess.run(
|
||||
[
|
||||
'id',
|
||||
'-u',
|
||||
su,
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
# check=True
|
||||
)
|
||||
_dbus_uid = result.stdout.decode("utf-8").replace('\n', '')
|
||||
|
||||
os.environ['DBUS_SESSION_BUS_ADDRESS'] = (
|
||||
f'unix:path=/run/user/{_dbus_uid}/bus'
|
||||
)
|
||||
|
||||
try:
|
||||
result = await trio.run_process(
|
||||
[
|
||||
'notify-send',
|
||||
'-u', 'normal',
|
||||
'-t', f'{duration}',
|
||||
'piker',
|
||||
|
||||
# TODO: add in standard fill/exec info that maybe we
|
||||
# pack in a broker independent way?
|
||||
f"'{msg.pformat()}'",
|
||||
],
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
check=False,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
log.warn(f'Notification daemon crashed stderr: {result.stderr}')
|
||||
|
||||
log.runtime(result)
|
||||
|
||||
except FileNotFoundError:
|
||||
log.warn('Tried to send a notification but \'notify-send\' not present')
|
|
@ -18,23 +18,23 @@ Super fast OHLC sampling graphics types.
|
|||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from PyQt5 import (
|
||||
QtGui,
|
||||
QtWidgets,
|
||||
)
|
||||
from PyQt5.QtCore import (
|
||||
QLineF,
|
||||
QRectF,
|
||||
)
|
||||
from PyQt5.QtWidgets import QGraphicsItem
|
||||
import pyqtgraph as pg
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||
from PyQt5.QtCore import QLineF, QPointF
|
||||
from PyQt5.QtGui import QPainterPath
|
||||
|
||||
from ._curve import FlowGraphic
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._style import hcolor
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import LinkedSplits
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -43,8 +43,7 @@ log = get_logger(__name__)
|
|||
def bar_from_ohlc_row(
|
||||
row: np.ndarray,
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
bar_w: float,
|
||||
bar_gap: float = 0.16
|
||||
w: float = 0.43
|
||||
|
||||
) -> tuple[QLineF]:
|
||||
'''
|
||||
|
@ -52,7 +51,8 @@ def bar_from_ohlc_row(
|
|||
OHLC "bar" for use in the "last datum" of a series.
|
||||
|
||||
'''
|
||||
open, high, low, close, index = row
|
||||
open, high, low, close, index = row[
|
||||
['open', 'high', 'low', 'close', 'index']]
|
||||
|
||||
# TODO: maybe consider using `QGraphicsLineItem` ??
|
||||
# gives us a ``.boundingRect()`` on the objects which may make
|
||||
|
@ -60,11 +60,9 @@ def bar_from_ohlc_row(
|
|||
# history path faster since it's done in C++:
|
||||
# https://doc.qt.io/qt-5/qgraphicslineitem.html
|
||||
|
||||
mid: float = (bar_w / 2) + index
|
||||
|
||||
# high -> low vertical (body) line
|
||||
if low != high:
|
||||
hl = QLineF(mid, low, mid, high)
|
||||
hl = QLineF(index, low, index, high)
|
||||
else:
|
||||
# XXX: if we don't do it renders a weird rectangle?
|
||||
# see below for filtering this later...
|
||||
|
@ -75,55 +73,48 @@ def bar_from_ohlc_row(
|
|||
# the index's range according to the view mapping coordinates.
|
||||
|
||||
# open line
|
||||
o = QLineF(index + bar_gap, open, mid, open)
|
||||
o = QLineF(index - w, open, index, open)
|
||||
|
||||
# close line
|
||||
c = QLineF(
|
||||
mid, close,
|
||||
index + bar_w - bar_gap, close,
|
||||
)
|
||||
c = QLineF(index, close, index + w, close)
|
||||
|
||||
return [hl, o, c]
|
||||
|
||||
|
||||
class BarItems(FlowGraphic):
|
||||
class BarItems(pg.GraphicsObject):
|
||||
'''
|
||||
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
||||
|
||||
'''
|
||||
# XXX: causes this weird jitter bug when click-drag panning
|
||||
# where the path curve will awkwardly flicker back and forth?
|
||||
cache_mode: int = QGraphicsItem.NoCache
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
**kwargs,
|
||||
linked: LinkedSplits,
|
||||
plotitem: 'pg.PlotItem', # noqa
|
||||
pen_color: str = 'bracket',
|
||||
last_bar_color: str = 'bracket',
|
||||
|
||||
name: Optional[str] = None,
|
||||
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self.linked = linked
|
||||
# XXX: for the mega-lulz increasing width here increases draw
|
||||
# latency... so probably don't do it until we figure that out.
|
||||
self._color = pen_color
|
||||
self.bars_pen = pg.mkPen(hcolor(pen_color), width=1)
|
||||
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
||||
self._name = name
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
self._last_bar_lines: tuple[QLineF, ...] | None = None
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
self.path = QPainterPath()
|
||||
self._last_bar_lines: Optional[tuple[QLineF, ...]] = None
|
||||
|
||||
def x_last(self) -> None | float:
|
||||
'''
|
||||
Return the last most x value of the close line segment
|
||||
or if not drawn yet, ``None``.
|
||||
def x_uppx(self) -> int:
|
||||
# we expect the downsample curve report this.
|
||||
return 0
|
||||
|
||||
'''
|
||||
if self._last_bar_lines:
|
||||
close_arm_line = self._last_bar_lines[-1]
|
||||
return close_arm_line.x2() if close_arm_line else None
|
||||
else:
|
||||
return None
|
||||
|
||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||
def boundingRect(self):
|
||||
# profiler = Profiler(
|
||||
# msg=f'BarItems.boundingRect(): `{self._name}`',
|
||||
# disabled=not pg_profile_enabled(),
|
||||
# ms_threshold=ms_slower_then,
|
||||
# )
|
||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||
|
||||
# TODO: Can we do rect caching to make this faster
|
||||
# like `pg.PlotCurveItem` does? In theory it's just
|
||||
|
@ -143,37 +134,32 @@ class BarItems(FlowGraphic):
|
|||
hb.topLeft(),
|
||||
hb.bottomRight(),
|
||||
)
|
||||
mn_y = hb_tl.y()
|
||||
mx_y = hb_br.y()
|
||||
most_left = hb_tl.x()
|
||||
most_right = hb_br.x()
|
||||
# profiler('calc path vertices')
|
||||
|
||||
# need to include last bar height or BR will be off
|
||||
# OHLC line segments: [hl, o, c]
|
||||
last_lines: tuple[QLineF] | None = self._last_bar_lines
|
||||
mx_y = hb_br.y()
|
||||
mn_y = hb_tl.y()
|
||||
|
||||
last_lines = self._last_bar_lines
|
||||
if last_lines:
|
||||
(
|
||||
hl,
|
||||
o,
|
||||
c,
|
||||
) = last_lines
|
||||
most_right = c.x2() + 1
|
||||
ymx = ymn = c.y2()
|
||||
body_line = self._last_bar_lines[0]
|
||||
if body_line:
|
||||
mx_y = max(mx_y, max(body_line.y1(), body_line.y2()))
|
||||
mn_y = min(mn_y, min(body_line.y1(), body_line.y2()))
|
||||
|
||||
if hl:
|
||||
y1, y2 = hl.y1(), hl.y2()
|
||||
ymn = min(y1, y2)
|
||||
ymx = max(y1, y2)
|
||||
mx_y = max(ymx, mx_y)
|
||||
mn_y = min(ymn, mn_y)
|
||||
# profiler('calc last bar vertices')
|
||||
return QtCore.QRectF(
|
||||
|
||||
return QRectF(
|
||||
most_left,
|
||||
# top left
|
||||
QPointF(
|
||||
hb_tl.x(),
|
||||
mn_y,
|
||||
most_right - most_left + 1,
|
||||
mx_y - mn_y,
|
||||
),
|
||||
|
||||
# bottom right
|
||||
QPointF(
|
||||
hb_br.x() + 1,
|
||||
mx_y,
|
||||
)
|
||||
|
||||
)
|
||||
|
||||
def paint(
|
||||
|
@ -184,7 +170,7 @@ class BarItems(FlowGraphic):
|
|||
|
||||
) -> None:
|
||||
|
||||
profiler = Profiler(
|
||||
profiler = pg.debug.Profiler(
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
)
|
||||
|
@ -197,12 +183,12 @@ class BarItems(FlowGraphic):
|
|||
# as is necesarry for what's in "view". Not sure if this will
|
||||
# lead to any perf gains other then when zoomed in to less bars
|
||||
# in view.
|
||||
p.setPen(self.last_step_pen)
|
||||
p.setPen(self.last_bar_pen)
|
||||
if self._last_bar_lines:
|
||||
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
||||
profiler('draw last bar')
|
||||
|
||||
p.setPen(self._pen)
|
||||
p.setPen(self.bars_pen)
|
||||
p.drawPath(self.path)
|
||||
profiler(f'draw history path: {self.path.capacity()}')
|
||||
|
||||
|
@ -210,40 +196,29 @@ class BarItems(FlowGraphic):
|
|||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
index_field: str,
|
||||
|
||||
) -> None:
|
||||
|
||||
# relevant fields
|
||||
fields: list[str] = [
|
||||
'index',
|
||||
'open',
|
||||
'high',
|
||||
'low',
|
||||
'close',
|
||||
index_field,
|
||||
]
|
||||
],
|
||||
|
||||
) -> None:
|
||||
|
||||
# relevant fields
|
||||
ohlc = src_data[fields]
|
||||
# last_row = ohlc[-1:]
|
||||
last_row = ohlc[-1:]
|
||||
|
||||
# individual values
|
||||
last_row = o, h, l, last, i = ohlc[-1]
|
||||
|
||||
# times = src_data['time']
|
||||
# if times[-1] - times[-2]:
|
||||
# breakpoint()
|
||||
|
||||
index = src_data[index_field]
|
||||
step_size = index[-1] - index[-2]
|
||||
last_row = i, o, h, l, last = ohlc[-1]
|
||||
|
||||
# generate new lines objects for updatable "current bar"
|
||||
bg: float = 0.16 * step_size
|
||||
self._last_bar_lines = bar_from_ohlc_row(
|
||||
last_row,
|
||||
bar_w=step_size,
|
||||
bar_gap=bg,
|
||||
)
|
||||
self._last_bar_lines = bar_from_ohlc_row(last_row)
|
||||
|
||||
# assert i == graphics.start_index - 1
|
||||
# assert i == last_index
|
||||
|
@ -258,16 +233,10 @@ class BarItems(FlowGraphic):
|
|||
if l != h: # noqa
|
||||
|
||||
if body is None:
|
||||
body = self._last_bar_lines[0] = QLineF(
|
||||
i + bg, l,
|
||||
i + step_size - bg, h,
|
||||
)
|
||||
body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
||||
else:
|
||||
# update body
|
||||
body.setLine(
|
||||
body.x1(), l,
|
||||
body.x2(), h,
|
||||
)
|
||||
body.setLine(i, l, i, h)
|
||||
|
||||
# XXX: pretty sure this is causing an issue where the
|
||||
# bar has a large upward move right before the next
|
||||
|
@ -278,4 +247,4 @@ class BarItems(FlowGraphic):
|
|||
# date / from some previous sample. It's weird though
|
||||
# because i've seen it do this to bars i - 3 back?
|
||||
|
||||
return ohlc[index_field], ohlc['close']
|
||||
return ohlc['index'], ohlc['close']
|
||||
|
|
|
@ -22,9 +22,12 @@ from __future__ import annotations
|
|||
from typing import (
|
||||
Optional, Generic,
|
||||
TypeVar, Callable,
|
||||
Literal,
|
||||
)
|
||||
import enum
|
||||
import sys
|
||||
|
||||
# from pydantic import BaseModel, validator
|
||||
from pydantic import BaseModel, validator
|
||||
from pydantic.generics import GenericModel
|
||||
from PyQt5.QtWidgets import (
|
||||
QWidget,
|
||||
|
@ -35,7 +38,6 @@ from ._forms import (
|
|||
# FontScaledDelegate,
|
||||
Edit,
|
||||
)
|
||||
from ..data.types import Struct
|
||||
|
||||
|
||||
DataType = TypeVar('DataType')
|
||||
|
@ -60,7 +62,7 @@ class Selection(Field[DataType], Generic[DataType]):
|
|||
options: dict[str, DataType]
|
||||
# value: DataType = None
|
||||
|
||||
# @validator('value') # , always=True)
|
||||
@validator('value') # , always=True)
|
||||
def set_value_first(
|
||||
cls,
|
||||
|
||||
|
@ -98,7 +100,7 @@ class Edit(Field[DataType], Generic[DataType]):
|
|||
widget_factory = Edit
|
||||
|
||||
|
||||
class AllocatorPane(Struct):
|
||||
class AllocatorPane(BaseModel):
|
||||
|
||||
account = Selection[str](
|
||||
options=dict.fromkeys(
|
||||
|
|
|
@ -18,27 +18,23 @@
|
|||
Charting overlay helpers.
|
||||
|
||||
'''
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
from typing import (
|
||||
Callable,
|
||||
Optional,
|
||||
from typing import Callable, Optional
|
||||
|
||||
from pyqtgraph.Qt.QtCore import (
|
||||
# QObject,
|
||||
# Signal,
|
||||
Qt,
|
||||
# QEvent,
|
||||
)
|
||||
|
||||
from pyqtgraph.graphicsItems.AxisItem import AxisItem
|
||||
from pyqtgraph.graphicsItems.ViewBox import ViewBox
|
||||
# from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
||||
from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
||||
from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
|
||||
from pyqtgraph.Qt.QtCore import (
|
||||
QObject,
|
||||
Signal,
|
||||
QEvent,
|
||||
Qt,
|
||||
)
|
||||
from pyqtgraph.Qt.QtWidgets import (
|
||||
# QGraphicsGridLayout,
|
||||
QGraphicsLinearLayout,
|
||||
)
|
||||
from pyqtgraph.Qt.QtCore import QObject, Signal, QEvent
|
||||
from pyqtgraph.Qt.QtWidgets import QGraphicsGridLayout, QGraphicsLinearLayout
|
||||
|
||||
from ._interaction import ChartView
|
||||
|
||||
__all__ = ["PlotItemOverlay"]
|
||||
|
||||
|
@ -84,20 +80,25 @@ class ComposedGridLayout:
|
|||
``<axis_name>i`` in the layout.
|
||||
|
||||
The ``item: PlotItem`` passed to the constructor's grid layout is
|
||||
used verbatim as the "main plot" who's view box is given precedence
|
||||
for input handling. The main plot's axes are removed from its
|
||||
used verbatim as the "main plot" who's view box is give precedence
|
||||
for input handling. The main plot's axes are removed from it's
|
||||
layout and placed in the surrounding exterior layouts to allow for
|
||||
re-ordering if desired.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
pi: PlotItem,
|
||||
item: PlotItem,
|
||||
grid: QGraphicsGridLayout,
|
||||
reverse: bool = False, # insert items to the "center"
|
||||
|
||||
) -> None:
|
||||
self.items: list[PlotItem] = []
|
||||
# self.grid = grid
|
||||
self.reverse = reverse
|
||||
|
||||
self.pitems: list[PlotItem] = []
|
||||
self._pi2axes: dict[ # TODO: use a ``bidict`` here?
|
||||
# TODO: use a ``bidict`` here?
|
||||
self._pi2axes: dict[
|
||||
int,
|
||||
dict[str, AxisItem],
|
||||
] = {}
|
||||
|
@ -119,13 +120,12 @@ class ComposedGridLayout:
|
|||
|
||||
if name in ('top', 'bottom'):
|
||||
orient = Qt.Vertical
|
||||
|
||||
elif name in ('left', 'right'):
|
||||
orient = Qt.Horizontal
|
||||
|
||||
layout.setOrientation(orient)
|
||||
|
||||
self.insert_plotitem(0, pi)
|
||||
self.insert(0, item)
|
||||
|
||||
# insert surrounding linear layouts into the parent pi's layout
|
||||
# such that additional axes can be appended arbitrarily without
|
||||
|
@ -135,14 +135,13 @@ class ComposedGridLayout:
|
|||
# TODO: do we need this?
|
||||
# axis should have been removed during insert above
|
||||
index = _axes_layout_indices[name]
|
||||
axis = pi.layout.itemAt(*index)
|
||||
axis = item.layout.itemAt(*index)
|
||||
if axis and axis.isVisible():
|
||||
assert linlayout.itemAt(0) is axis
|
||||
|
||||
# XXX: see comment in ``.insert_plotitem()``...
|
||||
# pi.layout.removeItem(axis)
|
||||
pi.layout.addItem(linlayout, *index)
|
||||
layout = pi.layout.itemAt(*index)
|
||||
# item.layout.removeItem(axis)
|
||||
item.layout.addItem(linlayout, *index)
|
||||
layout = item.layout.itemAt(*index)
|
||||
assert layout is linlayout
|
||||
|
||||
def _register_item(
|
||||
|
@ -158,32 +157,27 @@ class ComposedGridLayout:
|
|||
self._pi2axes.setdefault(name, {})[index] = axis
|
||||
|
||||
# enter plot into list for index tracking
|
||||
self.pitems.insert(index, plotitem)
|
||||
self.items.insert(index, plotitem)
|
||||
|
||||
def insert_plotitem(
|
||||
def insert(
|
||||
self,
|
||||
index: int,
|
||||
plotitem: PlotItem,
|
||||
|
||||
) -> tuple[int, list[AxisItem]]:
|
||||
) -> (int, int):
|
||||
'''
|
||||
Place item at index by inserting all axes into the grid
|
||||
at list-order appropriate position.
|
||||
|
||||
'''
|
||||
if index < 0:
|
||||
raise ValueError(
|
||||
'`.insert_plotitem()` only supports an index >= 0'
|
||||
)
|
||||
|
||||
inserted_axes: list[AxisItem] = []
|
||||
raise ValueError('`insert()` only supports an index >= 0')
|
||||
|
||||
# add plot's axes in sequence to the embedded linear layouts
|
||||
# for each "side" thus avoiding graphics collisions.
|
||||
for name, axis_info in plotitem.axes.copy().items():
|
||||
linlayout, axes = self.sides[name]
|
||||
axis = axis_info['item']
|
||||
inserted_axes.append(axis)
|
||||
|
||||
if axis in axes:
|
||||
# TODO: re-order using ``.pop()`` ?
|
||||
|
@ -196,20 +190,19 @@ class ComposedGridLayout:
|
|||
if (
|
||||
not axis.isVisible()
|
||||
|
||||
# XXX: we never skip moving the axes for the *root*
|
||||
# XXX: we never skip moving the axes for the *first*
|
||||
# plotitem inserted (even if not shown) since we need to
|
||||
# move all the hidden axes into linear sub-layouts for
|
||||
# that "central" plot in the overlay. Also if we don't
|
||||
# do it there's weird geomoetry calc offsets that make
|
||||
# view coords slightly off somehow .. smh
|
||||
and not len(self.pitems) == 0
|
||||
and not len(self.items) == 0
|
||||
):
|
||||
continue
|
||||
|
||||
# XXX: Remove old axis?
|
||||
# No, turns out we don't need this?
|
||||
# DON'T UNLINK IT since we need the original ``ViewBox`` to
|
||||
# still drive it with events/handlers B)
|
||||
# XXX: Remove old axis? No, turns out we don't need this?
|
||||
# DON'T unlink it since we the original ``ViewBox``
|
||||
# to still drive it B)
|
||||
# popped = plotitem.removeAxis(name, unlink=False)
|
||||
# assert axis is popped
|
||||
|
||||
|
@ -225,9 +218,9 @@ class ComposedGridLayout:
|
|||
|
||||
self._register_item(index, plotitem)
|
||||
|
||||
return (index, inserted_axes)
|
||||
return index
|
||||
|
||||
def append_plotitem(
|
||||
def append(
|
||||
self,
|
||||
item: PlotItem,
|
||||
|
||||
|
@ -239,7 +232,7 @@ class ComposedGridLayout:
|
|||
'''
|
||||
# for left and bottom axes we have to first remove
|
||||
# items and re-insert to maintain a list-order.
|
||||
return self.insert_plotitem(len(self.pitems), item)
|
||||
return self.insert(len(self.items), item)
|
||||
|
||||
def get_axis(
|
||||
self,
|
||||
|
@ -252,20 +245,20 @@ class ComposedGridLayout:
|
|||
if axis for that name is not shown.
|
||||
|
||||
'''
|
||||
index = self.pitems.index(plot)
|
||||
index = self.items.index(plot)
|
||||
named = self._pi2axes[name]
|
||||
return named.get(index)
|
||||
|
||||
# def pop(
|
||||
# self,
|
||||
# item: PlotItem,
|
||||
def pop(
|
||||
self,
|
||||
item: PlotItem,
|
||||
|
||||
# ) -> PlotItem:
|
||||
# '''
|
||||
# Remove item and restack all axes in list-order.
|
||||
) -> PlotItem:
|
||||
'''
|
||||
Remove item and restack all axes in list-order.
|
||||
|
||||
# '''
|
||||
# raise NotImplementedError
|
||||
'''
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
# Unimplemented features TODO:
|
||||
|
@ -286,6 +279,194 @@ class ComposedGridLayout:
|
|||
# axis?
|
||||
|
||||
|
||||
# TODO: we might want to enabled some kind of manual flag to disable
|
||||
# this method wrapping during type creation? As example a user could
|
||||
# definitively decide **not** to enable broadcasting support by
|
||||
# setting something like ``ViewBox.disable_relays = True``?
|
||||
def mk_relay_method(
|
||||
|
||||
signame: str,
|
||||
slot: Callable[
|
||||
[ViewBox,
|
||||
'QEvent',
|
||||
Optional[AxisItem]],
|
||||
None,
|
||||
],
|
||||
|
||||
) -> Callable[
|
||||
[
|
||||
ViewBox,
|
||||
# lol, there isn't really a generic type thanks
|
||||
# to the rewrite of Qt's event system XD
|
||||
'QEvent',
|
||||
|
||||
'Optional[AxisItem]',
|
||||
'Optional[ViewBox]', # the ``relayed_from`` arg we provide
|
||||
],
|
||||
None,
|
||||
]:
|
||||
|
||||
def maybe_broadcast(
|
||||
vb: 'ViewBox',
|
||||
ev: 'QEvent',
|
||||
axis: 'Optional[int]' = None,
|
||||
relayed_from: 'ViewBox' = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
(soon to be) Decorator which makes an event handler
|
||||
"broadcastable" to overlayed ``GraphicsWidget``s.
|
||||
|
||||
Adds relay signals based on the decorated handler's name
|
||||
and conducts a signal broadcast of the relay signal if there
|
||||
are consumers registered.
|
||||
|
||||
'''
|
||||
# When no relay source has been set just bypass all
|
||||
# the broadcast machinery.
|
||||
if vb.event_relay_source is None:
|
||||
ev.accept()
|
||||
return slot(
|
||||
vb,
|
||||
ev,
|
||||
axis=axis,
|
||||
)
|
||||
|
||||
if relayed_from:
|
||||
assert axis is None
|
||||
|
||||
# this is a relayed event and should be ignored (so it does not
|
||||
# halt/short circuit the graphicscene loop). Further the
|
||||
# surrounding handler for this signal must be allowed to execute
|
||||
# and get processed by **this consumer**.
|
||||
# print(f'{vb.name} rx relayed from {relayed_from.name}')
|
||||
ev.ignore()
|
||||
|
||||
return slot(
|
||||
vb,
|
||||
ev,
|
||||
axis=axis,
|
||||
)
|
||||
|
||||
if axis is not None:
|
||||
# print(f'{vb.name} handling axis event:\n{str(ev)}')
|
||||
ev.accept()
|
||||
return slot(
|
||||
vb,
|
||||
ev,
|
||||
axis=axis,
|
||||
)
|
||||
|
||||
elif (
|
||||
relayed_from is None
|
||||
and vb.event_relay_source is vb # we are the broadcaster
|
||||
and axis is None
|
||||
):
|
||||
# Broadcast case: this is a source event which will be
|
||||
# relayed to attached consumers and accepted after all
|
||||
# consumers complete their own handling followed by this
|
||||
# routine's processing. Sequence is,
|
||||
# - pre-relay to all consumers *first* - ``.emit()`` blocks
|
||||
# until all downstream relay handlers have run.
|
||||
# - run the source handler for **this** event and accept
|
||||
# the event
|
||||
|
||||
# Access the "bound signal" that is created
|
||||
# on the widget type as part of instantiation.
|
||||
signal = getattr(vb, signame)
|
||||
# print(f'{vb.name} emitting {signame}')
|
||||
|
||||
# TODO/NOTE: we could also just bypass a "relay" signal
|
||||
# entirely and instead call the handlers manually in
|
||||
# a loop? This probably is a lot simpler and also doesn't
|
||||
# have any downside, and allows not touching target widget
|
||||
# internals.
|
||||
signal.emit(
|
||||
ev,
|
||||
axis,
|
||||
# passing this demarks a broadcasted/relayed event
|
||||
vb,
|
||||
)
|
||||
# accept event so no more relays are fired.
|
||||
ev.accept()
|
||||
|
||||
# call underlying wrapped method with an extra
|
||||
# ``relayed_from`` value to denote that this is a relayed
|
||||
# event handling case.
|
||||
return slot(
|
||||
vb,
|
||||
ev,
|
||||
axis=axis,
|
||||
)
|
||||
|
||||
return maybe_broadcast
|
||||
|
||||
|
||||
# XXX: :( can't define signals **after** class compile time
|
||||
# so this is not really useful.
|
||||
# def mk_relay_signal(
|
||||
# func,
|
||||
# name: str = None,
|
||||
|
||||
# ) -> Signal:
|
||||
# (
|
||||
# args,
|
||||
# varargs,
|
||||
# varkw,
|
||||
# defaults,
|
||||
# kwonlyargs,
|
||||
# kwonlydefaults,
|
||||
# annotations
|
||||
# ) = inspect.getfullargspec(func)
|
||||
|
||||
# # XXX: generate a relay signal with 1 extra
|
||||
# # argument for a ``relayed_from`` kwarg. Since
|
||||
# # ``'self'`` is already ignored by signals we just need
|
||||
# # to count the arguments since we're adding only 1 (and
|
||||
# # ``args`` will capture that).
|
||||
# numargs = len(args + list(defaults))
|
||||
# signal = Signal(*tuple(numargs * [object]))
|
||||
# signame = name or func.__name__ + 'Relay'
|
||||
# return signame, signal
|
||||
|
||||
|
||||
def enable_relays(
|
||||
widget: GraphicsWidget,
|
||||
handler_names: list[str],
|
||||
|
||||
) -> list[Signal]:
|
||||
'''
|
||||
Method override helper which enables relay of a particular
|
||||
``Signal`` from some chosen broadcaster widget to a set of
|
||||
consumer widgets which should operate their event handlers normally
|
||||
but instead of signals "relayed" from the broadcaster.
|
||||
|
||||
Mostly useful for overlaying widgets that handle user input
|
||||
that you want to overlay graphically. The target ``widget`` type must
|
||||
define ``QtCore.Signal``s each with a `'Relay'` suffix for each
|
||||
name provided in ``handler_names: list[str]``.
|
||||
|
||||
'''
|
||||
signals = []
|
||||
for name in handler_names:
|
||||
handler = getattr(widget, name)
|
||||
signame = name + 'Relay'
|
||||
# ensure the target widget defines a relay signal
|
||||
relay = getattr(widget, signame)
|
||||
widget.relays[signame] = name
|
||||
signals.append(relay)
|
||||
method = mk_relay_method(signame, handler)
|
||||
setattr(widget, name, method)
|
||||
|
||||
return signals
|
||||
|
||||
|
||||
enable_relays(
|
||||
ChartView,
|
||||
['wheelEvent', 'mouseDragEvent']
|
||||
)
|
||||
|
||||
|
||||
class PlotItemOverlay:
|
||||
'''
|
||||
A composite for managing overlaid ``PlotItem`` instances such that
|
||||
|
@ -301,191 +482,86 @@ class PlotItemOverlay:
|
|||
) -> None:
|
||||
|
||||
self.root_plotitem: PlotItem = root_plotitem
|
||||
self.relay_handlers: defaultdict[
|
||||
str,
|
||||
list[Callable],
|
||||
] = defaultdict(list)
|
||||
|
||||
# NOTE: required for scene layering/relaying; this guarantees
|
||||
# the "root" plot receives priority for interaction
|
||||
# events/signals.
|
||||
root_plotitem.vb.setZValue(10)
|
||||
vb = root_plotitem.vb
|
||||
vb.event_relay_source = vb # TODO: maybe change name?
|
||||
vb.setZValue(1000) # XXX: critical for scene layering/relaying
|
||||
|
||||
self.layout = ComposedGridLayout(root_plotitem)
|
||||
self.overlays: list[PlotItem] = []
|
||||
self.layout = ComposedGridLayout(
|
||||
root_plotitem,
|
||||
root_plotitem.layout,
|
||||
)
|
||||
self._relays: dict[str, Signal] = {}
|
||||
|
||||
@property
|
||||
def overlays(self) -> list[PlotItem]:
|
||||
return self.layout.pitems
|
||||
|
||||
def add_plotitem(
|
||||
self,
|
||||
plotitem: PlotItem,
|
||||
index: Optional[int] = None,
|
||||
|
||||
# event/signal names which will be broadcasted to all added
|
||||
# (relayee) ``PlotItem``s (eg. ``ViewBox.mouseDragEvent``).
|
||||
relay_events: list[str] = [],
|
||||
|
||||
# TODO: we could also put the ``ViewBox.XAxis``
|
||||
# style enum here?
|
||||
# (0,), # link x
|
||||
# (1,), # link y
|
||||
# (0, 1), # link both
|
||||
link_axes: tuple[int] = (),
|
||||
|
||||
) -> tuple[int, list[AxisItem]]:
|
||||
) -> None:
|
||||
|
||||
index = index or len(self.overlays)
|
||||
root = self.root_plotitem
|
||||
# layout: QGraphicsGridLayout = root.layout
|
||||
self.overlays.insert(index, plotitem)
|
||||
vb: ViewBox = plotitem.vb
|
||||
|
||||
# mark this consumer overlay as ready to expect relayed events
|
||||
# from the root plotitem.
|
||||
vb.event_relay_source = root.vb
|
||||
|
||||
# TODO: some sane way to allow menu event broadcast XD
|
||||
# vb.setMenuEnabled(False)
|
||||
|
||||
# wire up any relay signal(s) from the source plot to added
|
||||
# "overlays". We use a plain loop instead of mucking with
|
||||
# re-connecting signal/slots which tends to be more invasive and
|
||||
# harder to implement and provides no measurable performance
|
||||
# gain.
|
||||
if relay_events:
|
||||
for ev_name in relay_events:
|
||||
relayee_handler: Callable[
|
||||
[
|
||||
ViewBox,
|
||||
# lol, there isn't really a generic type thanks
|
||||
# to the rewrite of Qt's event system XD
|
||||
QEvent,
|
||||
# TODO: inside the `maybe_broadcast()` (soon to be) decorator
|
||||
# we need have checks that consumers have been attached to
|
||||
# these relay signals.
|
||||
if link_axes != (0, 1):
|
||||
|
||||
AxisItem | None,
|
||||
],
|
||||
None,
|
||||
] = getattr(vb, ev_name)
|
||||
|
||||
sub_handlers: list[Callable] = self.relay_handlers[ev_name]
|
||||
|
||||
# on the first registry of a relayed event we pop the
|
||||
# root's handler and override it to a custom broadcaster
|
||||
# routine.
|
||||
if not sub_handlers:
|
||||
|
||||
src_handler = getattr(
|
||||
root.vb,
|
||||
ev_name,
|
||||
)
|
||||
|
||||
def broadcast(
|
||||
ev: 'QEvent',
|
||||
|
||||
# TODO: drop this viewbox specific input and
|
||||
# allow a predicate to be passed in by user.
|
||||
axis: 'Optional[int]' = None,
|
||||
|
||||
*,
|
||||
|
||||
# these are bound in by the ``partial`` below
|
||||
# and ensure a unique broadcaster per event.
|
||||
ev_name: str = None,
|
||||
src_handler: Callable = None,
|
||||
relayed_from: 'ViewBox' = None,
|
||||
|
||||
# remaining inputs the source handler expects
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Broadcast signal or event: this is a source
|
||||
event which will be relayed to attached
|
||||
"relayee" plot item consumers.
|
||||
|
||||
The event is accepted halting any further
|
||||
handlers from being triggered.
|
||||
|
||||
Sequence is,
|
||||
- pre-relay to all consumers *first* - exactly
|
||||
like how a ``Signal.emit()`` blocks until all
|
||||
downstream relay handlers have run.
|
||||
- run the event's source handler event
|
||||
|
||||
'''
|
||||
ev.accept()
|
||||
|
||||
# broadcast first to relayees *first*. trigger
|
||||
# relay of event to all consumers **before**
|
||||
# processing/consumption in the source handler.
|
||||
relayed_handlers = self.relay_handlers[ev_name]
|
||||
|
||||
assert getattr(vb, ev_name).__name__ == ev_name
|
||||
|
||||
# TODO: generalize as an input predicate
|
||||
if axis is None:
|
||||
for handler in relayed_handlers:
|
||||
handler(
|
||||
ev,
|
||||
axis=axis,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# run "source" widget's handler last
|
||||
src_handler(
|
||||
ev,
|
||||
axis=axis,
|
||||
)
|
||||
|
||||
# dynamic handler override on the publisher plot
|
||||
setattr(
|
||||
root.vb,
|
||||
ev_name,
|
||||
partial(
|
||||
broadcast,
|
||||
ev_name=ev_name,
|
||||
src_handler=src_handler
|
||||
),
|
||||
)
|
||||
|
||||
else:
|
||||
assert getattr(root.vb, ev_name)
|
||||
assert relayee_handler not in sub_handlers
|
||||
|
||||
# append relayed-to widget's handler to relay table
|
||||
sub_handlers.append(relayee_handler)
|
||||
# wire up relay signals
|
||||
for relay_signal_name, handler_name in vb.relays.items():
|
||||
# print(handler_name)
|
||||
# XXX: Signal class attrs are bound after instantiation
|
||||
# of the defining type, so we need to access that bound
|
||||
# version here.
|
||||
signal = getattr(root.vb, relay_signal_name)
|
||||
handler = getattr(vb, handler_name)
|
||||
signal.connect(handler)
|
||||
|
||||
# link dim-axes to root if requested by user.
|
||||
# TODO: solve more-then-wanted scaled panning on click drag
|
||||
# which seems to be due to broadcast. So we probably need to
|
||||
# disable broadcast when axes are linked in a particular
|
||||
# dimension?
|
||||
for dim in link_axes:
|
||||
# link x and y axes to new view box such that the top level
|
||||
# viewbox propagates to the root (and whatever other
|
||||
# plotitem overlays that have been added).
|
||||
vb.linkView(dim, root.vb)
|
||||
|
||||
# => NOTE: in order to prevent "more-then-linear" scaled
|
||||
# panning moves on (for eg. click-drag) certain range change
|
||||
# signals (i.e. ``.sigXRangeChanged``), the user needs to be
|
||||
# careful that any broadcasted ``relay_events`` are are short
|
||||
# circuited in sub-handlers (aka relayee's) implementations. As
|
||||
# an example if a ``ViewBox.mouseDragEvent`` is broadcasted, the
|
||||
# overlayed implementations need to be sure they either don't
|
||||
# also link the x-axes (by not providing ``link_axes=(0,)``
|
||||
# above) or that the relayee ``.mouseDragEvent()`` handlers are
|
||||
# ready to "``return`` early" in the case that
|
||||
# ``.sigXRangeChanged`` is emitted as part of linked axes.
|
||||
# For more details on such signalling mechanics peek in
|
||||
# ``ViewBox.linkView()``.
|
||||
# make overlaid viewbox impossible to focus since the top
|
||||
# level should handle all input and relay to overlays.
|
||||
# NOTE: this was solved with the `setZValue()` above!
|
||||
|
||||
# make overlaid viewbox impossible to focus since the top level
|
||||
# should handle all input and relay to overlays. Note that the
|
||||
# "root" plot item gettingn interaction priority is configured
|
||||
# with the ``.setZValue()`` during init.
|
||||
# TODO: we will probably want to add a "focus" api such that
|
||||
# a new "top level" ``PlotItem`` can be selected dynamically
|
||||
# (and presumably the axes dynamically sorted to match).
|
||||
vb.setFlag(
|
||||
vb.GraphicsItemFlag.ItemIsFocusable,
|
||||
False
|
||||
)
|
||||
vb.setFocusPolicy(Qt.NoFocus)
|
||||
|
||||
# => TODO: add a "focus" api for switching the "top level"
|
||||
# ``PlotItem`` dynamically.
|
||||
|
||||
# append-compose into the layout all axes from this plot
|
||||
if index is None:
|
||||
insert_index, axes = self.layout.append_plotitem(plotitem)
|
||||
else:
|
||||
insert_index, axes = self.layout.insert_plotitem(index, plotitem)
|
||||
self.layout.insert(index, plotitem)
|
||||
|
||||
plotitem.setGeometry(root.vb.sceneBoundingRect())
|
||||
|
||||
|
@ -503,12 +579,24 @@ class PlotItemOverlay:
|
|||
root.vb.setFocus()
|
||||
assert root.vb.focusWidget()
|
||||
|
||||
vb.setZValue(100)
|
||||
# XXX: do we need this? Why would you build then destroy?
|
||||
def remove_plotitem(self, plotItem: PlotItem) -> None:
|
||||
'''
|
||||
Remove this ``PlotItem`` from the overlayed set making not shown
|
||||
and unable to accept input.
|
||||
|
||||
return (
|
||||
index,
|
||||
axes,
|
||||
)
|
||||
'''
|
||||
...
|
||||
|
||||
# TODO: i think this would be super hot B)
|
||||
def focus_item(self, plotitem: PlotItem) -> PlotItem:
|
||||
'''
|
||||
Apply focus to a contained PlotItem thus making it the "top level"
|
||||
item in the overlay able to accept peripheral's input from the user
|
||||
and responsible for zoom and panning control via its ``ViewBox``.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
def get_axis(
|
||||
self,
|
||||
|
@ -542,9 +630,8 @@ class PlotItemOverlay:
|
|||
|
||||
return axes
|
||||
|
||||
# XXX: untested as of now.
|
||||
# TODO: need this as part of selecting a different root/source
|
||||
# plot to rewire interaction event broadcast dynamically.
|
||||
# TODO: i guess we need this if you want to detach existing plots
|
||||
# dynamically? XXX: untested as of now.
|
||||
def _disconnect_all(
|
||||
self,
|
||||
plotitem: PlotItem,
|
||||
|
@ -559,22 +646,3 @@ class PlotItemOverlay:
|
|||
disconnected.append(sig)
|
||||
|
||||
return disconnected
|
||||
|
||||
# XXX: do we need this? Why would you build then destroy?
|
||||
# def remove_plotitem(self, plotItem: PlotItem) -> None:
|
||||
# '''
|
||||
# Remove this ``PlotItem`` from the overlayed set making not shown
|
||||
# and unable to accept input.
|
||||
|
||||
# '''
|
||||
# ...
|
||||
|
||||
# TODO: i think this would be super hot B)
|
||||
# def focus_plotitem(self, plotitem: PlotItem) -> PlotItem:
|
||||
# '''
|
||||
# Apply focus to a contained PlotItem thus making it the "top level"
|
||||
# item in the overlay able to accept peripheral's input from the user
|
||||
# and responsible for zoom and panning control via its ``ViewBox``.
|
||||
|
||||
# '''
|
||||
# ...
|
||||
|
|
|
@ -0,0 +1,236 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Super fast ``QPainterPath`` generation related operator routines.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
# Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import njit, float64, int64 # , optional
|
||||
# import pyqtgraph as pg
|
||||
from PyQt5 import QtGui
|
||||
# from PyQt5.QtCore import QLineF, QPointF
|
||||
|
||||
from ..data._sharedmem import (
|
||||
ShmArray,
|
||||
)
|
||||
# from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._compression import (
|
||||
ds_m4,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._flows import Renderer
|
||||
|
||||
|
||||
def xy_downsample(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
|
||||
x_spacer: float = 0.5,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
|
||||
# downsample whenever more then 1 pixels per datum can be shown.
|
||||
# always refresh data bounds until we get diffing
|
||||
# working properly, see above..
|
||||
bins, x, y = ds_m4(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
)
|
||||
|
||||
# flatten output to 1d arrays suitable for path-graphics generation.
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
x = (x + np.array(
|
||||
[-x_spacer, 0, 0, x_spacer]
|
||||
)).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
return x, y
|
||||
|
||||
|
||||
@njit(
|
||||
# TODO: for now need to construct this manually for readonly arrays, see
|
||||
# https://github.com/numba/numba/issues/4511
|
||||
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
||||
# numba_ohlc_dtype[::1], # contiguous
|
||||
# int64,
|
||||
# optional(float64),
|
||||
# ),
|
||||
nogil=True
|
||||
)
|
||||
def path_arrays_from_ohlc(
|
||||
data: np.ndarray,
|
||||
start: int64,
|
||||
bar_gap: float64 = 0.43,
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Generate an array of lines objects from input ohlc data.
|
||||
|
||||
'''
|
||||
size = int(data.shape[0] * 6)
|
||||
|
||||
x = np.zeros(
|
||||
# data,
|
||||
shape=size,
|
||||
dtype=float64,
|
||||
)
|
||||
y, c = x.copy(), x.copy()
|
||||
|
||||
# TODO: report bug for assert @
|
||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||
for i, q in enumerate(data[start:], start):
|
||||
|
||||
# TODO: ask numba why this doesn't work..
|
||||
# open, high, low, close, index = q[
|
||||
# ['open', 'high', 'low', 'close', 'index']]
|
||||
|
||||
open = q['open']
|
||||
high = q['high']
|
||||
low = q['low']
|
||||
close = q['close']
|
||||
index = float64(q['index'])
|
||||
|
||||
istart = i * 6
|
||||
istop = istart + 6
|
||||
|
||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||
x[istart:istop] = (
|
||||
index - bar_gap,
|
||||
index,
|
||||
index,
|
||||
index,
|
||||
index,
|
||||
index + bar_gap,
|
||||
)
|
||||
y[istart:istop] = (
|
||||
open,
|
||||
open,
|
||||
low,
|
||||
high,
|
||||
close,
|
||||
close,
|
||||
)
|
||||
|
||||
# specifies that the first edge is never connected to the
|
||||
# prior bars last edge thus providing a small "gap"/"space"
|
||||
# between bars determined by ``bar_gap``.
|
||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||
|
||||
return x, y, c
|
||||
|
||||
|
||||
def gen_ohlc_qpath(
|
||||
r: Renderer,
|
||||
data: np.ndarray,
|
||||
array_key: str, # we ignore this
|
||||
vr: tuple[int, int],
|
||||
|
||||
start: int = 0, # XXX: do we need this?
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
w: float = 0.43,
|
||||
|
||||
) -> QtGui.QPainterPath:
|
||||
'''
|
||||
More or less direct proxy to ``path_arrays_from_ohlc()``
|
||||
but with closed in kwargs for line spacing.
|
||||
|
||||
'''
|
||||
x, y, c = path_arrays_from_ohlc(
|
||||
data,
|
||||
start,
|
||||
bar_gap=w,
|
||||
)
|
||||
return x, y, c
|
||||
|
||||
|
||||
def ohlc_to_line(
|
||||
ohlc_shm: ShmArray,
|
||||
data_field: str,
|
||||
fields: list[str] = ['open', 'high', 'low', 'close']
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
'''
|
||||
Convert an input struct-array holding OHLC samples into a pair of
|
||||
flattened x, y arrays with the same size (datums wise) as the source
|
||||
data.
|
||||
|
||||
'''
|
||||
y_out = ohlc_shm.ustruct(fields)
|
||||
first = ohlc_shm._first.value
|
||||
last = ohlc_shm._last.value
|
||||
|
||||
# write pushed data to flattened copy
|
||||
y_out[first:last] = rfn.structured_to_unstructured(
|
||||
ohlc_shm.array[fields]
|
||||
)
|
||||
|
||||
# generate an flat-interpolated x-domain
|
||||
x_out = (
|
||||
np.broadcast_to(
|
||||
ohlc_shm._array['index'][:, None],
|
||||
(
|
||||
ohlc_shm._array.size,
|
||||
# 4, # only ohlc
|
||||
y_out.shape[1],
|
||||
),
|
||||
) + np.array([-0.5, 0, 0, 0.5])
|
||||
)
|
||||
assert y_out.any()
|
||||
|
||||
return (
|
||||
x_out,
|
||||
y_out,
|
||||
)
|
||||
|
||||
|
||||
def to_step_format(
|
||||
shm: ShmArray,
|
||||
data_field: str,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an input 1d shm array to a "step array" format
|
||||
for use by path graphics generation.
|
||||
|
||||
'''
|
||||
i = shm._array['index'].copy()
|
||||
out = shm._array[data_field].copy()
|
||||
|
||||
x_out = np.broadcast_to(
|
||||
i[:, None],
|
||||
(i.size, 2),
|
||||
) + np.array([-0.5, 0.5])
|
||||
|
||||
y_out = np.empty((len(out), 2), dtype=out.dtype)
|
||||
y_out[:] = out[:, np.newaxis]
|
||||
|
||||
# start y at origin level
|
||||
y_out[0, 0] = 0
|
||||
return x_out, y_out
|
|
@ -15,19 +15,13 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Customization of ``pyqtgraph`` core routines and various types normally
|
||||
for speedups.
|
||||
|
||||
Generally, our does not require "scentific precision" for pixel perfect
|
||||
view transforms.
|
||||
Customization of ``pyqtgraph`` core routines to speed up our use mostly
|
||||
based on not requiring "scentific precision" for pixel perfect view
|
||||
transforms.
|
||||
|
||||
"""
|
||||
from typing import Optional
|
||||
|
||||
import pyqtgraph as pg
|
||||
|
||||
from ._axes import Axis
|
||||
|
||||
|
||||
def invertQTransform(tr):
|
||||
"""Return a QTransform that is the inverse of *tr*.
|
||||
|
@ -52,236 +46,3 @@ def _do_overrides() -> None:
|
|||
"""
|
||||
# we don't care about potential fp issues inside Qt
|
||||
pg.functions.invertQTransform = invertQTransform
|
||||
pg.PlotItem = PlotItem
|
||||
|
||||
# enable "QPainterPathPrivate for faster arrayToQPath" from
|
||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2324
|
||||
pg.setConfigOption('enableExperimental', True)
|
||||
|
||||
|
||||
# NOTE: the below customized type contains all our changes on a method
|
||||
# by method basis as per the diff:
|
||||
# https://github.com/pyqtgraph/pyqtgraph/commit/8e60bc14234b6bec1369ff4192dbfb82f8682920#diff-a2b5865955d2ba703dbc4c35ff01aa761aa28d2aeaac5e68d24e338bc82fb5b1R500
|
||||
|
||||
class PlotItem(pg.PlotItem):
|
||||
'''
|
||||
Overrides for the core plot object mostly pertaining to overlayed
|
||||
multi-view management as it relates to multi-axis managment.
|
||||
|
||||
This object is the combination of a ``ViewBox`` and multiple
|
||||
``AxisItem``s and so far we've added additional functionality and
|
||||
APIs for:
|
||||
- removal of axes
|
||||
|
||||
---
|
||||
|
||||
From ``pyqtgraph`` super type docs:
|
||||
- Manage placement of ViewBox, AxisItems, and LabelItems
|
||||
- Create and manage a list of PlotDataItems displayed inside the
|
||||
ViewBox
|
||||
- Implement a context menu with commonly used display and analysis
|
||||
options
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
parent=None,
|
||||
name=None,
|
||||
labels=None,
|
||||
title=None,
|
||||
viewBox=None,
|
||||
axisItems=None,
|
||||
default_axes=['left', 'bottom'],
|
||||
enableMenu=True,
|
||||
**kargs
|
||||
):
|
||||
super().__init__(
|
||||
parent=parent,
|
||||
name=name,
|
||||
labels=labels,
|
||||
title=title,
|
||||
viewBox=viewBox,
|
||||
axisItems=axisItems,
|
||||
# default_axes=default_axes,
|
||||
enableMenu=enableMenu,
|
||||
kargs=kargs,
|
||||
)
|
||||
self.name = name
|
||||
self.chart_widget = None
|
||||
# self.setAxisItems(
|
||||
# axisItems,
|
||||
# default_axes=default_axes,
|
||||
# )
|
||||
|
||||
# NOTE: this is an entirely new method not in upstream.
|
||||
def removeAxis(
|
||||
self,
|
||||
name: str,
|
||||
unlink: bool = True,
|
||||
|
||||
) -> Optional[pg.AxisItem]:
|
||||
"""
|
||||
Remove an axis from the contained axis items
|
||||
by ```name: str```.
|
||||
|
||||
This means the axis graphics object will be removed
|
||||
from the ``.layout: QGraphicsGridLayout`` as well as unlinked
|
||||
from the underlying associated ``ViewBox``.
|
||||
|
||||
If the ``unlink: bool`` is set to ``False`` then the axis will
|
||||
stay linked to its view and will only be removed from the
|
||||
layoutonly be removed from the layout.
|
||||
|
||||
If no axis with ``name: str`` is found then this is a noop.
|
||||
|
||||
Return the axis instance that was removed.
|
||||
|
||||
"""
|
||||
entry = self.axes.pop(name, None)
|
||||
|
||||
if not entry:
|
||||
return
|
||||
|
||||
axis = entry['item']
|
||||
self.layout.removeItem(axis)
|
||||
axis.scene().removeItem(axis)
|
||||
if unlink:
|
||||
axis.unlinkFromView()
|
||||
|
||||
self.update()
|
||||
|
||||
return axis
|
||||
|
||||
# Why do we need to always have all axes created?
|
||||
#
|
||||
# I don't understand this at all.
|
||||
#
|
||||
# Everything seems to work if you just always apply the
|
||||
# set passed to this method **EXCEPT** for some super weird reason
|
||||
# the view box geometry still computes as though the space for the
|
||||
# `'bottom'` axis is always there **UNLESS** you always add that
|
||||
# axis but hide it?
|
||||
#
|
||||
# Why in tf would this be the case!?!?
|
||||
def setAxisItems(
|
||||
self,
|
||||
# XXX: yeah yeah, i know we can't use type annots like this yet.
|
||||
axisItems: Optional[dict[str, pg.AxisItem]] = None,
|
||||
add_to_layout: bool = True,
|
||||
default_axes: list[str] = ['left', 'bottom'],
|
||||
):
|
||||
"""
|
||||
Override axis item setting to only
|
||||
|
||||
"""
|
||||
axisItems = axisItems or {}
|
||||
|
||||
# XXX: wth is is this even saying?!?
|
||||
# Array containing visible axis items
|
||||
# Also containing potentially hidden axes, but they are not
|
||||
# touched so it does not matter
|
||||
# visibleAxes = ['left', 'bottom']
|
||||
# Note that it does not matter that this adds
|
||||
# some values to visibleAxes a second time
|
||||
|
||||
# XXX: uhhh wat^ ..?
|
||||
|
||||
visibleAxes = list(default_axes) + list(axisItems.keys())
|
||||
|
||||
# TODO: we should probably invert the loop here to not loop the
|
||||
# predefined "axis name set" and instead loop the `axisItems`
|
||||
# input and lookup indices from a predefined map.
|
||||
for name, pos in (
|
||||
('top', (1, 1)),
|
||||
('bottom', (3, 1)),
|
||||
('left', (2, 0)),
|
||||
('right', (2, 2))
|
||||
):
|
||||
if (
|
||||
name in self.axes and
|
||||
name in axisItems
|
||||
):
|
||||
# we already have an axis entry for this name
|
||||
# so remove the existing entry.
|
||||
self.removeAxis(name)
|
||||
|
||||
# elif name not in axisItems:
|
||||
# # this axis entry is not provided in this call
|
||||
# # so remove any old/existing entry.
|
||||
# self.removeAxis(name)
|
||||
|
||||
# Create new axis
|
||||
if name in axisItems:
|
||||
axis = axisItems[name]
|
||||
if axis.scene() is not None:
|
||||
if (
|
||||
name not in self.axes
|
||||
or axis != self.axes[name]["item"]
|
||||
):
|
||||
raise RuntimeError(
|
||||
"Can't add an axis to multiple plots. Shared axes"
|
||||
" can be achieved with multiple AxisItem instances"
|
||||
" and set[X/Y]Link.")
|
||||
|
||||
else:
|
||||
# Set up new axis
|
||||
|
||||
# XXX: ok but why do we want to add axes for all entries
|
||||
# if not desired by the user? The only reason I can see
|
||||
# adding this is without it there's some weird
|
||||
# ``ViewBox`` geometry bug.. where a gap for the
|
||||
# 'bottom' axis is somehow left in?
|
||||
# axis = pg.AxisItem(orientation=name, parent=self)
|
||||
axis = Axis(
|
||||
self,
|
||||
orientation=name,
|
||||
parent=self,
|
||||
)
|
||||
|
||||
axis.linkToView(self.vb)
|
||||
|
||||
# XXX: shouldn't you already know the ``pos`` from the name?
|
||||
# Oh right instead of a global map that would let you
|
||||
# reasily look that up it's redefined over and over and over
|
||||
# again in methods..
|
||||
self.axes[name] = {'item': axis, 'pos': pos}
|
||||
|
||||
# NOTE: in the overlay case the axis may be added to some
|
||||
# other layout and should not be added here.
|
||||
if add_to_layout:
|
||||
self.layout.addItem(axis, *pos)
|
||||
|
||||
# place axis above images at z=0, items that want to draw
|
||||
# over the axes should be placed at z>=1:
|
||||
axis.setZValue(0.5)
|
||||
axis.setFlag(
|
||||
axis.GraphicsItemFlag.ItemNegativeZStacksBehindParent
|
||||
)
|
||||
if name in visibleAxes:
|
||||
self.showAxis(name, True)
|
||||
else:
|
||||
# why do we need to insert all axes to ``.axes`` and
|
||||
# only hide the ones the user doesn't specify? It all
|
||||
# seems to work fine without doing this except for this
|
||||
# weird gap for the 'bottom' axis that always shows up
|
||||
# in the view box geometry??
|
||||
self.hideAxis(name)
|
||||
|
||||
def updateGrid(
|
||||
self,
|
||||
*args,
|
||||
):
|
||||
alpha = self.ctrl.gridAlphaSlider.value()
|
||||
x = alpha if self.ctrl.xGridCheck.isChecked() else False
|
||||
y = alpha if self.ctrl.yGridCheck.isChecked() else False
|
||||
for name, dim in (
|
||||
('top', x),
|
||||
('bottom', x),
|
||||
('left', y),
|
||||
('right', y)
|
||||
):
|
||||
if name in self.axes:
|
||||
self.getAxis(name).setGrid(dim)
|
||||
# self.getAxis('bottom').setGrid(x)
|
||||
# self.getAxis('left').setGrid(y)
|
||||
# self.getAxis('right').setGrid(y)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,320 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
High level streaming graphics primitives.
|
||||
|
||||
This is an intermediate layer which associates real-time low latency
|
||||
graphics primitives with underlying stream/flow related data structures
|
||||
for fast incremental update.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import msgspec
|
||||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
from PyQt5.QtGui import QPainterPath
|
||||
|
||||
from ..data._formatters import (
|
||||
IncrementalFormatter,
|
||||
)
|
||||
from ..data._pathops import (
|
||||
xy_downsample,
|
||||
)
|
||||
from ..log import get_logger
|
||||
from .._profile import (
|
||||
Profiler,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._dataviz import Viz
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class Renderer(msgspec.Struct):
|
||||
|
||||
viz: Viz
|
||||
fmtr: IncrementalFormatter
|
||||
|
||||
# output graphics rendering, the main object
|
||||
# processed in ``QGraphicsObject.paint()``
|
||||
path: QPainterPath | None = None
|
||||
fast_path: QPainterPath | None = None
|
||||
|
||||
# downsampling state
|
||||
_last_uppx: float = 0
|
||||
_in_ds: bool = False
|
||||
|
||||
def draw_path(
|
||||
self,
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
connect: str | np.ndarray = 'all',
|
||||
path: QPainterPath | None = None,
|
||||
redraw: bool = False,
|
||||
|
||||
) -> QPainterPath:
|
||||
|
||||
path_was_none = path is None
|
||||
|
||||
if redraw and path:
|
||||
path.clear()
|
||||
|
||||
# TODO: avoid this?
|
||||
if self.fast_path:
|
||||
self.fast_path.clear()
|
||||
|
||||
path = pg.functions.arrayToQPath(
|
||||
x,
|
||||
y,
|
||||
connect=connect,
|
||||
finiteCheck=False,
|
||||
|
||||
# reserve mem allocs see:
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#reserve
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||
# - https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||
# XXX: right now this is based on ad-hoc checks on a
|
||||
# hidpi 3840x2160 4k monitor but we should optimize for
|
||||
# the target display(s) on the sys.
|
||||
# if no_path_yet:
|
||||
# graphics.path.reserve(int(500e3))
|
||||
# path=path, # path re-use / reserving
|
||||
)
|
||||
|
||||
# avoid mem allocs if possible
|
||||
if path_was_none:
|
||||
path.reserve(path.capacity())
|
||||
|
||||
return path
|
||||
|
||||
def render(
|
||||
self,
|
||||
|
||||
new_read,
|
||||
array_key: str,
|
||||
profiler: Profiler,
|
||||
uppx: float = 1,
|
||||
|
||||
# redraw and ds flags
|
||||
should_redraw: bool = False,
|
||||
new_sample_rate: bool = False,
|
||||
should_ds: bool = False,
|
||||
showing_src_data: bool = True,
|
||||
|
||||
do_append: bool = True,
|
||||
use_fpath: bool = True,
|
||||
|
||||
# only render datums "in view" of the ``ChartView``
|
||||
use_vr: bool = True,
|
||||
|
||||
) -> tuple[QPainterPath, bool]:
|
||||
'''
|
||||
Render the current graphics path(s)
|
||||
|
||||
There are (at least) 3 stages from source data to graphics data:
|
||||
- a data transform (which can be stored in additional shm)
|
||||
- a graphics transform which converts discrete basis data to
|
||||
a `float`-basis view-coords graphics basis. (eg. ``ohlc_flatten()``,
|
||||
``step_path_arrays_from_1d()``, etc.)
|
||||
|
||||
- blah blah blah (from notes)
|
||||
|
||||
'''
|
||||
# TODO: can the renderer just call ``Viz.read()`` directly?
|
||||
# unpack latest source data read
|
||||
fmtr = self.fmtr
|
||||
|
||||
(
|
||||
_,
|
||||
_,
|
||||
array,
|
||||
ivl,
|
||||
ivr,
|
||||
in_view,
|
||||
) = new_read
|
||||
|
||||
# xy-path data transform: convert source data to a format
|
||||
# able to be passed to a `QPainterPath` rendering routine.
|
||||
fmt_out = fmtr.format_to_1d(
|
||||
new_read,
|
||||
array_key,
|
||||
profiler,
|
||||
|
||||
slice_to_inview=use_vr,
|
||||
)
|
||||
|
||||
# no history in view case
|
||||
if not fmt_out:
|
||||
# XXX: this might be why the profiler only has exits?
|
||||
return
|
||||
|
||||
(
|
||||
x_1d,
|
||||
y_1d,
|
||||
connect,
|
||||
prepend_length,
|
||||
append_length,
|
||||
view_changed,
|
||||
# append_tres,
|
||||
|
||||
) = fmt_out
|
||||
|
||||
# redraw conditions
|
||||
if (
|
||||
prepend_length > 0
|
||||
or new_sample_rate
|
||||
or view_changed
|
||||
|
||||
# NOTE: comment this to try and make "append paths"
|
||||
# work below..
|
||||
or append_length > 0
|
||||
):
|
||||
should_redraw = True
|
||||
|
||||
path: QPainterPath = self.path
|
||||
fast_path: QPainterPath = self.fast_path
|
||||
reset: bool = False
|
||||
|
||||
self.viz.yrange = None
|
||||
|
||||
# redraw the entire source data if we have either of:
|
||||
# - no prior path graphic rendered or,
|
||||
# - we always intend to re-render the data only in view
|
||||
if (
|
||||
path is None
|
||||
or should_redraw
|
||||
):
|
||||
# print(f"{self.viz.name} -> REDRAWING BRUH")
|
||||
if new_sample_rate and showing_src_data:
|
||||
log.info(f'DE-downsampling -> {array_key}')
|
||||
self._in_ds = False
|
||||
|
||||
elif should_ds and uppx > 1:
|
||||
|
||||
ds_out = xy_downsample(
|
||||
x_1d,
|
||||
y_1d,
|
||||
uppx,
|
||||
)
|
||||
if ds_out is not None:
|
||||
x_1d, y_1d, ymn, ymx = ds_out
|
||||
self.viz.yrange = ymn, ymx
|
||||
# print(f'{self.viz.name} post ds: ymn, ymx: {ymn},{ymx}')
|
||||
|
||||
reset = True
|
||||
profiler(f'FULL PATH downsample redraw={should_ds}')
|
||||
self._in_ds = True
|
||||
|
||||
path = self.draw_path(
|
||||
x=x_1d,
|
||||
y=y_1d,
|
||||
connect=connect,
|
||||
path=path,
|
||||
redraw=True,
|
||||
)
|
||||
|
||||
profiler(
|
||||
'generated fresh path. '
|
||||
f'(should_redraw: {should_redraw} '
|
||||
f'should_ds: {should_ds} new_sample_rate: {new_sample_rate})'
|
||||
)
|
||||
|
||||
# TODO: get this piecewise prepend working - right now it's
|
||||
# giving heck on vwap...
|
||||
# elif prepend_length:
|
||||
|
||||
# prepend_path = pg.functions.arrayToQPath(
|
||||
# x[0:prepend_length],
|
||||
# y[0:prepend_length],
|
||||
# connect='all'
|
||||
# )
|
||||
|
||||
# # swap prepend path in "front"
|
||||
# old_path = graphics.path
|
||||
# graphics.path = prepend_path
|
||||
# # graphics.path.moveTo(new_x[0], new_y[0])
|
||||
# graphics.path.connectPath(old_path)
|
||||
|
||||
elif (
|
||||
append_length > 0
|
||||
and do_append
|
||||
):
|
||||
profiler(f'sliced append path {append_length}')
|
||||
# (
|
||||
# x_1d,
|
||||
# y_1d,
|
||||
# connect,
|
||||
# ) = append_tres
|
||||
|
||||
profiler(
|
||||
f'diffed array input, append_length={append_length}'
|
||||
)
|
||||
|
||||
# if should_ds and uppx > 1:
|
||||
# new_x, new_y = xy_downsample(
|
||||
# new_x,
|
||||
# new_y,
|
||||
# uppx,
|
||||
# )
|
||||
# profiler(f'fast path downsample redraw={should_ds}')
|
||||
|
||||
append_path = self.draw_path(
|
||||
x=x_1d,
|
||||
y=y_1d,
|
||||
connect=connect,
|
||||
path=fast_path,
|
||||
)
|
||||
profiler('generated append qpath')
|
||||
|
||||
if use_fpath:
|
||||
# an attempt at trying to make append-updates faster..
|
||||
if fast_path is None:
|
||||
fast_path = append_path
|
||||
# fast_path.reserve(int(6e3))
|
||||
else:
|
||||
# print(
|
||||
# f'{self.viz.name}: FAST PATH\n'
|
||||
# f"append_path br: {append_path.boundingRect()}\n"
|
||||
# f"path size: {size}\n"
|
||||
# f"append_path len: {append_path.length()}\n"
|
||||
# f"fast_path len: {fast_path.length()}\n"
|
||||
# )
|
||||
|
||||
fast_path.connectPath(append_path)
|
||||
size = fast_path.capacity()
|
||||
profiler(f'connected fast path w size: {size}')
|
||||
|
||||
# graphics.path.moveTo(new_x[0], new_y[0])
|
||||
# path.connectPath(append_path)
|
||||
|
||||
# XXX: lol this causes a hang..
|
||||
# graphics.path = graphics.path.simplified()
|
||||
else:
|
||||
size = path.capacity()
|
||||
profiler(f'connected history path w size: {size}')
|
||||
path.connectPath(append_path)
|
||||
|
||||
self.path = path
|
||||
self.fast_path = fast_path
|
||||
|
||||
return self.path, reset
|
|
@ -35,13 +35,9 @@ from collections import defaultdict
|
|||
from contextlib import asynccontextmanager
|
||||
from functools import partial
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
Awaitable,
|
||||
Sequence,
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Iterator,
|
||||
Optional, Callable,
|
||||
Awaitable, Sequence,
|
||||
Any, AsyncIterator
|
||||
)
|
||||
import time
|
||||
# from pprint import pformat
|
||||
|
@ -123,7 +119,7 @@ class CompleterView(QTreeView):
|
|||
# TODO: size this based on DPI font
|
||||
self.setIndentation(_font.px_size)
|
||||
|
||||
self.setUniformRowHeights(True)
|
||||
# self.setUniformRowHeights(True)
|
||||
# self.setColumnWidth(0, 3)
|
||||
# self.setVerticalBarPolicy(Qt.ScrollBarAlwaysOff)
|
||||
# self.setSizeAdjustPolicy(QAbstractScrollArea.AdjustIgnored)
|
||||
|
@ -142,31 +138,15 @@ class CompleterView(QTreeView):
|
|||
model.setHorizontalHeaderLabels(labels)
|
||||
|
||||
self._font_size: int = 0 # pixels
|
||||
self._init: bool = False
|
||||
|
||||
async def on_pressed(
|
||||
self,
|
||||
idx: QModelIndex,
|
||||
) -> None:
|
||||
'''
|
||||
Mouse pressed on view handler.
|
||||
async def on_pressed(self, idx: QModelIndex) -> None:
|
||||
'''Mouse pressed on view handler.
|
||||
|
||||
'''
|
||||
search = self.parent()
|
||||
|
||||
await search.chart_current_item(
|
||||
clear_to_cache=True,
|
||||
)
|
||||
|
||||
# XXX: this causes Qt to hang and segfault..lovely
|
||||
# self.show_cache_entries(
|
||||
# only=True,
|
||||
# keep_current_item_selected=True,
|
||||
# )
|
||||
|
||||
await search.chart_current_item(clear_to_cache=False)
|
||||
search.focus()
|
||||
|
||||
|
||||
def set_font_size(self, size: int = 18):
|
||||
# print(size)
|
||||
if size < 0:
|
||||
|
@ -176,64 +156,56 @@ class CompleterView(QTreeView):
|
|||
|
||||
self.setStyleSheet(f"font: {size}px")
|
||||
|
||||
def resize_to_results(
|
||||
self,
|
||||
w: Optional[float] = 0,
|
||||
h: Optional[float] = None,
|
||||
# def resizeEvent(self, event: 'QEvent') -> None:
|
||||
# event.accept()
|
||||
# super().resizeEvent(event)
|
||||
|
||||
) -> None:
|
||||
def on_resize(self) -> None:
|
||||
'''
|
||||
Resize relay event from god.
|
||||
|
||||
'''
|
||||
self.resize_to_results()
|
||||
|
||||
def resize_to_results(self):
|
||||
model = self.model()
|
||||
cols = model.columnCount()
|
||||
cidx = self.selectionModel().currentIndex()
|
||||
rows = model.rowCount()
|
||||
self.expandAll()
|
||||
|
||||
# compute the approx height in pixels needed to include
|
||||
# all result rows in view.
|
||||
row_h = rows_h = self.rowHeight(cidx) * (rows + 1)
|
||||
for idx, item in self.iter_df_rows():
|
||||
row_h = self.rowHeight(idx)
|
||||
rows_h += row_h
|
||||
# print(f'row_h: {row_h}\nrows_h: {rows_h}')
|
||||
|
||||
# TODO: could we just break early here on detection
|
||||
# of ``rows_h >= h``?
|
||||
# rows = model.rowCount()
|
||||
|
||||
col_w_tot = 0
|
||||
for i in range(cols):
|
||||
# only slap in a rows's height's worth
|
||||
# of padding once at startup.. no idea
|
||||
if (
|
||||
not self._init
|
||||
and row_h
|
||||
):
|
||||
col_w_tot = row_h
|
||||
self._init = True
|
||||
|
||||
self.resizeColumnToContents(i)
|
||||
col_w_tot += self.columnWidth(i)
|
||||
|
||||
# NOTE: if the heigh `h` set here is **too large** then the
|
||||
# resize event will perpetually trigger as the window causes
|
||||
# some kind of recompute of callbacks.. so we have to ensure
|
||||
# it's limited.
|
||||
if h:
|
||||
h: int = round(h)
|
||||
abs_mx = round(0.91 * h)
|
||||
self.setMaximumHeight(abs_mx)
|
||||
win = self.window()
|
||||
win_h = win.height()
|
||||
edit_h = self.parent().bar.height()
|
||||
sb_h = win.statusBar().height()
|
||||
|
||||
if rows_h <= abs_mx:
|
||||
# self.setMinimumHeight(rows_h)
|
||||
self.setMinimumHeight(rows_h)
|
||||
# self.setFixedHeight(rows_h)
|
||||
# TODO: probably make this more general / less hacky
|
||||
# we should figure out the exact number of rows to allow
|
||||
# inclusive of search bar and header "rows", in pixel terms.
|
||||
# Eventually when we have an "info" widget below the results we
|
||||
# will want space for it and likely terminating the results-view
|
||||
# space **exactly on a row** would be ideal.
|
||||
# if row_px > 0:
|
||||
# rows = ceil(window_h / row_px) - 4
|
||||
# else:
|
||||
# rows = 16
|
||||
# self.setFixedHeight(rows * row_px)
|
||||
# self.resize(self.width(), rows * row_px)
|
||||
|
||||
else:
|
||||
self.setMinimumHeight(abs_mx)
|
||||
# NOTE: if the heigh set here is **too large** then the resize
|
||||
# event will perpetually trigger as the window causes some kind
|
||||
# of recompute of callbacks.. so we have to ensure it's limited.
|
||||
h = win_h - (edit_h + 1.666*sb_h)
|
||||
assert h > 0
|
||||
self.setFixedHeight(round(h))
|
||||
|
||||
# dyncamically size to width of longest result seen
|
||||
curr_w = self.width()
|
||||
if curr_w < col_w_tot:
|
||||
self.setMinimumWidth(col_w_tot)
|
||||
# size to width of longest result seen thus far
|
||||
# TODO: should we always dynamically scale to longest result?
|
||||
if self.width() < col_w_tot:
|
||||
self.setFixedWidth(col_w_tot)
|
||||
|
||||
self.update()
|
||||
|
||||
|
@ -302,7 +274,7 @@ class CompleterView(QTreeView):
|
|||
def select_first(self) -> QStandardItem:
|
||||
'''
|
||||
Select the first depth >= 2 entry from the completer tree and
|
||||
return its item.
|
||||
return it's item.
|
||||
|
||||
'''
|
||||
# ensure we're **not** selecting the first level parent node and
|
||||
|
@ -359,23 +331,6 @@ class CompleterView(QTreeView):
|
|||
item = model.itemFromIndex(idx)
|
||||
yield idx, item
|
||||
|
||||
def iter_df_rows(
|
||||
self,
|
||||
iparent: QModelIndex = QModelIndex(),
|
||||
|
||||
) -> Iterator[tuple[QModelIndex, QStandardItem]]:
|
||||
|
||||
model = self.model()
|
||||
isections = model.rowCount(iparent)
|
||||
for i in range(isections):
|
||||
idx = model.index(i, 0, iparent)
|
||||
item = model.itemFromIndex(idx)
|
||||
yield idx, item
|
||||
|
||||
if model.hasChildren(idx):
|
||||
# recursively yield child items depth-first
|
||||
yield from self.iter_df_rows(idx)
|
||||
|
||||
def find_section(
|
||||
self,
|
||||
section: str,
|
||||
|
@ -399,8 +354,7 @@ class CompleterView(QTreeView):
|
|||
status_field: str = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Clear all result-rows from under the depth = 1 section.
|
||||
'''Clear all result-rows from under the depth = 1 section.
|
||||
|
||||
'''
|
||||
idx = self.find_section(section)
|
||||
|
@ -421,6 +375,8 @@ class CompleterView(QTreeView):
|
|||
else:
|
||||
model.setItem(idx.row(), 1, QStandardItem())
|
||||
|
||||
self.resize_to_results()
|
||||
|
||||
return idx
|
||||
else:
|
||||
return None
|
||||
|
@ -430,26 +386,12 @@ class CompleterView(QTreeView):
|
|||
section: str,
|
||||
values: Sequence[str],
|
||||
clear_all: bool = False,
|
||||
reverse: bool = False,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Set result-rows for depth = 1 tree section ``section``.
|
||||
|
||||
'''
|
||||
if (
|
||||
values
|
||||
and not isinstance(values[0], str)
|
||||
):
|
||||
flattened: list[str] = []
|
||||
for val in values:
|
||||
flattened.extend(val)
|
||||
|
||||
values = flattened
|
||||
|
||||
if reverse:
|
||||
values = reversed(values)
|
||||
|
||||
model = self.model()
|
||||
if clear_all:
|
||||
# XXX: rewrite the model from scratch if caller requests it
|
||||
|
@ -502,22 +444,9 @@ class CompleterView(QTreeView):
|
|||
|
||||
self.show_matches()
|
||||
|
||||
def show_matches(
|
||||
self,
|
||||
wh: Optional[tuple[float, float]] = None,
|
||||
|
||||
) -> None:
|
||||
|
||||
if wh:
|
||||
self.resize_to_results(*wh)
|
||||
else:
|
||||
# case where it's just an update from results and *NOT*
|
||||
# a resize of some higher level parent-container widget.
|
||||
search = self.parent()
|
||||
w, h = search.space_dims()
|
||||
self.resize_to_results(w=w, h=h)
|
||||
|
||||
def show_matches(self) -> None:
|
||||
self.show()
|
||||
self.resize_to_results()
|
||||
|
||||
|
||||
class SearchBar(Edit):
|
||||
|
@ -537,15 +466,18 @@ class SearchBar(Edit):
|
|||
self.godwidget = godwidget
|
||||
super().__init__(parent, **kwargs)
|
||||
self.view: CompleterView = view
|
||||
godwidget._widgets[view.mode_name] = view
|
||||
|
||||
def show(self) -> None:
|
||||
super().show()
|
||||
self.view.show_matches()
|
||||
|
||||
def unfocus(self) -> None:
|
||||
self.parent().hide()
|
||||
self.clearFocus()
|
||||
|
||||
def hide(self) -> None:
|
||||
if self.view:
|
||||
self.view.hide()
|
||||
super().hide()
|
||||
|
||||
|
||||
class SearchWidget(QtWidgets.QWidget):
|
||||
|
@ -564,16 +496,15 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
parent=None,
|
||||
|
||||
) -> None:
|
||||
super().__init__(parent)
|
||||
super().__init__(parent or godwidget)
|
||||
|
||||
# size it as we specify
|
||||
self.setSizePolicy(
|
||||
QtWidgets.QSizePolicy.Fixed,
|
||||
QtWidgets.QSizePolicy.Fixed,
|
||||
QtWidgets.QSizePolicy.Expanding,
|
||||
)
|
||||
|
||||
self.godwidget = godwidget
|
||||
godwidget.reg_for_resize(self)
|
||||
|
||||
self.vbox = QtWidgets.QVBoxLayout(self)
|
||||
self.vbox.setContentsMargins(0, 4, 4, 0)
|
||||
|
@ -623,53 +554,20 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
self.vbox.setAlignment(self.view, Qt.AlignTop | Qt.AlignLeft)
|
||||
|
||||
def focus(self) -> None:
|
||||
self.show()
|
||||
self.bar.focus()
|
||||
|
||||
def show_cache_entries(
|
||||
self,
|
||||
only: bool = False,
|
||||
keep_current_item_selected: bool = False,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Clear the search results view and show only cached (aka recently
|
||||
loaded with active data) feeds in the results section.
|
||||
|
||||
'''
|
||||
godw = self.godwidget
|
||||
|
||||
# first entry in the cache is the current symbol(s)
|
||||
fqsns = set()
|
||||
for multi_fqsns in list(godw._chart_cache):
|
||||
for fqsn in set(multi_fqsns):
|
||||
fqsns.add(fqsn)
|
||||
|
||||
if keep_current_item_selected:
|
||||
sel = self.view.selectionModel()
|
||||
cidx = sel.currentIndex()
|
||||
|
||||
if self.view.model().rowCount(QModelIndex()) == 0:
|
||||
# fill cache list if nothing existing
|
||||
self.view.set_section_entries(
|
||||
'cache',
|
||||
list(fqsns),
|
||||
# remove all other completion results except for cache
|
||||
clear_all=only,
|
||||
reverse=True,
|
||||
list(reversed(self.godwidget._chart_cache)),
|
||||
clear_all=True,
|
||||
)
|
||||
|
||||
if (
|
||||
keep_current_item_selected
|
||||
and cidx.isValid()
|
||||
):
|
||||
# set current selection back to what it was before filling out
|
||||
# the view results.
|
||||
self.view.select_from_idx(cidx)
|
||||
else:
|
||||
self.view.select_first()
|
||||
self.bar.focus()
|
||||
self.show()
|
||||
|
||||
def get_current_item(self) -> tuple[QModelIndex, str, str] | None:
|
||||
'''
|
||||
Return the current completer tree selection as
|
||||
def get_current_item(self) -> Optional[tuple[str, str]]:
|
||||
'''Return the current completer tree selection as
|
||||
a tuple ``(parent: str, child: str)`` if valid, else ``None``.
|
||||
|
||||
'''
|
||||
|
@ -695,11 +593,7 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
if provider == 'cache':
|
||||
symbol, _, provider = symbol.rpartition('.')
|
||||
|
||||
return (
|
||||
cidx,
|
||||
provider,
|
||||
symbol,
|
||||
)
|
||||
return provider, symbol
|
||||
|
||||
else:
|
||||
return None
|
||||
|
@ -709,8 +603,7 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
clear_to_cache: bool = True,
|
||||
|
||||
) -> Optional[str]:
|
||||
'''
|
||||
Attempt to load and switch the current selected
|
||||
'''Attempt to load and switch the current selected
|
||||
completion result to the affiliated chart app.
|
||||
|
||||
Return any loaded symbol.
|
||||
|
@ -720,16 +613,15 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
if value is None:
|
||||
return None
|
||||
|
||||
cidx, provider, symbol = value
|
||||
godw = self.godwidget
|
||||
provider, symbol = value
|
||||
chart = self.godwidget
|
||||
|
||||
fqsn = f'{symbol}.{provider}'
|
||||
log.info(f'Requesting symbol: {fqsn}')
|
||||
log.info(f'Requesting symbol: {symbol}.{provider}')
|
||||
|
||||
# assert provider in symbol
|
||||
await godw.load_symbols(
|
||||
fqsns=[fqsn],
|
||||
loglevel='info',
|
||||
await chart.load_symbol(
|
||||
provider,
|
||||
symbol,
|
||||
'info',
|
||||
)
|
||||
|
||||
# fully qualified symbol name (SNS i guess is what we're
|
||||
|
@ -743,48 +635,18 @@ class SearchWidget(QtWidgets.QWidget):
|
|||
# Re-order the symbol cache on the chart to display in
|
||||
# LIFO order. this is normally only done internally by
|
||||
# the chart on new symbols being loaded into memory
|
||||
godw.set_chart_symbols(
|
||||
(fqsn,), (
|
||||
godw.hist_linked,
|
||||
godw.rt_linked,
|
||||
)
|
||||
)
|
||||
self.show_cache_entries(
|
||||
only=True,
|
||||
chart.set_chart_symbol(fqsn, chart.linkedsplits)
|
||||
|
||||
self.view.set_section_entries(
|
||||
'cache',
|
||||
values=list(reversed(chart._chart_cache)),
|
||||
|
||||
# remove all other completion results except for cache
|
||||
clear_all=True,
|
||||
)
|
||||
|
||||
self.bar.focus()
|
||||
return fqsn
|
||||
|
||||
def space_dims(self) -> tuple[float, float]:
|
||||
'''
|
||||
Compute and return the "available space dimentions" for this
|
||||
search widget in terms of px space for results by return the
|
||||
pair of width and height.
|
||||
|
||||
'''
|
||||
# XXX: dun need dis rite?
|
||||
# win = self.window()
|
||||
# win_h = win.height()
|
||||
# sb_h = win.statusBar().height()
|
||||
godw = self.godwidget
|
||||
hl = godw.hist_linked
|
||||
edit_h = self.bar.height()
|
||||
h = hl.height() - edit_h
|
||||
w = hl.width()
|
||||
return w, h
|
||||
|
||||
def on_resize(self) -> None:
|
||||
'''
|
||||
Resize relay event from god, resize all child widgets.
|
||||
|
||||
Right now this is just view to contents and/or the fast chart
|
||||
height.
|
||||
|
||||
'''
|
||||
w, h = self.space_dims()
|
||||
self.bar.view.show_matches(wh=(w, h))
|
||||
|
||||
|
||||
_search_active: trio.Event = trio.Event()
|
||||
_search_enabled: bool = False
|
||||
|
@ -820,10 +682,9 @@ async def pack_matches(
|
|||
with trio.CancelScope() as cs:
|
||||
task_status.started(cs)
|
||||
# ensure ^ status is updated
|
||||
results = list(await search(pattern))
|
||||
results = await search(pattern)
|
||||
|
||||
# XXX: don't cache the cache results xD
|
||||
if provider != 'cache':
|
||||
if provider != 'cache': # XXX: don't cache the cache results xD
|
||||
matches[(provider, pattern)] = results
|
||||
|
||||
# print(f'results from {provider}: {results}')
|
||||
|
@ -851,11 +712,10 @@ async def fill_results(
|
|||
max_pause_time: float = 6/16 + 0.001,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Task to search through providers and fill in possible
|
||||
"""Task to search through providers and fill in possible
|
||||
completion results.
|
||||
|
||||
'''
|
||||
"""
|
||||
global _search_active, _search_enabled, _searcher_cache
|
||||
|
||||
bar = search.bar
|
||||
|
@ -869,10 +729,6 @@ async def fill_results(
|
|||
matches = defaultdict(list)
|
||||
has_results: defaultdict[str, set[str]] = defaultdict(set)
|
||||
|
||||
# show cached feed list at startup
|
||||
search.show_cache_entries()
|
||||
search.on_resize()
|
||||
|
||||
while True:
|
||||
await _search_active.wait()
|
||||
period = None
|
||||
|
@ -886,7 +742,7 @@ async def fill_results(
|
|||
pattern = await recv_chan.receive()
|
||||
|
||||
period = time.time() - wait_start
|
||||
log.debug(f'{pattern} after {period}')
|
||||
print(f'{pattern} after {period}')
|
||||
|
||||
# during fast multiple key inputs, wait until a pause
|
||||
# (in typing) to initiate search
|
||||
|
@ -924,9 +780,8 @@ async def fill_results(
|
|||
# it hasn't already been searched with the current
|
||||
# input pattern (in which case just look up the old
|
||||
# results).
|
||||
if (
|
||||
period >= pause
|
||||
and provider not in already_has_results
|
||||
if (period >= pause) and (
|
||||
provider not in already_has_results
|
||||
):
|
||||
|
||||
# TODO: it may make more sense TO NOT search the
|
||||
|
@ -934,9 +789,7 @@ async def fill_results(
|
|||
# cpu-bound.
|
||||
if provider != 'cache':
|
||||
view.clear_section(
|
||||
provider,
|
||||
status_field='-> searchin..',
|
||||
)
|
||||
provider, status_field='-> searchin..')
|
||||
|
||||
await n.start(
|
||||
pack_matches,
|
||||
|
@ -957,20 +810,11 @@ async def fill_results(
|
|||
# re-searching it's ``dict`` since it's easier
|
||||
# but it also causes it to be slower then cached
|
||||
# results from other providers on occasion.
|
||||
if (
|
||||
results
|
||||
):
|
||||
if provider != 'cache':
|
||||
if results and provider != 'cache':
|
||||
view.set_section_entries(
|
||||
section=provider,
|
||||
values=results,
|
||||
)
|
||||
else:
|
||||
# if provider == 'cache':
|
||||
# for the cache just show what we got
|
||||
# that matches
|
||||
search.show_cache_entries()
|
||||
|
||||
else:
|
||||
view.clear_section(provider)
|
||||
|
||||
|
@ -992,11 +836,13 @@ async def handle_keyboard_input(
|
|||
global _search_active, _search_enabled
|
||||
|
||||
# startup
|
||||
searchw = searchbar.parent()
|
||||
godwidget = searchw.godwidget
|
||||
view = searchbar.view
|
||||
view.set_font_size(searchbar.dpi_font.px_size)
|
||||
send, recv = trio.open_memory_channel(616)
|
||||
bar = searchbar
|
||||
search = searchbar.parent()
|
||||
godwidget = search.godwidget
|
||||
view = bar.view
|
||||
view.set_font_size(bar.dpi_font.px_size)
|
||||
|
||||
send, recv = trio.open_memory_channel(16)
|
||||
|
||||
async with trio.open_nursery() as n:
|
||||
|
||||
|
@ -1006,15 +852,11 @@ async def handle_keyboard_input(
|
|||
n.start_soon(
|
||||
partial(
|
||||
fill_results,
|
||||
searchw,
|
||||
search,
|
||||
recv,
|
||||
)
|
||||
)
|
||||
|
||||
searchbar.focus()
|
||||
searchw.show_cache_entries()
|
||||
await trio.sleep(0)
|
||||
|
||||
async for kbmsg in recv_chan:
|
||||
event, etype, key, mods, txt = kbmsg.to_tuple()
|
||||
|
||||
|
@ -1024,29 +866,19 @@ async def handle_keyboard_input(
|
|||
if mods == Qt.ControlModifier:
|
||||
ctl = True
|
||||
|
||||
if key in (
|
||||
Qt.Key_Enter,
|
||||
Qt.Key_Return
|
||||
):
|
||||
if key in (Qt.Key_Enter, Qt.Key_Return):
|
||||
|
||||
await search.chart_current_item(clear_to_cache=True)
|
||||
_search_enabled = False
|
||||
await searchw.chart_current_item(clear_to_cache=True)
|
||||
continue
|
||||
|
||||
# XXX: causes hang and segfault..
|
||||
# searchw.show_cache_entries(
|
||||
# only=True,
|
||||
# keep_current_item_selected=True,
|
||||
# )
|
||||
|
||||
view.show_matches()
|
||||
searchw.focus()
|
||||
|
||||
elif (
|
||||
not ctl
|
||||
and not searchbar.text()
|
||||
):
|
||||
# TODO: really should factor this somewhere..bc
|
||||
# we're doin it in another spot as well..
|
||||
searchw.show_cache_entries(only=True)
|
||||
elif not ctl and not bar.text():
|
||||
# if nothing in search text show the cache
|
||||
view.set_section_entries(
|
||||
'cache',
|
||||
list(reversed(godwidget._chart_cache)),
|
||||
clear_all=True,
|
||||
)
|
||||
continue
|
||||
|
||||
# cancel and close
|
||||
|
@ -1055,7 +887,7 @@ async def handle_keyboard_input(
|
|||
Qt.Key_Space, # i feel like this is the "native" one
|
||||
Qt.Key_Alt,
|
||||
}:
|
||||
searchbar.unfocus()
|
||||
search.bar.unfocus()
|
||||
|
||||
# kill the search and focus back on main chart
|
||||
if godwidget:
|
||||
|
@ -1063,95 +895,68 @@ async def handle_keyboard_input(
|
|||
|
||||
continue
|
||||
|
||||
if (
|
||||
ctl
|
||||
and key in {Qt.Key_L}
|
||||
):
|
||||
if ctl and key in {
|
||||
Qt.Key_L,
|
||||
}:
|
||||
# like url (link) highlight in a web browser
|
||||
searchbar.focus()
|
||||
bar.focus()
|
||||
|
||||
# selection navigation controls
|
||||
elif (
|
||||
ctl
|
||||
and key in {Qt.Key_D}
|
||||
):
|
||||
elif ctl and key in {
|
||||
Qt.Key_D,
|
||||
}:
|
||||
view.next_section(direction='down')
|
||||
_search_enabled = False
|
||||
|
||||
elif (
|
||||
ctl
|
||||
and key in {Qt.Key_U}
|
||||
):
|
||||
elif ctl and key in {
|
||||
Qt.Key_U,
|
||||
}:
|
||||
view.next_section(direction='up')
|
||||
_search_enabled = False
|
||||
|
||||
# selection navigation controls
|
||||
elif (
|
||||
ctl and (
|
||||
key in {
|
||||
elif (ctl and key in {
|
||||
|
||||
Qt.Key_K,
|
||||
Qt.Key_J,
|
||||
}
|
||||
|
||||
or key in {
|
||||
}) or key in {
|
||||
|
||||
Qt.Key_Up,
|
||||
Qt.Key_Down,
|
||||
}
|
||||
)
|
||||
):
|
||||
_search_enabled = False
|
||||
|
||||
if key in {
|
||||
Qt.Key_K,
|
||||
Qt.Key_Up
|
||||
}:
|
||||
_search_enabled = False
|
||||
if key in {Qt.Key_K, Qt.Key_Up}:
|
||||
item = view.select_previous()
|
||||
|
||||
elif key in {
|
||||
Qt.Key_J,
|
||||
Qt.Key_Down,
|
||||
}:
|
||||
elif key in {Qt.Key_J, Qt.Key_Down}:
|
||||
item = view.select_next()
|
||||
|
||||
if item:
|
||||
parent_item = item.parent()
|
||||
|
||||
# if we're in the cache section and thus the next
|
||||
# selection is a cache item, switch and show it
|
||||
# immediately since it should be very fast.
|
||||
if (
|
||||
parent_item
|
||||
and parent_item.text() == 'cache'
|
||||
):
|
||||
await searchw.chart_current_item(clear_to_cache=False)
|
||||
if parent_item and parent_item.text() == 'cache':
|
||||
|
||||
# if it's a cache item, switch and show it immediately
|
||||
await search.chart_current_item(clear_to_cache=False)
|
||||
|
||||
# ACTUAL SEARCH BLOCK #
|
||||
# where we fuzzy complete and fill out sections.
|
||||
elif not ctl:
|
||||
# relay to completer task
|
||||
_search_enabled = True
|
||||
send.send_nowait(searchw.bar.text())
|
||||
send.send_nowait(search.bar.text())
|
||||
_search_active.set()
|
||||
|
||||
|
||||
async def search_simple_dict(
|
||||
text: str,
|
||||
source: dict,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
|
||||
tokens = []
|
||||
for key in source:
|
||||
if not isinstance(key, str):
|
||||
tokens.extend(key)
|
||||
else:
|
||||
tokens.append(key)
|
||||
|
||||
# search routine can be specified as a function such
|
||||
# as in the case of the current app's local symbol cache
|
||||
matches = fuzzy.extractBests(
|
||||
text,
|
||||
tokens,
|
||||
source.keys(),
|
||||
score_cutoff=90,
|
||||
)
|
||||
|
||||
|
|
|
@ -240,12 +240,12 @@ def hcolor(name: str) -> str:
|
|||
'gunmetal': '#91A3B0',
|
||||
'battleship': '#848482',
|
||||
|
||||
# default ohlc-bars/curve gray
|
||||
'bracket': '#666666', # like the logo
|
||||
|
||||
# bluish
|
||||
'charcoal': '#36454F',
|
||||
|
||||
# default bars
|
||||
'bracket': '#666666', # like the logo
|
||||
|
||||
# work well for filled polygons which want a 'bracket' feel
|
||||
# going light to dark
|
||||
'davies': '#555555',
|
||||
|
|
|
@ -21,29 +21,15 @@ Qt main window singletons and stuff.
|
|||
import os
|
||||
import signal
|
||||
import time
|
||||
from typing import (
|
||||
Callable,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
from typing import Callable, Optional, Union
|
||||
import uuid
|
||||
|
||||
from pyqtgraph import QtGui
|
||||
from PyQt5 import QtCore
|
||||
from PyQt5.QtWidgets import (
|
||||
QWidget,
|
||||
QMainWindow,
|
||||
QApplication,
|
||||
QLabel,
|
||||
QStatusBar,
|
||||
)
|
||||
from PyQt5.QtWidgets import QLabel, QStatusBar
|
||||
|
||||
from PyQt5.QtGui import (
|
||||
QScreen,
|
||||
QCloseEvent,
|
||||
)
|
||||
from ..log import get_logger
|
||||
from ._style import _font_small, hcolor
|
||||
from ._chart import GodWidget
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -162,13 +148,12 @@ class MultiStatus:
|
|||
self.bar.clearMessage()
|
||||
|
||||
|
||||
class MainWindow(QMainWindow):
|
||||
class MainWindow(QtGui.QMainWindow):
|
||||
|
||||
# XXX: for tiling wms this should scale
|
||||
# with the alloted window size.
|
||||
# TODO: detect for tiling and if untrue set some size?
|
||||
# size = (300, 500)
|
||||
godwidget: GodWidget
|
||||
size = (300, 500)
|
||||
|
||||
title = 'piker chart (ur symbol is loading bby)'
|
||||
|
||||
|
@ -177,20 +162,17 @@ class MainWindow(QMainWindow):
|
|||
# self.setMinimumSize(*self.size)
|
||||
self.setWindowTitle(self.title)
|
||||
|
||||
# set by runtime after `trio` is engaged.
|
||||
self.godwidget: Optional[GodWidget] = None
|
||||
|
||||
self._status_bar: QStatusBar = None
|
||||
self._status_label: QLabel = None
|
||||
self._size: Optional[tuple[int, int]] = None
|
||||
|
||||
@property
|
||||
def mode_label(self) -> QLabel:
|
||||
def mode_label(self) -> QtGui.QLabel:
|
||||
|
||||
# init mode label
|
||||
if not self._status_label:
|
||||
|
||||
self._status_label = label = QLabel()
|
||||
self._status_label = label = QtGui.QLabel()
|
||||
label.setStyleSheet(
|
||||
f"""QLabel {{
|
||||
color : {hcolor('gunmetal')};
|
||||
|
@ -212,7 +194,8 @@ class MainWindow(QMainWindow):
|
|||
|
||||
def closeEvent(
|
||||
self,
|
||||
event: QCloseEvent,
|
||||
|
||||
event: QtGui.QCloseEvent,
|
||||
|
||||
) -> None:
|
||||
'''Cancel the root actor asap.
|
||||
|
@ -252,8 +235,8 @@ class MainWindow(QMainWindow):
|
|||
def on_focus_change(
|
||||
self,
|
||||
|
||||
last: QWidget,
|
||||
current: QWidget,
|
||||
last: QtGui.QWidget,
|
||||
current: QtGui.QWidget,
|
||||
|
||||
) -> None:
|
||||
|
||||
|
@ -264,12 +247,11 @@ class MainWindow(QMainWindow):
|
|||
name = getattr(current, 'mode_name', '')
|
||||
self.set_mode_name(name)
|
||||
|
||||
def current_screen(self) -> QScreen:
|
||||
'''
|
||||
Get a frickin screen (if we can, gawd).
|
||||
def current_screen(self) -> QtGui.QScreen:
|
||||
"""Get a frickin screen (if we can, gawd).
|
||||
|
||||
'''
|
||||
app = QApplication.instance()
|
||||
"""
|
||||
app = QtGui.QApplication.instance()
|
||||
|
||||
for _ in range(3):
|
||||
screen = app.screenAt(self.pos())
|
||||
|
@ -302,7 +284,7 @@ class MainWindow(QMainWindow):
|
|||
'''
|
||||
# https://stackoverflow.com/a/18975846
|
||||
if not size and not self._size:
|
||||
# app = QApplication.instance()
|
||||
app = QtGui.QApplication.instance()
|
||||
geo = self.current_screen().geometry()
|
||||
h, w = geo.height(), geo.width()
|
||||
# use approx 1/3 of the area of the screen by default
|
||||
|
@ -310,36 +292,9 @@ class MainWindow(QMainWindow):
|
|||
|
||||
self.resize(*size or self._size)
|
||||
|
||||
def resizeEvent(self, event: QtCore.QEvent) -> None:
|
||||
if (
|
||||
# event.spontaneous()
|
||||
event.oldSize().height == event.size().height
|
||||
):
|
||||
event.ignore()
|
||||
return
|
||||
|
||||
# XXX: uncomment for debugging..
|
||||
# attrs = {}
|
||||
# for key in dir(event):
|
||||
# if key == '__dir__':
|
||||
# continue
|
||||
# attr = getattr(event, key)
|
||||
# try:
|
||||
# attrs[key] = attr()
|
||||
# except TypeError:
|
||||
# attrs[key] = attr
|
||||
|
||||
# from pprint import pformat
|
||||
# print(
|
||||
# f'{pformat(attrs)}\n'
|
||||
# f'WINDOW RESIZE: {self.size()}\n\n'
|
||||
# )
|
||||
self.godwidget.on_win_resize(event)
|
||||
event.accept()
|
||||
|
||||
|
||||
# singleton app per actor
|
||||
_qt_win: QMainWindow = None
|
||||
_qt_win: QtGui.QMainWindow = None
|
||||
|
||||
|
||||
def main_window() -> MainWindow:
|
||||
|
|
|
@ -46,10 +46,8 @@ def _kivy_import_hack():
|
|||
@click.argument('name', nargs=1, required=True)
|
||||
@click.pass_obj
|
||||
def monitor(config, rate, name, dhost, test, tl):
|
||||
'''
|
||||
Start a real-time watchlist UI
|
||||
|
||||
'''
|
||||
"""Start a real-time watchlist UI
|
||||
"""
|
||||
# global opts
|
||||
brokermod = config['brokermods'][0]
|
||||
loglevel = config['loglevel']
|
||||
|
@ -72,12 +70,8 @@ def monitor(config, rate, name, dhost, test, tl):
|
|||
) as portal:
|
||||
# run app "main"
|
||||
await _async_main(
|
||||
name,
|
||||
portal,
|
||||
tickers,
|
||||
brokermod,
|
||||
rate,
|
||||
test=test,
|
||||
name, portal, tickers,
|
||||
brokermod, rate, test=test,
|
||||
)
|
||||
|
||||
tractor.run(
|
||||
|
@ -128,7 +122,7 @@ def optschain(config, symbol, date, rate, test):
|
|||
@cli.command()
|
||||
@click.option(
|
||||
'--profile',
|
||||
# '-p',
|
||||
'-p',
|
||||
default=None,
|
||||
help='Enable pyqtgraph profiling'
|
||||
)
|
||||
|
@ -137,14 +131,9 @@ def optschain(config, symbol, date, rate, test):
|
|||
is_flag=True,
|
||||
help='Enable tractor debug mode'
|
||||
)
|
||||
@click.argument('symbols', nargs=-1, required=True)
|
||||
@click.argument('symbol', required=True)
|
||||
@click.pass_obj
|
||||
def chart(
|
||||
config,
|
||||
symbols: list[str],
|
||||
profile,
|
||||
pdb: bool,
|
||||
):
|
||||
def chart(config, symbol, profile, pdb):
|
||||
'''
|
||||
Start a real-time chartng UI
|
||||
|
||||
|
@ -155,10 +144,8 @@ def chart(
|
|||
_profile._pg_profile = True
|
||||
_profile.ms_slower_then = float(profile)
|
||||
|
||||
# Qt UI entrypoint
|
||||
from ._app import _main
|
||||
|
||||
for symbol in symbols:
|
||||
if '.' not in symbol:
|
||||
click.echo(click.style(
|
||||
f'symbol: {symbol} must have a {symbol}.<provider> suffix',
|
||||
|
@ -166,16 +153,15 @@ def chart(
|
|||
))
|
||||
return
|
||||
|
||||
|
||||
# global opts
|
||||
brokernames = config['brokers']
|
||||
brokermods = config['brokermods']
|
||||
assert brokermods
|
||||
tractorloglevel = config['tractorloglevel']
|
||||
pikerloglevel = config['loglevel']
|
||||
|
||||
_main(
|
||||
syms=symbols,
|
||||
brokermods=brokermods,
|
||||
sym=symbol,
|
||||
brokernames=brokernames,
|
||||
piker_loglevel=pikerloglevel,
|
||||
tractor_kwargs={
|
||||
'debug_mode': pdb,
|
||||
|
@ -184,6 +170,5 @@ def chart(
|
|||
'enable_modules': [
|
||||
'piker.clearing._client'
|
||||
],
|
||||
'registry_addr': config.get('registry_addr'),
|
||||
},
|
||||
)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,3 @@
|
|||
"""
|
||||
Super hawt Qt UI components
|
||||
"""
|
|
@ -0,0 +1,67 @@
|
|||
import sys
|
||||
|
||||
from PySide2.QtCharts import QtCharts
|
||||
from PySide2.QtWidgets import QApplication, QMainWindow
|
||||
from PySide2.QtCore import Qt, QPointF
|
||||
from PySide2 import QtGui
|
||||
import qdarkstyle
|
||||
|
||||
data = ((1, 7380, 7520, 7380, 7510, 7324),
|
||||
(2, 7520, 7580, 7410, 7440, 7372),
|
||||
(3, 7440, 7650, 7310, 7520, 7434),
|
||||
(4, 7450, 7640, 7450, 7550, 7480),
|
||||
(5, 7510, 7590, 7460, 7490, 7502),
|
||||
(6, 7500, 7590, 7480, 7560, 7512),
|
||||
(7, 7560, 7830, 7540, 7800, 7584))
|
||||
|
||||
|
||||
app = QApplication([])
|
||||
# set dark stylesheet
|
||||
# import pdb; pdb.set_trace()
|
||||
app.setStyleSheet(qdarkstyle.load_stylesheet_pyside())
|
||||
|
||||
series = QtCharts.QCandlestickSeries()
|
||||
series.setDecreasingColor(Qt.darkRed)
|
||||
series.setIncreasingColor(Qt.darkGreen)
|
||||
|
||||
ma5 = QtCharts.QLineSeries() # 5-days average data line
|
||||
tm = [] # stores str type data
|
||||
|
||||
# in a loop, series and ma5 append corresponding data
|
||||
for num, o, h, l, c, m in data:
|
||||
candle = QtCharts.QCandlestickSet(o, h, l, c)
|
||||
series.append(candle)
|
||||
ma5.append(QPointF(num, m))
|
||||
tm.append(str(num))
|
||||
|
||||
pen = candle.pen()
|
||||
# import pdb; pdb.set_trace()
|
||||
|
||||
chart = QtCharts.QChart()
|
||||
|
||||
# import pdb; pdb.set_trace()
|
||||
series.setBodyOutlineVisible(False)
|
||||
series.setCapsVisible(False)
|
||||
# brush = QtGui.QBrush()
|
||||
# brush.setColor(Qt.green)
|
||||
# series.setBrush(brush)
|
||||
chart.addSeries(series) # candle
|
||||
chart.addSeries(ma5) # ma5 line
|
||||
|
||||
chart.setAnimationOptions(QtCharts.QChart.SeriesAnimations)
|
||||
chart.createDefaultAxes()
|
||||
chart.legend().hide()
|
||||
|
||||
chart.axisX(series).setCategories(tm)
|
||||
chart.axisX(ma5).setVisible(False)
|
||||
|
||||
view = QtCharts.QChartView(chart)
|
||||
view.chart().setTheme(QtCharts.QChart.ChartTheme.ChartThemeDark)
|
||||
view.setRubberBand(QtCharts.QChartView.HorizontalRubberBand)
|
||||
# chartview.chart().setTheme(QtCharts.QChart.ChartTheme.ChartThemeBlueCerulean)
|
||||
|
||||
ui = QMainWindow()
|
||||
# ui.setGeometry(50, 50, 500, 300)
|
||||
ui.setCentralWidget(view)
|
||||
ui.show()
|
||||
sys.exit(app.exec_())
|
|
@ -1,3 +0,0 @@
|
|||
#[pytest]
|
||||
#trio_mode=True
|
||||
#log_cli=1
|
|
@ -1,12 +1,13 @@
|
|||
# we require a pinned dev branch to get some edge features that
|
||||
# are often untested in tractor's CI and/or being tested by us
|
||||
# first before committing as core features in tractor's base.
|
||||
-e git+https://github.com/goodboy/tractor.git@piker_pin#egg=tractor
|
||||
-e git+https://github.com/goodboy/tractor.git@master#egg=tractor
|
||||
|
||||
# `pyqtgraph` peeps keep breaking, fixing, improving so might as well
|
||||
# pin this to a dev branch that we have more control over especially
|
||||
# as more graphics stuff gets hashed out.
|
||||
-e git+https://github.com/pikers/pyqtgraph.git@master#egg=pyqtgraph
|
||||
-e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph
|
||||
|
||||
|
||||
# our async client for ``marketstore`` (the tsdb)
|
||||
-e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
|
||||
|
@ -17,7 +18,4 @@
|
|||
|
||||
|
||||
# ``asyncvnc`` for sending interactions to ib-gw inside docker
|
||||
-e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc
|
||||
|
||||
# ``cryptofeed`` for connecting to various crypto exchanges + custom fixes
|
||||
-e git+https://github.com/pikers/cryptofeed.git@date_parsing#egg=cryptofeed
|
||||
-e git+https://github.com/pikers/asyncvnc.git@vid_passthrough#egg=asyncvnc
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue